hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
f76d71c9f25c7972983bac4a6794423f1c9053f9
194
py
Python
api/admin.py
igor-markin/Social-Network-API
d4653606b30a728030dad61d27d96b0588d5c478
[ "Apache-2.0" ]
null
null
null
api/admin.py
igor-markin/Social-Network-API
d4653606b30a728030dad61d27d96b0588d5c478
[ "Apache-2.0" ]
null
null
null
api/admin.py
igor-markin/Social-Network-API
d4653606b30a728030dad61d27d96b0588d5c478
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from .models import Comment, Follow, Group, Post admin.site.register(Post) admin.site.register(Group) admin.site.register(Comment) admin.site.register(Follow)
21.555556
48
0.804124
28
194
5.571429
0.428571
0.230769
0.435897
0.269231
0
0
0
0
0
0
0
0
0.087629
194
8
49
24.25
0.881356
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
f77da335e22f3681a7058e4359eb61cbbec66d6c
279,213
py
Python
msgraph/cli/command_modules/teams/azext_teams/generated/_params.py
microsoftgraph/msgraph-cli-archived
489f70bf4ede1ce67b84bfb31e66da3e4db76062
[ "MIT" ]
null
null
null
msgraph/cli/command_modules/teams/azext_teams/generated/_params.py
microsoftgraph/msgraph-cli-archived
489f70bf4ede1ce67b84bfb31e66da3e4db76062
[ "MIT" ]
22
2022-03-29T22:54:37.000Z
2022-03-29T22:55:27.000Z
msgraph/cli/command_modules/teams/azext_teams/generated/_params.py
microsoftgraph/msgraph-cli-archived
489f70bf4ede1ce67b84bfb31e66da3e4db76062
[ "MIT" ]
null
null
null
# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- # pylint: disable=too-many-lines # pylint: disable=too-many-statements from azure.cli.core.commands.parameters import ( get_three_state_flag, get_enum_type, get_location_type ) from azure.cli.core.commands.validators import validate_file_or_dict from azext_teams.action import ( AddFunSettings, AddGuestSettings, AddMemberSettings, AddMessagingSettings, AddGroupsMembers, AddAssignedLabels, AddAssignedLicenses, AddLicenseProcessingState, AddOnPremisesProvisioningErrors, AddAppRoleAssignments, AddCreatedOnBehalfOf, AddMemberOf, AddMicrosoftGraphGroupMembers, AddMembersWithLicenseErrors, AddOwners, AddTransitiveMemberOf, AddTransitiveMembers, AddAcceptedSenders, AddGroupsPhoto, AddPhotos, AddRejectedSenders, AddExtensions, AddGroupLifecyclePolicies, AddOfferShiftRequests, AddOpenShiftChangeRequests, AddSchedulingGroups, AddSwapShiftsChangeRequests, AddTimeOffReasons, AddTimeOffRequests, AddTeamsMembers, AddTeamsAppDefinition, AddAppDefinitions, AddError, AddAttachments, AddBody, AddHostedContents, AddPolicyTip, AddApplication, AddConfiguration, AddSharepointIds, AddAudio, AddFileSystemInfo, AddImage, AddTeamsChannelsPhoto, AddPublication, AddVideo, AddSubscriptions, AddVersions, AddContentType, AddFields, AddMicrosoftGraphWorkbookApplication, AddFunctions, AddPackage, AddSpecialFolder, AddView, AddHashes, AddDraftOpenShift, AddActivities, AddDraftTimeOff, AddEncryption ) def load_arguments(self, _): with self.argument_context('teams chat-chat create-chat') as c: c.argument('id_', options_list=['--id'], type=str, help='Read-only.') with self.argument_context('teams chat-chat delete-chat') as c: c.argument('chat_id', type=str, help='key: id of chat') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams chat-chat list-chat') as c: c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams chat-chat show-chat') as c: c.argument('chat_id', type=str, help='key: id of chat') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams chat-chat update-chat') as c: c.argument('chat_id', type=str, help='key: id of chat') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') with self.argument_context('teams group delete-team') as c: c.argument('group_id', type=str, help='key: id of group') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams group show-team') as c: c.argument('group_id', type=str, help='key: id of group') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams group update-team') as c: c.argument('group_id', type=str, help='key: id of group') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('classification', type=str, help='An optional label. Typically describes the data or business ' 'sensitivity of the team. Must match one of a pre-configured set in the tenant\'s directory.') c.argument('description', type=str, help='An optional description for the team.') c.argument('display_name', type=str, help='The name of the team.') c.argument('fun_settings', action=AddFunSettings, nargs='+', help='teamFunSettings') c.argument('guest_settings', action=AddGuestSettings, nargs='+', help='teamGuestSettings') c.argument('internal_id', type=str, help='A unique ID for the team that has been used in a few places such as ' 'the audit log/Office 365 Management Activity API.') c.argument('is_archived', arg_type=get_three_state_flag(), help='Whether this team is in read-only mode.') c.argument('member_settings', action=AddMemberSettings, nargs='+', help='teamMemberSettings') c.argument('messaging_settings', action=AddMessagingSettings, nargs='+', help='teamMessagingSettings') c.argument('specialization', arg_type=get_enum_type(['none', 'educationStandard', 'educationClass', 'educationProfessionalLearningCommunity', 'educationStaff', 'healthcareStandard', 'healthcareCareCoordination', 'unknownFutureValue']), help='') c.argument('visibility', arg_type=get_enum_type(['private', 'public', 'hiddenMembership', 'unknownFutureValue']), help='') c.argument('web_url', type=str, help='A hyperlink that will go to the team in the Microsoft Teams client. This ' 'is the URL that you get when you right-click a team in the Microsoft Teams client and select Get ' 'link to team. This URL should be treated as an opaque blob, and not parsed.') c.argument('channels', type=validate_file_or_dict, help='The collection of channels & messages associated with ' 'the team. Expected value: json-string/@json-file.') c.argument('installed_apps', type=validate_file_or_dict, help='The apps installed in this team. Expected ' 'value: json-string/@json-file.') c.argument('members', action=AddGroupsMembers, nargs='+', help='Members and owners of the team.') c.argument('operations', type=validate_file_or_dict, help='The async operations that ran or are running on ' 'this team. Expected value: json-string/@json-file.') c.argument('primary_channel', type=validate_file_or_dict, help='channel Expected value: ' 'json-string/@json-file.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Template') c.argument('id1', type=str, help='Read-only.', arg_group='Group') c.argument('deleted_date_time', help='', arg_group='Group') c.argument('assigned_labels', action=AddAssignedLabels, nargs='+', help='The list of sensitivity label pairs ' '(label ID, label name) associated with an Microsoft 365 group. Returned only on $select. ' 'Read-only.', arg_group='Group') c.argument('assigned_licenses', action=AddAssignedLicenses, nargs='+', help='The licenses that are assigned to ' 'the group. Returned only on $select. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_classification', type=str, help='Describes a classification for the group ' '(such as low, medium or high business impact). Valid values for this property are defined by ' 'creating a ClassificationList setting value, based on the template definition.Returned by default.', arg_group='Group') c.argument('created_date_time', help='Timestamp of when the group was created. The value cannot be modified ' 'and is automatically populated when the group is created. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_description', type=str, help='An optional description for the group. ' 'Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_display_name', type=str, help='The display name for the group. This property ' 'is required when a group is created and cannot be cleared during updates. Returned by default. ' 'Supports $filter and $orderby.', arg_group='Group') c.argument('expiration_date_time', help='Timestamp of when the group is set to expire. The value cannot be ' 'modified and is automatically populated when the group is created. The Timestamp type represents ' 'date and time information using ISO 8601 format and is always in UTC time. For example, midnight ' 'UTC on Jan 1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('group_types', nargs='+', help='Specifies the group type and its membership. If the collection ' 'contains Unified, the group is a Microsoft 365 group; otherwise, it\'s either a security group or ' 'distribution group. For details, see groups overview.If the collection includes DynamicMembership, ' 'the group has dynamic membership; otherwise, membership is static. Returned by default. Supports ' '$filter.', arg_group='Group') c.argument('has_members_with_license_errors', arg_type=get_three_state_flag(), help='Indicates whether there ' 'are members in this group that have license errors from its group-based license assignment. This ' 'property is never returned on a GET operation. You can use it as a $filter argument to get groups ' 'that have members with license errors (that is, filter for this property being true). See an ' 'example.', arg_group='Group') c.argument('license_processing_state', action=AddLicenseProcessingState, nargs='+', help='licenseProcessingState', arg_group='Group') c.argument('mail', type=str, help='The SMTP address for the group, for example, \'serviceadmins@contoso.onmicro' 'soft.com\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('mail_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is mail-enabled. ' 'Returned by default.', arg_group='Group') c.argument('mail_nickname', type=str, help='The mail alias for the group, unique in the organization. This ' 'property must be specified when a group is created. Returned by default. Supports $filter.', arg_group='Group') c.argument('membership_rule', type=str, help='The rule that determines members for this group if the group is ' 'a dynamic group (groupTypes contains DynamicMembership). For more information about the syntax of ' 'the membership rule, see Membership Rules syntax. Returned by default.', arg_group='Group') c.argument('membership_rule_processing_state', type=str, help='Indicates whether the dynamic membership ' 'processing is on or paused. Possible values are \'On\' or \'Paused\'. Returned by default.', arg_group='Group') c.argument('on_premises_domain_name', type=str, help='Contains the on-premises domain FQDN, also called ' 'dnsDomainName synchronized from the on-premises directory. The property is only populated for ' 'customers who are synchronizing their on-premises directory to Azure Active Directory via Azure AD ' 'Connect.Returned by default. Read-only.', arg_group='Group') c.argument('on_premises_last_sync_date_time', help='Indicates the last time at which the group was synced with ' 'the on-premises directory.The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('on_premises_net_bios_name', type=str, help='Contains the on-premises netBios name synchronized ' 'from the on-premises directory. The property is only populated for customers who are synchronizing ' 'their on-premises directory to Azure Active Directory via Azure AD Connect.Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_provisioning_errors', action=AddOnPremisesProvisioningErrors, nargs='+', help='Errors ' 'when using Microsoft synchronization product during provisioning. Returned by default.', arg_group='Group') c.argument('on_premises_sam_account_name', type=str, help='Contains the on-premises SAM account name ' 'synchronized from the on-premises directory. The property is only populated for customers who are ' 'synchronizing their on-premises directory to Azure Active Directory via Azure AD Connect.Returned ' 'by default. Read-only.', arg_group='Group') c.argument('on_premises_security_identifier', type=str, help='Contains the on-premises security identifier ' '(SID) for the group that was synchronized from on-premises to the cloud. Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_sync_enabled', arg_type=get_three_state_flag(), help='true if this group is synced ' 'from an on-premises directory; false if this group was originally synced from an on-premises ' 'directory but is no longer synced; null if this object has never been synced from an on-premises ' 'directory (default). Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('preferred_data_location', type=str, help='The preferred data location for the group. For more ' 'information, see OneDrive Online Multi-Geo. Returned by default.', arg_group='Group') c.argument('preferred_language', type=str, help='The preferred language for an Microsoft 365 group. Should ' 'follow ISO 639-1 Code; for example \'en-US\'. Returned by default.', arg_group='Group') c.argument('proxy_addresses', nargs='+', help='Email addresses for the group that direct to the same group ' 'mailbox. For example: [\'SMTP: bob@contoso.com\', \'smtp: bob@sales.contoso.com\']. The any ' 'operator is required to filter expressions on multi-valued properties. Returned by default. ' 'Read-only. Not nullable. Supports $filter.', arg_group='Group') c.argument('renewed_date_time', help='Timestamp of when the group was last renewed. This cannot be modified ' 'directly and is only updated via the renew service action. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('security_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is a ' 'security group. Returned by default. Supports $filter.', arg_group='Group') c.argument('security_identifier', type=str, help='Security identifier of the group, used in Windows scenarios. ' 'Returned by default.', arg_group='Group') c.argument('theme', type=str, help='Specifies an Microsoft 365 group\'s color theme. Possible values are Teal, ' 'Purple, Green, Blue, Pink, Orange or Red. Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_visibility', type=str, help='Specifies the visibility of a Microsoft 365 ' 'group. Possible values are: Private, Public, or Hiddenmembership; blank values are treated as ' 'public. See group visibility options to learn more.Visibility can be set only when a group is ' 'created; it is not editable.Visibility is supported only for unified groups; it is not supported ' 'for security groups. Returned by default.', arg_group='Group') c.argument('allow_external_senders', arg_type=get_three_state_flag(), help='Indicates if people external to ' 'the organization can send messages to the group. Default value is false. Returned only on $select.', arg_group='Group') c.argument('auto_subscribe_new_members', arg_type=get_three_state_flag(), help='Indicates if new members added ' 'to the group will be auto-subscribed to receive email notifications. You can set this property in ' 'a PATCH request for the group; do not set it in the initial POST request that creates the group. ' 'Default value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_address_lists', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in certain parts of the Outlook UI: the Address Book, address lists for selecting ' 'message recipients, and the Browse Groups dialog for searching groups; otherwise, false. Default ' 'value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_outlook_clients', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in Outlook clients, such as Outlook for Windows and Outlook on the web; otherwise, ' 'false. Default value is false. Returned only on $select.', arg_group='Group') c.argument('is_subscribed_by_mail', arg_type=get_three_state_flag(), help='Indicates whether the signed-in ' 'user is subscribed to receive email conversations. Default value is true. Returned only on ' '$select.', arg_group='Group') c.argument('unseen_count', type=int, help='Count of conversations that have received new posts since the ' 'signed-in user last visited the group. Returned only on $select.', arg_group='Group') c.argument('group_is_archived', arg_type=get_three_state_flag(), help='', arg_group='Group') c.argument('app_role_assignments', action=AddAppRoleAssignments, nargs='+', help='', arg_group='Group') c.argument('created_on_behalf_of', action=AddCreatedOnBehalfOf, nargs='+', help='Represents an Azure Active ' 'Directory object. The directoryObject type is the base type for many other directory entity types.', arg_group='Group') c.argument('member_of', action=AddMemberOf, nargs='+', help='Groups that this group is a member of. HTTP ' 'Methods: GET (supported for all groups). Read-only. Nullable.', arg_group='Group') c.argument('microsoft_graph_group_members', action=AddMicrosoftGraphGroupMembers, nargs='+', help='Users and ' 'groups that are members of this group. HTTP Methods: GET (supported for all groups), POST ' '(supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE ' '(supported for Microsoft 365 groups and security groups) Nullable.', arg_group='Group') c.argument('members_with_license_errors', action=AddMembersWithLicenseErrors, nargs='+', help='A list of group ' 'members with license errors from this group-based license assignment. Read-only.', arg_group='Group') c.argument('owners', action=AddOwners, nargs='+', help='The owners of the group. The owners are a set of ' 'non-admin users who are allowed to modify this object. Limited to 100 owners. HTTP Methods: GET ' '(supported for all groups), POST (supported for Microsoft 365 groups, security groups and ' 'mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security groups). ' 'Nullable.', arg_group='Group') c.argument('settings', type=validate_file_or_dict, help='Read-only. Nullable. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('transitive_member_of', action=AddTransitiveMemberOf, nargs='+', help='', arg_group='Group') c.argument('transitive_members', action=AddTransitiveMembers, nargs='+', help='', arg_group='Group') c.argument('accepted_senders', action=AddAcceptedSenders, nargs='+', help='The list of users or groups that ' 'are allowed to create post\'s or calendar events in this group. If this list is non-empty then ' 'only users or groups listed here are allowed to post.', arg_group='Group') c.argument('calendar', type=validate_file_or_dict, help='calendar Expected value: json-string/@json-file.', arg_group='Group') c.argument('calendar_view', type=validate_file_or_dict, help='The calendar view for the calendar. Read-only. ' 'Expected value: json-string/@json-file.', arg_group='Group') c.argument('conversations', type=validate_file_or_dict, help='The group\'s conversations. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('events', type=validate_file_or_dict, help='The group\'s calendar events. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('photo', action=AddGroupsPhoto, nargs='+', help='profilePhoto', arg_group='Group') c.argument('photos', action=AddPhotos, nargs='+', help='The profile photos owned by the group. Read-only. ' 'Nullable.', arg_group='Group') c.argument('rejected_senders', action=AddRejectedSenders, nargs='+', help='The list of users or groups that ' 'are not allowed to create posts or calendar events in this group. Nullable', arg_group='Group') c.argument('threads', type=validate_file_or_dict, help='The group\'s conversation threads. Nullable. Expected ' 'value: json-string/@json-file.', arg_group='Group') c.argument('drive', type=validate_file_or_dict, help='drive Expected value: json-string/@json-file.', arg_group='Group') c.argument('drives', type=validate_file_or_dict, help='The group\'s drives. Read-only. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('sites', type=validate_file_or_dict, help='The list of SharePoint sites in this group. Access the ' 'default site with /sites/root. Expected value: json-string/@json-file.', arg_group='Group') c.argument('extensions', action=AddExtensions, nargs='+', help='The collection of open extensions defined for ' 'the group. Read-only. Nullable.', arg_group='Group') c.argument('group_lifecycle_policies', action=AddGroupLifecyclePolicies, nargs='+', help='The collection of ' 'lifecycle policies for this group. Read-only. Nullable.', arg_group='Group') c.argument('planner', type=validate_file_or_dict, help='plannerGroup Expected value: json-string/@json-file.', arg_group='Group') c.argument('onenote', type=validate_file_or_dict, help='onenote Expected value: json-string/@json-file.', arg_group='Group') c.argument('team', type=validate_file_or_dict, help='team Expected value: json-string/@json-file.', arg_group='Group') c.argument('id2', type=str, help='Read-only.', arg_group='Schedule') c.argument('enabled', arg_type=get_three_state_flag(), help='Indicates whether the schedule is enabled for the ' 'team. Required.', arg_group='Schedule') c.argument('offer_shift_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether offer ' 'shift requests are enabled for the schedule.', arg_group='Schedule') c.argument('open_shifts_enabled', arg_type=get_three_state_flag(), help='Indicates whether open shifts are ' 'enabled for the schedule.', arg_group='Schedule') c.argument('provision_status', arg_type=get_enum_type(['NotStarted', 'Running', 'Completed', 'Failed']), help='', arg_group='Schedule') c.argument('provision_status_code', type=str, help='Additional information about why schedule provisioning ' 'failed.', arg_group='Schedule') c.argument('swap_shifts_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether swap ' 'shifts requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_clock_enabled', arg_type=get_three_state_flag(), help='Indicates whether time clock is ' 'enabled for the schedule.', arg_group='Schedule') c.argument('time_off_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether time off ' 'requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_zone', type=str, help='Indicates the time zone of the schedule team using tz database format. ' 'Required.', arg_group='Schedule') c.argument('workforce_integration_ids', nargs='+', help='', arg_group='Schedule') c.argument('offer_shift_requests', action=AddOfferShiftRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shift_change_requests', action=AddOpenShiftChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shifts', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Schedule') c.argument('scheduling_groups', action=AddSchedulingGroups, nargs='+', help='The logical grouping of users in ' 'the schedule (usually by role).', arg_group='Schedule') c.argument('shifts', type=validate_file_or_dict, help='The shifts in the schedule. Expected value: ' 'json-string/@json-file.', arg_group='Schedule') c.argument('swap_shifts_change_requests', action=AddSwapShiftsChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('time_off_reasons', action=AddTimeOffReasons, nargs='+', help='The set of reasons for a time off in ' 'the schedule.', arg_group='Schedule') c.argument('time_off_requests', action=AddTimeOffRequests, nargs='+', help='', arg_group='Schedule') c.argument('times_off', type=validate_file_or_dict, help='The instances of times off in the schedule. Expected ' 'value: json-string/@json-file.', arg_group='Schedule') with self.argument_context('teams team list') as c: c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team create') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('classification', type=str, help='An optional label. Typically describes the data or business ' 'sensitivity of the team. Must match one of a pre-configured set in the tenant\'s directory.') c.argument('description', type=str, help='An optional description for the team.') c.argument('display_name', type=str, help='The name of the team.') c.argument('fun_settings', action=AddFunSettings, nargs='+', help='teamFunSettings') c.argument('guest_settings', action=AddGuestSettings, nargs='+', help='teamGuestSettings') c.argument('internal_id', type=str, help='A unique ID for the team that has been used in a few places such as ' 'the audit log/Office 365 Management Activity API.') c.argument('is_archived', arg_type=get_three_state_flag(), help='Whether this team is in read-only mode.') c.argument('member_settings', action=AddMemberSettings, nargs='+', help='teamMemberSettings') c.argument('messaging_settings', action=AddMessagingSettings, nargs='+', help='teamMessagingSettings') c.argument('specialization', arg_type=get_enum_type(['none', 'educationStandard', 'educationClass', 'educationProfessionalLearningCommunity', 'educationStaff', 'healthcareStandard', 'healthcareCareCoordination', 'unknownFutureValue']), help='') c.argument('visibility', arg_type=get_enum_type(['private', 'public', 'hiddenMembership', 'unknownFutureValue']), help='') c.argument('web_url', type=str, help='A hyperlink that will go to the team in the Microsoft Teams client. This ' 'is the URL that you get when you right-click a team in the Microsoft Teams client and select Get ' 'link to team. This URL should be treated as an opaque blob, and not parsed.') c.argument('channels', type=validate_file_or_dict, help='The collection of channels & messages associated with ' 'the team. Expected value: json-string/@json-file.') c.argument('installed_apps', type=validate_file_or_dict, help='The apps installed in this team. Expected ' 'value: json-string/@json-file.') c.argument('members', action=AddGroupsMembers, nargs='+', help='Members and owners of the team.') c.argument('operations', type=validate_file_or_dict, help='The async operations that ran or are running on ' 'this team. Expected value: json-string/@json-file.') c.argument('primary_channel', type=validate_file_or_dict, help='channel Expected value: ' 'json-string/@json-file.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Template') c.argument('id1', type=str, help='Read-only.', arg_group='Group') c.argument('deleted_date_time', help='', arg_group='Group') c.argument('assigned_labels', action=AddAssignedLabels, nargs='+', help='The list of sensitivity label pairs ' '(label ID, label name) associated with an Microsoft 365 group. Returned only on $select. ' 'Read-only.', arg_group='Group') c.argument('assigned_licenses', action=AddAssignedLicenses, nargs='+', help='The licenses that are assigned to ' 'the group. Returned only on $select. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_classification', type=str, help='Describes a classification for the group ' '(such as low, medium or high business impact). Valid values for this property are defined by ' 'creating a ClassificationList setting value, based on the template definition.Returned by default.', arg_group='Group') c.argument('created_date_time', help='Timestamp of when the group was created. The value cannot be modified ' 'and is automatically populated when the group is created. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_description', type=str, help='An optional description for the group. ' 'Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_display_name', type=str, help='The display name for the group. This property ' 'is required when a group is created and cannot be cleared during updates. Returned by default. ' 'Supports $filter and $orderby.', arg_group='Group') c.argument('expiration_date_time', help='Timestamp of when the group is set to expire. The value cannot be ' 'modified and is automatically populated when the group is created. The Timestamp type represents ' 'date and time information using ISO 8601 format and is always in UTC time. For example, midnight ' 'UTC on Jan 1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('group_types', nargs='+', help='Specifies the group type and its membership. If the collection ' 'contains Unified, the group is a Microsoft 365 group; otherwise, it\'s either a security group or ' 'distribution group. For details, see groups overview.If the collection includes DynamicMembership, ' 'the group has dynamic membership; otherwise, membership is static. Returned by default. Supports ' '$filter.', arg_group='Group') c.argument('has_members_with_license_errors', arg_type=get_three_state_flag(), help='Indicates whether there ' 'are members in this group that have license errors from its group-based license assignment. This ' 'property is never returned on a GET operation. You can use it as a $filter argument to get groups ' 'that have members with license errors (that is, filter for this property being true). See an ' 'example.', arg_group='Group') c.argument('license_processing_state', action=AddLicenseProcessingState, nargs='+', help='licenseProcessingState', arg_group='Group') c.argument('mail', type=str, help='The SMTP address for the group, for example, \'serviceadmins@contoso.onmicro' 'soft.com\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('mail_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is mail-enabled. ' 'Returned by default.', arg_group='Group') c.argument('mail_nickname', type=str, help='The mail alias for the group, unique in the organization. This ' 'property must be specified when a group is created. Returned by default. Supports $filter.', arg_group='Group') c.argument('membership_rule', type=str, help='The rule that determines members for this group if the group is ' 'a dynamic group (groupTypes contains DynamicMembership). For more information about the syntax of ' 'the membership rule, see Membership Rules syntax. Returned by default.', arg_group='Group') c.argument('membership_rule_processing_state', type=str, help='Indicates whether the dynamic membership ' 'processing is on or paused. Possible values are \'On\' or \'Paused\'. Returned by default.', arg_group='Group') c.argument('on_premises_domain_name', type=str, help='Contains the on-premises domain FQDN, also called ' 'dnsDomainName synchronized from the on-premises directory. The property is only populated for ' 'customers who are synchronizing their on-premises directory to Azure Active Directory via Azure AD ' 'Connect.Returned by default. Read-only.', arg_group='Group') c.argument('on_premises_last_sync_date_time', help='Indicates the last time at which the group was synced with ' 'the on-premises directory.The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('on_premises_net_bios_name', type=str, help='Contains the on-premises netBios name synchronized ' 'from the on-premises directory. The property is only populated for customers who are synchronizing ' 'their on-premises directory to Azure Active Directory via Azure AD Connect.Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_provisioning_errors', action=AddOnPremisesProvisioningErrors, nargs='+', help='Errors ' 'when using Microsoft synchronization product during provisioning. Returned by default.', arg_group='Group') c.argument('on_premises_sam_account_name', type=str, help='Contains the on-premises SAM account name ' 'synchronized from the on-premises directory. The property is only populated for customers who are ' 'synchronizing their on-premises directory to Azure Active Directory via Azure AD Connect.Returned ' 'by default. Read-only.', arg_group='Group') c.argument('on_premises_security_identifier', type=str, help='Contains the on-premises security identifier ' '(SID) for the group that was synchronized from on-premises to the cloud. Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_sync_enabled', arg_type=get_three_state_flag(), help='true if this group is synced ' 'from an on-premises directory; false if this group was originally synced from an on-premises ' 'directory but is no longer synced; null if this object has never been synced from an on-premises ' 'directory (default). Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('preferred_data_location', type=str, help='The preferred data location for the group. For more ' 'information, see OneDrive Online Multi-Geo. Returned by default.', arg_group='Group') c.argument('preferred_language', type=str, help='The preferred language for an Microsoft 365 group. Should ' 'follow ISO 639-1 Code; for example \'en-US\'. Returned by default.', arg_group='Group') c.argument('proxy_addresses', nargs='+', help='Email addresses for the group that direct to the same group ' 'mailbox. For example: [\'SMTP: bob@contoso.com\', \'smtp: bob@sales.contoso.com\']. The any ' 'operator is required to filter expressions on multi-valued properties. Returned by default. ' 'Read-only. Not nullable. Supports $filter.', arg_group='Group') c.argument('renewed_date_time', help='Timestamp of when the group was last renewed. This cannot be modified ' 'directly and is only updated via the renew service action. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('security_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is a ' 'security group. Returned by default. Supports $filter.', arg_group='Group') c.argument('security_identifier', type=str, help='Security identifier of the group, used in Windows scenarios. ' 'Returned by default.', arg_group='Group') c.argument('theme', type=str, help='Specifies an Microsoft 365 group\'s color theme. Possible values are Teal, ' 'Purple, Green, Blue, Pink, Orange or Red. Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_visibility', type=str, help='Specifies the visibility of a Microsoft 365 ' 'group. Possible values are: Private, Public, or Hiddenmembership; blank values are treated as ' 'public. See group visibility options to learn more.Visibility can be set only when a group is ' 'created; it is not editable.Visibility is supported only for unified groups; it is not supported ' 'for security groups. Returned by default.', arg_group='Group') c.argument('allow_external_senders', arg_type=get_three_state_flag(), help='Indicates if people external to ' 'the organization can send messages to the group. Default value is false. Returned only on $select.', arg_group='Group') c.argument('auto_subscribe_new_members', arg_type=get_three_state_flag(), help='Indicates if new members added ' 'to the group will be auto-subscribed to receive email notifications. You can set this property in ' 'a PATCH request for the group; do not set it in the initial POST request that creates the group. ' 'Default value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_address_lists', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in certain parts of the Outlook UI: the Address Book, address lists for selecting ' 'message recipients, and the Browse Groups dialog for searching groups; otherwise, false. Default ' 'value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_outlook_clients', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in Outlook clients, such as Outlook for Windows and Outlook on the web; otherwise, ' 'false. Default value is false. Returned only on $select.', arg_group='Group') c.argument('is_subscribed_by_mail', arg_type=get_three_state_flag(), help='Indicates whether the signed-in ' 'user is subscribed to receive email conversations. Default value is true. Returned only on ' '$select.', arg_group='Group') c.argument('unseen_count', type=int, help='Count of conversations that have received new posts since the ' 'signed-in user last visited the group. Returned only on $select.', arg_group='Group') c.argument('group_is_archived', arg_type=get_three_state_flag(), help='', arg_group='Group') c.argument('app_role_assignments', action=AddAppRoleAssignments, nargs='+', help='', arg_group='Group') c.argument('created_on_behalf_of', action=AddCreatedOnBehalfOf, nargs='+', help='Represents an Azure Active ' 'Directory object. The directoryObject type is the base type for many other directory entity types.', arg_group='Group') c.argument('member_of', action=AddMemberOf, nargs='+', help='Groups that this group is a member of. HTTP ' 'Methods: GET (supported for all groups). Read-only. Nullable.', arg_group='Group') c.argument('microsoft_graph_group_members', action=AddMicrosoftGraphGroupMembers, nargs='+', help='Users and ' 'groups that are members of this group. HTTP Methods: GET (supported for all groups), POST ' '(supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE ' '(supported for Microsoft 365 groups and security groups) Nullable.', arg_group='Group') c.argument('members_with_license_errors', action=AddMembersWithLicenseErrors, nargs='+', help='A list of group ' 'members with license errors from this group-based license assignment. Read-only.', arg_group='Group') c.argument('owners', action=AddOwners, nargs='+', help='The owners of the group. The owners are a set of ' 'non-admin users who are allowed to modify this object. Limited to 100 owners. HTTP Methods: GET ' '(supported for all groups), POST (supported for Microsoft 365 groups, security groups and ' 'mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security groups). ' 'Nullable.', arg_group='Group') c.argument('settings', type=validate_file_or_dict, help='Read-only. Nullable. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('transitive_member_of', action=AddTransitiveMemberOf, nargs='+', help='', arg_group='Group') c.argument('transitive_members', action=AddTransitiveMembers, nargs='+', help='', arg_group='Group') c.argument('accepted_senders', action=AddAcceptedSenders, nargs='+', help='The list of users or groups that ' 'are allowed to create post\'s or calendar events in this group. If this list is non-empty then ' 'only users or groups listed here are allowed to post.', arg_group='Group') c.argument('calendar', type=validate_file_or_dict, help='calendar Expected value: json-string/@json-file.', arg_group='Group') c.argument('calendar_view', type=validate_file_or_dict, help='The calendar view for the calendar. Read-only. ' 'Expected value: json-string/@json-file.', arg_group='Group') c.argument('conversations', type=validate_file_or_dict, help='The group\'s conversations. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('events', type=validate_file_or_dict, help='The group\'s calendar events. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('photo', action=AddGroupsPhoto, nargs='+', help='profilePhoto', arg_group='Group') c.argument('photos', action=AddPhotos, nargs='+', help='The profile photos owned by the group. Read-only. ' 'Nullable.', arg_group='Group') c.argument('rejected_senders', action=AddRejectedSenders, nargs='+', help='The list of users or groups that ' 'are not allowed to create posts or calendar events in this group. Nullable', arg_group='Group') c.argument('threads', type=validate_file_or_dict, help='The group\'s conversation threads. Nullable. Expected ' 'value: json-string/@json-file.', arg_group='Group') c.argument('drive', type=validate_file_or_dict, help='drive Expected value: json-string/@json-file.', arg_group='Group') c.argument('drives', type=validate_file_or_dict, help='The group\'s drives. Read-only. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('sites', type=validate_file_or_dict, help='The list of SharePoint sites in this group. Access the ' 'default site with /sites/root. Expected value: json-string/@json-file.', arg_group='Group') c.argument('extensions', action=AddExtensions, nargs='+', help='The collection of open extensions defined for ' 'the group. Read-only. Nullable.', arg_group='Group') c.argument('group_lifecycle_policies', action=AddGroupLifecyclePolicies, nargs='+', help='The collection of ' 'lifecycle policies for this group. Read-only. Nullable.', arg_group='Group') c.argument('planner', type=validate_file_or_dict, help='plannerGroup Expected value: json-string/@json-file.', arg_group='Group') c.argument('onenote', type=validate_file_or_dict, help='onenote Expected value: json-string/@json-file.', arg_group='Group') c.argument('team', type=validate_file_or_dict, help='team Expected value: json-string/@json-file.', arg_group='Group') c.argument('id2', type=str, help='Read-only.', arg_group='Schedule') c.argument('enabled', arg_type=get_three_state_flag(), help='Indicates whether the schedule is enabled for the ' 'team. Required.', arg_group='Schedule') c.argument('offer_shift_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether offer ' 'shift requests are enabled for the schedule.', arg_group='Schedule') c.argument('open_shifts_enabled', arg_type=get_three_state_flag(), help='Indicates whether open shifts are ' 'enabled for the schedule.', arg_group='Schedule') c.argument('provision_status', arg_type=get_enum_type(['NotStarted', 'Running', 'Completed', 'Failed']), help='', arg_group='Schedule') c.argument('provision_status_code', type=str, help='Additional information about why schedule provisioning ' 'failed.', arg_group='Schedule') c.argument('swap_shifts_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether swap ' 'shifts requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_clock_enabled', arg_type=get_three_state_flag(), help='Indicates whether time clock is ' 'enabled for the schedule.', arg_group='Schedule') c.argument('time_off_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether time off ' 'requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_zone', type=str, help='Indicates the time zone of the schedule team using tz database format. ' 'Required.', arg_group='Schedule') c.argument('workforce_integration_ids', nargs='+', help='', arg_group='Schedule') c.argument('offer_shift_requests', action=AddOfferShiftRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shift_change_requests', action=AddOpenShiftChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shifts', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Schedule') c.argument('scheduling_groups', action=AddSchedulingGroups, nargs='+', help='The logical grouping of users in ' 'the schedule (usually by role).', arg_group='Schedule') c.argument('shifts', type=validate_file_or_dict, help='The shifts in the schedule. Expected value: ' 'json-string/@json-file.', arg_group='Schedule') c.argument('swap_shifts_change_requests', action=AddSwapShiftsChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('time_off_reasons', action=AddTimeOffReasons, nargs='+', help='The set of reasons for a time off in ' 'the schedule.', arg_group='Schedule') c.argument('time_off_requests', action=AddTimeOffRequests, nargs='+', help='', arg_group='Schedule') c.argument('times_off', type=validate_file_or_dict, help='The instances of times off in the schedule. Expected ' 'value: json-string/@json-file.', arg_group='Schedule') with self.argument_context('teams team delete-team') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team show-team') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team archive') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('should_set_spo_site_read_only_for_members', arg_type=get_three_state_flag(), help='') with self.argument_context('teams team clone') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('display_name', type=str, help='') c.argument('description', type=str, help='') c.argument('mail_nickname', type=str, help='') c.argument('classification', type=str, help='') c.argument('visibility', arg_type=get_enum_type(['private', 'public', 'hiddenMembership', 'unknownFutureValue']), help='') c.argument('parts_to_clone', arg_type=get_enum_type(['apps', 'tabs', 'settings', 'channels', 'members']), help='') with self.argument_context('teams team create-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('description', type=str, help='Optional textual description for the channel.') c.argument('display_name', type=str, help='Channel name as it will appear to the user in Microsoft Teams.') c.argument('email', type=str, help='The email address for sending messages to the channel. Read-only.') c.argument('membership_type', arg_type=get_enum_type(['standard', 'private', 'unknownFutureValue']), help='') c.argument('web_url', type=str, help='A hyperlink that will navigate to the channel in Microsoft Teams. This ' 'is the URL that you get when you right-click a channel in Microsoft Teams and select Get link to ' 'channel. This URL should be treated as an opaque blob, and not parsed. Read-only.') c.argument('files_folder', type=validate_file_or_dict, help='driveItem Expected value: json-string/@json-file.') c.argument('members', action=AddTeamsMembers, nargs='+', help='') c.argument('messages', type=validate_file_or_dict, help='A collection of all the messages in the channel. A ' 'navigation property. Nullable. Expected value: json-string/@json-file.') c.argument('tabs', type=validate_file_or_dict, help='A collection of all the tabs in the channel. A navigation ' 'property. Expected value: json-string/@json-file.') with self.argument_context('teams team create-installed-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('teams_app_definition', action=AddTeamsAppDefinition, nargs='+', help='teamsAppDefinition') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Teams App') c.argument('display_name', type=str, help='The name of the catalog app provided by the app developer in the ' 'Microsoft Teams zip app package.', arg_group='Teams App') c.argument('distribution_method', arg_type=get_enum_type(['store', 'organization', 'sideloaded', 'unknownFutureValue']), help='', arg_group='Teams ' 'App') c.argument('external_id', type=str, help='The ID of the catalog provided by the app developer in the Microsoft ' 'Teams zip app package.', arg_group='Teams App') c.argument('app_definitions', action=AddAppDefinitions, nargs='+', help='The details for each version of the ' 'app.', arg_group='Teams App') with self.argument_context('teams team create-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('display_name', type=str, help='The display name of the user.') c.argument('roles', nargs='+', help='The roles for that user.') with self.argument_context('teams team create-operation') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attempts_count', type=int, help='Number of times the operation was attempted before being marked ' 'successful or failed.') c.argument('created_date_time', help='Time when the operation was created.') c.argument('error', action=AddError, nargs='+', help='operationError') c.argument('last_action_date_time', help='Time when the async operation was last updated.') c.argument('operation_type', arg_type=get_enum_type(['invalid', 'cloneTeam', 'archiveTeam', 'unarchiveTeam', 'createTeam', 'unknownFutureValue']), help='') c.argument('status', arg_type=get_enum_type(['invalid', 'notStarted', 'inProgress', 'succeeded', 'failed', 'unknownFutureValue']), help='') c.argument('target_resource_id', type=str, help='The ID of the object that\'s created or modified as result of ' 'this async operation, typically a team.') c.argument('target_resource_location', type=str, help='The location of the object that\'s created or modified ' 'as result of this async operation. This URL should be treated as an opaque value and not parsed ' 'into its component paths.') with self.argument_context('teams team delete-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team delete-installed-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team delete-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team delete-operation') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_async_operation_id', type=str, help='key: id of teamsAsyncOperation') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team delete-primary-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team delete-ref-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team delete-ref-template') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team delete-schedule') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team list-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team list-installed-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team list-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team list-operation') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team set-ref-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: ' 'json-string/@json-file.') with self.argument_context('teams team set-ref-template') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: ' 'json-string/@json-file.') with self.argument_context('teams team show-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team show-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team show-installed-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team show-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team show-operation') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_async_operation_id', type=str, help='key: id of teamsAsyncOperation') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team show-primary-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team show-ref-group') as c: c.argument('team_id', type=str, help='key: id of team') with self.argument_context('teams team show-ref-template') as c: c.argument('team_id', type=str, help='key: id of team') with self.argument_context('teams team show-schedule') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team show-template') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team unarchive') as c: c.argument('team_id', type=str, help='key: id of team') with self.argument_context('teams team update-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('description', type=str, help='Optional textual description for the channel.') c.argument('display_name', type=str, help='Channel name as it will appear to the user in Microsoft Teams.') c.argument('email', type=str, help='The email address for sending messages to the channel. Read-only.') c.argument('membership_type', arg_type=get_enum_type(['standard', 'private', 'unknownFutureValue']), help='') c.argument('web_url', type=str, help='A hyperlink that will navigate to the channel in Microsoft Teams. This ' 'is the URL that you get when you right-click a channel in Microsoft Teams and select Get link to ' 'channel. This URL should be treated as an opaque blob, and not parsed. Read-only.') c.argument('files_folder', type=validate_file_or_dict, help='driveItem Expected value: json-string/@json-file.') c.argument('members', action=AddTeamsMembers, nargs='+', help='') c.argument('messages', type=validate_file_or_dict, help='A collection of all the messages in the channel. A ' 'navigation property. Nullable. Expected value: json-string/@json-file.') c.argument('tabs', type=validate_file_or_dict, help='A collection of all the tabs in the channel. A navigation ' 'property. Expected value: json-string/@json-file.') with self.argument_context('teams team update-installed-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('teams_app_definition', action=AddTeamsAppDefinition, nargs='+', help='teamsAppDefinition') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Teams App') c.argument('display_name', type=str, help='The name of the catalog app provided by the app developer in the ' 'Microsoft Teams zip app package.', arg_group='Teams App') c.argument('distribution_method', arg_type=get_enum_type(['store', 'organization', 'sideloaded', 'unknownFutureValue']), help='', arg_group='Teams ' 'App') c.argument('external_id', type=str, help='The ID of the catalog provided by the app developer in the Microsoft ' 'Teams zip app package.', arg_group='Teams App') c.argument('app_definitions', action=AddAppDefinitions, nargs='+', help='The details for each version of the ' 'app.', arg_group='Teams App') with self.argument_context('teams team update-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('display_name', type=str, help='The display name of the user.') c.argument('roles', nargs='+', help='The roles for that user.') with self.argument_context('teams team update-operation') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_async_operation_id', type=str, help='key: id of teamsAsyncOperation') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attempts_count', type=int, help='Number of times the operation was attempted before being marked ' 'successful or failed.') c.argument('created_date_time', help='Time when the operation was created.') c.argument('error', action=AddError, nargs='+', help='operationError') c.argument('last_action_date_time', help='Time when the async operation was last updated.') c.argument('operation_type', arg_type=get_enum_type(['invalid', 'cloneTeam', 'archiveTeam', 'unarchiveTeam', 'createTeam', 'unknownFutureValue']), help='') c.argument('status', arg_type=get_enum_type(['invalid', 'notStarted', 'inProgress', 'succeeded', 'failed', 'unknownFutureValue']), help='') c.argument('target_resource_id', type=str, help='The ID of the object that\'s created or modified as result of ' 'this async operation, typically a team.') c.argument('target_resource_location', type=str, help='The location of the object that\'s created or modified ' 'as result of this async operation. This URL should be treated as an opaque value and not parsed ' 'into its component paths.') with self.argument_context('teams team update-primary-channel') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('description', type=str, help='Optional textual description for the channel.') c.argument('display_name', type=str, help='Channel name as it will appear to the user in Microsoft Teams.') c.argument('email', type=str, help='The email address for sending messages to the channel. Read-only.') c.argument('membership_type', arg_type=get_enum_type(['standard', 'private', 'unknownFutureValue']), help='') c.argument('web_url', type=str, help='A hyperlink that will navigate to the channel in Microsoft Teams. This ' 'is the URL that you get when you right-click a channel in Microsoft Teams and select Get link to ' 'channel. This URL should be treated as an opaque blob, and not parsed. Read-only.') c.argument('files_folder', type=validate_file_or_dict, help='driveItem Expected value: json-string/@json-file.') c.argument('members', action=AddTeamsMembers, nargs='+', help='') c.argument('messages', type=validate_file_or_dict, help='A collection of all the messages in the channel. A ' 'navigation property. Nullable. Expected value: json-string/@json-file.') c.argument('tabs', type=validate_file_or_dict, help='A collection of all the tabs in the channel. A navigation ' 'property. Expected value: json-string/@json-file.') with self.argument_context('teams team update-schedule') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('enabled', arg_type=get_three_state_flag(), help='Indicates whether the schedule is enabled for the ' 'team. Required.') c.argument('offer_shift_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether offer ' 'shift requests are enabled for the schedule.') c.argument('open_shifts_enabled', arg_type=get_three_state_flag(), help='Indicates whether open shifts are ' 'enabled for the schedule.') c.argument('provision_status', arg_type=get_enum_type(['NotStarted', 'Running', 'Completed', 'Failed']), help='') c.argument('provision_status_code', type=str, help='Additional information about why schedule provisioning ' 'failed.') c.argument('swap_shifts_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether swap ' 'shifts requests are enabled for the schedule.') c.argument('time_clock_enabled', arg_type=get_three_state_flag(), help='Indicates whether time clock is ' 'enabled for the schedule.') c.argument('time_off_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether time off ' 'requests are enabled for the schedule.') c.argument('time_zone', type=str, help='Indicates the time zone of the schedule team using tz database format. ' 'Required.') c.argument('workforce_integration_ids', nargs='+', help='') c.argument('offer_shift_requests', action=AddOfferShiftRequests, nargs='+', help='') c.argument('open_shift_change_requests', action=AddOpenShiftChangeRequests, nargs='+', help='') c.argument('open_shifts', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('scheduling_groups', action=AddSchedulingGroups, nargs='+', help='The logical grouping of users in ' 'the schedule (usually by role).') c.argument('shifts', type=validate_file_or_dict, help='The shifts in the schedule. Expected value: ' 'json-string/@json-file.') c.argument('swap_shifts_change_requests', action=AddSwapShiftsChangeRequests, nargs='+', help='') c.argument('time_off_reasons', action=AddTimeOffReasons, nargs='+', help='The set of reasons for a time off in ' 'the schedule.') c.argument('time_off_requests', action=AddTimeOffRequests, nargs='+', help='') c.argument('times_off', type=validate_file_or_dict, help='The instances of times off in the schedule. Expected ' 'value: json-string/@json-file.') with self.argument_context('teams team-channel create-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('display_name', type=str, help='The display name of the user.') c.argument('roles', nargs='+', help='The roles for that user.') with self.argument_context('teams team-channel create-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-channel create-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('configuration', action=AddConfiguration, nargs='+', help='teamsTabConfiguration') c.argument('display_name', type=str, help='Name of the tab.') c.argument('web_url', type=str, help='Deep link URL of the tab instance. Read only.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Teams App') c.argument('microsoft_graph_teams_app_display_name', type=str, help='The name of the catalog app provided by ' 'the app developer in the Microsoft Teams zip app package.', arg_group='Teams App') c.argument('distribution_method', arg_type=get_enum_type(['store', 'organization', 'sideloaded', 'unknownFutureValue']), help='', arg_group='Teams ' 'App') c.argument('external_id', type=str, help='The ID of the catalog provided by the app developer in the Microsoft ' 'Teams zip app package.', arg_group='Teams App') c.argument('app_definitions', action=AddAppDefinitions, nargs='+', help='The details for each version of the ' 'app.', arg_group='Teams App') with self.argument_context('teams team-channel delete-file-folder') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-channel delete-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-channel delete-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-channel delete-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-channel list-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel list-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel list-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel show-file-folder') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel show-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel show-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel show-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel update-file-folder') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='Date and time of item creation. Read-only.') c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.') c.argument('e_tag', type=str, help='ETag for the item. Read-only.') c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.') c.argument('name', type=str, help='The name of the item. Read-write.') c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.') c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user ' 'object. Expected value: json-string/@json-file.') c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory ' 'user object. Expected value: json-string/@json-file.') c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. ' 'Read-only.', arg_group='Parent Reference') c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.', arg_group='Parent Reference') c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. ' 'Read-only.', arg_group='Parent Reference') c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. ' 'Read-only.', arg_group='Parent Reference') c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.', arg_group='Parent Reference') c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the ' '[Shares][] API.', arg_group='Parent Reference') c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent ' 'Reference') c.argument('site_id', type=str, help='', arg_group='Parent Reference') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity', arg_group='Created By') c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity', arg_group='Created By') c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity', arg_group='Created By') c.argument('audio', action=AddAudio, nargs='+', help='audio') c.argument('content', help='The content stream, if the item represents a file.') c.argument('c_tag', type=str, help='An eTag for the content of the item. This eTag is not changed if only the ' 'metadata is changed. Note This property is not returned if the item is a folder. Read-only.') c.argument('file_system_info', action=AddFileSystemInfo, nargs='+', help='fileSystemInfo') c.argument('image', action=AddImage, nargs='+', help='image') c.argument('location', arg_type=get_location_type(self.cli_ctx)) c.argument('photo', action=AddTeamsChannelsPhoto, nargs='+', help='photo') c.argument('publication', action=AddPublication, nargs='+', help='publicationFacet') c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/@json-file.') c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds') c.argument('size', type=int, help='Size of the item in bytes. Read-only.') c.argument('video', action=AddVideo, nargs='+', help='video') c.argument('web_dav_url', type=str, help='WebDAV compatible URL for the item.') c.argument('children', type=validate_file_or_dict, help='Collection containing Item objects for the immediate ' 'children of Item. Only items representing folders have children. Read-only. Nullable. Expected ' 'value: json-string/@json-file.') c.argument('permissions', type=validate_file_or_dict, help='The set of permissions for the item. Read-only. ' 'Nullable. Expected value: json-string/@json-file.') c.argument('subscriptions', action=AddSubscriptions, nargs='+', help='The set of subscriptions on the item. ' 'Only supported on the root of a drive.') c.argument('thumbnails', type=validate_file_or_dict, help='Collection containing [ThumbnailSet][] objects ' 'associated with the item. For more info, see [getting thumbnails][]. Read-only. Nullable. Expected ' 'value: json-string/@json-file.') c.argument('versions', action=AddVersions, nargs='+', help='The list of previous versions of the item. For ' 'more info, see [getting previous versions][]. Read-only. Nullable.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='List Item') c.argument('microsoft_graph_base_item_created_date_time_created_date_time', help='Date and time of item ' 'creation. Read-only.', arg_group='List Item') c.argument('microsoft_graph_base_item_description', type=str, help='Provides a user-visible description of the ' 'item. Optional.', arg_group='List Item') c.argument('microsoft_graph_base_item_e_tag', type=str, help='ETag for the item. Read-only.', arg_group='List ' 'Item') c.argument('microsoft_graph_base_item_last_modified_date_time_last_modified_date_time', help='Date and time ' 'the item was last modified. Read-only.', arg_group='List Item') c.argument('microsoft_graph_base_item_name', type=str, help='The name of the item. Read-write.', arg_group='List Item') c.argument('microsoft_graph_base_item_web_url', type=str, help='URL that displays the resource in the browser. ' 'Read-only.', arg_group='List Item') c.argument('microsoft_graph_user_created_by_user', type=validate_file_or_dict, help='Represents an Azure ' 'Active Directory user object. Expected value: json-string/@json-file.', arg_group='List Item') c.argument('microsoft_graph_user_last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure ' 'Active Directory user object. Expected value: json-string/@json-file.', arg_group='List Item') c.argument('microsoft_graph_item_reference_drive_id', type=str, help='Unique identifier of the drive instance ' 'that contains the item. Read-only.', arg_group='List Item Parent Reference') c.argument('microsoft_graph_item_reference_drive_type', type=str, help='Identifies the type of drive. See ' '[drive][] resource for values.', arg_group='List Item Parent Reference') c.argument('id1', type=str, help='Unique identifier of the item in the drive. Read-only.', arg_group='List ' 'Item Parent Reference') c.argument('name1', type=str, help='The name of the item being referenced. Read-only.', arg_group='List Item ' 'Parent Reference') c.argument('microsoft_graph_item_reference_path', type=str, help='Path that can be used to navigate to the ' 'item. Read-only.', arg_group='List Item Parent Reference') c.argument('microsoft_graph_item_reference_share_id', type=str, help='A unique identifier for a shared ' 'resource that can be accessed via the [Shares][] API.', arg_group='List Item Parent Reference') c.argument('sharepoint_ids1', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='List Item ' 'Parent Reference') c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='List Item Parent Reference') c.argument('application1', action=AddApplication, nargs='+', help='identity', arg_group='List Item Last ' 'Modified By') c.argument('device1', action=AddApplication, nargs='+', help='identity', arg_group='List Item Last Modified By') c.argument('user1', action=AddApplication, nargs='+', help='identity', arg_group='List Item Last Modified By') c.argument('application2', action=AddApplication, nargs='+', help='identity', arg_group='List Item Created By') c.argument('device2', action=AddApplication, nargs='+', help='identity', arg_group='List Item Created By') c.argument('user2', action=AddApplication, nargs='+', help='identity', arg_group='List Item Created By') c.argument('content_type', action=AddContentType, nargs='+', help='contentTypeInfo', arg_group='List Item') c.argument('sharepoint_ids2', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='List Item') c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: ' 'json-string/@json-file.', arg_group='List Item') c.argument('drive_item', type=validate_file_or_dict, help='driveItem Expected value: json-string/@json-file.', arg_group='List Item') c.argument('fields', action=AddFields, nargs='+', help='fieldValueSet', arg_group='List Item') c.argument('microsoft_graph_list_item_versions', type=validate_file_or_dict, help='The list of previous ' 'versions of the list item. Expected value: json-string/@json-file.', arg_group='List Item') c.argument('id2', type=str, help='Read-only.', arg_group='Analytics') c.argument('all_time', type=validate_file_or_dict, help='itemActivityStat Expected value: ' 'json-string/@json-file.', arg_group='Analytics') c.argument('item_activity_stats', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Analytics') c.argument('last_seven_days', type=validate_file_or_dict, help='itemActivityStat Expected value: ' 'json-string/@json-file.', arg_group='Analytics') c.argument('id3', type=str, help='Read-only.', arg_group='Workbook') c.argument('microsoft_graph_workbook_application', action=AddMicrosoftGraphWorkbookApplication, nargs='+', help='workbookApplication', arg_group='Workbook') c.argument('comments', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('functions', action=AddFunctions, nargs='+', help='workbookFunctions', arg_group='Workbook') c.argument('names', type=validate_file_or_dict, help='Represents a collection of workbook scoped named items ' '(named ranges and constants). Read-only. Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('operations', type=validate_file_or_dict, help='The status of workbook operations. Getting an ' 'operation collection is not supported, but you can get the status of a long-running operation if ' 'the Location header is returned in the response. Read-only. Expected value: ' 'json-string/@json-file.', arg_group='Workbook') c.argument('tables', type=validate_file_or_dict, help='Represents a collection of tables associated with the ' 'workbook. Read-only. Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('worksheets', type=validate_file_or_dict, help='Represents a collection of worksheets associated ' 'with the workbook. Read-only. Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('microsoft_graph_special_folder_name', type=str, help='The unique identifier for this item in the ' '/drive/special collection', arg_group='Special Folder') c.argument('owner', type=validate_file_or_dict, help='identitySet Expected value: json-string/@json-file.', arg_group='Shared') c.argument('scope', type=str, help='Indicates the scope of how the item is shared: anonymous, organization, or ' 'users. Read-only.', arg_group='Shared') c.argument('shared_by', type=validate_file_or_dict, help='identitySet Expected value: json-string/@json-file.', arg_group='Shared') c.argument('shared_date_time', help='The UTC date and time when the item was shared. Read-only.', arg_group='Shared') c.argument('on_click_telemetry_url', type=str, help='A callback URL that can be used to record telemetry ' 'information. The application should issue a GET on this URL if the user interacts with this item ' 'to improve the quality of results.', arg_group='Search Result') c.argument('created_by', type=validate_file_or_dict, help='identitySet Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_created_date_time_created_date_time', help='Date and time of item ' 'creation. Read-only.', arg_group='Remote Item') c.argument('file', type=validate_file_or_dict, help='file Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_file_system_info_file_system_info', action=AddFileSystemInfo, nargs='+', help='fileSystemInfo', arg_group='Remote Item') c.argument('folder', type=validate_file_or_dict, help='folder Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_id', type=str, help='Unique identifier for the remote item in its ' 'drive. Read-only.', arg_group='Remote Item') c.argument('microsoft_graph_image', action=AddImage, nargs='+', help='image', arg_group='Remote Item') c.argument('last_modified_by', type=validate_file_or_dict, help='identitySet Expected value: ' 'json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_last_modified_date_time_last_modified_date_time', help='Date and time ' 'the item was last modified. Read-only.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_name', type=str, help='Optional. Filename of the remote item. ' 'Read-only.', arg_group='Remote Item') c.argument('package', action=AddPackage, nargs='+', help='package', arg_group='Remote Item') c.argument('parent_reference', type=validate_file_or_dict, help='itemReference Expected value: ' 'json-string/@json-file.', arg_group='Remote Item') c.argument('shared', type=validate_file_or_dict, help='shared Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('sharepoint_ids3', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Remote Item') c.argument('integer_size', type=int, help='Size of the remote item. Read-only.', arg_group='Remote Item') c.argument('special_folder', action=AddSpecialFolder, nargs='+', help='specialFolder', arg_group='Remote Item') c.argument('microsoft_graph_video', action=AddVideo, nargs='+', help='video', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_web_dav_url_web_dav_url', type=str, help='DAV compatible URL for the ' 'item.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_web_url', type=str, help='URL that displays the resource in the ' 'browser. Read-only.', arg_group='Remote Item') c.argument('queued_date_time', help='Date and time the pending binary operation was queued in UTC time. ' 'Read-only.', arg_group='Pending Operations Pending Content Update') c.argument('type_', options_list=['--type'], type=str, help='A string indicating the type of package. While ' 'oneNote is the only currently defined value, you should expect other package types to be returned ' 'and handle them accordingly.', arg_group='Package') c.argument('child_count', type=int, help='Number of children contained immediately within this container.', arg_group='Folder') c.argument('view', action=AddView, nargs='+', help='folderView', arg_group='Folder') c.argument('hashes', action=AddHashes, nargs='+', help='hashes', arg_group='File') c.argument('mime_type', type=str, help='The MIME type for the file. This is determined by logic on the server ' 'and might not be the value provided when the file was uploaded. Read-only.', arg_group='File') c.argument('processing_metadata', arg_type=get_three_state_flag(), help='', arg_group='File') c.argument('state', type=str, help='Represents the state of the deleted item.', arg_group='Deleted') with self.argument_context('teams team-channel update-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('display_name', type=str, help='The display name of the user.') c.argument('roles', nargs='+', help='The roles for that user.') with self.argument_context('teams team-channel update-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-channel update-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('configuration', action=AddConfiguration, nargs='+', help='teamsTabConfiguration') c.argument('display_name', type=str, help='Name of the tab.') c.argument('web_url', type=str, help='Deep link URL of the tab instance. Read only.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Teams App') c.argument('microsoft_graph_teams_app_display_name', type=str, help='The name of the catalog app provided by ' 'the app developer in the Microsoft Teams zip app package.', arg_group='Teams App') c.argument('distribution_method', arg_type=get_enum_type(['store', 'organization', 'sideloaded', 'unknownFutureValue']), help='', arg_group='Teams ' 'App') c.argument('external_id', type=str, help='The ID of the catalog provided by the app developer in the Microsoft ' 'Teams zip app package.', arg_group='Teams App') c.argument('app_definitions', action=AddAppDefinitions, nargs='+', help='The details for each version of the ' 'app.', arg_group='Teams App') with self.argument_context('teams team-channel-message create-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') with self.argument_context('teams team-channel-message create-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-channel-message delete-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_hosted_content_id', type=str, help='key: id of chatMessageHostedContent') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-channel-message delete-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_id1', type=str, help='key: id of chatMessage') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-channel-message list-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel-message list-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel-message show-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_hosted_content_id', type=str, help='key: id of chatMessageHostedContent') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel-message show-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_id1', type=str, help='key: id of chatMessage') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-channel-message update-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_hosted_content_id', type=str, help='key: id of chatMessageHostedContent') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') with self.argument_context('teams team-channel-message update-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_id1', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-channel-tab delete-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-channel-tab set-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: ' 'json-string/@json-file.') with self.argument_context('teams team-channel-tab show-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') with self.argument_context('teams team-channel-tab show-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('channel_id', type=str, help='key: id of channel') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-installed-app delete-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-installed-app delete-ref-team-app-definition') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-installed-app set-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: ' 'json-string/@json-file.') with self.argument_context('teams team-installed-app set-ref-team-app-definition') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: ' 'json-string/@json-file.') with self.argument_context('teams team-installed-app show-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') with self.argument_context('teams team-installed-app show-ref-team-app-definition') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') with self.argument_context('teams team-installed-app show-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-installed-app show-team-app-definition') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-installed-app upgrade') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_app_installation_id', type=str, help='key: id of teamsAppInstallation') with self.argument_context('teams team-primary-channel create-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('display_name', type=str, help='The display name of the user.') c.argument('roles', nargs='+', help='The roles for that user.') with self.argument_context('teams team-primary-channel create-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-primary-channel create-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('configuration', action=AddConfiguration, nargs='+', help='teamsTabConfiguration') c.argument('display_name', type=str, help='Name of the tab.') c.argument('web_url', type=str, help='Deep link URL of the tab instance. Read only.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Teams App') c.argument('microsoft_graph_teams_app_display_name', type=str, help='The name of the catalog app provided by ' 'the app developer in the Microsoft Teams zip app package.', arg_group='Teams App') c.argument('distribution_method', arg_type=get_enum_type(['store', 'organization', 'sideloaded', 'unknownFutureValue']), help='', arg_group='Teams ' 'App') c.argument('external_id', type=str, help='The ID of the catalog provided by the app developer in the Microsoft ' 'Teams zip app package.', arg_group='Teams App') c.argument('app_definitions', action=AddAppDefinitions, nargs='+', help='The details for each version of the ' 'app.', arg_group='Teams App') with self.argument_context('teams team-primary-channel delete-file-folder') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-primary-channel delete-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-primary-channel delete-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-primary-channel delete-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-primary-channel list-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel list-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel list-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel show-file-folder') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel show-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel show-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel show-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel update-file-folder') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='Date and time of item creation. Read-only.') c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.') c.argument('e_tag', type=str, help='ETag for the item. Read-only.') c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.') c.argument('name', type=str, help='The name of the item. Read-write.') c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.') c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user ' 'object. Expected value: json-string/@json-file.') c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory ' 'user object. Expected value: json-string/@json-file.') c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. ' 'Read-only.', arg_group='Parent Reference') c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.', arg_group='Parent Reference') c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. ' 'Read-only.', arg_group='Parent Reference') c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. ' 'Read-only.', arg_group='Parent Reference') c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.', arg_group='Parent Reference') c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the ' '[Shares][] API.', arg_group='Parent Reference') c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent ' 'Reference') c.argument('site_id', type=str, help='', arg_group='Parent Reference') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity', arg_group='Created By') c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity', arg_group='Created By') c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity', arg_group='Created By') c.argument('audio', action=AddAudio, nargs='+', help='audio') c.argument('content', help='The content stream, if the item represents a file.') c.argument('c_tag', type=str, help='An eTag for the content of the item. This eTag is not changed if only the ' 'metadata is changed. Note This property is not returned if the item is a folder. Read-only.') c.argument('file_system_info', action=AddFileSystemInfo, nargs='+', help='fileSystemInfo') c.argument('image', action=AddImage, nargs='+', help='image') c.argument('location', arg_type=get_location_type(self.cli_ctx)) c.argument('photo', action=AddTeamsChannelsPhoto, nargs='+', help='photo') c.argument('publication', action=AddPublication, nargs='+', help='publicationFacet') c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/@json-file.') c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds') c.argument('size', type=int, help='Size of the item in bytes. Read-only.') c.argument('video', action=AddVideo, nargs='+', help='video') c.argument('web_dav_url', type=str, help='WebDAV compatible URL for the item.') c.argument('children', type=validate_file_or_dict, help='Collection containing Item objects for the immediate ' 'children of Item. Only items representing folders have children. Read-only. Nullable. Expected ' 'value: json-string/@json-file.') c.argument('permissions', type=validate_file_or_dict, help='The set of permissions for the item. Read-only. ' 'Nullable. Expected value: json-string/@json-file.') c.argument('subscriptions', action=AddSubscriptions, nargs='+', help='The set of subscriptions on the item. ' 'Only supported on the root of a drive.') c.argument('thumbnails', type=validate_file_or_dict, help='Collection containing [ThumbnailSet][] objects ' 'associated with the item. For more info, see [getting thumbnails][]. Read-only. Nullable. Expected ' 'value: json-string/@json-file.') c.argument('versions', action=AddVersions, nargs='+', help='The list of previous versions of the item. For ' 'more info, see [getting previous versions][]. Read-only. Nullable.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='List Item') c.argument('microsoft_graph_base_item_created_date_time_created_date_time', help='Date and time of item ' 'creation. Read-only.', arg_group='List Item') c.argument('microsoft_graph_base_item_description', type=str, help='Provides a user-visible description of the ' 'item. Optional.', arg_group='List Item') c.argument('microsoft_graph_base_item_e_tag', type=str, help='ETag for the item. Read-only.', arg_group='List ' 'Item') c.argument('microsoft_graph_base_item_last_modified_date_time_last_modified_date_time', help='Date and time ' 'the item was last modified. Read-only.', arg_group='List Item') c.argument('microsoft_graph_base_item_name', type=str, help='The name of the item. Read-write.', arg_group='List Item') c.argument('microsoft_graph_base_item_web_url', type=str, help='URL that displays the resource in the browser. ' 'Read-only.', arg_group='List Item') c.argument('microsoft_graph_user_created_by_user', type=validate_file_or_dict, help='Represents an Azure ' 'Active Directory user object. Expected value: json-string/@json-file.', arg_group='List Item') c.argument('microsoft_graph_user_last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure ' 'Active Directory user object. Expected value: json-string/@json-file.', arg_group='List Item') c.argument('microsoft_graph_item_reference_drive_id', type=str, help='Unique identifier of the drive instance ' 'that contains the item. Read-only.', arg_group='List Item Parent Reference') c.argument('microsoft_graph_item_reference_drive_type', type=str, help='Identifies the type of drive. See ' '[drive][] resource for values.', arg_group='List Item Parent Reference') c.argument('id1', type=str, help='Unique identifier of the item in the drive. Read-only.', arg_group='List ' 'Item Parent Reference') c.argument('name1', type=str, help='The name of the item being referenced. Read-only.', arg_group='List Item ' 'Parent Reference') c.argument('microsoft_graph_item_reference_path', type=str, help='Path that can be used to navigate to the ' 'item. Read-only.', arg_group='List Item Parent Reference') c.argument('microsoft_graph_item_reference_share_id', type=str, help='A unique identifier for a shared ' 'resource that can be accessed via the [Shares][] API.', arg_group='List Item Parent Reference') c.argument('sharepoint_ids1', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='List Item ' 'Parent Reference') c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='List Item Parent Reference') c.argument('application1', action=AddApplication, nargs='+', help='identity', arg_group='List Item Last ' 'Modified By') c.argument('device1', action=AddApplication, nargs='+', help='identity', arg_group='List Item Last Modified By') c.argument('user1', action=AddApplication, nargs='+', help='identity', arg_group='List Item Last Modified By') c.argument('application2', action=AddApplication, nargs='+', help='identity', arg_group='List Item Created By') c.argument('device2', action=AddApplication, nargs='+', help='identity', arg_group='List Item Created By') c.argument('user2', action=AddApplication, nargs='+', help='identity', arg_group='List Item Created By') c.argument('content_type', action=AddContentType, nargs='+', help='contentTypeInfo', arg_group='List Item') c.argument('sharepoint_ids2', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='List Item') c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: ' 'json-string/@json-file.', arg_group='List Item') c.argument('drive_item', type=validate_file_or_dict, help='driveItem Expected value: json-string/@json-file.', arg_group='List Item') c.argument('fields', action=AddFields, nargs='+', help='fieldValueSet', arg_group='List Item') c.argument('microsoft_graph_list_item_versions', type=validate_file_or_dict, help='The list of previous ' 'versions of the list item. Expected value: json-string/@json-file.', arg_group='List Item') c.argument('id2', type=str, help='Read-only.', arg_group='Analytics') c.argument('all_time', type=validate_file_or_dict, help='itemActivityStat Expected value: ' 'json-string/@json-file.', arg_group='Analytics') c.argument('item_activity_stats', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Analytics') c.argument('last_seven_days', type=validate_file_or_dict, help='itemActivityStat Expected value: ' 'json-string/@json-file.', arg_group='Analytics') c.argument('id3', type=str, help='Read-only.', arg_group='Workbook') c.argument('microsoft_graph_workbook_application', action=AddMicrosoftGraphWorkbookApplication, nargs='+', help='workbookApplication', arg_group='Workbook') c.argument('comments', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('functions', action=AddFunctions, nargs='+', help='workbookFunctions', arg_group='Workbook') c.argument('names', type=validate_file_or_dict, help='Represents a collection of workbook scoped named items ' '(named ranges and constants). Read-only. Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('operations', type=validate_file_or_dict, help='The status of workbook operations. Getting an ' 'operation collection is not supported, but you can get the status of a long-running operation if ' 'the Location header is returned in the response. Read-only. Expected value: ' 'json-string/@json-file.', arg_group='Workbook') c.argument('tables', type=validate_file_or_dict, help='Represents a collection of tables associated with the ' 'workbook. Read-only. Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('worksheets', type=validate_file_or_dict, help='Represents a collection of worksheets associated ' 'with the workbook. Read-only. Expected value: json-string/@json-file.', arg_group='Workbook') c.argument('microsoft_graph_special_folder_name', type=str, help='The unique identifier for this item in the ' '/drive/special collection', arg_group='Special Folder') c.argument('owner', type=validate_file_or_dict, help='identitySet Expected value: json-string/@json-file.', arg_group='Shared') c.argument('scope', type=str, help='Indicates the scope of how the item is shared: anonymous, organization, or ' 'users. Read-only.', arg_group='Shared') c.argument('shared_by', type=validate_file_or_dict, help='identitySet Expected value: json-string/@json-file.', arg_group='Shared') c.argument('shared_date_time', help='The UTC date and time when the item was shared. Read-only.', arg_group='Shared') c.argument('on_click_telemetry_url', type=str, help='A callback URL that can be used to record telemetry ' 'information. The application should issue a GET on this URL if the user interacts with this item ' 'to improve the quality of results.', arg_group='Search Result') c.argument('created_by', type=validate_file_or_dict, help='identitySet Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_created_date_time_created_date_time', help='Date and time of item ' 'creation. Read-only.', arg_group='Remote Item') c.argument('file', type=validate_file_or_dict, help='file Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_file_system_info_file_system_info', action=AddFileSystemInfo, nargs='+', help='fileSystemInfo', arg_group='Remote Item') c.argument('folder', type=validate_file_or_dict, help='folder Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_id', type=str, help='Unique identifier for the remote item in its ' 'drive. Read-only.', arg_group='Remote Item') c.argument('microsoft_graph_image', action=AddImage, nargs='+', help='image', arg_group='Remote Item') c.argument('last_modified_by', type=validate_file_or_dict, help='identitySet Expected value: ' 'json-string/@json-file.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_last_modified_date_time_last_modified_date_time', help='Date and time ' 'the item was last modified. Read-only.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_name', type=str, help='Optional. Filename of the remote item. ' 'Read-only.', arg_group='Remote Item') c.argument('package', action=AddPackage, nargs='+', help='package', arg_group='Remote Item') c.argument('parent_reference', type=validate_file_or_dict, help='itemReference Expected value: ' 'json-string/@json-file.', arg_group='Remote Item') c.argument('shared', type=validate_file_or_dict, help='shared Expected value: json-string/@json-file.', arg_group='Remote Item') c.argument('sharepoint_ids3', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Remote Item') c.argument('integer_size', type=int, help='Size of the remote item. Read-only.', arg_group='Remote Item') c.argument('special_folder', action=AddSpecialFolder, nargs='+', help='specialFolder', arg_group='Remote Item') c.argument('microsoft_graph_video', action=AddVideo, nargs='+', help='video', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_web_dav_url_web_dav_url', type=str, help='DAV compatible URL for the ' 'item.', arg_group='Remote Item') c.argument('microsoft_graph_remote_item_web_url', type=str, help='URL that displays the resource in the ' 'browser. Read-only.', arg_group='Remote Item') c.argument('queued_date_time', help='Date and time the pending binary operation was queued in UTC time. ' 'Read-only.', arg_group='Pending Operations Pending Content Update') c.argument('type_', options_list=['--type'], type=str, help='A string indicating the type of package. While ' 'oneNote is the only currently defined value, you should expect other package types to be returned ' 'and handle them accordingly.', arg_group='Package') c.argument('child_count', type=int, help='Number of children contained immediately within this container.', arg_group='Folder') c.argument('view', action=AddView, nargs='+', help='folderView', arg_group='Folder') c.argument('hashes', action=AddHashes, nargs='+', help='hashes', arg_group='File') c.argument('mime_type', type=str, help='The MIME type for the file. This is determined by logic on the server ' 'and might not be the value provided when the file was uploaded. Read-only.', arg_group='File') c.argument('processing_metadata', arg_type=get_three_state_flag(), help='', arg_group='File') c.argument('state', type=str, help='Represents the state of the deleted item.', arg_group='Deleted') with self.argument_context('teams team-primary-channel update-member') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('conversation_member_id', type=str, help='key: id of conversationMember') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('display_name', type=str, help='The display name of the user.') c.argument('roles', nargs='+', help='The roles for that user.') with self.argument_context('teams team-primary-channel update-message') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-primary-channel update-tab') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('configuration', action=AddConfiguration, nargs='+', help='teamsTabConfiguration') c.argument('display_name', type=str, help='Name of the tab.') c.argument('web_url', type=str, help='Deep link URL of the tab instance. Read only.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Teams App') c.argument('microsoft_graph_teams_app_display_name', type=str, help='The name of the catalog app provided by ' 'the app developer in the Microsoft Teams zip app package.', arg_group='Teams App') c.argument('distribution_method', arg_type=get_enum_type(['store', 'organization', 'sideloaded', 'unknownFutureValue']), help='', arg_group='Teams ' 'App') c.argument('external_id', type=str, help='The ID of the catalog provided by the app developer in the Microsoft ' 'Teams zip app package.', arg_group='Teams App') c.argument('app_definitions', action=AddAppDefinitions, nargs='+', help='The details for each version of the ' 'app.', arg_group='Teams App') with self.argument_context('teams team-primary-channel-message create-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') with self.argument_context('teams team-primary-channel-message create-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-primary-channel-message delete-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_hosted_content_id', type=str, help='key: id of chatMessageHostedContent') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-primary-channel-message delete-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_id1', type=str, help='key: id of chatMessage') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-primary-channel-message list-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel-message list-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel-message show-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_hosted_content_id', type=str, help='key: id of chatMessageHostedContent') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel-message show-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_id1', type=str, help='key: id of chatMessage') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-primary-channel-message update-hosted-content') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_hosted_content_id', type=str, help='key: id of chatMessageHostedContent') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') with self.argument_context('teams team-primary-channel-message update-reply') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('chat_message_id', type=str, help='key: id of chatMessage') c.argument('chat_message_id1', type=str, help='key: id of chatMessage') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('attachments', action=AddAttachments, nargs='+', help='Attached files. Attachments are currently ' 'read-only – sending attachments is not supported.') c.argument('body', action=AddBody, nargs='+', help='itemBody') c.argument('created_date_time', help='Read only. Timestamp of when the chat message was created.') c.argument('deleted_date_time', help='Read only. Timestamp at which the chat message was deleted, or null if ' 'not deleted.') c.argument('etag', type=str, help='Read-only. Version number of the chat message.') c.argument('importance', arg_type=get_enum_type(['normal', 'high', 'urgent', 'unknownFutureValue']), help='') c.argument('last_edited_date_time', help='Read only. Timestamp when edits to the chat message were made. ' 'Triggers an \'Edited\' flag in the Microsoft Teams UI. If no edits are made the value is null.') c.argument('last_modified_date_time', help='Read only. Timestamp when the chat message is created (initial ' 'setting) or edited, including when a reaction is added or removed.') c.argument('locale', type=str, help='Locale of the chat message set by the client.') c.argument('mentions', type=validate_file_or_dict, help='List of entities mentioned in the chat message. ' 'Currently supports user, bot, team, channel. Expected value: json-string/@json-file.') c.argument('message_type', arg_type=get_enum_type(['message', 'chatEvent', 'typing', 'unknownFutureValue']), help='') c.argument('reactions', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('reply_to_id', type=str, help='Read-only. Id of the parent chat message or root chat message of the ' 'thread. (Only applies to chat messages in channels not chats)') c.argument('subject', type=str, help='The subject of the chat message, in plaintext.') c.argument('summary', type=str, help='Summary text of the chat message that could be used for push ' 'notifications and summary views or fall back views. Only applies to channel chat messages, not ' 'chat messages in a chat.') c.argument('web_url', type=str, help='') c.argument('hosted_contents', action=AddHostedContents, nargs='+', help='') c.argument('replies', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.') c.argument('dlp_action', arg_type=get_enum_type(['none', 'notifySender', 'blockAccess', 'blockAccessExternal']), help='', arg_group='Policy Violation') c.argument('justification_text', type=str, help='', arg_group='Policy Violation') c.argument('policy_tip', action=AddPolicyTip, nargs='+', help='chatMessagePolicyViolationPolicyTip', arg_group='Policy Violation') c.argument('user_action', arg_type=get_enum_type(['none', 'override', 'reportFalsePositive']), help='', arg_group='Policy Violation') c.argument('verdict_details', arg_type=get_enum_type(['none', 'allowFalsePositiveOverride', 'allowOverrideWithoutJustification', 'allowOverrideWithJustification']), help='', arg_group='Policy Violation') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='From') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='From') with self.argument_context('teams team-primary-channel-tab delete-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-primary-channel-tab set-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: ' 'json-string/@json-file.') with self.argument_context('teams team-primary-channel-tab show-ref-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') with self.argument_context('teams team-primary-channel-tab show-team-app') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('teams_tab_id', type=str, help='key: id of teamsTab') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule create-offer-shift-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('recipient_action_date_time', help='The Timestamp type represents date and time information using ' 'ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look ' 'like this: \'2014-01-01T00:00:00Z\'') c.argument('recipient_action_message', type=str, help='Custom message sent by recipient of the offer shift ' 'request.') c.argument('recipient_user_id', type=str, help='User ID of the recipient of the offer shift request.') c.argument('sender_shift_id', type=str, help='User ID of the sender of the offer shift request.') with self.argument_context('teams team-schedule create-open-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('draft_open_shift', action=AddDraftOpenShift, nargs='+', help='openShiftItem') c.argument('scheduling_group_id', type=str, help='ID for the scheduling group that the open shift belongs to.') c.argument('shared_open_shift', action=AddDraftOpenShift, nargs='+', help='openShiftItem') with self.argument_context('teams team-schedule create-open-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('open_shift_id', type=str, help='ID for the open shift.') with self.argument_context('teams team-schedule create-scheduling-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('display_name', type=str, help='The display name for the schedulingGroup. Required.') c.argument('is_active', arg_type=get_three_state_flag(), help='Indicates whether the schedulingGroup can be ' 'used when creating new entities or updating existing ones. Required.') c.argument('user_ids', nargs='+', help='The list of user IDs that are a member of the schedulingGroup. ' 'Required.') with self.argument_context('teams team-schedule create-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('scheduling_group_id', type=str, help='ID of the scheduling group the shift is part of. Required.') c.argument('user_id', type=str, help='ID of the user assigned to the shift. Required.') c.argument('end_date_time', help='', arg_group='Shared Shift') c.argument('start_date_time', help='', arg_group='Shared Shift') c.argument('theme', arg_type=get_enum_type(['white', 'blue', 'green', 'purple', 'pink', 'yellow', 'gray', 'darkBlue', 'darkGreen', 'darkPurple', 'darkPink', 'darkYellow', 'unknownFutureValue']), help='', arg_group='Shared Shift') c.argument('activities', action=AddActivities, nargs='+', help='An incremental part of a shift which can cover ' 'details of when and where an employee is during their shift. For example, an assignment or a ' 'scheduled break or lunch. Required.', arg_group='Shared Shift') c.argument('display_name', type=str, help='The shift label of the shiftItem.', arg_group='Shared Shift') c.argument('notes', type=str, help='The shift notes for the shiftItem.', arg_group='Shared Shift') c.argument('microsoft_graph_schedule_entity_end_date_time_end_date_time', help='', arg_group='Draft Shift') c.argument('microsoft_graph_schedule_entity_start_date_time_start_date_time', help='', arg_group='Draft Shift') c.argument('microsoft_graph_schedule_entity_theme', arg_type=get_enum_type(['white', 'blue', 'green', 'purple', 'pink', 'yellow', 'gray', 'darkBlue', 'darkGreen', 'darkPurple', 'darkPink', 'darkYellow', 'unknownFutureValue']), help='', arg_group='Draft Shift') c.argument('microsoft_graph_shift_item_activities', action=AddActivities, nargs='+', help='An incremental part ' 'of a shift which can cover details of when and where an employee is during their shift. For ' 'example, an assignment or a scheduled break or lunch. Required.', arg_group='Draft Shift') c.argument('microsoft_graph_shift_item_display_name', type=str, help='The shift label of the shiftItem.', arg_group='Draft Shift') c.argument('microsoft_graph_shift_item_notes', type=str, help='The shift notes for the shiftItem.', arg_group='Draft Shift') with self.argument_context('teams team-schedule create-swap-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('recipient_action_date_time', help='The Timestamp type represents date and time information using ' 'ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look ' 'like this: \'2014-01-01T00:00:00Z\'') c.argument('recipient_action_message', type=str, help='Custom message sent by recipient of the offer shift ' 'request.') c.argument('recipient_user_id', type=str, help='User ID of the recipient of the offer shift request.') c.argument('sender_shift_id', type=str, help='User ID of the sender of the offer shift request.') c.argument('recipient_shift_id', type=str, help='ShiftId for the recipient user with whom the request is to ' 'swap.') with self.argument_context('teams team-schedule create-time-off') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('draft_time_off', action=AddDraftTimeOff, nargs='+', help='timeOffItem') c.argument('shared_time_off', action=AddDraftTimeOff, nargs='+', help='timeOffItem') c.argument('user_id', type=str, help='ID of the user assigned to the timeOff. Required.') with self.argument_context('teams team-schedule create-time-off-reason') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('display_name', type=str, help='The name of the timeOffReason. Required.') c.argument('icon_type', arg_type=get_enum_type(['none', 'car', 'calendar', 'running', 'plane', 'firstAid', 'doctor', 'notWorking', 'clock', 'juryDuty', 'globe', 'cup', 'phone', 'weather', 'umbrella', 'piggyBank', 'dog', 'cake', 'trafficCone', 'pin', 'sunny', 'unknownFutureValue']), help='') c.argument('is_active', arg_type=get_three_state_flag(), help='Indicates whether the timeOffReason can be used ' 'when creating new entities or updating existing ones. Required.') with self.argument_context('teams team-schedule create-time-off-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('end_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('start_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('time_off_reason_id', type=str, help='The reason for the time off.') with self.argument_context('teams team-schedule delete-offer-shift-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('offer_shift_request_id', type=str, help='key: id of offerShiftRequest') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-open-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('open_shift_id', type=str, help='key: id of openShift') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-open-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('open_shift_change_request_id', type=str, help='key: id of openShiftChangeRequest') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-scheduling-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('scheduling_group_id', type=str, help='key: id of schedulingGroup') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('shift_id', type=str, help='key: id of shift') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-swap-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('swap_shifts_change_request_id', type=str, help='key: id of swapShiftsChangeRequest') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-time-off') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_id', type=str, help='key: id of timeOff') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-time-off-reason') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_reason_id', type=str, help='key: id of timeOffReason') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule delete-time-off-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_request_id', type=str, help='key: id of timeOffRequest') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams team-schedule list-offer-shift-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-open-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-open-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-scheduling-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-swap-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-time-off') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-time-off-reason') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule list-time-off-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule share') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('notify_team', arg_type=get_three_state_flag(), help='') c.argument('start_date_time', help='') c.argument('end_date_time', help='') with self.argument_context('teams team-schedule show-offer-shift-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('offer_shift_request_id', type=str, help='key: id of offerShiftRequest') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-open-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('open_shift_id', type=str, help='key: id of openShift') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-open-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('open_shift_change_request_id', type=str, help='key: id of openShiftChangeRequest') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-scheduling-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('scheduling_group_id', type=str, help='key: id of schedulingGroup') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('shift_id', type=str, help='key: id of shift') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-swap-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('swap_shifts_change_request_id', type=str, help='key: id of swapShiftsChangeRequest') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-time-off') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_id', type=str, help='key: id of timeOff') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-time-off-reason') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_reason_id', type=str, help='key: id of timeOffReason') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule show-time-off-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_request_id', type=str, help='key: id of timeOffRequest') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams team-schedule update-offer-shift-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('offer_shift_request_id', type=str, help='key: id of offerShiftRequest') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('recipient_action_date_time', help='The Timestamp type represents date and time information using ' 'ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look ' 'like this: \'2014-01-01T00:00:00Z\'') c.argument('recipient_action_message', type=str, help='Custom message sent by recipient of the offer shift ' 'request.') c.argument('recipient_user_id', type=str, help='User ID of the recipient of the offer shift request.') c.argument('sender_shift_id', type=str, help='User ID of the sender of the offer shift request.') with self.argument_context('teams team-schedule update-open-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('open_shift_id', type=str, help='key: id of openShift') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('draft_open_shift', action=AddDraftOpenShift, nargs='+', help='openShiftItem') c.argument('scheduling_group_id', type=str, help='ID for the scheduling group that the open shift belongs to.') c.argument('shared_open_shift', action=AddDraftOpenShift, nargs='+', help='openShiftItem') with self.argument_context('teams team-schedule update-open-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('open_shift_change_request_id', type=str, help='key: id of openShiftChangeRequest') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('open_shift_id', type=str, help='ID for the open shift.') with self.argument_context('teams team-schedule update-scheduling-group') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('scheduling_group_id', type=str, help='key: id of schedulingGroup') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('display_name', type=str, help='The display name for the schedulingGroup. Required.') c.argument('is_active', arg_type=get_three_state_flag(), help='Indicates whether the schedulingGroup can be ' 'used when creating new entities or updating existing ones. Required.') c.argument('user_ids', nargs='+', help='The list of user IDs that are a member of the schedulingGroup. ' 'Required.') with self.argument_context('teams team-schedule update-shift') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('shift_id', type=str, help='key: id of shift') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('scheduling_group_id', type=str, help='ID of the scheduling group the shift is part of. Required.') c.argument('user_id', type=str, help='ID of the user assigned to the shift. Required.') c.argument('end_date_time', help='', arg_group='Shared Shift') c.argument('start_date_time', help='', arg_group='Shared Shift') c.argument('theme', arg_type=get_enum_type(['white', 'blue', 'green', 'purple', 'pink', 'yellow', 'gray', 'darkBlue', 'darkGreen', 'darkPurple', 'darkPink', 'darkYellow', 'unknownFutureValue']), help='', arg_group='Shared Shift') c.argument('activities', action=AddActivities, nargs='+', help='An incremental part of a shift which can cover ' 'details of when and where an employee is during their shift. For example, an assignment or a ' 'scheduled break or lunch. Required.', arg_group='Shared Shift') c.argument('display_name', type=str, help='The shift label of the shiftItem.', arg_group='Shared Shift') c.argument('notes', type=str, help='The shift notes for the shiftItem.', arg_group='Shared Shift') c.argument('microsoft_graph_schedule_entity_end_date_time_end_date_time', help='', arg_group='Draft Shift') c.argument('microsoft_graph_schedule_entity_start_date_time_start_date_time', help='', arg_group='Draft Shift') c.argument('microsoft_graph_schedule_entity_theme', arg_type=get_enum_type(['white', 'blue', 'green', 'purple', 'pink', 'yellow', 'gray', 'darkBlue', 'darkGreen', 'darkPurple', 'darkPink', 'darkYellow', 'unknownFutureValue']), help='', arg_group='Draft Shift') c.argument('microsoft_graph_shift_item_activities', action=AddActivities, nargs='+', help='An incremental part ' 'of a shift which can cover details of when and where an employee is during their shift. For ' 'example, an assignment or a scheduled break or lunch. Required.', arg_group='Draft Shift') c.argument('microsoft_graph_shift_item_display_name', type=str, help='The shift label of the shiftItem.', arg_group='Draft Shift') c.argument('microsoft_graph_shift_item_notes', type=str, help='The shift notes for the shiftItem.', arg_group='Draft Shift') with self.argument_context('teams team-schedule update-swap-shift-change-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('swap_shifts_change_request_id', type=str, help='key: id of swapShiftsChangeRequest') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('recipient_action_date_time', help='The Timestamp type represents date and time information using ' 'ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look ' 'like this: \'2014-01-01T00:00:00Z\'') c.argument('recipient_action_message', type=str, help='Custom message sent by recipient of the offer shift ' 'request.') c.argument('recipient_user_id', type=str, help='User ID of the recipient of the offer shift request.') c.argument('sender_shift_id', type=str, help='User ID of the sender of the offer shift request.') c.argument('recipient_shift_id', type=str, help='ShiftId for the recipient user with whom the request is to ' 'swap.') with self.argument_context('teams team-schedule update-time-off') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_id', type=str, help='key: id of timeOff') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('draft_time_off', action=AddDraftTimeOff, nargs='+', help='timeOffItem') c.argument('shared_time_off', action=AddDraftTimeOff, nargs='+', help='timeOffItem') c.argument('user_id', type=str, help='ID of the user assigned to the timeOff. Required.') with self.argument_context('teams team-schedule update-time-off-reason') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_reason_id', type=str, help='key: id of timeOffReason') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('display_name', type=str, help='The name of the timeOffReason. Required.') c.argument('icon_type', arg_type=get_enum_type(['none', 'car', 'calendar', 'running', 'plane', 'firstAid', 'doctor', 'notWorking', 'clock', 'juryDuty', 'globe', 'cup', 'phone', 'weather', 'umbrella', 'piggyBank', 'dog', 'cake', 'trafficCone', 'pin', 'sunny', 'unknownFutureValue']), help='') c.argument('is_active', arg_type=get_three_state_flag(), help='Indicates whether the timeOffReason can be used ' 'when creating new entities or updating existing ones. Required.') with self.argument_context('teams team-schedule update-time-off-request') as c: c.argument('team_id', type=str, help='key: id of team') c.argument('time_off_request_id', type=str, help='key: id of timeOffRequest') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('assigned_to', arg_type=get_enum_type(['sender', 'recipient', 'manager', 'system', 'unknownFutureValue']), help='') c.argument('manager_action_date_time', help='') c.argument('manager_action_message', type=str, help='') c.argument('manager_user_id', type=str, help='') c.argument('sender_date_time', help='') c.argument('sender_message', type=str, help='') c.argument('sender_user_id', type=str, help='') c.argument('state', arg_type=get_enum_type(['pending', 'approved', 'declined', 'unknownFutureValue']), help='') c.argument('end_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('start_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('time_off_reason_id', type=str, help='The reason for the time off.') with self.argument_context('teams teamwork-teamwork show-teamwork') as c: c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams teamwork-teamwork update-teamwork') as c: c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('workforce_integrations', type=validate_file_or_dict, help=' Expected value: ' 'json-string/@json-file.') with self.argument_context('teams teamwork create-workforce-integration') as c: c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('api_version', type=int, help='API version for the call back URL. Start with 1.') c.argument('display_name', type=str, help='Name of the workforce integration.') c.argument('encryption', action=AddEncryption, nargs='+', help='workforceIntegrationEncryption') c.argument('is_active', arg_type=get_three_state_flag(), help='Indicates whether this workforce integration is ' 'currently active and available.') c.argument('supported_entities', arg_type=get_enum_type(['none', 'shift', 'swapRequest', 'userShiftPreferences', 'openShift', 'openShiftRequest', 'offerShiftRequest', 'unknownFutureValue']), help='') c.argument('url', type=str, help='Workforce Integration URL for callbacks from the Shifts service.') with self.argument_context('teams teamwork delete-workforce-integration') as c: c.argument('workforce_integration_id', type=str, help='key: id of workforceIntegration') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams teamwork list-workforce-integration') as c: c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams teamwork show-workforce-integration') as c: c.argument('workforce_integration_id', type=str, help='key: id of workforceIntegration') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams teamwork update-workforce-integration') as c: c.argument('workforce_integration_id', type=str, help='key: id of workforceIntegration') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('created_date_time', help='The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'') c.argument('last_modified_date_time', help='The Timestamp type represents date and time information using ISO ' '8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like ' 'this: \'2014-01-01T00:00:00Z\'') c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By') c.argument('api_version', type=int, help='API version for the call back URL. Start with 1.') c.argument('display_name', type=str, help='Name of the workforce integration.') c.argument('encryption', action=AddEncryption, nargs='+', help='workforceIntegrationEncryption') c.argument('is_active', arg_type=get_three_state_flag(), help='Indicates whether this workforce integration is ' 'currently active and available.') c.argument('supported_entities', arg_type=get_enum_type(['none', 'shift', 'swapRequest', 'userShiftPreferences', 'openShift', 'openShiftRequest', 'offerShiftRequest', 'unknownFutureValue']), help='') c.argument('url', type=str, help='Workforce Integration URL for callbacks from the Shifts service.') with self.argument_context('teams user create-joined-team') as c: c.argument('user_id', type=str, help='key: id of user') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('classification', type=str, help='An optional label. Typically describes the data or business ' 'sensitivity of the team. Must match one of a pre-configured set in the tenant\'s directory.') c.argument('description', type=str, help='An optional description for the team.') c.argument('display_name', type=str, help='The name of the team.') c.argument('fun_settings', action=AddFunSettings, nargs='+', help='teamFunSettings') c.argument('guest_settings', action=AddGuestSettings, nargs='+', help='teamGuestSettings') c.argument('internal_id', type=str, help='A unique ID for the team that has been used in a few places such as ' 'the audit log/Office 365 Management Activity API.') c.argument('is_archived', arg_type=get_three_state_flag(), help='Whether this team is in read-only mode.') c.argument('member_settings', action=AddMemberSettings, nargs='+', help='teamMemberSettings') c.argument('messaging_settings', action=AddMessagingSettings, nargs='+', help='teamMessagingSettings') c.argument('specialization', arg_type=get_enum_type(['none', 'educationStandard', 'educationClass', 'educationProfessionalLearningCommunity', 'educationStaff', 'healthcareStandard', 'healthcareCareCoordination', 'unknownFutureValue']), help='') c.argument('visibility', arg_type=get_enum_type(['private', 'public', 'hiddenMembership', 'unknownFutureValue']), help='') c.argument('web_url', type=str, help='A hyperlink that will go to the team in the Microsoft Teams client. This ' 'is the URL that you get when you right-click a team in the Microsoft Teams client and select Get ' 'link to team. This URL should be treated as an opaque blob, and not parsed.') c.argument('channels', type=validate_file_or_dict, help='The collection of channels & messages associated with ' 'the team. Expected value: json-string/@json-file.') c.argument('installed_apps', type=validate_file_or_dict, help='The apps installed in this team. Expected ' 'value: json-string/@json-file.') c.argument('members', action=AddGroupsMembers, nargs='+', help='Members and owners of the team.') c.argument('operations', type=validate_file_or_dict, help='The async operations that ran or are running on ' 'this team. Expected value: json-string/@json-file.') c.argument('primary_channel', type=validate_file_or_dict, help='channel Expected value: ' 'json-string/@json-file.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Template') c.argument('id1', type=str, help='Read-only.', arg_group='Group') c.argument('deleted_date_time', help='', arg_group='Group') c.argument('assigned_labels', action=AddAssignedLabels, nargs='+', help='The list of sensitivity label pairs ' '(label ID, label name) associated with an Microsoft 365 group. Returned only on $select. ' 'Read-only.', arg_group='Group') c.argument('assigned_licenses', action=AddAssignedLicenses, nargs='+', help='The licenses that are assigned to ' 'the group. Returned only on $select. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_classification', type=str, help='Describes a classification for the group ' '(such as low, medium or high business impact). Valid values for this property are defined by ' 'creating a ClassificationList setting value, based on the template definition.Returned by default.', arg_group='Group') c.argument('created_date_time', help='Timestamp of when the group was created. The value cannot be modified ' 'and is automatically populated when the group is created. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_description', type=str, help='An optional description for the group. ' 'Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_display_name', type=str, help='The display name for the group. This property ' 'is required when a group is created and cannot be cleared during updates. Returned by default. ' 'Supports $filter and $orderby.', arg_group='Group') c.argument('expiration_date_time', help='Timestamp of when the group is set to expire. The value cannot be ' 'modified and is automatically populated when the group is created. The Timestamp type represents ' 'date and time information using ISO 8601 format and is always in UTC time. For example, midnight ' 'UTC on Jan 1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('group_types', nargs='+', help='Specifies the group type and its membership. If the collection ' 'contains Unified, the group is a Microsoft 365 group; otherwise, it\'s either a security group or ' 'distribution group. For details, see groups overview.If the collection includes DynamicMembership, ' 'the group has dynamic membership; otherwise, membership is static. Returned by default. Supports ' '$filter.', arg_group='Group') c.argument('has_members_with_license_errors', arg_type=get_three_state_flag(), help='Indicates whether there ' 'are members in this group that have license errors from its group-based license assignment. This ' 'property is never returned on a GET operation. You can use it as a $filter argument to get groups ' 'that have members with license errors (that is, filter for this property being true). See an ' 'example.', arg_group='Group') c.argument('license_processing_state', action=AddLicenseProcessingState, nargs='+', help='licenseProcessingState', arg_group='Group') c.argument('mail', type=str, help='The SMTP address for the group, for example, \'serviceadmins@contoso.onmicro' 'soft.com\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('mail_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is mail-enabled. ' 'Returned by default.', arg_group='Group') c.argument('mail_nickname', type=str, help='The mail alias for the group, unique in the organization. This ' 'property must be specified when a group is created. Returned by default. Supports $filter.', arg_group='Group') c.argument('membership_rule', type=str, help='The rule that determines members for this group if the group is ' 'a dynamic group (groupTypes contains DynamicMembership). For more information about the syntax of ' 'the membership rule, see Membership Rules syntax. Returned by default.', arg_group='Group') c.argument('membership_rule_processing_state', type=str, help='Indicates whether the dynamic membership ' 'processing is on or paused. Possible values are \'On\' or \'Paused\'. Returned by default.', arg_group='Group') c.argument('on_premises_domain_name', type=str, help='Contains the on-premises domain FQDN, also called ' 'dnsDomainName synchronized from the on-premises directory. The property is only populated for ' 'customers who are synchronizing their on-premises directory to Azure Active Directory via Azure AD ' 'Connect.Returned by default. Read-only.', arg_group='Group') c.argument('on_premises_last_sync_date_time', help='Indicates the last time at which the group was synced with ' 'the on-premises directory.The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('on_premises_net_bios_name', type=str, help='Contains the on-premises netBios name synchronized ' 'from the on-premises directory. The property is only populated for customers who are synchronizing ' 'their on-premises directory to Azure Active Directory via Azure AD Connect.Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_provisioning_errors', action=AddOnPremisesProvisioningErrors, nargs='+', help='Errors ' 'when using Microsoft synchronization product during provisioning. Returned by default.', arg_group='Group') c.argument('on_premises_sam_account_name', type=str, help='Contains the on-premises SAM account name ' 'synchronized from the on-premises directory. The property is only populated for customers who are ' 'synchronizing their on-premises directory to Azure Active Directory via Azure AD Connect.Returned ' 'by default. Read-only.', arg_group='Group') c.argument('on_premises_security_identifier', type=str, help='Contains the on-premises security identifier ' '(SID) for the group that was synchronized from on-premises to the cloud. Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_sync_enabled', arg_type=get_three_state_flag(), help='true if this group is synced ' 'from an on-premises directory; false if this group was originally synced from an on-premises ' 'directory but is no longer synced; null if this object has never been synced from an on-premises ' 'directory (default). Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('preferred_data_location', type=str, help='The preferred data location for the group. For more ' 'information, see OneDrive Online Multi-Geo. Returned by default.', arg_group='Group') c.argument('preferred_language', type=str, help='The preferred language for an Microsoft 365 group. Should ' 'follow ISO 639-1 Code; for example \'en-US\'. Returned by default.', arg_group='Group') c.argument('proxy_addresses', nargs='+', help='Email addresses for the group that direct to the same group ' 'mailbox. For example: [\'SMTP: bob@contoso.com\', \'smtp: bob@sales.contoso.com\']. The any ' 'operator is required to filter expressions on multi-valued properties. Returned by default. ' 'Read-only. Not nullable. Supports $filter.', arg_group='Group') c.argument('renewed_date_time', help='Timestamp of when the group was last renewed. This cannot be modified ' 'directly and is only updated via the renew service action. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('security_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is a ' 'security group. Returned by default. Supports $filter.', arg_group='Group') c.argument('security_identifier', type=str, help='Security identifier of the group, used in Windows scenarios. ' 'Returned by default.', arg_group='Group') c.argument('theme', type=str, help='Specifies an Microsoft 365 group\'s color theme. Possible values are Teal, ' 'Purple, Green, Blue, Pink, Orange or Red. Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_visibility', type=str, help='Specifies the visibility of a Microsoft 365 ' 'group. Possible values are: Private, Public, or Hiddenmembership; blank values are treated as ' 'public. See group visibility options to learn more.Visibility can be set only when a group is ' 'created; it is not editable.Visibility is supported only for unified groups; it is not supported ' 'for security groups. Returned by default.', arg_group='Group') c.argument('allow_external_senders', arg_type=get_three_state_flag(), help='Indicates if people external to ' 'the organization can send messages to the group. Default value is false. Returned only on $select.', arg_group='Group') c.argument('auto_subscribe_new_members', arg_type=get_three_state_flag(), help='Indicates if new members added ' 'to the group will be auto-subscribed to receive email notifications. You can set this property in ' 'a PATCH request for the group; do not set it in the initial POST request that creates the group. ' 'Default value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_address_lists', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in certain parts of the Outlook UI: the Address Book, address lists for selecting ' 'message recipients, and the Browse Groups dialog for searching groups; otherwise, false. Default ' 'value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_outlook_clients', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in Outlook clients, such as Outlook for Windows and Outlook on the web; otherwise, ' 'false. Default value is false. Returned only on $select.', arg_group='Group') c.argument('is_subscribed_by_mail', arg_type=get_three_state_flag(), help='Indicates whether the signed-in ' 'user is subscribed to receive email conversations. Default value is true. Returned only on ' '$select.', arg_group='Group') c.argument('unseen_count', type=int, help='Count of conversations that have received new posts since the ' 'signed-in user last visited the group. Returned only on $select.', arg_group='Group') c.argument('group_is_archived', arg_type=get_three_state_flag(), help='', arg_group='Group') c.argument('app_role_assignments', action=AddAppRoleAssignments, nargs='+', help='', arg_group='Group') c.argument('created_on_behalf_of', action=AddCreatedOnBehalfOf, nargs='+', help='Represents an Azure Active ' 'Directory object. The directoryObject type is the base type for many other directory entity types.', arg_group='Group') c.argument('member_of', action=AddMemberOf, nargs='+', help='Groups that this group is a member of. HTTP ' 'Methods: GET (supported for all groups). Read-only. Nullable.', arg_group='Group') c.argument('microsoft_graph_group_members', action=AddMicrosoftGraphGroupMembers, nargs='+', help='Users and ' 'groups that are members of this group. HTTP Methods: GET (supported for all groups), POST ' '(supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE ' '(supported for Microsoft 365 groups and security groups) Nullable.', arg_group='Group') c.argument('members_with_license_errors', action=AddMembersWithLicenseErrors, nargs='+', help='A list of group ' 'members with license errors from this group-based license assignment. Read-only.', arg_group='Group') c.argument('owners', action=AddOwners, nargs='+', help='The owners of the group. The owners are a set of ' 'non-admin users who are allowed to modify this object. Limited to 100 owners. HTTP Methods: GET ' '(supported for all groups), POST (supported for Microsoft 365 groups, security groups and ' 'mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security groups). ' 'Nullable.', arg_group='Group') c.argument('settings', type=validate_file_or_dict, help='Read-only. Nullable. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('transitive_member_of', action=AddTransitiveMemberOf, nargs='+', help='', arg_group='Group') c.argument('transitive_members', action=AddTransitiveMembers, nargs='+', help='', arg_group='Group') c.argument('accepted_senders', action=AddAcceptedSenders, nargs='+', help='The list of users or groups that ' 'are allowed to create post\'s or calendar events in this group. If this list is non-empty then ' 'only users or groups listed here are allowed to post.', arg_group='Group') c.argument('calendar', type=validate_file_or_dict, help='calendar Expected value: json-string/@json-file.', arg_group='Group') c.argument('calendar_view', type=validate_file_or_dict, help='The calendar view for the calendar. Read-only. ' 'Expected value: json-string/@json-file.', arg_group='Group') c.argument('conversations', type=validate_file_or_dict, help='The group\'s conversations. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('events', type=validate_file_or_dict, help='The group\'s calendar events. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('photo', action=AddGroupsPhoto, nargs='+', help='profilePhoto', arg_group='Group') c.argument('photos', action=AddPhotos, nargs='+', help='The profile photos owned by the group. Read-only. ' 'Nullable.', arg_group='Group') c.argument('rejected_senders', action=AddRejectedSenders, nargs='+', help='The list of users or groups that ' 'are not allowed to create posts or calendar events in this group. Nullable', arg_group='Group') c.argument('threads', type=validate_file_or_dict, help='The group\'s conversation threads. Nullable. Expected ' 'value: json-string/@json-file.', arg_group='Group') c.argument('drive', type=validate_file_or_dict, help='drive Expected value: json-string/@json-file.', arg_group='Group') c.argument('drives', type=validate_file_or_dict, help='The group\'s drives. Read-only. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('sites', type=validate_file_or_dict, help='The list of SharePoint sites in this group. Access the ' 'default site with /sites/root. Expected value: json-string/@json-file.', arg_group='Group') c.argument('extensions', action=AddExtensions, nargs='+', help='The collection of open extensions defined for ' 'the group. Read-only. Nullable.', arg_group='Group') c.argument('group_lifecycle_policies', action=AddGroupLifecyclePolicies, nargs='+', help='The collection of ' 'lifecycle policies for this group. Read-only. Nullable.', arg_group='Group') c.argument('planner', type=validate_file_or_dict, help='plannerGroup Expected value: json-string/@json-file.', arg_group='Group') c.argument('onenote', type=validate_file_or_dict, help='onenote Expected value: json-string/@json-file.', arg_group='Group') c.argument('team', type=validate_file_or_dict, help='team Expected value: json-string/@json-file.', arg_group='Group') c.argument('id2', type=str, help='Read-only.', arg_group='Schedule') c.argument('enabled', arg_type=get_three_state_flag(), help='Indicates whether the schedule is enabled for the ' 'team. Required.', arg_group='Schedule') c.argument('offer_shift_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether offer ' 'shift requests are enabled for the schedule.', arg_group='Schedule') c.argument('open_shifts_enabled', arg_type=get_three_state_flag(), help='Indicates whether open shifts are ' 'enabled for the schedule.', arg_group='Schedule') c.argument('provision_status', arg_type=get_enum_type(['NotStarted', 'Running', 'Completed', 'Failed']), help='', arg_group='Schedule') c.argument('provision_status_code', type=str, help='Additional information about why schedule provisioning ' 'failed.', arg_group='Schedule') c.argument('swap_shifts_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether swap ' 'shifts requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_clock_enabled', arg_type=get_three_state_flag(), help='Indicates whether time clock is ' 'enabled for the schedule.', arg_group='Schedule') c.argument('time_off_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether time off ' 'requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_zone', type=str, help='Indicates the time zone of the schedule team using tz database format. ' 'Required.', arg_group='Schedule') c.argument('workforce_integration_ids', nargs='+', help='', arg_group='Schedule') c.argument('offer_shift_requests', action=AddOfferShiftRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shift_change_requests', action=AddOpenShiftChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shifts', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Schedule') c.argument('scheduling_groups', action=AddSchedulingGroups, nargs='+', help='The logical grouping of users in ' 'the schedule (usually by role).', arg_group='Schedule') c.argument('shifts', type=validate_file_or_dict, help='The shifts in the schedule. Expected value: ' 'json-string/@json-file.', arg_group='Schedule') c.argument('swap_shifts_change_requests', action=AddSwapShiftsChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('time_off_reasons', action=AddTimeOffReasons, nargs='+', help='The set of reasons for a time off in ' 'the schedule.', arg_group='Schedule') c.argument('time_off_requests', action=AddTimeOffRequests, nargs='+', help='', arg_group='Schedule') c.argument('times_off', type=validate_file_or_dict, help='The instances of times off in the schedule. Expected ' 'value: json-string/@json-file.', arg_group='Schedule') with self.argument_context('teams user delete-joined-team') as c: c.argument('user_id', type=str, help='key: id of user') c.argument('team_id', type=str, help='key: id of team') c.argument('if_match', type=str, help='ETag') with self.argument_context('teams user list-joined-team') as c: c.argument('user_id', type=str, help='key: id of user') c.argument('orderby', nargs='+', help='Order items by property values') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams user show-joined-team') as c: c.argument('user_id', type=str, help='key: id of user') c.argument('team_id', type=str, help='key: id of team') c.argument('select', nargs='+', help='Select properties to be returned') c.argument('expand', nargs='+', help='Expand related entities') with self.argument_context('teams user update-joined-team') as c: c.argument('user_id', type=str, help='key: id of user') c.argument('team_id', type=str, help='key: id of team') c.argument('id_', options_list=['--id'], type=str, help='Read-only.') c.argument('classification', type=str, help='An optional label. Typically describes the data or business ' 'sensitivity of the team. Must match one of a pre-configured set in the tenant\'s directory.') c.argument('description', type=str, help='An optional description for the team.') c.argument('display_name', type=str, help='The name of the team.') c.argument('fun_settings', action=AddFunSettings, nargs='+', help='teamFunSettings') c.argument('guest_settings', action=AddGuestSettings, nargs='+', help='teamGuestSettings') c.argument('internal_id', type=str, help='A unique ID for the team that has been used in a few places such as ' 'the audit log/Office 365 Management Activity API.') c.argument('is_archived', arg_type=get_three_state_flag(), help='Whether this team is in read-only mode.') c.argument('member_settings', action=AddMemberSettings, nargs='+', help='teamMemberSettings') c.argument('messaging_settings', action=AddMessagingSettings, nargs='+', help='teamMessagingSettings') c.argument('specialization', arg_type=get_enum_type(['none', 'educationStandard', 'educationClass', 'educationProfessionalLearningCommunity', 'educationStaff', 'healthcareStandard', 'healthcareCareCoordination', 'unknownFutureValue']), help='') c.argument('visibility', arg_type=get_enum_type(['private', 'public', 'hiddenMembership', 'unknownFutureValue']), help='') c.argument('web_url', type=str, help='A hyperlink that will go to the team in the Microsoft Teams client. This ' 'is the URL that you get when you right-click a team in the Microsoft Teams client and select Get ' 'link to team. This URL should be treated as an opaque blob, and not parsed.') c.argument('channels', type=validate_file_or_dict, help='The collection of channels & messages associated with ' 'the team. Expected value: json-string/@json-file.') c.argument('installed_apps', type=validate_file_or_dict, help='The apps installed in this team. Expected ' 'value: json-string/@json-file.') c.argument('members', action=AddGroupsMembers, nargs='+', help='Members and owners of the team.') c.argument('operations', type=validate_file_or_dict, help='The async operations that ran or are running on ' 'this team. Expected value: json-string/@json-file.') c.argument('primary_channel', type=validate_file_or_dict, help='channel Expected value: ' 'json-string/@json-file.') c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Template') c.argument('id1', type=str, help='Read-only.', arg_group='Group') c.argument('deleted_date_time', help='', arg_group='Group') c.argument('assigned_labels', action=AddAssignedLabels, nargs='+', help='The list of sensitivity label pairs ' '(label ID, label name) associated with an Microsoft 365 group. Returned only on $select. ' 'Read-only.', arg_group='Group') c.argument('assigned_licenses', action=AddAssignedLicenses, nargs='+', help='The licenses that are assigned to ' 'the group. Returned only on $select. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_classification', type=str, help='Describes a classification for the group ' '(such as low, medium or high business impact). Valid values for this property are defined by ' 'creating a ClassificationList setting value, based on the template definition.Returned by default.', arg_group='Group') c.argument('created_date_time', help='Timestamp of when the group was created. The value cannot be modified ' 'and is automatically populated when the group is created. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('microsoft_graph_group_description', type=str, help='An optional description for the group. ' 'Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_display_name', type=str, help='The display name for the group. This property ' 'is required when a group is created and cannot be cleared during updates. Returned by default. ' 'Supports $filter and $orderby.', arg_group='Group') c.argument('expiration_date_time', help='Timestamp of when the group is set to expire. The value cannot be ' 'modified and is automatically populated when the group is created. The Timestamp type represents ' 'date and time information using ISO 8601 format and is always in UTC time. For example, midnight ' 'UTC on Jan 1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('group_types', nargs='+', help='Specifies the group type and its membership. If the collection ' 'contains Unified, the group is a Microsoft 365 group; otherwise, it\'s either a security group or ' 'distribution group. For details, see groups overview.If the collection includes DynamicMembership, ' 'the group has dynamic membership; otherwise, membership is static. Returned by default. Supports ' '$filter.', arg_group='Group') c.argument('has_members_with_license_errors', arg_type=get_three_state_flag(), help='Indicates whether there ' 'are members in this group that have license errors from its group-based license assignment. This ' 'property is never returned on a GET operation. You can use it as a $filter argument to get groups ' 'that have members with license errors (that is, filter for this property being true). See an ' 'example.', arg_group='Group') c.argument('license_processing_state', action=AddLicenseProcessingState, nargs='+', help='licenseProcessingState', arg_group='Group') c.argument('mail', type=str, help='The SMTP address for the group, for example, \'serviceadmins@contoso.onmicro' 'soft.com\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('mail_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is mail-enabled. ' 'Returned by default.', arg_group='Group') c.argument('mail_nickname', type=str, help='The mail alias for the group, unique in the organization. This ' 'property must be specified when a group is created. Returned by default. Supports $filter.', arg_group='Group') c.argument('membership_rule', type=str, help='The rule that determines members for this group if the group is ' 'a dynamic group (groupTypes contains DynamicMembership). For more information about the syntax of ' 'the membership rule, see Membership Rules syntax. Returned by default.', arg_group='Group') c.argument('membership_rule_processing_state', type=str, help='Indicates whether the dynamic membership ' 'processing is on or paused. Possible values are \'On\' or \'Paused\'. Returned by default.', arg_group='Group') c.argument('on_premises_domain_name', type=str, help='Contains the on-premises domain FQDN, also called ' 'dnsDomainName synchronized from the on-premises directory. The property is only populated for ' 'customers who are synchronizing their on-premises directory to Azure Active Directory via Azure AD ' 'Connect.Returned by default. Read-only.', arg_group='Group') c.argument('on_premises_last_sync_date_time', help='Indicates the last time at which the group was synced with ' 'the on-premises directory.The Timestamp type represents date and time information using ISO 8601 ' 'format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: ' '\'2014-01-01T00:00:00Z\'. Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('on_premises_net_bios_name', type=str, help='Contains the on-premises netBios name synchronized ' 'from the on-premises directory. The property is only populated for customers who are synchronizing ' 'their on-premises directory to Azure Active Directory via Azure AD Connect.Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_provisioning_errors', action=AddOnPremisesProvisioningErrors, nargs='+', help='Errors ' 'when using Microsoft synchronization product during provisioning. Returned by default.', arg_group='Group') c.argument('on_premises_sam_account_name', type=str, help='Contains the on-premises SAM account name ' 'synchronized from the on-premises directory. The property is only populated for customers who are ' 'synchronizing their on-premises directory to Azure Active Directory via Azure AD Connect.Returned ' 'by default. Read-only.', arg_group='Group') c.argument('on_premises_security_identifier', type=str, help='Contains the on-premises security identifier ' '(SID) for the group that was synchronized from on-premises to the cloud. Returned by default. ' 'Read-only.', arg_group='Group') c.argument('on_premises_sync_enabled', arg_type=get_three_state_flag(), help='true if this group is synced ' 'from an on-premises directory; false if this group was originally synced from an on-premises ' 'directory but is no longer synced; null if this object has never been synced from an on-premises ' 'directory (default). Returned by default. Read-only. Supports $filter.', arg_group='Group') c.argument('preferred_data_location', type=str, help='The preferred data location for the group. For more ' 'information, see OneDrive Online Multi-Geo. Returned by default.', arg_group='Group') c.argument('preferred_language', type=str, help='The preferred language for an Microsoft 365 group. Should ' 'follow ISO 639-1 Code; for example \'en-US\'. Returned by default.', arg_group='Group') c.argument('proxy_addresses', nargs='+', help='Email addresses for the group that direct to the same group ' 'mailbox. For example: [\'SMTP: bob@contoso.com\', \'smtp: bob@sales.contoso.com\']. The any ' 'operator is required to filter expressions on multi-valued properties. Returned by default. ' 'Read-only. Not nullable. Supports $filter.', arg_group='Group') c.argument('renewed_date_time', help='Timestamp of when the group was last renewed. This cannot be modified ' 'directly and is only updated via the renew service action. The Timestamp type represents date and ' 'time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan ' '1, 2014 would look like this: \'2014-01-01T00:00:00Z\'. Returned by default. Read-only.', arg_group='Group') c.argument('security_enabled', arg_type=get_three_state_flag(), help='Specifies whether the group is a ' 'security group. Returned by default. Supports $filter.', arg_group='Group') c.argument('security_identifier', type=str, help='Security identifier of the group, used in Windows scenarios. ' 'Returned by default.', arg_group='Group') c.argument('theme', type=str, help='Specifies an Microsoft 365 group\'s color theme. Possible values are Teal, ' 'Purple, Green, Blue, Pink, Orange or Red. Returned by default.', arg_group='Group') c.argument('microsoft_graph_group_visibility', type=str, help='Specifies the visibility of a Microsoft 365 ' 'group. Possible values are: Private, Public, or Hiddenmembership; blank values are treated as ' 'public. See group visibility options to learn more.Visibility can be set only when a group is ' 'created; it is not editable.Visibility is supported only for unified groups; it is not supported ' 'for security groups. Returned by default.', arg_group='Group') c.argument('allow_external_senders', arg_type=get_three_state_flag(), help='Indicates if people external to ' 'the organization can send messages to the group. Default value is false. Returned only on $select.', arg_group='Group') c.argument('auto_subscribe_new_members', arg_type=get_three_state_flag(), help='Indicates if new members added ' 'to the group will be auto-subscribed to receive email notifications. You can set this property in ' 'a PATCH request for the group; do not set it in the initial POST request that creates the group. ' 'Default value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_address_lists', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in certain parts of the Outlook UI: the Address Book, address lists for selecting ' 'message recipients, and the Browse Groups dialog for searching groups; otherwise, false. Default ' 'value is false. Returned only on $select.', arg_group='Group') c.argument('hide_from_outlook_clients', arg_type=get_three_state_flag(), help='True if the group is not ' 'displayed in Outlook clients, such as Outlook for Windows and Outlook on the web; otherwise, ' 'false. Default value is false. Returned only on $select.', arg_group='Group') c.argument('is_subscribed_by_mail', arg_type=get_three_state_flag(), help='Indicates whether the signed-in ' 'user is subscribed to receive email conversations. Default value is true. Returned only on ' '$select.', arg_group='Group') c.argument('unseen_count', type=int, help='Count of conversations that have received new posts since the ' 'signed-in user last visited the group. Returned only on $select.', arg_group='Group') c.argument('group_is_archived', arg_type=get_three_state_flag(), help='', arg_group='Group') c.argument('app_role_assignments', action=AddAppRoleAssignments, nargs='+', help='', arg_group='Group') c.argument('created_on_behalf_of', action=AddCreatedOnBehalfOf, nargs='+', help='Represents an Azure Active ' 'Directory object. The directoryObject type is the base type for many other directory entity types.', arg_group='Group') c.argument('member_of', action=AddMemberOf, nargs='+', help='Groups that this group is a member of. HTTP ' 'Methods: GET (supported for all groups). Read-only. Nullable.', arg_group='Group') c.argument('microsoft_graph_group_members', action=AddMicrosoftGraphGroupMembers, nargs='+', help='Users and ' 'groups that are members of this group. HTTP Methods: GET (supported for all groups), POST ' '(supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE ' '(supported for Microsoft 365 groups and security groups) Nullable.', arg_group='Group') c.argument('members_with_license_errors', action=AddMembersWithLicenseErrors, nargs='+', help='A list of group ' 'members with license errors from this group-based license assignment. Read-only.', arg_group='Group') c.argument('owners', action=AddOwners, nargs='+', help='The owners of the group. The owners are a set of ' 'non-admin users who are allowed to modify this object. Limited to 100 owners. HTTP Methods: GET ' '(supported for all groups), POST (supported for Microsoft 365 groups, security groups and ' 'mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security groups). ' 'Nullable.', arg_group='Group') c.argument('settings', type=validate_file_or_dict, help='Read-only. Nullable. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('transitive_member_of', action=AddTransitiveMemberOf, nargs='+', help='', arg_group='Group') c.argument('transitive_members', action=AddTransitiveMembers, nargs='+', help='', arg_group='Group') c.argument('accepted_senders', action=AddAcceptedSenders, nargs='+', help='The list of users or groups that ' 'are allowed to create post\'s or calendar events in this group. If this list is non-empty then ' 'only users or groups listed here are allowed to post.', arg_group='Group') c.argument('calendar', type=validate_file_or_dict, help='calendar Expected value: json-string/@json-file.', arg_group='Group') c.argument('calendar_view', type=validate_file_or_dict, help='The calendar view for the calendar. Read-only. ' 'Expected value: json-string/@json-file.', arg_group='Group') c.argument('conversations', type=validate_file_or_dict, help='The group\'s conversations. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('events', type=validate_file_or_dict, help='The group\'s calendar events. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('photo', action=AddGroupsPhoto, nargs='+', help='profilePhoto', arg_group='Group') c.argument('photos', action=AddPhotos, nargs='+', help='The profile photos owned by the group. Read-only. ' 'Nullable.', arg_group='Group') c.argument('rejected_senders', action=AddRejectedSenders, nargs='+', help='The list of users or groups that ' 'are not allowed to create posts or calendar events in this group. Nullable', arg_group='Group') c.argument('threads', type=validate_file_or_dict, help='The group\'s conversation threads. Nullable. Expected ' 'value: json-string/@json-file.', arg_group='Group') c.argument('drive', type=validate_file_or_dict, help='drive Expected value: json-string/@json-file.', arg_group='Group') c.argument('drives', type=validate_file_or_dict, help='The group\'s drives. Read-only. Expected value: ' 'json-string/@json-file.', arg_group='Group') c.argument('sites', type=validate_file_or_dict, help='The list of SharePoint sites in this group. Access the ' 'default site with /sites/root. Expected value: json-string/@json-file.', arg_group='Group') c.argument('extensions', action=AddExtensions, nargs='+', help='The collection of open extensions defined for ' 'the group. Read-only. Nullable.', arg_group='Group') c.argument('group_lifecycle_policies', action=AddGroupLifecyclePolicies, nargs='+', help='The collection of ' 'lifecycle policies for this group. Read-only. Nullable.', arg_group='Group') c.argument('planner', type=validate_file_or_dict, help='plannerGroup Expected value: json-string/@json-file.', arg_group='Group') c.argument('onenote', type=validate_file_or_dict, help='onenote Expected value: json-string/@json-file.', arg_group='Group') c.argument('team', type=validate_file_or_dict, help='team Expected value: json-string/@json-file.', arg_group='Group') c.argument('id2', type=str, help='Read-only.', arg_group='Schedule') c.argument('enabled', arg_type=get_three_state_flag(), help='Indicates whether the schedule is enabled for the ' 'team. Required.', arg_group='Schedule') c.argument('offer_shift_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether offer ' 'shift requests are enabled for the schedule.', arg_group='Schedule') c.argument('open_shifts_enabled', arg_type=get_three_state_flag(), help='Indicates whether open shifts are ' 'enabled for the schedule.', arg_group='Schedule') c.argument('provision_status', arg_type=get_enum_type(['NotStarted', 'Running', 'Completed', 'Failed']), help='', arg_group='Schedule') c.argument('provision_status_code', type=str, help='Additional information about why schedule provisioning ' 'failed.', arg_group='Schedule') c.argument('swap_shifts_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether swap ' 'shifts requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_clock_enabled', arg_type=get_three_state_flag(), help='Indicates whether time clock is ' 'enabled for the schedule.', arg_group='Schedule') c.argument('time_off_requests_enabled', arg_type=get_three_state_flag(), help='Indicates whether time off ' 'requests are enabled for the schedule.', arg_group='Schedule') c.argument('time_zone', type=str, help='Indicates the time zone of the schedule team using tz database format. ' 'Required.', arg_group='Schedule') c.argument('workforce_integration_ids', nargs='+', help='', arg_group='Schedule') c.argument('offer_shift_requests', action=AddOfferShiftRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shift_change_requests', action=AddOpenShiftChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('open_shifts', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.', arg_group='Schedule') c.argument('scheduling_groups', action=AddSchedulingGroups, nargs='+', help='The logical grouping of users in ' 'the schedule (usually by role).', arg_group='Schedule') c.argument('shifts', type=validate_file_or_dict, help='The shifts in the schedule. Expected value: ' 'json-string/@json-file.', arg_group='Schedule') c.argument('swap_shifts_change_requests', action=AddSwapShiftsChangeRequests, nargs='+', help='', arg_group='Schedule') c.argument('time_off_reasons', action=AddTimeOffReasons, nargs='+', help='The set of reasons for a time off in ' 'the schedule.', arg_group='Schedule') c.argument('time_off_requests', action=AddTimeOffRequests, nargs='+', help='', arg_group='Schedule') c.argument('times_off', type=validate_file_or_dict, help='The instances of times off in the schedule. Expected ' 'value: json-string/@json-file.', arg_group='Schedule')
83.198153
120
0.646689
35,831
279,213
4.917083
0.02509
0.092307
0.048262
0.033425
0.990652
0.990124
0.98908
0.987133
0.986332
0.985691
0
0.0077
0.226956
279,213
3,355
121
83.222951
0.808519
0.001812
0
0.913951
0
0.024359
0.488821
0.060679
0
0
0
0
0
1
0.000316
false
0
0.00348
0
0.003796
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f78a16bc9faad8fd388756f80a205426cf046820
167
py
Python
Udacity/aws-ml-ud090/sw-part-2/test_nearest_square.py
mohammedelzanaty/myRoad2BeFullStack
eea3a5edb6c6a999136b04fdaea6ce0c81137a58
[ "MIT" ]
2
2021-04-21T12:05:01.000Z
2022-01-19T09:58:38.000Z
Udacity/aws-ml-ud090/sw-part-2/test_nearest_square.py
mohammedelzanaty/myRoad2BeFullStack
eea3a5edb6c6a999136b04fdaea6ce0c81137a58
[ "MIT" ]
34
2019-12-26T11:21:42.000Z
2022-02-27T19:55:10.000Z
Udacity/aws-ml-ud090/sw-part-2/test_nearest_square.py
mohammedelzanaty/myRoad2BeFullStack
eea3a5edb6c6a999136b04fdaea6ce0c81137a58
[ "MIT" ]
2
2021-08-15T07:59:36.000Z
2022-01-16T06:17:32.000Z
from nearest import nearest_square def test_nearest_square_5(): assert(nearest_square(5) == 4) def test_nearest_square_12(): assert(nearest_square(12) == 9)
23.857143
35
0.748503
25
167
4.64
0.44
0.560345
0.241379
0.344828
0
0
0
0
0
0
0
0.055944
0.143713
167
7
36
23.857143
0.755245
0
0
0
0
0
0
0
0
0
0
0
0.4
1
0.4
true
0
0.2
0
0.6
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
8
f78d8b1f1461fc3ac037b0d1deb3dc40e655c301
138
py
Python
modules/generator/__init__.py
vliu15/tts-gan
6246c584a83f67dedaa25155c3b1491b99658319
[ "MIT" ]
12
2021-02-17T23:37:52.000Z
2021-09-05T08:24:58.000Z
modules/generator/__init__.py
vliu15/tts-gan
6246c584a83f67dedaa25155c3b1491b99658319
[ "MIT" ]
null
null
null
modules/generator/__init__.py
vliu15/tts-gan
6246c584a83f67dedaa25155c3b1491b99658319
[ "MIT" ]
2
2021-04-27T12:41:58.000Z
2021-08-18T08:31:32.000Z
from modules.generator.aligner import Aligner from modules.generator.decoder import Decoder from modules.generator.encoder import Encoder
34.5
45
0.869565
18
138
6.666667
0.388889
0.275
0.5
0
0
0
0
0
0
0
0
0
0.086957
138
3
46
46
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
e3bda4d01f8393bd9b549eac1e176cb85827edac
34
py
Python
tests/legacy_unittest/sample/test_score02/print_func/__init__.py
bayeshack2016/icon-service
36cab484d2e41548d7f2f74526f127ee3a4423fc
[ "Apache-2.0" ]
52
2018-08-24T02:28:43.000Z
2021-07-06T04:44:22.000Z
tests/legacy_unittest/sample/test_score02/print_func/__init__.py
bayeshack2016/icon-service
36cab484d2e41548d7f2f74526f127ee3a4423fc
[ "Apache-2.0" ]
62
2018-09-17T06:59:16.000Z
2021-12-15T06:02:51.000Z
tests/legacy_unittest/sample/test_score02/print_func/__init__.py
bayeshack2016/icon-service
36cab484d2e41548d7f2f74526f127ee3a4423fc
[ "Apache-2.0" ]
35
2018-09-14T02:42:10.000Z
2022-02-05T10:34:46.000Z
from .print_func import func_test
17
33
0.852941
6
34
4.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
1
0
7
540b2e5972bbc4abfd75ac02d479d7c1562e1d11
1,597
py
Python
euler008.py
TheSkeward/project_euler
1454f3181642a6f604ddb1f52574181b9d0c5dbf
[ "Unlicense" ]
null
null
null
euler008.py
TheSkeward/project_euler
1454f3181642a6f604ddb1f52574181b9d0c5dbf
[ "Unlicense" ]
null
null
null
euler008.py
TheSkeward/project_euler
1454f3181642a6f604ddb1f52574181b9d0c5dbf
[ "Unlicense" ]
null
null
null
"""Project Euler problem 8""" import math def digit_product(string): """Returns the product of the digits in the string""" return math.prod(list(map(int, list(string)))) def calculate(digits): """Returns the value of the product of the given number of adjacent digits in SERIES that have the greatest product""" products = [] for index in range(len(SERIES)): end_digit = index + digits products.append(digit_product(SERIES[index:end_digit])) answer = max(products) return answer SERIES = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450" if __name__ == "__main__": print(calculate(13))
66.541667
1,011
0.879148
82
1,597
16.97561
0.536585
0.010776
0.017241
0.021552
0
0
0
0
0
0
0
0.681849
0.078898
1,597
23
1,012
69.434783
0.264446
0.115216
0
0
0
0
0.723618
0.717875
0
1
0
0
0
1
0.153846
false
0
0.076923
0
0.384615
0.076923
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
7
541f4c1026e965876ebb7bb3f9f15ae06a9830e2
87,769
py
Python
include/runner_untrained.py
MLI-lab/Robustness-CS
8ef26795ffd02824a2cf0f9496887554484a8b08
[ "Apache-2.0" ]
18
2021-05-16T21:50:58.000Z
2021-12-23T14:52:02.000Z
include/runner_untrained.py
MLI-lab/Robustness-CS
8ef26795ffd02824a2cf0f9496887554484a8b08
[ "Apache-2.0" ]
null
null
null
include/runner_untrained.py
MLI-lab/Robustness-CS
8ef26795ffd02824a2cf0f9496887554484a8b08
[ "Apache-2.0" ]
3
2021-04-08T06:47:32.000Z
2021-10-15T12:22:03.000Z
from torch.autograd import Variable import torch import torch.optim import time import copy import pickle import random import numpy as np from scipy.linalg import hadamard from skimage.metrics import structural_similarity as ssim from .helpers import * from .mri_helpers import * from .transforms import * from .decoder_conv import * dtype = torch.cuda.FloatTensor #dtype = torch.FloatTensor def exp_lr_scheduler(optimizer, epoch, init_lr=0.001, lr_decay_epoch=500): """Decay learning rate by a factor of 0.1 every lr_decay_epoch epochs.""" lr = init_lr * (0.65**(epoch // lr_decay_epoch)) if epoch % lr_decay_epoch == 0: print('LR is set to {}'.format(lr)) for param_group in optimizer.param_groups: param_group['lr'] = lr return optimizer class MSLELoss(torch.nn.Module): def __init__(self): super(MSLELoss,self).__init__() def forward(self,x,y): criterion = nn.MSELoss() loss = torch.log(criterion(x, y)) return loss class rMSELoss(torch.nn.Module): def __init__(self): super(rMSELoss,self).__init__() def forward(self,r,x,y,lam): criterion = nn.MSELoss() loss = -criterion(x,y) + lam*torch.norm(r) # 0.01 is the regularizer parameter return loss class MyLoss(torch.nn.Module): #def __init__(self): # super(MyLoss,self).__init__() def forward(self,r,y,yr,H,lam): loss = -(torch.norm(y-yr)**2) /np.prod(H.data.cpu().numpy().shape) + lam * (torch.norm(r)**2) return loss def get_derivs(self,y,yr,yd,r,H,lam): # y : reconstruction from clean k-space # yr : reconstruction from perturbed k-space # yd : reconstruction from slightly perturbed version of the "perturbed" kspace (for numerical derivation) # r : perturbation # H : slight perturbation for computing numerical derivatives #grad1 = 1/np.prod(y.data.cpu().numpy().shape) grad1 = 1/np.prod(H.data.cpu().numpy().shape) grad1 *= (torch.norm(y-yd)**2 - torch.norm(y-yr)**2) / (2*H) grad2 = lam*r print("\ngrad norms:",torch.norm(grad1),torch.norm(grad2)) grad = -grad1 + grad2 #self.grad = grad #del(y,yr,yd,r,H,grad1,grad2,grad) #torch.cuda.empty_cache() return grad def get_scale_factor(net,num_channels,in_size,ksp_tt,ni=None,scale_out=1,scale_type="norm"): ### get norm of deep decoder output # get net input, scaling of that is irrelevant if ni is None: shape = [1,num_channels, in_size[0], in_size[1]] ni = Variable(torch.zeros(shape)).type(dtype) ni.data.uniform_() # generate random image try: out_chs = net( ni.type(dtype),scale_out=scale_out ).data.cpu().numpy()[0] except: out_chs = net( ni.type(dtype) ).data.cpu().numpy()[0] out_imgs = channels2imgs(out_chs) out_img_tt = transform.root_sum_of_squares( torch.tensor(out_imgs) , dim=0) ### get norm of least-squares reconstruction orig_tt = transform.ifft2(ksp_tt) # Apply Inverse Fourier Transform to get the complex image orig_imgs_tt = transform.complex_abs(orig_tt) # Compute absolute value to get a real image orig_img_tt = transform.root_sum_of_squares(orig_imgs_tt, dim=0) orig_img_np = orig_img_tt.cpu().numpy() if scale_type == "norm": s = np.linalg.norm(out_img_tt) / np.linalg.norm(orig_img_np) if scale_type == "mean": s = (out_img_tt.mean() / orig_img_np.mean()).numpy()[np.newaxis][0] return s,ni def data_consistency(parnet, parni, mask1d, slice_ksp_torchtensor1): img = parnet(parni.type(dtype)) s = img.shape ns = int(s[1]/2) # number of slices fimg = Variable( torch.zeros( (s[0],ns,s[2],s[3],2 ) ) ).type(dtype) for i in range(ns): fimg[0,i,:,:,0] = img[0,2*i,:,:] fimg[0,i,:,:,1] = img[0,2*i+1,:,:] Fimg = transform.fft2(fimg) # dim: (1,num_slices,x,y,2) # ksp has dim: (num_slices,x,y) meas = slice_ksp_torchtensor1.unsqueeze(0) # dim: (1,num_slices,x,y,2) mask = torch.from_numpy(np.array(mask1d, dtype=np.uint8)) ksp_dc = Fimg.clone() ksp_dc = ksp_dc.detach().cpu() ksp_dc[:,:,:,mask==1,:] = meas[:,:,:,mask==1,:] # after data consistency block img_dc = transform.ifft2(ksp_dc)[0] out = [] for img in img_dc.detach().cpu(): out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] par_out_chs = np.array(out) par_out_imgs = channels2imgs(par_out_chs) # deep decoder reconstruction prec = root_sum_of_squares2(par_out_imgs) if prec.shape[0] > 320: prec = crop_center2(prec,320,320) return prec def myrunner_untrained( ksp, num_iter = 20, num_iter_inner = 10000, LR = 0.01, lam = 0.1, eps = 1e2, OPTIMIZER='adam', mask = None, mask1d = None, mask2d = None, lr_decay_epoch = 0, weight_decay=0, loss_type="MSE", retain_graph = False, find_best = True, ): ################ main optimization steup: perturbation finder ################ shape = ksp.shape print("perturbation shape: ", shape) r = Variable(torch.zeros(shape).cuda(),requires_grad=True).type(dtype) r.data.uniform_() #r.data *= 1/torch.norm(ksp)#1./1e3 r.data *= torch.norm(ksp)/torch.norm(r) r = r.type(dtype) r.retain_grad() r_saved = r.data.clone() #### r.requires_grad = True optimizer = torch.optim.SGD([r], lr=LR,momentum=0.9,weight_decay=weight_decay) #### loss_ = np.zeros(num_iter) #loss = MyLoss() loss = rMSELoss() ################ ################ ################ sub optimization: fitting ConvDecoder (or any untrained network) ################ num_channels = 160 #256 num_layers = 8 strides = [1]*(num_layers-1) in_size = [8,4] kernel_size = 3 output_depth = ksp.numpy().shape[0]*2 out_size = ksp.numpy().shape[1:-1] width,height = in_size shape = [1,num_channels, width, height] print("network input shape: ", shape) net_input = Variable(torch.zeros(shape)).type(dtype) net_input.data.uniform_() net_input.data *= 1./10 net_input = net_input.type(dtype) ##### fit the network for reconstruction without perturbation ##### net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) torch.save(net.state_dict(), "./init") ### fix scaling for ConvDecoder scaling_factor,_ = get_scale_factor(net, num_channels, in_size, ksp, ni=net_input) slice_ksp_torchtensor1 = ksp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, fixed_net = fitr( net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = fixed_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #fixed_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) fixed_rec = torch.from_numpy( data_consistency(fixed_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) fixed_rec, mean, std = transform.normalize_instance(fixed_rec, eps=1e-11) fixed_rec = fixed_rec.clamp(-6, 6) pert_recs = [] R = [] for i in range(num_iter): ### prepare input for ConvDecoder # create the network net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) # f(A(x+r)) recovery print("\n***fit ConvDecoder at i = {}***".format(i)) print("norms:",torch.norm(r),torch.norm(ksp)) inp = r + ksp.type(dtype) scaling_factor,_ = get_scale_factor(net, num_channels, in_size, inp.data, ni=net_input) slice_ksp_torchtensor1 = inp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask.type(dtype)) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) # fit the network out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, pert_net = fitr(net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) pert_rec = torch.from_numpy( data_consistency(pert_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) pert_rec, mean, std = transform.normalize_instance(pert_rec, eps=1e-11) pert_rec = pert_rec.clamp(-6, 6) pert_recs.append(pert_rec.data.cpu().numpy()) def closure(): optimizer.zero_grad() loss__ = loss(r,fixed_rec.type(dtype),pert_rec.type(dtype),lam) loss__.backward(retain_graph=retain_graph) loss_[i] = loss__.data.cpu().numpy() if i % 1 == 0: print ('Iteration %05d loss %f' % (i, loss_[i]), '\r', end='') return loss__ #print("\n{}\n".format(r.requires_grad)) R.append(r.data.cpu()) loss__ = optimizer.step(closure) ### new network for computing derivatives """print("\n***fit ConvDecoder at i = {} for derivatives***".format(i)) net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) ### fit this network to get an approximate derivative of f(A(x+r)) with respect to A(x+r) --> slightly perturb it with epsilon shape = inp.shape #H = Variable(torch.zeros(shape)).type(dtype) #H.data.uniform_(0.1,0.2) H /= torch.norm(H) H *= torch.norm(inp) / eps print(H.shape,unders_measurement.shape) unders_meas = unders_measurement.clone() unders_meas += H scale_out,sover,pover,par_mse_n, par_mse_t, parni, der_net = fitr(net, unders_meas.type(dtype), Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam", retain_graph=True, ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) der_rec = torch.from_numpy( data_consistency(der_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) der_rec, mean, std = transform.normalize_instance(der_rec, eps=1e-11) der_rec = der_rec.clamp(-6, 6) ### compute loss and perform the optimization step loss_[i] = loss(r,fixed_rec,pert_rec,H,lam) grad = loss.get_derivs(fixed_rec,pert_rec,der_rec,r,H,lam) r -= LR*grad print("\nloss at iteration{}:".format(i),loss_[i]) R.append(r.data.cpu()) print(2*"\n")""" #loss = optimizer.step(closure) with open("./outputs/untrainedrunner_test/results","wb") as fn: pickle.dump([R,ksp,loss_,fixed_rec.data.cpu().numpy(),pert_recs],fn) return R,net_input, loss_, fixed_rec.data.cpu().numpy(), pert_recs def myrunner_untrained_test3( ksp, num_iter = 20, num_iter_inner = 10000, LR = 0.01, lam = 0.1, eps = 1e2, OPTIMIZER='adam', mask = None, mask1d = None, mask2d = None, lr_decay_epoch = 0, weight_decay=0, loss_type="MSE", retain_graph = False, find_best = True, ): ################ main optimization steup: perturbation finder ################ shape = ksp.shape print("perturbation shape: ", shape) r = Variable(torch.zeros(shape)).type(dtype) r.data.uniform_() #r.data *= 1/torch.norm(ksp)#1./1e3 r.data *= torch.norm(ksp)/torch.norm(r) r = r.type(dtype) #r.retain_grad() r_saved = r.data.clone() #### r.requires_grad = True optimizer = torch.optim.SGD([r], lr=LR,momentum=0.9,weight_decay=weight_decay) #### loss_ = np.zeros(num_iter) #loss = MyLoss() loss = rMSELoss() ################ ################ ################ sub optimization: fitting ConvDecoder (or any untrained network) ################ num_channels = 160 #256 num_layers = 8 strides = [1]*(num_layers-1) in_size = [8,4] kernel_size = 3 output_depth = ksp.numpy().shape[0]*2 out_size = ksp.numpy().shape[1:-1] width,height = in_size shape = [1,num_channels, width, height] print("network input shape: ", shape) net_input = Variable(torch.zeros(shape)).type(dtype) net_input.data.uniform_() net_input.data *= 1./10 net_input = net_input.type(dtype) ##### fit the network for reconstruction without perturbation ##### net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) torch.save(net.state_dict(), "./init") ### fix scaling for ConvDecoder scaling_factor,_ = get_scale_factor(net, num_channels, in_size, ksp, ni=net_input) slice_ksp_torchtensor1 = ksp.type(dtype) * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask.type(dtype)) #unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) unders_measurement = Variable(masked_kspace[None,:]) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) out = [] for img in sampled_image2: out += [ img[:,:,0].data.cpu().numpy() , img[:,:,1].data.cpu().numpy() ] lsest = torch.tensor(np.array([out])) ######################### optimization for convdecoder img_noisy_var = unders_measurement.clone() img_clean_var = Variable(lsest).type(dtype) p = [x for x in net.parameters()] optimizer2 = torch.optim.Adam(p, lr=0.1,weight_decay=weight_decay) mse = torch.nn.MSELoss() mse_wrt_noisy = np.zeros(num_iter_inner) import copy if find_best: best_net = copy.deepcopy(net) best_mse = 1000000.0 SSIMs = [] PSNRs = [] for i in range(num_iter_inner): def closure2(): optimizer2.zero_grad() out = net(net_input.type(dtype)) # training loss losss = mse( forwardm(out,mask2d) , img_noisy_var ) losss.backward(retain_graph=retain_graph) mse_wrt_noisy[i] = losss.data.cpu().numpy() # the actual loss true_loss = mse( Variable(out.data, requires_grad=False).type(dtype), img_clean_var.type(dtype) ) if i % 100 == 0: if lsimg is not None: ### compute ssim and psnr ### out_chs = out.data.cpu().numpy()[0] out_imgs = channels2imgs(out_chs) # least squares reconstruciton orig = crop_center2( root_sum_of_squares2(var_to_np(lsimg)) , 320,320) # deep decoder reconstruction rec = crop_center2(root_sum_of_squares2(out_imgs),320,320) ssim_const = ssim(orig,rec,data_range=orig.max()) SSIMs.append(ssim_const) psnr_const = psnr(orig,rec,np.max(orig)) PSNRs.append(psnr_const) ### ### trloss = losss.data true_loss = true_loss.data print ('Iteration %05d Train loss %f Actual loss %f' % (i, trloss,true_loss), '\r', end='') return losss losss = optimizer2.step(closure2) # if training loss improves by at least one percent, we found a new best net lossval = losss.data if best_mse > 1.005*lossval: best_mse = lossval best_net = copy.deepcopy(net) net = best_net #scale_out,sover,pover,par_mse_n, par_mse_t, parni, fixed_net = fitr( net, # unders_measurement, # Variable(lsest).type(dtype), # mask2d, # num_iter=num_iter_inner, # LR=0.008, # apply_f = forwardm, # lsimg = lsimg, # find_best=True, # net_input = net_input, # OPTIMIZER = "adam" # ) out_chs = net( net_input.type(dtype) )[0] sh = out_chs.shape chs = int(sh[0]/2) imgs = torch.zeros( (chs,sh[1],sh[2]) ).type(dtype) for q in range(chs): imgs[q] = torch.sqrt( out_chs[2*q]**2 + out_chs[2*q+1]**2 ) fixed_rec = root_sum_of_squares(imgs) #out_chs = fixed_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #fixed_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) #fixed_rec = torch.from_numpy( data_consistency(net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) fixed_rec, mean, std = transform.normalize_instance(fixed_rec, eps=1e-11) fixed_rec = fixed_rec.clamp(-6, 6) pert_recs = [] R = [] for j in range(num_iter): ### prepare input for ConvDecoder # create the network net3 = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net3.load_state_dict(torch.load("./init")) # f(A(x+r)) recovery print("\n***fit ConvDecoder at i = {}***".format(j)) print("norms:",torch.norm(r),torch.norm(ksp)) inp = r + ksp.type(dtype) #scaling_factor,_ = get_scale_factor(net3, # num_channels, # in_size, # inp.data, # ni=net_input) slice_ksp_torchtensor1 = inp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask.type(dtype)) #unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) unders_measurement = masked_kspace[None,:] sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) # fit the network out = [] for img in sampled_image2: out += [ img[:,:,0].data.cpu().numpy() , img[:,:,1].data.cpu().numpy() ] lsest = torch.tensor(np.array([out])) ######################### optimization for convdecoder img_noisy_var = unders_measurement img_clean_var = Variable(lsest).type(dtype) p3 = [x for x in net3.parameters()] optimizer3 = torch.optim.Adam(p3, lr=0.1,weight_decay=weight_decay) #mse = torch.nn.MSELoss() mse_wrt_noisy = np.zeros(num_iter_inner) import copy if find_best: best_net = copy.deepcopy(net3) best_mse = 1000000.0 SSIMs = [] PSNRs = [] for i in range(num_iter_inner): #def closure3(): optimizer3.zero_grad() out = net3(net_input.type(dtype)) # training loss losss = mse( forwardm(out,mask2d) , img_noisy_var ) losss.backward(retain_graph=retain_graph) optimizer3.step() mse_wrt_noisy[i] = losss.data.cpu().numpy() # the actual loss true_loss = mse( Variable(out.data, requires_grad=False).type(dtype), img_clean_var.type(dtype) ) if i % 100 == 0: trloss = losss.data true_loss = true_loss.data print ('Iteration %05d Train loss %f Actual loss %f' % (i, trloss,true_loss), '\r', end='') # return losss #losss = optimizer3.step(closure3) # if training loss improves by at least one percent, we found a new best net lossval = losss.data if best_mse > 1.005*lossval: best_mse = lossval best_net = copy.deepcopy(net3) net3 = best_net #scale_out,sover,pover,par_mse_n, par_mse_t, parni, pert_net = fitr(net, # unders_measurement, ## Variable(lsest).type(dtype), # mask2d, # num_iter=num_iter_inner, # LR=0.008, # apply_f = forwardm, # lsimg = lsimg, # find_best=True, # net_input = net_input, # OPTIMIZER = "adam" # ) out_chs = net3( net_input.type(dtype) )[0] sh = out_chs.shape chs = int(sh[0]/2) imgs = torch.zeros( (chs,sh[1],sh[2]) ).type(dtype) for q in range(chs): imgs[q] = torch.sqrt( out_chs[2*q]**2 + out_chs[2*q+1]**2 ) pert_rec = root_sum_of_squares(imgs) #pert_rec = center_crop(pert_rec,(320,320)) #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) #pert_rec = torch.from_numpy( data_consistency(net3, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) pert_rec, mean, std = transform.normalize_instance(pert_rec, eps=1e-11) pert_rec = pert_rec.clamp(-6, 6) pert_recs.append(pert_rec.data.cpu().numpy()) def closure(): optimizer.zero_grad() loss__ = loss(r,fixed_rec.type(dtype),pert_rec.type(dtype),lam) loss__.backward(retain_graph=retain_graph) loss_[j] = loss__.data.cpu().numpy() if i % 1 == 0: print ('Iteration %05d loss %f' % (j, loss_[j]), '\r', end='') return loss__ #print("\n{}\n".format(r.requires_grad)) R.append(r.data.cpu()) loss__ = optimizer.step(closure) print(r.grad.nonzero()) ### new network for computing derivatives """print("\n***fit ConvDecoder at i = {} for derivatives***".format(i)) net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) ### fit this network to get an approximate derivative of f(A(x+r)) with respect to A(x+r) --> slightly perturb it with epsilon shape = inp.shape #H = Variable(torch.zeros(shape)).type(dtype) #H.data.uniform_(0.1,0.2) H /= torch.norm(H) H *= torch.norm(inp) / eps print(H.shape,unders_measurement.shape) unders_meas = unders_measurement.clone() unders_meas += H scale_out,sover,pover,par_mse_n, par_mse_t, parni, der_net = fitr(net, unders_meas.type(dtype), Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam", retain_graph=True, ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) der_rec = torch.from_numpy( data_consistency(der_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) der_rec, mean, std = transform.normalize_instance(der_rec, eps=1e-11) der_rec = der_rec.clamp(-6, 6) ### compute loss and perform the optimization step loss_[i] = loss(r,fixed_rec,pert_rec,H,lam) grad = loss.get_derivs(fixed_rec,pert_rec,der_rec,r,H,lam) r -= LR*grad print("\nloss at iteration{}:".format(i),loss_[i]) R.append(r.data.cpu()) print(2*"\n")""" #loss = optimizer.step(closure) with open("./outputs/untrainedrunner_test/results","wb") as fn: pickle.dump([R,ksp,loss_,fixed_rec.data.cpu().numpy(),pert_recs],fn) return R,net_input, loss_, fixed_rec.data.cpu().numpy(), pert_recs def myrunner_untrained_test2(ksp, net=None, num_iter = 20, num_iter_inner = 10000, LR = 0.01, lam = 0.1, eps = 1e2, OPTIMIZER='adam', mask = None, mask1d = None, mask2d = None, lr_decay_epoch = 0, weight_decay=0, loss_type="MSE", retain_graph = False, find_best = True, ): ################ main optimization steup: perturbation finder ################ shape = ksp.shape print("perturbation shape: ", shape) r = Variable(torch.zeros(shape)).type(dtype) r.data.uniform_() r.data *= torch.norm(ksp)/torch.norm(r) r = r.type(dtype) r_saved = r.data.clone() #r.requires_grad = True loss_ = np.zeros(num_iter) loss = MyLoss() ################ ################ """ with open("masks","rb") as fn: [mask,mask1d,mask2d,net_input] = pickle.load(fn) in_size = [4,4] kernel_size = 3 num_channels = 60#128 num_layers = 4#6 strides = [1]*(num_layers-1) output_depth = ksp.numpy().shape[0]*2 out_size = ksp.numpy().shape[1:-1] width,height = in_size shape = [1,num_channels, width, height] print("network input shape: ", shape) net_input = Variable(torch.zeros(shape)).type(dtype) net_input.data.uniform_() net_input.data *= 1./10 net_input = net_input.type(dtype) ##### fit the network for reconstruction without perturbation ##### net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=True, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) #torch.save(net.state_dict(), "./init") ### fix scaling for ConvDecoder scaling_factor,_ = get_scale_factor(net, num_channels, in_size, ksp, ni=net_input) slice_ksp_torchtensor1 = ksp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, fixed_net = fitr( net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.1, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = fixed_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #fixed_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) fixed_rec = torch.from_numpy( data_consistency(fixed_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) fixed_rec, mean, std = transform.normalize_instance(fixed_rec, eps=1e-11) fixed_rec = fixed_rec.clamp(-6, 6) print(fixed_rec.shape,ksp.shape) """ ### fixed reconstruction from non-perturbed data masked_kspace, mask = transform.apply_mask(ksp.type(dtype), mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) fixed_rec = net(inp2.type(dtype))[0] pert_recs = [] R = [] for i in range(num_iter): print("perturbation norm:",torch.norm(net_input)) inp = net_input + ksp masked_kspace, mask = transform.apply_mask(inp, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) pert_rec = net(inp2.type(dtype)) pert_recs.append(pert_rec.data.cpu().numpy()[0]) ### fit this network to get an approximate derivative of f(A(x+r)) with respect to A(x+r) --> slightly perturb it with epsilon shape = inp.shape #H = Variable(torch.zeros(shape)).type(dtype) #H.data.uniform_(0.1,0.2) H = torch.randn(shape).type(dtype)+1e-9 #H = torch.zeros(shape).type(dtype) + 1 H /= torch.norm(H) H *= torch.norm(inp) / eps inp2 = inp + H masked_kspace, mask = transform.apply_mask(inp2, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) der_rec = net(inp2.type(dtype)) ######## inp2 = inp - H masked_kspace, mask = transform.apply_mask(inp2, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) der_rec2 = net(inp2.type(dtype)) ### compute loss and perform the optimization step loss_[i] = loss(net_input,fixed_rec,pert_rec,H,lam) grad = loss.get_derivs(fixed_rec.data.cpu(),der_rec2.data.cpu(),der_rec.data.cpu(),net_input.data.cpu(),H.data.cpu(),lam) print("\nloss at iteration{}:".format(i),loss_[i]) """ num_channels = 100 #256 num_layers = 5 strides = [1]*(num_layers-1) in_size = [4,4] kernel_size = 3 output_depth = ksp.numpy().shape[0]*2 out_size = ksp.numpy().shape[1:-1] width,height = in_size shape = [1,num_channels, width, height] print("network input shape: ", shape) net_input = Variable(torch.zeros(shape)).type(dtype) net_input.data.uniform_() net_input.data *= 1./10 net_input = net_input.type(dtype) ##### fit the network for reconstruction without perturbation ##### net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) torch.save(net.state_dict(), "./init") ### fix scaling for ConvDecoder scaling_factor,_ = get_scale_factor(net, num_channels, in_size, ksp, ni=net_input) slice_ksp_torchtensor1 = ksp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, fixed_net = fitr( net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = fixed_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #fixed_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) fixed_rec = torch.from_numpy( data_consistency(fixed_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) fixed_rec, mean, std = transform.normalize_instance(fixed_rec, eps=1e-11) fixed_rec = fixed_rec.clamp(-6, 6) pert_recs = [] R = [] indices = [] for i in range(ksp.shape[1]): for j in range(ksp.shape[2]): for k in range(ksp.shape[3]): p = random.random() if p < 0.001: indices.append((0,i,j,k)) print("\n%{} of elements picked for perturbation".format(100*len(indices)/np.prod(ksp.numpy().shape))) for i in range(num_iter): ### prepare input for ConvDecoder # create the network net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) # f(A(x+r)) recovery print("\n***fit ConvDecoder at i = {}***".format(i)) print("norms:",torch.norm(r),torch.norm(ksp)) inp = r + ksp.type(dtype) scaling_factor,_ = get_scale_factor(net, num_channels, in_size, inp.data, ni=net_input) slice_ksp_torchtensor1 = inp.data.cpu() * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) # fit the network out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, pert_net = fitr(net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) pert_rec = torch.from_numpy( data_consistency(pert_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) pert_rec, mean, std = transform.normalize_instance(pert_rec, eps=1e-11) pert_rec = pert_rec.clamp(-6, 6) pert_recs.append(pert_rec.data.cpu().numpy())""" R.append(r.data.cpu()) r -= LR*(-grad.type(dtype)+lam*r) print(2*"\n") #loss = optimizer.step(closure) with open("./outputs/untrainedrunner_test/results","wb") as fn: pickle.dump([R,ksp,loss_,fixed_rec.data.cpu().numpy(),pert_recs,mask,mask1d,mask2d],fn) #del(der_rec,pert_rec,inp,grad) #torch.cuda.empty_cache() return R,net_input, loss_, fixed_rec.data.cpu().numpy(), pert_recs def myrunner_untrained_test(ksp, net=None, num_iter = 20, num_iter_inner = 10000, LR = 0.01, lam = 0.1, eps = 1e2, OPTIMIZER='adam', mask = None, mask1d = None, mask2d = None, lr_decay_epoch = 0, weight_decay=0, loss_type="MSE", retain_graph = False, find_best = True, ): ################ main optimization steup: perturbation finder ################ shape = ksp.shape print("perturbation shape: ", shape) r = Variable(torch.zeros(shape)).type(dtype) r.data.uniform_() #r.data *= 1/torch.norm(ksp)#1./1e3 #indices = [] indices = torch.from_numpy(np.random.rand(shape[0],shape[1],shape[2],shape[3])) indices[indices>0.2] = 0 indices[indices!=0] = 1 r[indices==0] = 0 #for l in range(ksp.shape[0]): # for i in range(ksp.shape[1]): # for j in range(ksp.shape[2]): # for k in range(ksp.shape[3]): # p = random.random() # if p < 0.001: # indices.append((l,i,j,k)) # else: # r[l,i,j,k] = 0 print("\n%{} of elements picked for perturbation".format(100*indices.sum()/np.prod(ksp.numpy().shape))) inds = torch.nonzero(r) r.data *= torch.norm(ksp)/torch.norm(r) r = r.type(dtype) r_saved = r.data.clone() #r.requires_grad = True loss_ = np.zeros(num_iter) loss = MyLoss() ################ ################ with open("masks","rb") as fn: [mask,mask1d,mask2d,net_input] = pickle.load(fn) in_size = [4,4] kernel_size = 3 num_channels = 60#128 num_layers = 4#6 strides = [1]*(num_layers-1) output_depth = ksp.numpy().shape[0]*2 out_size = ksp.numpy().shape[1:-1] width,height = in_size shape = [1,num_channels, width, height] print("network input shape: ", shape) net_input = Variable(torch.zeros(shape)).type(dtype) net_input.data.uniform_() net_input.data *= 1./10 net_input = net_input.type(dtype) ##### fit the network for reconstruction without perturbation ##### net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=True, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) #torch.save(net.state_dict(), "./init") ### fix scaling for ConvDecoder scaling_factor,_ = get_scale_factor(net, num_channels, in_size, ksp, ni=net_input) slice_ksp_torchtensor1 = ksp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, fixed_net = fitr( net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.1, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = fixed_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #fixed_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) fixed_rec = torch.from_numpy( data_consistency(fixed_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) fixed_rec, mean, std = transform.normalize_instance(fixed_rec, eps=1e-11) fixed_rec = fixed_rec.clamp(-6, 6) print(fixed_rec.shape,ksp.shape) """ ### fixed reconstruction from non-perturbed data masked_kspace, mask = transform.apply_mask(ksp.type(dtype), mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) fixed_rec = net(inp2.type(dtype))[0] pert_recs = [] R = [] """ """for i in range(num_iter): print("perturbation norm:",torch.norm(net_input)) inp = net_input + ksp masked_kspace, mask = transform.apply_mask(inp, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) pert_rec = net(inp2.type(dtype)) pert_recs.append(pert_rec.data.cpu().numpy()[0]) ### fit this network to get an approximate derivative of f(A(x+r)) with respect to A(x+r) --> slightly perturb it with epsilon shape = inp.shape #H = Variable(torch.zeros(shape)).type(dtype) #H.data.uniform_(0.1,0.2) H = torch.randn(shape).type(dtype)+1e-9 #H = torch.zeros(shape).type(dtype) + 1 H /= torch.norm(H) H *= torch.norm(inp) / eps inp2 = inp + H masked_kspace, mask = transform.apply_mask(inp2, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) der_rec = net(inp2.type(dtype)) ######## inp2 = inp - H masked_kspace, mask = transform.apply_mask(inp2, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) der_rec2 = net(inp2.type(dtype)) ### compute loss and perform the optimization step loss_[i] = loss(net_input,fixed_rec,pert_rec,H,lam) grad = loss.get_derivs(fixed_rec.data.cpu(),der_rec2.data.cpu(),der_rec.data.cpu(),net_input.data.cpu(),H.data.cpu(),lam) print("\nloss at iteration{}:".format(i),loss_[i])""" """ num_channels = 100 #256 num_layers = 5 strides = [1]*(num_layers-1) in_size = [4,4] kernel_size = 3 output_depth = ksp.numpy().shape[0]*2 out_size = ksp.numpy().shape[1:-1] width,height = in_size shape = [1,num_channels, width, height] print("network input shape: ", shape) net_input = Variable(torch.zeros(shape)).type(dtype) net_input.data.uniform_() net_input.data *= 1./10 net_input = net_input.type(dtype) ##### fit the network for reconstruction without perturbation ##### net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) torch.save(net.state_dict(), "./init") ### fix scaling for ConvDecoder scaling_factor,_ = get_scale_factor(net, num_channels, in_size, ksp, ni=net_input) slice_ksp_torchtensor1 = ksp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, fixed_net = fitr( net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = fixed_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #fixed_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) fixed_rec = torch.from_numpy( data_consistency(fixed_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) fixed_rec, mean, std = transform.normalize_instance(fixed_rec, eps=1e-11) fixed_rec = fixed_rec.clamp(-6, 6) pert_recs = [] R = [] indices = [] for i in range(ksp.shape[1]): for j in range(ksp.shape[2]): for k in range(ksp.shape[3]): p = random.random() if p < 0.001: indices.append((0,i,j,k)) print("\n%{} of elements picked for perturbation".format(100*len(indices)/np.prod(ksp.numpy().shape))) for i in range(num_iter): ### prepare input for ConvDecoder # create the network net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) # f(A(x+r)) recovery print("\n***fit ConvDecoder at i = {}***".format(i)) print("norms:",torch.norm(r),torch.norm(ksp)) inp = r + ksp.type(dtype) scaling_factor,_ = get_scale_factor(net, num_channels, in_size, inp.data, ni=net_input) slice_ksp_torchtensor1 = inp.data.cpu() * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) # fit the network out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, pert_net = fitr(net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) pert_rec = torch.from_numpy( data_consistency(pert_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) pert_rec, mean, std = transform.normalize_instance(pert_rec, eps=1e-11) pert_rec = pert_rec.clamp(-6, 6) pert_recs.append(pert_rec.data.cpu().numpy())""" pert_recs = [] R = [] for i in range(num_iter): print("perturbation norm:",torch.norm(r)) if i>0: print("grad norm, l2 norm:",torch.norm(grad),lam*torch.norm(r)) """inp = r + ksp.type(dtype) masked_kspace, mask = transform.apply_mask(inp, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) pert_rec = net(inp2.type(dtype))[0] pert_recs.append(pert_rec.data.cpu().numpy()) """ net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=True, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) # f(A(x+r)) recovery print("\n***fit ConvDecoder at i = {}***".format(i)) print("norms:",torch.norm(r),torch.norm(ksp)) inp = r + ksp.type(dtype) scaling_factor,_ = get_scale_factor(net, num_channels, in_size, inp.data, ni=net_input) slice_ksp_torchtensor1 = inp.data.cpu() * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) # fit the network out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, pert_net = fitr(net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.1, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) pert_rec = torch.from_numpy( data_consistency(pert_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) pert_rec, mean, std = transform.normalize_instance(pert_rec, eps=1e-11) pert_rec = pert_rec.clamp(-6, 6) pert_recs.append(pert_rec.data.cpu().numpy()) ctr = 0 s = time.time() grad = torch.zeros(ksp.shape) #for j in range(ksp.shape[0]): #print( "j={}".format(j) ) # for m in range(ksp.shape[1]): # for n in range(ksp.shape[2]): # for q in range(ksp.shape[3]): # if (j,m,n,q) not in indices: # continue for ind in inds: """h = inp.mean()/eps inp2 = inp.clone() inp2[ind[0],ind[1],ind[2],ind[3]] += h masked_kspace, mask = transform.apply_mask(inp2, mask = mask.type(dtype)) sampled_image2 = transform.ifft2(masked_kspace) crop_size = (320, 320) sampled_image = transform.complex_center_crop(sampled_image2, crop_size) # Absolute value sampled_image = transform.complex_abs(sampled_image) # Apply Root-Sum-of-Squares if multicoil data sampled_image = transform.root_sum_of_squares(sampled_image) # Normalize input sampled_image, mean, std = transform.normalize_instance(sampled_image, eps=1e-11) sampled_image = sampled_image.clamp(-6, 6) inp2 = sampled_image.unsqueeze(0) der_rec = net(inp2.type(dtype)).data.cpu()[0] ### new network for computing derivatives ### right side #print("\n***fit ConvDecoder at i = {} for derivatives***".format(i)) """ net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=True, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) ### fit this network to get an approximate derivative of f(A(x+r)) with respect to A(x+r) --> slightly perturb it with epsilon h = inp.mean()/eps unders_meas = unders_measurement.clone() unders_meas[0,ind[0],ind[1],ind[2],ind[3]] += h scale_out,sover,pover,par_mse_n, par_mse_t, parni, der_net = fitr(net, unders_meas.type(dtype), Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.1, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam", retain_graph=True, ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) der_rec = torch.from_numpy( data_consistency(der_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) der_rec, mean, std = transform.normalize_instance(der_rec, eps=1e-11) der_rec = der_rec.clamp(-6, 6).data.cpu() """### left side net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) ### fit this network to get an approximate derivative of f(A(x+r)) with respect to A(x+r) --> slightly perturb it with epsilon unders_meas = unders_measurement.clone() unders_meas[0,j,m,n,q] -= h scale_out,sover,pover,par_mse_n, par_mse_t, parni, der_net = fitr(net, unders_meas.type(dtype), Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam", retain_graph=True, ) #out_chs = pert_net( net_input.type(dtype) ).data.cpu().numpy()[0] #out_imgs = channels2imgs(out_chs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(out_imgs),320,320)).type(dtype) der_rec2 = torch.from_numpy( data_consistency(der_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) der_rec2, mean, std = transform.normalize_instance(der_rec2, eps=1e-11) der_rec2 = der_rec2.clamp(-6, 6).data.cpu()""" grad[ind[0],ind[1],ind[2],ind[3]] = (torch.norm(fixed_rec.data.cpu()-der_rec)**2 - torch.norm(fixed_rec.data.cpu()-pert_rec.data.cpu())**2) / (h) / np.prod(fixed_rec.data.cpu().numpy().shape) #if ctr % 100 == 0: # print('%',ctr*100/np.prod(ksp.data.cpu().numpy().shape), time.time()-s,"seconds") # s = time.time() #ctr += 1 #r[grad==0] = 0 R.append(r.data.cpu()) r -= LR*(-grad.type(dtype)+lam*r) print(2*"\n") #loss = optimizer.step(closure) with open("./outputs/untrainedrunner_test/results","wb") as fn: pickle.dump([R,ksp,loss_,fixed_rec.data.cpu().numpy(),pert_recs,mask,mask1d,mask2d],fn) #del(der_rec,pert_rec,inp,grad) #torch.cuda.empty_cache() return R,net_input, loss_, fixed_rec.data.cpu().numpy(), pert_recs def runner_untrained(ksp, num_iter = 20, num_iter_inner = 10000, LR = 0.01, OPTIMIZER='adam', mask = None, mask1d = None, mask2d = None, lr_decay_epoch = 0, weight_decay=0, loss_type="MSE", retain_graph = False, find_best = True, ): ################ main optimization steup: perturbation finder ################ shape = ksp.shape print("perturbation shape: ", shape) r = Variable(torch.zeros(shape)).type(dtype) r.data.uniform_() #r.data *= 1/torch.norm(ksp)#1./1e3 r.data *= torch.norm(ksp)/torch.norm(r) r = r.type(dtype) r_saved = r.data.clone() r.requires_grad = True p = [r] mse_ = np.zeros(num_iter) if OPTIMIZER == 'SGD': print("optimize with SGD", LR) optimizer = torch.optim.SGD(p, lr=LR,momentum=0.9,weight_decay=weight_decay) elif OPTIMIZER == 'adam': print("optimize with adam", LR) optimizer = torch.optim.Adam(p, lr=LR,weight_decay=weight_decay) elif OPTIMIZER == 'LBFGS': print("optimize with LBFGS", LR) optimizer = torch.optim.LBFGS(p, lr=LR) elif OPTIMIZER == "adagrad": print("optimize with adagrad", LR) optimizer = torch.optim.Adagrad(p, lr=LR,weight_decay=weight_decay) mse = rMSELoss() ################ ################ ################ sub optimization: fitting ConvDecoder (or any untrained network) ################ num_channels = 160 #256 num_layers = 8 strides = [1]*(num_layers-1) in_size = [8,4] kernel_size = 3 output_depth = ksp.shape[0]*2 out_size = ksp.shape[1:-1] width,height = in_size shape = [1,num_channels, width, height] print("network input shape: ", shape) net_input = Variable(torch.zeros(shape)).type(dtype) net_input.data.uniform_() net_input.data *= 1./10 net_input = net_input.type(dtype) ##### fit the network for reconstruction without perturbation ##### net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) torch.save(net.state_dict(), "./init") ### fix scaling for ConvDecoder scaling_factor,_ = get_scale_factor(net, num_channels, in_size, ksp, ni=net_input) slice_ksp_torchtensor1 = ksp * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, fixed_net = fitr( net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) fixed_outs = fixed_net( net_input.type(dtype) ) #outs = fixed_outs.data.cpu().numpy()[0] #fixed_imgs = channels2imgs(outs) #fixed_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(fixed_imgs),320,320)).type(dtype) fixed_rec = torch.from_numpy( data_consistency(fixed_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) pert_recs = [] R = [] for i in range(num_iter): ### prepare input for ConvDecoder # create the network net = convdecoder(out_size,in_size,output_depth, num_layers,strides,num_channels, act_fun = nn.ReLU(), skips=False,need_sigmoid=False,bias=False, need_last = True, kernel_size=kernel_size,upsample_mode="nearest").type(dtype) net.load_state_dict(torch.load("./init")) # f(A(x+r)) recovery print("***fit ConvDecoder at i = {}***".format(i)) print("norms:",torch.norm(r),torch.norm(ksp),'\n') inp = r + ksp.type(dtype) scaling_factor,_ = get_scale_factor(net, num_channels, in_size, inp.data, ni=net_input) slice_ksp_torchtensor1 = inp.data.cpu() * scaling_factor masked_kspace, mask = transform.apply_mask(slice_ksp_torchtensor1, mask = mask) unders_measurement = np_to_var( masked_kspace.data.cpu().numpy() ).type(dtype) sampled_image2 = transform.ifft2(masked_kspace) measurement = slice_ksp_torchtensor1.unsqueeze(0).type(dtype) lsimg = lsreconstruction(measurement) # fit the network out = [] for img in sampled_image2: out += [ img[:,:,0].numpy() , img[:,:,1].numpy() ] lsest = torch.tensor(np.array([out])) scale_out,sover,pover,par_mse_n, par_mse_t, parni, pert_net = fitr(net, unders_measurement, Variable(lsest).type(dtype), mask2d, num_iter=num_iter_inner, LR=0.008, apply_f = forwardm, lsimg = lsimg, find_best=True, net_input = net_input, OPTIMIZER = "adam" ) pert_outs = pert_net( net_input.type(dtype) ) #outs = pert_outs.data.cpu().numpy()[0] #pert_imgs = channels2imgs(outs) #pert_rec = torch.from_numpy(crop_center2(root_sum_of_squares2(pert_imgs),320,320)).type(dtype) pert_rec = torch.from_numpy( data_consistency(pert_net, net_input, mask1d, slice_ksp_torchtensor1) ).type(dtype) pert_recs.append(pert_rec.data.cpu().numpy()) if lr_decay_epoch is not 0: optimizer = exp_lr_scheduler(optimizer, i, init_lr=LR, lr_decay_epoch=lr_decay_epoch) def closure(): optimizer.zero_grad() #out = net(inp.type(dtype)) #out2 = net(inp2.type(dtype)) #loss = mse(r, fixed_rec, pert_rec) loss = mse(r, fixed_outs, pert_outs) loss.backward(retain_graph=retain_graph) mse_[i] = loss.data.cpu().numpy() if i % 1 == 0: print ('\nIteration %05d loss %f\n\n' % (i, mse_[i])) return loss R.append(r.data.cpu()) loss = optimizer.step(closure) with open("./outputs/untrainedrunner1/results","wb") as fn: pickle.dump([R,ksp,mse_,fixed_rec.data.cpu().numpy(),pert_recs],fn) return R,net_input, mse_, fixed_rec.data.cpu().numpy(), pert_recs def fitr(net, img_noisy_var, img_clean_var, mask, net_input = None, num_iter = 5000, LR = 0.01, OPTIMIZER='adam', mask_var = None, apply_f = None, lr_decay_epoch = 0, lsimg = None, target_img = None, find_best=False, weight_decay=0, totalupsample = 1, loss_type="MSE", retain_graph = False, scale_out=1, ): import copy p = [x for x in net.parameters() ] mse_wrt_noisy = np.zeros(num_iter) mse_wrt_truth = np.zeros(num_iter) if OPTIMIZER == 'SGD': print("optimize with SGD", LR) optimizer1 = torch.optim.SGD(p, lr=LR,momentum=0.9,weight_decay=weight_decay) elif OPTIMIZER == 'adam': print("optimize with adam", LR) optimizer1 = torch.optim.Adam(p, lr=LR, weight_decay=weight_decay) elif OPTIMIZER == 'LBFGS': print("optimize with LBFGS", LR) optimizer1 = torch.optim.LBFGS(p, lr=LR) elif OPTIMIZER == "adagrad": print("optimize with adagrad", LR) optimizer1 = torch.optim.Adagrad(p, lr=LR,weight_decay=weight_decay) mse1 = torch.nn.MSELoss() if find_best: best_net = copy.deepcopy(net) best_mse = 1000000.0 PSNRs = [] SSIMs = [] for i in range(num_iter): if lr_decay_epoch is not 0 and i % lr_decay_epoch == 0: optimizer1 = exp_lr_scheduler(optimizer, i, init_lr=LR, lr_decay_epoch=lr_decay_epoch) def closure(): optimizer1.zero_grad() try: out = net(net_input.type(dtype),scale_out=scale_out) except: out = net(net_input.type(dtype)) # training loss if mask_var is not None: loss = mse1( out * mask_var , img_noisy_var * mask_var ) elif apply_f: loss = mse1( apply_f(out,mask) , img_noisy_var ) else: loss = mse1(out, img_noisy_var) loss.backward(retain_graph=retain_graph) mse_wrt_noisy[i] = loss.data.cpu().numpy() # the actual loss true_loss = mse1( Variable(out.data, requires_grad=False).type(dtype), img_clean_var.type(dtype) ) mse_wrt_truth[i] = true_loss.data.cpu().numpy() if i % 100 == 0: if lsimg is not None: ### compute ssim and psnr ### out_chs = out.data.cpu().numpy()[0] out_imgs = channels2imgs(out_chs) # least squares reconstruciton orig = crop_center2( root_sum_of_squares2(var_to_np(lsimg)) , 320,320) # deep decoder reconstruction rec = crop_center2(root_sum_of_squares2(out_imgs),320,320) ssim_const = ssim(orig,rec,data_range=orig.max()) SSIMs.append(ssim_const) psnr_const = psnr(orig,rec,np.max(orig)) PSNRs.append(psnr_const) ### ### trloss = loss.data true_loss = true_loss.data try: out2 = net(Variable(net_input).type(dtype),scale_out=scale_out) except: out2 = net(Variable(net_input).type(dtype)) loss2 = mse1(out2, img_clean_var).data print ('Iteration %05d Train loss %f Actual loss %f Actual loss orig %f' % (i, trloss,true_loss,loss2), '\r', end='') return loss loss = optimizer1.step(closure) if find_best: # if training loss improves by at least one percent, we found a new best net lossval = loss.data if best_mse > 1.005*lossval: best_mse = lossval best_net = copy.deepcopy(net) net_input_saved = net_input.data.clone() if find_best: net = best_net return scale_out,SSIMs,PSNRs,mse_wrt_noisy, mse_wrt_truth,net_input, net
49.503102
203
0.496599
9,483
87,769
4.369714
0.042708
0.04561
0.019692
0.013538
0.903784
0.893479
0.880737
0.868092
0.86148
0.854868
0
0.024733
0.395151
87,769
1,773
204
49.503102
0.755835
0.102872
0
0.743
0
0.005
0.147463
0.038125
0
0
0
0
0
1
0.02
false
0
0.017
0
0.058
0.046
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5421067be2bcec7e192ae0e280b62009b1216330
4,255
py
Python
CS-RRT/astar.py
RKJenamani/CS-RRT
7a595c468b3b746817b68498a1080d66017addf2
[ "MIT" ]
2
2020-04-10T16:40:18.000Z
2020-09-14T16:56:37.000Z
CS-RRT/astar.py
RKJenamani/CS-RRT
7a595c468b3b746817b68498a1080d66017addf2
[ "MIT" ]
1
2020-08-24T07:47:19.000Z
2020-08-24T07:59:53.000Z
CS-RRT/astar.py
RKJenamani/CS-RRT
7a595c468b3b746817b68498a1080d66017addf2
[ "MIT" ]
2
2020-04-10T16:10:22.000Z
2020-07-13T21:06:33.000Z
import heapq from math import sqrt import pylab as pl import numpy as np from matplotlib import collections as mc import matplotlib.pyplot as plt import helper import time def get_successors(G, cur): return G.neighbors(cur) def compute_distance(start_config, end_config): s = [(start_config[i] - end_config[i])**2 for i in range(0, len(start_config))] return sqrt(sum(s)) def compute_distance_id(G, u, v): start_config = helper.state_to_numpy(G.nodes[u]['state']) end_config = helper.state_to_numpy(G.nodes[v]['state']) return compute_distance(start_config, end_config) def get_heuristic(G, v, goal_v): return compute_distance_id(G, v, goal_v) def astar(G, start_v, goal_v, occ_g, inc, h_weight=1): # print(start_v) # print(goal_v) queue = [] heapq.heappush(queue, (0, 0, start_v)) nodes = dict() nodes[start_v] = (0, []) start_time = time.time() count = 0 while len(queue): curr_time = time.time() if curr_time - start_time > 100: return [], None heu, dis, cur = heapq.heappop(queue) if dis > nodes[cur][0]: continue if cur == goal_v: addv = goal_v plan = [] while addv != []: plan.append(addv) addv = nodes[addv][1] # print(" count = ", count) # print(" dis = ", dis) return np.array(plan[::-1]), dis next_cur = get_successors(G, cur) for v in next_cur: # dis_v = dis + compute_distance_id(G, cur, v) dis_v = dis + G[cur][v]['weight'] if (v not in nodes) or nodes[v][0] > dis_v: count += 1 cost_v = dis_v + h_weight * get_heuristic(G, v, goal_v) node1_pos = helper.state_to_numpy(G.nodes[v]['state']) node2_pos = helper.state_to_numpy(G.nodes[cur]['state']) lines = [] colors = [] lines.append([node1_pos, node2_pos]) # if not helper.is_edge_free(node1_pos, node2_pos, occ_g, inc = inc): # colors.append((1,0,0,0.3)) # lc = mc.LineCollection(lines, colors=colors, linewidths=1) # continue colors.append((0,1,0,0.3)) heapq.heappush(queue, (cost_v, dis_v, v)) nodes[v] = (dis_v, cur) # print(" count = ", count) return [], None def astar1(G, start_v, goal_v, occ_g, inc, h_weight=1): # print(start_v) # print(goal_v) queue = [] heapq.heappush(queue, (0, 0, start_v)) nodes = dict() nodes[start_v] = (0, []) start_time = time.time() count = 0 while len(queue): curr_time = time.time() if curr_time - start_time > 60: return [], None heu, dis, cur = heapq.heappop(queue) if dis > nodes[cur][0]: continue if cur == goal_v: addv = goal_v plan = [] while addv != []: plan.append(addv) addv = nodes[addv][1] return np.array(plan[::-1]), dis next_cur = get_successors(G, cur) for v in next_cur: # dis_v = dis + compute_distance_id(G, cur, v) dis_v = dis + G[cur][v]['weight'] if (v not in nodes) or nodes[v][0] > dis_v: count += 1 cost_v = dis_v + h_weight * get_heuristic(G, v, goal_v) node1_pos = helper.state_to_numpy(G.nodes[v]['state']) node2_pos = helper.state_to_numpy(G.nodes[cur]['state']) lines = [] colors = [] lines.append([node1_pos, node2_pos]) if not helper.is_edge_free1(node1_pos, node2_pos, occ_g, inc = inc): colors.append((1,0,0,0.3)) lc = mc.LineCollection(lines, colors=colors, linewidths=1) continue colors.append((0,1,0,0.3)) heapq.heappush(queue, (cost_v, dis_v, v)) nodes[v] = (dis_v, cur) # print(" count = ", count) return [], None
33.503937
85
0.515629
568
4,255
3.670775
0.15669
0.028777
0.019185
0.051799
0.816787
0.816787
0.774101
0.756835
0.742446
0.742446
0
0.02301
0.356522
4,255
127
86
33.503937
0.738495
0.114219
0
0.726316
0
0
0.011185
0
0
0
0
0
0
1
0.063158
false
0
0.084211
0.021053
0.252632
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5813ee815abb5801e9eabb3bd33cefa283c8cac7
6,579
py
Python
q2_api_client/clients/mobile_ws/account_client.py
jcook00/q2-api-client
4431af164eb4baf52e26e8842e017cad1609a279
[ "BSD-2-Clause" ]
null
null
null
q2_api_client/clients/mobile_ws/account_client.py
jcook00/q2-api-client
4431af164eb4baf52e26e8842e017cad1609a279
[ "BSD-2-Clause" ]
null
null
null
q2_api_client/clients/mobile_ws/account_client.py
jcook00/q2-api-client
4431af164eb4baf52e26e8842e017cad1609a279
[ "BSD-2-Clause" ]
null
null
null
from q2_api_client.clients.base_q2_client import BaseQ2Client from q2_api_client.endpoints.mobile_ws_endpoints import AccountEndpoint class AccountClient(BaseQ2Client): def get_accounts(self): """GET /mobilews/account :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT.value return self._get(url=self._build_url(endpoint)) def set_pfm_enrollment_to_true(self): """GET /mobilews/account/Pfm/setEnrolledToTrue :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_SET_ENROLLED.value return self._get(url=self._build_url(endpoint)) def get_account_groups(self): """GET /mobilews/account/group :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_GROUP.value return self._get(url=self._build_url(endpoint)) def create_account_group(self, request_body): """POST /mobilews/account/group :param dict request_body: Dictionary object to send in the body of the request :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_GROUP.value return self._post(url=self._build_url(endpoint), json=request_body) def delete_account_group(self, group_id): """DELETE /mobilews/account/group/{id} :param str group_id: path parameter :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_GROUP_ID.value.format(id=group_id) return self._delete(url=self._build_url(endpoint)) def update_account_group(self, group_id, request_body): """PUT /mobilews/account/group/{id} :param str group_id: path parameter :param dict request_body: Dictionary object to send in the body of the request :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_GROUP_ID.value.format(id=group_id) return self._put(url=self._build_url(endpoint), json=request_body) def get_account_labels(self): """GET /mobilews/account/label :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_LABEL.value return self._get(url=self._build_url(endpoint)) def create_account_label(self, request_body): """POST /mobilews/account/label :param dict request_body: Dictionary object to send in the body of the request :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_LABEL.value return self._post(url=self._build_url(endpoint), json=request_body) def delete_account_label(self, label_id): """DELETE /mobilews/account/label/{id} :param str label_id: path parameter :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_LABEL_ID.value.format(id=label_id) return self._delete(url=self._build_url(endpoint)) def update_account_label(self, label_id, request_body): """PUT /mobilews/account/label/{id} :param str label_id: path parameter :param dict request_body: Dictionary object to send in the body of the request :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_LABEL_ID.value.format(id=label_id) return self._put(url=self._build_url(endpoint), json=request_body) def get_inquiry_link_history_template(self, account_id): """GET /mobilews/account/{id}/inquiryLink :param str account_id: path parameter :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_INQUIRY_LINK.value.format(id=account_id) return self._get(url=self._build_url(endpoint)) def create_inquiry_link_history_template(self, account_id, request_body): """POST /mobilews/account/{id}/inquiryLink :param str account_id: path parameter :param dict request_body: Dictionary object to send in the body of the request :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_INQUIRY_LINK.value.format(id=account_id) return self._post(url=self._build_url(endpoint), json=request_body) def get_account(self, account_id): """GET /mobilews/account/{id} :param str account_id: path parameter :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_ID.value.format(id=account_id) return self._get(url=self._build_url(endpoint)) def create_account(self, account_id, request_body): """POST /mobilews/account/{id} :param str account_id: path parameter :param dict request_body: Dictionary object to send in the body of the request :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_ID.value.format(id=account_id) return self._post(url=self._build_url(endpoint), json=request_body) def delete_account(self, account_id): """DELETE /mobilews/account/{id} :param str account_id: path parameter :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_ID.value.format(id=account_id) return self._delete(url=self._build_url(endpoint)) def update_account(self, account_id, request_body): """PUT /mobilews/account/{id} :param str account_id: path parameter :param dict request_body: Dictionary object to send in the body of the request :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_ID.value.format(id=account_id) return self._put(url=self._build_url(endpoint), json=request_body) def get_account_detail_click_value(self, account_id, hade_id): """GET /mobilews/account/{id}/{hadeId} :param str account_id: path parameter :param str hade_id: path parameter :return: Response object :rtype: requests.Response """ endpoint = AccountEndpoint.ACCOUNT_HADE_ID.value.format(id=account_id, hadeId=hade_id) return self._get(url=self._build_url(endpoint))
36.960674
94
0.676395
792
6,579
5.395202
0.087121
0.0674
0.079569
0.099462
0.89656
0.87737
0.842031
0.814182
0.814182
0.758952
0
0.000987
0.230278
6,579
177
95
37.169492
0.842812
0.368749
0
0.574074
0
0
0
0
0
0
0
0
0
1
0.314815
false
0
0.037037
0
0.685185
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
582c8c9355efb4ebb1ed986a88b02a3684ba9402
28,677
py
Python
ambra_sdk/service/entrypoints/generated/query.py
dicomgrid/sdk-python
bb12eed311bad73dfb863917df4dc5cbcd91a447
[ "Apache-2.0" ]
9
2020-04-20T23:45:44.000Z
2021-04-18T11:22:17.000Z
ambra_sdk/service/entrypoints/generated/query.py
dicomgrid/sdk-python
bb12eed311bad73dfb863917df4dc5cbcd91a447
[ "Apache-2.0" ]
13
2020-02-08T16:15:05.000Z
2021-09-13T22:55:28.000Z
ambra_sdk/service/entrypoints/generated/query.py
dicomgrid/sdk-python
bb12eed311bad73dfb863917df4dc5cbcd91a447
[ "Apache-2.0" ]
6
2020-03-25T17:47:45.000Z
2021-04-18T11:22:19.000Z
""" Query. Do not edit this file by hand. This is generated by parsing api.html service doc. """ from ambra_sdk.exceptions.service import FilterNotFound from ambra_sdk.exceptions.service import InvalidCondition from ambra_sdk.exceptions.service import InvalidField from ambra_sdk.exceptions.service import InvalidGroup from ambra_sdk.exceptions.service import InvalidSortField from ambra_sdk.exceptions.service import InvalidSortOrder from ambra_sdk.exceptions.service import InvalidTag from ambra_sdk.exceptions.service import InvalidType from ambra_sdk.exceptions.service import MissingFields from ambra_sdk.exceptions.service import NotConfigured from ambra_sdk.exceptions.service import NotFound from ambra_sdk.exceptions.service import NotPermitted from ambra_sdk.exceptions.service import NotPhantom from ambra_sdk.exceptions.service import Stale from ambra_sdk.exceptions.service import StudyQueryGroupChange from ambra_sdk.exceptions.service import Thumbnail from ambra_sdk.exceptions.service import TooBig from ambra_sdk.service.query import QueryO from ambra_sdk.service.query import AsyncQueryO from ambra_sdk.service.query import QueryOPSF from ambra_sdk.service.query import AsyncQueryOPSF class Query: """Query.""" def __init__(self, api): self._api = api def list( self, ): """List. """ request_data = { } errors_mapping = {} errors_mapping[('FILTER_NOT_FOUND', None)] = FilterNotFound('The filter can not be found. The error_subtype will hold the filter UUID') errors_mapping[('INVALID_CONDITION', None)] = InvalidCondition('The condition is not support. The error_subtype will hold the filter expression this applies to') errors_mapping[('INVALID_FIELD', None)] = InvalidField('The field is not valid for this object. The error_subtype will hold the filter expression this applies to') errors_mapping[('INVALID_SORT_FIELD', None)] = InvalidSortField('The field is not valid for this object. The error_subtype will hold the field name this applies to') errors_mapping[('INVALID_SORT_ORDER', None)] = InvalidSortOrder('The sort order for the field is invalid. The error_subtype will hold the field name this applies to') query_data = { 'api': self._api, 'url': '/query/list', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } query_data['paginated_field'] = 'queries' return QueryOPSF(**query_data) def add( self, group_id, subject, body=None, customfield_param=None, notify=None, owner_namespace_id=None, owner_user_id=None, query_type=None, recipients=None, study_id=None, ): """Add. :param group_id: Id of the group associated to a trial site to add them to :param subject: Subject line of the query :param body: The query body (optional) :param customfield_param: Custom field(s) (optional) :param notify: Comma delimited list of the emails to be notified of the query events (optional) :param owner_namespace_id: The namespace owning the query. The account default from the default_query_owner_namespace account setting is used if not passed through the parameter (optional) :param owner_user_id: The user owning the query (optional) :param query_type: The query type (optional) :param recipients: JSON array of the user UUIDs to add to the query as recipients (optional) :param study_id: The study the query is related to (optional) """ request_data = { 'body': body, 'group_id': group_id, 'notify': notify, 'owner_namespace_id': owner_namespace_id, 'owner_user_id': owner_user_id, 'query_type': query_type, 'recipients': recipients, 'study_id': study_id, 'subject': subject, } if customfield_param is not None: customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()} request_data.update(customfield_param_dict) errors_mapping = {} errors_mapping[('INVALID_GROUP', None)] = InvalidGroup('The group passed is not linked to a trial site') errors_mapping[('INVALID_TYPE', None)] = InvalidType('The passed type is not valid for the group and patient arm') errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The group&#39;s account is not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to add a query to the namespace') query_data = { 'api': self._api, 'url': '/query/add', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) def set( self, uuid, body=None, customfield_param=None, group_id=None, notify=None, owner_user_id=None, query_type=None, recipients=None, subject=None, ): """Set. :param uuid: The query uuid :param body: The query body (optional) :param customfield_param: Custom field(s) (optional) :param group_id: Id of the group associated to a trial site to add them to. Group change is not allowed for study-related queries (optional) :param notify: Comma delimited list of the emails to be notified of the query events (optional) :param owner_user_id: The user owning the query (optional) :param query_type: The query type (optional) :param recipients: JSON array of the user UUIDs to add to the query as recipients (optional) :param subject: Subject line of the query (optional) """ request_data = { 'body': body, 'group_id': group_id, 'notify': notify, 'owner_user_id': owner_user_id, 'query_type': query_type, 'recipients': recipients, 'subject': subject, 'uuid': uuid, } if customfield_param is not None: customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()} request_data.update(customfield_param_dict) errors_mapping = {} errors_mapping[('INVALID_GROUP', None)] = InvalidGroup('The group passed is not linked to a trial site or is from another account') errors_mapping[('INVALID_TYPE', None)] = InvalidType('The passed type is not valid for the group and patient arm') errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The group&#39;s account is not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to edit the query') errors_mapping[('STUDY_QUERY_GROUP_CHANGE', None)] = StudyQueryGroupChange('Cannot change group for the study-related query') query_data = { 'api': self._api, 'url': '/query/set', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) def get( self, uuid, ): """Get. :param uuid: The query uuid """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view the query') query_data = { 'api': self._api, 'url': '/query/get', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) def delete( self, uuid, ): """Delete. :param uuid: The query uuid """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to delete the query') query_data = { 'api': self._api, 'url': '/query/delete', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) def status_set( self, new, old, uuid, ): """Status set. :param new: The new query status value :param old: The old query status value :param uuid: Query uuid """ request_data = { 'new': new, 'old': old, 'uuid': uuid, } errors_mapping = {} errors_mapping[('INVALID_TAG', None)] = InvalidTag('The query status new value is not a valid value') errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to set the status for this query') errors_mapping[('STALE', None)] = Stale('The query status you have is stale') query_data = { 'api': self._api, 'url': '/query/status/set', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) def attachment_add( self, length, name, query_id, type, thumbnail_length=None, thumbnail_type=None, ): """Attachment add. :param length: The attachment size in bytes :param name: The attachment display name :param query_id: Query UUID :param type: The attachment MIME-type :param thumbnail_length: The attachment's thumbnail MIME-type (optional) :param thumbnail_type: The attachment's thumbnail size in bytes (optional) """ request_data = { 'length': length, 'name': name, 'query_id': query_id, 'thumbnail_length': thumbnail_length, 'thumbnail_type': thumbnail_type, 'type': type, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The Azure keys are not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to add attachments to this query') errors_mapping[('TOO_BIG', None)] = TooBig('The attachment size exceeds the limit') query_data = { 'api': self._api, 'url': '/query/attachment/add', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) def attachment_commit( self, uuid, ): """Attachment commit. :param uuid: Attachment UUID """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The Azure keys are not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The query attachment can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not the creator of the query attachment') errors_mapping[('NOT_PHANTOM', None)] = NotPhantom('The attachment is not phantom') query_data = { 'api': self._api, 'url': '/query/attachment/commit', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) def attachment_delete( self, uuid, ): """Attachment delete. :param uuid: Attachment UUID """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The Azure keys are not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The query attachment can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to delete attachments from this query') errors_mapping[('THUMBNAIL', None)] = Thumbnail('The attachment is a thumbnail for the attachment, Use the main attachment to delete the thumbnail') query_data = { 'api': self._api, 'url': '/query/attachment/delete', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return QueryO(**query_data) class AsyncQuery: """AsyncQuery.""" def __init__(self, api): self._api = api def list( self, ): """List. """ request_data = { } errors_mapping = {} errors_mapping[('FILTER_NOT_FOUND', None)] = FilterNotFound('The filter can not be found. The error_subtype will hold the filter UUID') errors_mapping[('INVALID_CONDITION', None)] = InvalidCondition('The condition is not support. The error_subtype will hold the filter expression this applies to') errors_mapping[('INVALID_FIELD', None)] = InvalidField('The field is not valid for this object. The error_subtype will hold the filter expression this applies to') errors_mapping[('INVALID_SORT_FIELD', None)] = InvalidSortField('The field is not valid for this object. The error_subtype will hold the field name this applies to') errors_mapping[('INVALID_SORT_ORDER', None)] = InvalidSortOrder('The sort order for the field is invalid. The error_subtype will hold the field name this applies to') query_data = { 'api': self._api, 'url': '/query/list', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } query_data['paginated_field'] = 'queries' return AsyncQueryOPSF(**query_data) def add( self, group_id, subject, body=None, customfield_param=None, notify=None, owner_namespace_id=None, owner_user_id=None, query_type=None, recipients=None, study_id=None, ): """Add. :param group_id: Id of the group associated to a trial site to add them to :param subject: Subject line of the query :param body: The query body (optional) :param customfield_param: Custom field(s) (optional) :param notify: Comma delimited list of the emails to be notified of the query events (optional) :param owner_namespace_id: The namespace owning the query. The account default from the default_query_owner_namespace account setting is used if not passed through the parameter (optional) :param owner_user_id: The user owning the query (optional) :param query_type: The query type (optional) :param recipients: JSON array of the user UUIDs to add to the query as recipients (optional) :param study_id: The study the query is related to (optional) """ request_data = { 'body': body, 'group_id': group_id, 'notify': notify, 'owner_namespace_id': owner_namespace_id, 'owner_user_id': owner_user_id, 'query_type': query_type, 'recipients': recipients, 'study_id': study_id, 'subject': subject, } if customfield_param is not None: customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()} request_data.update(customfield_param_dict) errors_mapping = {} errors_mapping[('INVALID_GROUP', None)] = InvalidGroup('The group passed is not linked to a trial site') errors_mapping[('INVALID_TYPE', None)] = InvalidType('The passed type is not valid for the group and patient arm') errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The group&#39;s account is not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to add a query to the namespace') query_data = { 'api': self._api, 'url': '/query/add', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data) def set( self, uuid, body=None, customfield_param=None, group_id=None, notify=None, owner_user_id=None, query_type=None, recipients=None, subject=None, ): """Set. :param uuid: The query uuid :param body: The query body (optional) :param customfield_param: Custom field(s) (optional) :param group_id: Id of the group associated to a trial site to add them to. Group change is not allowed for study-related queries (optional) :param notify: Comma delimited list of the emails to be notified of the query events (optional) :param owner_user_id: The user owning the query (optional) :param query_type: The query type (optional) :param recipients: JSON array of the user UUIDs to add to the query as recipients (optional) :param subject: Subject line of the query (optional) """ request_data = { 'body': body, 'group_id': group_id, 'notify': notify, 'owner_user_id': owner_user_id, 'query_type': query_type, 'recipients': recipients, 'subject': subject, 'uuid': uuid, } if customfield_param is not None: customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()} request_data.update(customfield_param_dict) errors_mapping = {} errors_mapping[('INVALID_GROUP', None)] = InvalidGroup('The group passed is not linked to a trial site or is from another account') errors_mapping[('INVALID_TYPE', None)] = InvalidType('The passed type is not valid for the group and patient arm') errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The group&#39;s account is not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The object was not found. The error_subtype holds the type of object not found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to edit the query') errors_mapping[('STUDY_QUERY_GROUP_CHANGE', None)] = StudyQueryGroupChange('Cannot change group for the study-related query') query_data = { 'api': self._api, 'url': '/query/set', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data) def get( self, uuid, ): """Get. :param uuid: The query uuid """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view the query') query_data = { 'api': self._api, 'url': '/query/get', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data) def delete( self, uuid, ): """Delete. :param uuid: The query uuid """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to delete the query') query_data = { 'api': self._api, 'url': '/query/delete', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data) def status_set( self, new, old, uuid, ): """Status set. :param new: The new query status value :param old: The old query status value :param uuid: Query uuid """ request_data = { 'new': new, 'old': old, 'uuid': uuid, } errors_mapping = {} errors_mapping[('INVALID_TAG', None)] = InvalidTag('The query status new value is not a valid value') errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to set the status for this query') errors_mapping[('STALE', None)] = Stale('The query status you have is stale') query_data = { 'api': self._api, 'url': '/query/status/set', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data) def attachment_add( self, length, name, query_id, type, thumbnail_length=None, thumbnail_type=None, ): """Attachment add. :param length: The attachment size in bytes :param name: The attachment display name :param query_id: Query UUID :param type: The attachment MIME-type :param thumbnail_length: The attachment's thumbnail MIME-type (optional) :param thumbnail_type: The attachment's thumbnail size in bytes (optional) """ request_data = { 'length': length, 'name': name, 'query_id': query_id, 'thumbnail_length': thumbnail_length, 'thumbnail_type': thumbnail_type, 'type': type, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The Azure keys are not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The query can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to add attachments to this query') errors_mapping[('TOO_BIG', None)] = TooBig('The attachment size exceeds the limit') query_data = { 'api': self._api, 'url': '/query/attachment/add', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data) def attachment_commit( self, uuid, ): """Attachment commit. :param uuid: Attachment UUID """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The Azure keys are not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The query attachment can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not the creator of the query attachment') errors_mapping[('NOT_PHANTOM', None)] = NotPhantom('The attachment is not phantom') query_data = { 'api': self._api, 'url': '/query/attachment/commit', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data) def attachment_delete( self, uuid, ): """Attachment delete. :param uuid: Attachment UUID """ request_data = { 'uuid': uuid, } errors_mapping = {} errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields') errors_mapping[('NOT_CONFIGURED', None)] = NotConfigured('The Azure keys are not configured') errors_mapping[('NOT_FOUND', None)] = NotFound('The query attachment can not be found') errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to delete attachments from this query') errors_mapping[('THUMBNAIL', None)] = Thumbnail('The attachment is a thumbnail for the attachment, Use the main attachment to delete the thumbnail') query_data = { 'api': self._api, 'url': '/query/attachment/delete', 'request_data': request_data, 'errors_mapping': errors_mapping, 'required_sid': True, } return AsyncQueryO(**query_data)
42.484444
196
0.623915
3,450
28,677
5.015362
0.052754
0.106687
0.040687
0.054095
0.97827
0.97827
0.936947
0.936947
0.936947
0.936947
0
0.000387
0.27998
28,677
675
197
42.484444
0.837611
0.142414
0
0.879208
1
0.031683
0.351909
0.007805
0
0
0
0
0
1
0.039604
false
0.015842
0.041584
0
0.120792
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
583924d6eb27503c01688d6487e1bfc9438c8a7d
123
py
Python
info/modules/passport/__init__.py
lihaoABC/FlaskProject
323f75b37e3ee5f56833925a04cb2aceb8aa74f4
[ "MIT" ]
null
null
null
info/modules/passport/__init__.py
lihaoABC/FlaskProject
323f75b37e3ee5f56833925a04cb2aceb8aa74f4
[ "MIT" ]
null
null
null
info/modules/passport/__init__.py
lihaoABC/FlaskProject
323f75b37e3ee5f56833925a04cb2aceb8aa74f4
[ "MIT" ]
null
null
null
from flask import Blueprint passport_blue = Blueprint('passport', __name__, url_prefix='/passport') from . import views
17.571429
71
0.772358
15
123
5.933333
0.666667
0.382022
0
0
0
0
0
0
0
0
0
0
0.130081
123
7
72
17.571429
0.831776
0
0
0
0
0
0.137097
0
0
0
0
0
0
1
0
false
0.333333
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
1
0
7
584605659e5404b5e03438b51c2e2c578b77fb29
45
py
Python
src/lib/CGIHTTPServer.py
DTenore/skulpt
098d20acfb088d6db85535132c324b7ac2f2d212
[ "MIT" ]
2,671
2015-01-03T08:23:25.000Z
2022-03-31T06:15:48.000Z
src/lib/CGIHTTPServer.py
wakeupmuyunhe/skulpt
a8fb11a80fb6d7c016bab5dfe3712517a350b347
[ "MIT" ]
972
2015-01-05T08:11:00.000Z
2022-03-29T13:47:15.000Z
src/lib/CGIHTTPServer.py
wakeupmuyunhe/skulpt
a8fb11a80fb6d7c016bab5dfe3712517a350b347
[ "MIT" ]
845
2015-01-03T19:53:36.000Z
2022-03-29T18:34:22.000Z
import _sk_fail; _sk_fail._("CGIHTTPServer")
22.5
44
0.8
6
45
5.166667
0.666667
0.387097
0
0
0
0
0
0
0
0
0
0
0.066667
45
1
45
45
0.738095
0
0
0
0
0
0.288889
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
585cc14372691030b0de9ae50db13fb0d1637568
2,952
py
Python
blog/backlog.py
dkowsikpai/librolet
7148670655157ca5f1ad6853039c9ec00e37adef
[ "MIT" ]
null
null
null
blog/backlog.py
dkowsikpai/librolet
7148670655157ca5f1ad6853039c9ec00e37adef
[ "MIT" ]
null
null
null
blog/backlog.py
dkowsikpai/librolet
7148670655157ca5f1ad6853039c9ec00e37adef
[ "MIT" ]
null
null
null
from datetime import datetime def backloghome(ip, data, loc, namef): filehandle = [] temp ="" if loc == "create": temp = str(ip)+"{title:"+str(data.title)+" content:"+str(data.content)+" date_posted:"+str(data.date_posted)+" author:"+str(data.author)+" img_url:"+str(data.image)+" booked:"+str(data.bookeditem)+" college:"+str(data.college)+" mob:"+str(data.mobileno)+" tokens:"+str(data.tokens)+" user_got:"+str(data.tokenbyuser)+" }\n" filehandle.append(temp) fname = "bkup/homepage/create/"+str(namef)+".txt" elif loc == "update": temp = str(ip)+"{title:"+str(data.title)+" content:"+str(data.content)+" date_posted:"+str(data.date_posted)+" author:"+str(data.author)+" img_url:"+str(data.image)+" booked:"+str(data.bookeditem)+" college:"+str(data.college)+" mob:"+str(data.mobileno)+" tokens:"+str(data.tokens)+" user_got:"+str(data.tokenbyuser)+" }\n" filehandle.append(temp) fname = "bkup/homepage/update/"+str(namef)+".txt" elif loc == "delete": temp = str(ip)+"{title:"+str(data.title)+" content:"+str(data.content)+" date_posted:"+str(data.date_posted)+" author:"+str(data.author)+" img_url:"+str(data.image)+" booked:"+str(data.bookeditem)+" college:"+str(data.college)+" mob:"+str(data.mobileno)+" tokens:"+str(data.tokens)+" user_got:"+str(data.tokenbyuser)+" }\n" filehandle.append(temp) fname = "bkup/homepage/delete/"+str(namef)+".txt" elif loc == "booked": temp = str(ip)+"{title:"+str(data.title)+" content:"+str(data.content)+" date_posted:"+str(data.date_posted)+" author:"+str(data.author)+" img_url:"+str(data.image)+" booked:"+str(data.bookeditem)+" college:"+str(data.college)+" mob:"+str(data.mobileno)+" tokens:"+str(data.tokens)+" user_got:"+str(data.tokenbyuser)+" }\n" filehandle.append(temp) fname = "bkup/homepage/booked/"+str(namef)+".txt" elif loc == "retbook": temp = str(ip)+"RET{title:"+str(data.title)+" content:"+str(data.content)+" date_posted:"+str(data.date_posted)+" author:"+str(data.author)+" img_url:"+str(data.image)+" booked:"+str(data.bookeditem)+" college:"+str(data.college)+" mob:"+str(data.mobileno)+" tokens:"+str(data.tokens)+" user_got:"+str(data.tokenbyuser)+" }\n" filehandle.append(temp) fname = "bkup/homepage/booked/"+str(namef)+".txt" filehandle.insert(0, str(datetime.now())+"\n") try: f = open(fname, "a") f.write("\n") f.writelines(filehandle) f.close() except: f = open(fname, "w") f.write("\n") f.writelines(filehandle) f.close() def backlog(ip, folder, data, namef): fname = "bkup/"+folder+"/"+str(namef)+".txt" data = str(ip)+" "+str(datetime.now()) + data try: f = open(fname, "a") f.write(data) f.close() except: f = open(fname, "w") f.write(data) f.close() def backlogtoken(ip, folder, pk, data): fname = "bkup/"+folder+"/"+pk+".txt" data = str(ip) +" "+ str(datetime.now()) + data try: f = open(fname, "a") f.write(data) f.close() except: f = open(fname, "w") f.write(data) f.close()
47.612903
328
0.653117
429
2,952
4.449883
0.137529
0.183342
0.034573
0.044526
0.862232
0.830278
0.830278
0.822944
0.793609
0.778418
0
0.000376
0.098238
2,952
61
329
48.393443
0.716379
0
0
0.684211
0
0
0.217892
0.035581
0
0
0
0
0
0
null
null
0
0.017544
null
null
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
544d5de7863f0e9806848ed94eec81a027fd082a
72
py
Python
twitch_listener/__init__.py
PlotThiggins/twitch-listener
92882d7afd20a3918b8e3ff613de6c83dd7931ca
[ "MIT" ]
13
2020-11-15T06:58:03.000Z
2022-03-26T14:27:53.000Z
twitch_listener/__init__.py
Oliver-Lloyd/twitch-listener
92882d7afd20a3918b8e3ff613de6c83dd7931ca
[ "MIT" ]
3
2020-09-03T18:31:29.000Z
2021-10-16T22:07:25.000Z
twitch_listener/__init__.py
lloyd334/twitch-listener
92882d7afd20a3918b8e3ff613de6c83dd7931ca
[ "MIT" ]
7
2020-06-25T02:59:17.000Z
2021-12-02T18:26:37.000Z
from twitch_listener import listener from twitch_listener import utils
18
36
0.875
10
72
6.1
0.5
0.327869
0.590164
0.786885
0
0
0
0
0
0
0
0
0.125
72
3
37
24
0.968254
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
545c001273d1e7374c7d04d2bef452deae3b1125
1,999
py
Python
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/feature_column/__init__.py
Lube-Project/ProgettoLube
cbf33971e2c2e865783ec1a2302625539186a338
[ "MIT" ]
2
2020-09-30T00:11:09.000Z
2021-10-04T13:00:38.000Z
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/feature_column/__init__.py
Lube-Project/ProgettoLube
cbf33971e2c2e865783ec1a2302625539186a338
[ "MIT" ]
null
null
null
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/feature_column/__init__.py
Lube-Project/ProgettoLube
cbf33971e2c2e865783ec1a2302625539186a338
[ "MIT" ]
1
2021-01-28T01:57:41.000Z
2021-01-28T01:57:41.000Z
# This file is MACHINE GENERATED! Do not edit. # Generated by: tensorflow/python/tools/api/generator/create_python_api.py script. """Public API for tf.feature_column namespace. """ from __future__ import print_function as _print_function import sys as _sys from tensorflow.python.feature_column.feature_column_v2 import bucketized_column from tensorflow.python.feature_column.feature_column_v2 import categorical_column_with_hash_bucket from tensorflow.python.feature_column.feature_column_v2 import categorical_column_with_identity from tensorflow.python.feature_column.feature_column_v2 import categorical_column_with_vocabulary_file_v2 as categorical_column_with_vocabulary_file from tensorflow.python.feature_column.feature_column_v2 import categorical_column_with_vocabulary_list from tensorflow.python.feature_column.feature_column_v2 import crossed_column from tensorflow.python.feature_column.feature_column_v2 import embedding_column from tensorflow.python.feature_column.feature_column_v2 import indicator_column from tensorflow.python.feature_column.feature_column_v2 import make_parse_example_spec_v2 as make_parse_example_spec from tensorflow.python.feature_column.feature_column_v2 import numeric_column from tensorflow.python.feature_column.feature_column_v2 import shared_embedding_columns_v2 as shared_embeddings from tensorflow.python.feature_column.feature_column_v2 import weighted_categorical_column from tensorflow.python.feature_column.sequence_feature_column import sequence_categorical_column_with_hash_bucket from tensorflow.python.feature_column.sequence_feature_column import sequence_categorical_column_with_identity from tensorflow.python.feature_column.sequence_feature_column import sequence_categorical_column_with_vocabulary_file from tensorflow.python.feature_column.sequence_feature_column import sequence_categorical_column_with_vocabulary_list from tensorflow.python.feature_column.sequence_feature_column import sequence_numeric_column del _print_function
68.931034
148
0.909455
277
1,999
6.119134
0.191336
0.268437
0.20059
0.270796
0.778171
0.775811
0.772271
0.772271
0.772271
0.654867
0
0.007941
0.055028
1,999
28
149
71.392857
0.889359
0.085043
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
0.95
0
0.95
0.1
0
0
0
null
1
1
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
12
5492997589d4cfa2ea646158a68c061d430c4a61
5,135
py
Python
test/pyaz/databoxedge/device/__init__.py
bigdatamoore/py-az-cli
54383a4ee7cc77556f6183e74e992eec95b28e01
[ "MIT" ]
null
null
null
test/pyaz/databoxedge/device/__init__.py
bigdatamoore/py-az-cli
54383a4ee7cc77556f6183e74e992eec95b28e01
[ "MIT" ]
9
2021-09-24T16:37:24.000Z
2021-12-24T00:39:19.000Z
test/pyaz/databoxedge/device/__init__.py
bigdatamoore/py-az-cli
54383a4ee7cc77556f6183e74e992eec95b28e01
[ "MIT" ]
null
null
null
import json, subprocess from ... pyaz_utils import get_cli_name, get_params def list(resource_group=None, expand=None): params = get_params(locals()) command = "az databoxedge device list " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def show(name, resource_group): params = get_params(locals()) command = "az databoxedge device show " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def create(name, resource_group, location=None, tags=None, sku=None, etag=None, status=None, description=None, model_description=None, friendly_name=None, no_wait=None): params = get_params(locals()) command = "az databoxedge device create " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def update(name, resource_group, tags=None): params = get_params(locals()) command = "az databoxedge device update " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def delete(name, resource_group, yes=None, no_wait=None): params = get_params(locals()) command = "az databoxedge device delete " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def download_update(name, resource_group, no_wait=None): params = get_params(locals()) command = "az databoxedge device download-update " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def install_update(name, resource_group, no_wait=None): params = get_params(locals()) command = "az databoxedge device install-update " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def scan_for_update(name, resource_group, no_wait=None): params = get_params(locals()) command = "az databoxedge device scan-for-update " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def show_update_summary(name, resource_group): params = get_params(locals()) command = "az databoxedge device show-update-summary " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr) def wait(name, resource_group, timeout=None, interval=None, deleted=None, created=None, updated=None, exists=None, custom=None): params = get_params(locals()) command = "az databoxedge device wait " + params print(command) output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = output.stdout.decode("utf-8") stderr = output.stderr.decode("utf-8") if stdout: return json.loads(stdout) print(stdout) else: raise Exception(stderr) print(stderr)
35.659722
169
0.667186
627
5,135
5.405104
0.103668
0.08262
0.059014
0.061965
0.870463
0.870463
0.870463
0.870463
0.870463
0.825317
0
0.004971
0.216553
5,135
143
170
35.909091
0.837435
0
0
0.833333
0
0
0.082376
0
0
0
0
0
0
1
0.075758
false
0
0.015152
0
0.166667
0.227273
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
49ab7f9838c95dd38462880b08220447e6d1ac0d
217
py
Python
social_auth/backends/pipeline/sauth.py
merutak/django-social-auth
3a6e4414da0e969fcaf625a891852a3b2d7627c0
[ "BSD-2-Clause", "BSD-3-Clause" ]
863
2015-01-01T00:42:07.000Z
2022-03-30T02:47:18.000Z
social_auth/backends/pipeline/sauth.py
merutak/django-social-auth
3a6e4414da0e969fcaf625a891852a3b2d7627c0
[ "BSD-2-Clause", "BSD-3-Clause" ]
101
2015-01-08T00:28:16.000Z
2022-03-07T03:11:19.000Z
social_auth/backends/pipeline/sauth.py
merutak/django-social-auth
3a6e4414da0e969fcaf625a891852a3b2d7627c0
[ "BSD-2-Clause", "BSD-3-Clause" ]
256
2015-01-02T16:55:36.000Z
2022-03-04T11:10:47.000Z
from social.pipeline.social_auth import social_user as social_auth_user, \ associate_user, load_extra_data social_auth_user, associate_user, load_extra_data # placate pyflakes
54.25
74
0.705069
27
217
5.222222
0.481481
0.212766
0.198582
0.326241
0.567376
0.567376
0.567376
0.567376
0
0
0
0
0.258065
217
3
75
72.333333
0.875776
0.073733
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
b713d978a6ef6d0b004e4202bd7c28332c418937
67,937
py
Python
roboTraining/simulate.py
Gabs48/SpringMassNetworks
d917ef242ed99b02f82e9ff8697960d0d1d65178
[ "MIT" ]
3
2019-11-27T15:35:20.000Z
2021-12-09T08:20:34.000Z
roboTraining/simulate.py
Gabs48/SpringMassNetworks
d917ef242ed99b02f82e9ff8697960d0d1d65178
[ "MIT" ]
null
null
null
roboTraining/simulate.py
Gabs48/SpringMassNetworks
d917ef242ed99b02f82e9ff8697960d0d1d65178
[ "MIT" ]
1
2021-03-16T15:42:36.000Z
2021-03-16T15:42:36.000Z
from collections import deque import copy import datetime import itertools import numpy as np import matplotlib from matplotlib.mlab import * matplotlib.use('Agg') import matplotlib.pyplot as plt import matplotlib.colors as colors import matplotlib.cm as cmx import os from scipy import signal from robot import Robot from utils import * import sys class Plotter(object): """ create instance with plotting properties --- attributes--- - border: float space at the border of the plot diagram - plotCycle : int number of iterations before the plot is refreshed - startPlot : int iteration number at which plotting starts - plot: boolean should a plot be shown - pauseTime : float time that is paused on the plot """ def __init__(self, border=1.5, plotCycle=10, startPlot=0, text=False, plot=False, movie=False,\ pauseTime=0.00001, movieName="out", color=True, delete=True): self.plot = plot self.movie = movie self.color = color self.delete = delete self.first_it = True if plot: self.border = border self.plotCycle = plotCycle self.startPlot = startPlot self.pauseTime = pauseTime self.fig = plt.subplots(figsize=(10.88, 4.88), dpi=300, ) if text: self.text = plt.text(0.2,0.9,"",ha='center', va = 'center', transform=plt.gca().transAxes)#Left Align Text else: self.text = None self.init = True # plot in initial modus xlist=[]; ylist=[]; self.plt_line, = plt.plot(xlist, ylist) # init drawing plt.fill_between([-1e8, 1e8], -20, 0, facecolor='gray', edgecolor='gray') if movie: self.fileList = [] self.fps = 30 self.frame = 0 if os.name == "posix": directory = os.getenv("HOME") + '/.temp' mkdir_p(directory) self.IMGname = directory + '_tmp%04d.png' else: self.IMGname = 'temp/_tmp%04d.png' self.movieName = movieName if color: jet = cm = plt.get_cmap('spectral') cNorm = colors.Normalize(vmin = 0.5, vmax = 1.5) self.colorMap = cmx.ScalarMappable(norm=cNorm, cmap=jet) def _construct_plot_lines(self, xpos, ypos, connections): """ update properties of instance based on x and y coordinates and connections matrix""" if self.init: self.init = False; plt.ylim(-self.border, self.border + 1.2 * np.max(ypos)) self.xplotwidth = max(xpos)+self.border - (min(xpos)-self.border) """all_lines = np.dstack((np.tile(xpos[:, np.newaxis], (1, len(xpos))), np.tile(ypos[np.newaxis, :], (len(ypos), 1)))) all_lines = np.reshape(all_lines, (-1, 2))""" xlist = [] ylist = [] for i,j in itertools.product(range(len(xpos)), range(len(ypos))): if connections[i,j]: xlist.append(xpos[i]) xlist.append(xpos[j]) xlist.append(None) ylist.append(ypos[i]) ylist.append(ypos[j]) ylist.append(None) return xlist, ylist def update(self, robot, iterationCount = 0): self.draw(robot, iterationCount) def draw(self, robot, iterationCount): """ draw a plot of the robot parameters""" # draw robot if self.plot: if iterationCount % self.plotCycle == self.startPlot % self.plotCycle: xpos, ypos, connections = robot._getRobotPos2D() if self.color: stressRatio = robot.stressRatio() plt.cla() plt.fill_between([-1e8, 1e8], -20, 0, facecolor='gray', edgecolor='gray') self.text = None#plt.text(0.2,0.9,"",ha='center', va = 'center', transform=plt.gca().transAxes) #Left Align Text if self.init: self.init = False; self.maxy = np.max(ypos) plt.ylim(-self.border, self.border + 1.2 * self.maxy) #if self.first_it == True: self.xplotwidth = max(xpos) + self.border - (min(xpos) - self.border) # self.first_it = False for i,j in itertools.product(range(len(xpos)), range(len(ypos))): if connections[i,j]: colorVal = self.colorMap.to_rgba(stressRatio[i,j]) plt.plot([xpos[i], xpos[j]], [ypos[i], ypos[j]], color = colorVal) plt.plot(xpos, ypos, 'ko', markersize = 10) else: xlist, ylist = self._construct_plot_lines(xpos, ypos, connections) self.plt_line.set_xdata(xlist) self.plt_line.set_ydata(ylist) minx = min(xpos)-self.border plt.xlim(minx, minx + self.xplotwidth) if self.text is not None: self.text.set_text(robot.printState()) if not self.movie: plt.draw() else: self.frame+= 1; fname = self.IMGname % self.frame plt.savefig(fname) self.fileList.append(fname) plt.pause(self.pauseTime) def end(self): if self.movie: try: os.remove(self.movieName + ".mp4") except OSError: pass print "ffmpeg -r " + str(self.fps) + " -s 1100x700"+ " -i "+ self.IMGname + " -c:v libx264 -r 30 -pix_fmt yuv420p " + self.movieName + ".mp4""" os.system("ffmpeg -r " + str(self.fps) + " -s 1100x700"+ " -i "+ self.IMGname + " -c:v libx264 -r 30 -pix_fmt yuv420p " + self.movieName + ".mp4""") if self.delete: for fname in self.fileList: os.remove(fname) self.fileList = [] self.frame = 0; class SimulationEnvironment(object): """ class with general Parameters for Simulations but not bound to a specific robot""" param = ["timeStep", "simulationLength", "verlet", "refPower", "refDist"] def __init__(self,timeStep=0.005, simulationLength=10000, plot=Plotter(), verlet=True, noisy=False, \ controlPlot=True, pcaPlot=False, pcaTitle="PCA", pcaFilename="pca", pcaMat=None, perfMetr="dist", \ refDist=0 , refPower=0): self.timeStep = timeStep # time step size self.plot = plot # plotting assert isinstance(simulationLength, int), "simulation length should be integer" self.simulationLength = simulationLength # number of iterations self.verlet = verlet; self.noisy = noisy; self.controlPlot = controlPlot self.pcaPlot = pcaPlot self.pcaTitle = pcaTitle self.pcaFilename = pcaFilename self.perfMetr = perfMetr self.pcaMat = pcaMat if self.perfMetr == "powereff" or self.perfMetr == "powersat" or self.perfMetr == "distsat": assert refDist is not 0, refPower is not 0 self.refDist = refDist self.refPower = refPower else: self.refDist = 0 self.refPower = 0 def end(self): return self.plot.end() class ControlPlotter(object): " plot the generated control signals " def __init__(self, robot, simulEnv): self.ydata = np.zeros((0, robot.getNoConnections())) self.xdata = np.zeros((0,1)) self.prev_speed = SpaceList(np.zeros(robot.getShape(),float)) self.simulEnv = simulEnv self.timeStep = simulEnv.timeStep self.filename = simulEnv.pcaFilename self.simulationLength = self.simulEnv.simulationLength self.simulationTime = self.simulEnv.timeStep self.timeArray = np.linspace(0, self.simulationTime, num=self.simulationLength) self.title = simulEnv.pcaTitle self.acc = np.array([]) def addData(self, robot): currentTime = robot.state.currentTime restlength = robot.currentRestLength(array = True) self.ydata = np.vstack((self.ydata, restlength)) self.xdata = np.vstack((self.xdata, [[currentTime]])) def addPCAData(self, robot): """Store PCA values for plotting limitcycle""" speed_it = robot.state.speed acc_it = (speed_it.getArray() - self.prev_speed.getArray()) / self.timeStep if self.acc.size == 0: self.acc = acc_it else: self.acc = np.vstack((self.acc, acc_it)) return def plot(self): fig, ax = Plot.initPlot() ax.plot(self.xdata,self.ydata) Plot.configurePlot(fig,ax, "time","current SpringLength") fig.show() def plotLimitCycle(self, n=None, save=True, show=False): """Plot the limit cycle of x_training and y_trained""" gap = 2 pca = None window = int(np.ceil(self.acc.shape[0]/gap)) vec = self.acc if vec.shape[0] > vec.shape[1]: if self.simulEnv.pcaMat == None: pca = PCA(vec) pc1 = pca.Y[:,0] pc2 = pca.Y[:,1] else: pca = self.simulEnv.pcaMat res = pca.project(vec) pc1 = res[:,0] pc2 = res[:,1] else: pc1 = vec[:,0] pc2 = vec[:,1] print " -- Save Limit cycle plot in " + self.filename fig, ax = Plot.initPlot(proj="3d") for j in xrange(1, window): ax.plot(pc1[j*gap:(j+1)*gap+1], pc2[j*gap:(j+1)*gap+1], \ self.timeArray[j*gap:(j+1)*gap+1], \ c=plt.cm.winter(1.*j/window), linewidth=1.2, label="PCA trajectory") ax.set_xlabel('PC 1') ax.set_ylabel('PC 2') ax.set_zlabel('Time') ax.view_init(30, 60) plt.title(self.title) if show: plt.show() if save: plt.savefig(self.filename + ".png", format='png', dpi=300) plt.close() return pca class Simulation(object): """ class to run and store simulation runs, for better results the child class VerletSimulation is advised """ param = ["robot", "simulEnv", "initState"] def __init__(self,simulEnv ,robot): self.simulEnv = simulEnv self.robot = robot self.initState = robot.getState() self.endState = None self.iterationNumber = 0; self.controlPlot = simulEnv.controlPlot self.pcaPlot = simulEnv.pcaPlot if self.controlPlot or self.pcaPlot: self.controlPlotter = ControlPlotter(robot, simulEnv) def simulateStep(self): """ Euler integration for a single time step""" A = self.robot.computeAcceleration() V = self.robot.getVelocity() self.iterationNumber+=1 return self.robot.changeState(self.simulEnv.timeStep, V, A) def runSimulation(self): """ Runs a simulation over a number of iterations and returns the distance travelled""" for i in range(self.simulEnv.simulationLength): self.simulateStep() self.simulEnv.plot.update(self.robot,self.iterationNumber) if self.controlPlot: self.controlPlotter.addData(self.robot) if self.pcaPlot: self.controlPlotter.addPCAData(self.robot) self.simulEnv.end() self.endState = self.robot.getState() if self.controlPlot: self.controlPlotter.plot() if self.pcaPlot: self.simulEnv.pcaMat = self.controlPlotter.plotLimitCycle() return self.performanceMetric() def getDistance(self): """ Return the current travelled distance """ return Robot.getDistanceTraveled(self.initState, self.endState) def performanceMetric(self): """ Return a score to characterize the simulation depending on the chosen performance metric """ # TODOOO: test distsat , powersat and change to speed! (normalized in time) distance = self.getDistance() speed = distance / (self.iterationNumber * self.simulEnv.timeStep) power = self.robot.getPower() refPower =self.simulEnv.refPower refDist = self.simulEnv.refDist C = np.arctanh(1.0 / np.sqrt(2)) if self.simulEnv.perfMetr == 'dist': return [distance, power, distance] elif self.simulEnv.perfMetr == 'powereff': return [(np.tanh(C * refPower / power) * np.tanh(C * distance / refDist)), power, distance] elif self.simulEnv.perfMetr == 'powersat': if power > refPower: score = (np.tanh(C * refPower / power) * np.tanh(C * distance / refDist)) else: score = (np.tanh(C * distance / refDist)) return [score, power, distance] elif self.simulEnv.perfMetr == 'distsat': if distance < refDist: score = (np.tanh(C * refPower / power) * np.tanh(C * distance / refDist)) else: score = (np.tanh(C * refPower / power)) return [score, power, distance] else: raise NotImplementedError ('the requested performance metric has not been implemented') class VerletSimulation(Simulation): """ use the Verlet algorithm to obtain more accurate simulations """ def __init__(self, simulEnv, robot, reset = False): if reset: robot.reset() super(VerletSimulation, self).__init__(simulEnv, robot) self.Aold = SpaceList(np.zeros(robot.getShape(),float)) def process(self): """ extend simulateStep in child classes """ def simulateStep(self): self.process() V = self.robot.getVelocity() #print str(self.robot.state.currentTime) + " Updating the states of iteration " + str(self.iterationNumber) timeStep = self.simulEnv.timeStep self.Aold = self.robot.changeStateVerlet(timeStep, V, self.Aold) self.iterationNumber+=1 #print str(self.robot.state.currentTime) + " Updating iteration number: " + str(self.iterationNumber) return self.Aold def getTime(self): return self.robot.state.currentTime class NoisyVerletSimulation(VerletSimulation): """ Simulate noise in Verlet Update steps """ def __init__(self, simulEnv, robot, noise = 0.01, reset = False): self.noise = noise super(NoisyVerletSimulation, self).__init__(simulEnv, robot, reset = reset) def simulateStep(self): self.process() V = self.robot.getVelocity() timeStep = self.simulEnv.timeStep self.iterationNumber+=1 self.Aold = self.robot.changeStateVerlet(timeStep, V, self.Aold, self.noise) return self.Aold class NoisyImpulseVerletSimulation(VerletSimulation): """ Simulate noise in Verlet Update steps """ def __init__(self, simulEnv, robot, noise=1, reset=False, impulserate=0.05, durationRate=100): # Parent constructor super(NoisyImpulseVerletSimulation, self).__init__(simulEnv, robot, reset=reset) # Noise variables self.noise = noise self.noiseArr = None self.durationRate = durationRate self.nImpulse = self.simulEnv.simulationLength * impulserate self.noiseTime = np.random.randint(0, self.simulEnv.simulationLength, int(np.floor(self.nImpulse))) self.noiseIt = 0 def simulateStep(self): self.process() V = self.robot.getVelocity() timeStep = self.simulEnv.timeStep self.iterationNumber+=1 # If step is a noisy one: if self.iterationNumber in self.noiseTime: self.noiseIt = 1 # Estimate impulse noise as percentage of mean acceleration value n_val = self.noise * np.mean(np.abs(self.Aold.getArray())) x_noise = np.random.uniform(- n_val, n_val) y_noise = np.random.uniform(- n_val, n_val) # Select a random node node_val = np.random.randint(0, self.Aold.getnoNodes()) self.noiseArr = np.zeros((2, self.Aold.getnoNodes())) self.noiseArr[0][node_val] = x_noise self.noiseArr[1][node_val] = y_noise self.Aold = self.robot.changeStateVerlet(timeStep, V, self.Aold, impulsenoise=self.noiseArr) if self.noiseIt > 0: if self.noiseIt >= self.durationRate: self.noiseIt = 0 else: self.Aold = self.robot.changeStateVerlet(timeStep, V, self.Aold, impulsenoise=self.noiseArr) self.noiseIt += 1 return self.Aold if self.noiseIt == 0: self.Aold = self.robot.changeStateVerlet(timeStep, V, self.Aold) return self.Aold class TrainingSimulation(VerletSimulation): """ Extend VerletSimulation and train an output layer to produce a structured patterns The time is divided in five steps as described in the process method. Defaut training method use a one-shot regression learning from an input vector formed with the positions This class can be extended by rewriting the trainStep, runStep and train methods """ def __init__(self, simulEnv, robot, omega=5, transPhase=0.2, trainPhase=0.6, trainingPlot="all", \ signTime=None, outputFilename="sinusoid", outputFolder="RC", title="None"): """ Init the training test sim class and parent classes - omega is the desired output sinusoid frequency. It should correspond to the frequency of the MSN - transPhase is the proportion of time dedicated to transitoire dynamics before training - trainPhase is the proportion of time dedicated to training - outputFilename and outputFolder are set to save plotting if traingPlot is True - wDistPlot is set to plot the output neurons weight dustribution - signTime can be set to reduce the phase impact to a significant part of the simulation. For instance, if the simulation time is set to 50s, the signTime=30, then, the transitoire trinaing and running phases will only applys to """ super(TrainingSimulation, self).__init__(simulEnv, robot) self.omega = omega self.transPhase = transPhase self.trainPhase = trainPhase self.trainingPlot = trainingPlot self.outputFolder = outputFolder self.outputFilename = outputFilename self.title = title if not signTime: self.signLength = self.simulEnv.simulationLength else: self.signLength = int(np.floor(signTime / self.simulEnv.timeStep)) self.simulationTime = self.simulEnv.timeStep * self.simulEnv.simulationLength self.transLength = int(np.floor(self.transPhase * self.signLength)) self.trainLength = int(np.floor(self.trainPhase * self.signLength)) self.runLength = self.simulEnv.simulationLength - self.trainLength - self.transLength self.inputs = [] self.yTraining = self.create_y_training() self.xTraining = np.array([]) self.yTrained = np.array([]) self.nrmsError = None self.absError = None self.error = None self.weightMatrixDiff = None self.N = self.robot.getState().pos.getArray().shape[1] + self.robot.getState().speed.getArray().shape[1] if len(self.yTraining[self.iterationNumber].shape) != 0: self.O = self.yTraining[self.iterationNumber].shape[0] else: self.O = 1 if self.robot.control.__class__.__name__ == "ClosedLoopSineControl": self.CL = True #print " -- Closed-Loop simulation and control activated. Closing the loop from time = " + \ # num2str(self.simulEnv.timeStep * (self.transLength + self.trainLength)) + " s -- " else: self.CL = False mkdir_p(self.outputFolder) def create_y_training(self): """ Create a sinusoid signal of length trainLength to train the output neuron""" self.timeArray = np.linspace(0, self.simulationTime, num=self.simulEnv.simulationLength) # Single sinus # y = np.sin(self.omega * self.timeArray).reshape(-1,1) # Real input values y = [] for t in self.timeArray: line = connections2Array(self.robot.control.modulationFactorTime(t), self.robot.morph.connections) y.append(line) return np.array(y) def neuron_fct(self, x): """ The transition function of the output neuron """ y = 1 + np.tanh(x) return y def trainStep(self): """ Add training data for a given step """ posArray = self.robot.getState().pos.getArray().T if self.xTraining.size == 0: self.xTraining = posArray else: self.xTraining = np.vstack((self.xTraining, posArray)) def train(self): """ Determine the output weight matrix to minimize error """ # Copy input, and output x = self.xTraining y = self.yTraining[self.transLength:self.transLength+self.trainLength] # Compute weights w, res, rank, singVal = np.linalg.lstsq(x, y) np.set_printoptions(threshold=np.inf) self.weightMatrix = w self.yTrained = np.transpose(np.dot(w.T, x)) # Print debug print " -- Network training by linear regression performed. Sum of residues = {:.4f}".format(res[0]) + \ ". Global NRMSE = {:.4f} --".format(self.nrmse(y, np.dot(x, w))) if self.trainingPlot == "all": self.plotW() # Start Closed-Loop mode #self.robot.control.closeLoop() def runStep(self): """ Run the neuron for a given step """ # Get input state vector if hasattr(self, 'hist'): x_it = self.robot.getState().pos.getArray().T self.inputs.pop(0) self.inputs.append(x_it) posArray = np.mat(self.inputs[0]) for i in range(self.hist-1): if i < 3: posArray = np.vstack((posArray, self.inputs[i])) else: if i%4 == 0: posArray = np.vstack((posArray, self.inputs[i])) else: posArray = np.vstack((self.robot.getState().speed.getArray(), self.robot.getState().pos.getArray())) # Compute new signal estimation y_est = np.asarray(np.transpose(np.dot(self.weightMatrix.T, posArray))) # Store estimation in vector if self.yTrained.size == 0: self.yTrained = y_est else: self.yTrained = np.vstack((self.yTrained, y_est)) # Pass the signal estimation to the controller to close the loop stepInput = array2ModFactor(y_est, self.robot.morph.connections) self.robot.control.setStepInput(stepInput) def process(self): """ Add the training process to the normal simulation. 5 steps: - Transiant phase: nothing is done - Training phase: adding the data to the training vector - Training point: linear regression to compute the output neurons weight matrix - Runnning point: replace the control inputs by the trained values - End of sim: save and plot """ it = self.iterationNumber # Training phase (add data to trianing vector) if it >= self.transLength and it < (self.transLength + self.trainLength - 1): #print "2. Training phase. It: " + str(it) self.trainStep() # Training time (linear regression for output neurons) if it == (self.transLength + self.trainLength - 1): #print "3. Training time. It: " + str(it) self.train() self.runStep() # Running phase (connect the neurons output to the robot) if it >= (self.transLength + self.trainLength) and it < (self.simulEnv.simulationLength - 1) : #print "4. Running phase. It: " + str(it) self.runStep() # End of simulation (plot everything) if it == self.simulEnv.simulationLength - 1: #print "5. End of simulation. It: " + str(it) self.runStep() self.save() ## If save?? if self.trainingPlot == "all": self.plotWDiff(-self.trainLength) self.plotError(-self.trainLength+50) self.plotInputs() self.plot(n=6000) self.plotLimitCycle() if self.trainingPlot == "cont": self.plotWDiff(-self.trainLength*2/3) self.plotInputs() self.plotError(-self.trainLength+50) self.plot(n=6000, comp=2) if self.trainingPlot == "lc": self.plotLimitCycle() def mse(self, arr1, arr2): """ Compute MSE between two matrices """ assert arr1.shape == arr2.shape, "Mean Square Error can only be computed on matrices with same size" a, b = arr2.shape return np.sum((arr2 - arr1) ** 2) / float(a * b) def nrmse(self, arr1, arr2): """ Compute NRMSE between two matrices """ # Center signals around 0 arr1 = arr1 - 1 arr2 = arr2 - 1 rmse = np.sqrt(self.mse(arr1, arr2)) max_val = max(np.max(arr1), np.max(arr2)) min_val = min(np.min(arr1), np.min(arr2)) return (rmse / (max_val - min_val)) def _numPoints(self, n): """Give the number of points to plot in each phase when using n points""" # Compute number of points of each phase if not n or n > self.simulEnv.simulationLength: n = self.simulEnv.simulationLength n_tot = self.simulEnv.simulationLength n_trans = self.transLength n_train = self.trainLength n_run = self.runLength if n_run > n: n_run = n return [n_trans, n_train, n_run, n_tot, n] def get_training_error(self): """ Fill and return rms error and absolute for all actuators """ y_err = [] if self.nrmsError == None: for i in range(self.O): y_err.append(self.nrmse(self.yTraining[:,i].reshape(-1,1), self.yTrained[:,i].reshape(-1,1))) self.nrmsError = sum(y_err) / float(len(y_err)) print " -- Computing NRMS Error: " + str(self.nrmsError) + " --" y_err = [] if self.absError == None: for i in range(self.O): y_norm = np.max(self.yTraining[:,i]) - np.min(self.yTraining[:,i]) y_err.append(100 * np.max(np.abs(self.yTraining[:,i].reshape(-1,1) - \ self.yTrained[:,i].reshape(-1,1)) / y_norm)) self.absError = sum(y_err) / float(len(y_err)) print " -- Computing Max Absolute Error: {:.2f} %".format(self.absError) + " --" return self.nrmsError, self.absError def plot(self, n=None, comp=None, show=False, save=True): """ Print the driving and trained signals as a fct of time in a file""" [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) # Some arrays init nrmse = 0 y_err = np.zeros(1) # Get number of graphs to print if comp == None: comp = self.O for i in range(comp): # Compute error vector if n_run != 0: nrmse = self.nrmse(self.yTraining[-n_run:, i].reshape(-1,1), self.yTrained[-n_run:, i].reshape(-1,1)) y_norm = np.max(self.yTraining[-n_run:, i]) - np.min(self.yTraining[-n_run:, i]) y_err = np.abs(self.yTraining[-n_run:, i].reshape(-1,1) - self.yTrained[-n_run:, i].reshape(-1,1)) / y_norm # Plot print " -- Generating training graph " + str(i+1) + "/" + str(self.yTraining.shape[1]) + ". NRMSE = " + \ str(nrmse) + " -- " fig, ax = Plot.initPlot() for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + ax.get_xticklabels() + ax.get_yticklabels()): item.set_fontsize(17) plt.plot(self.timeArray[-n:], self.yTraining[-n:, i] ,"r--", label="Training signal") plt.plot(self.timeArray[-n:], np.zeros(n) ,"k-") plt.plot(self.timeArray[-n:-n_run-1], self.yTrained[-n:-n_run-1, i] ,"y-", label="Trained sig (train)") if n_run != 0: plt.plot(self.timeArray[-n_run-1:], self.yTrained[-n_run-1:, i] ,"b-", label="Trained sig (run)") #plt.plot(self.timeArray[-n_run:], y_err[-n_run:] ,"g-", label="Error signal") plt.title("Spring control force " + str(i+1) + ". Maximum error = {:.2f} %".format(100 * np.max(y_err)) + \ ". NRMSE = {:.2f}".format(nrmse)) Plot.configurePlot(fig, ax, 'Temp', 'Temp', legend=False, legendLocation='lower center') plt.xlabel('Simulation step') plt.ylabel('Signal value') if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_sin_" + str(i+1) + ".png", format='png', dpi=300) plt.close() def plotInputs(self): """ Plot inputs evolution to monitor feddback effect """ plt.plot(self.timeArray, self.acc, "b-") plt.title("Node 1 acceleration evolution") plt.savefig(self.outputFolder + "/" + self.outputFilename + "_acc.png", format='png', dpi=300) plt.close() plt.plot(self.timeArray, self.pos, "") plt.title("Node 1 position evolution") plt.savefig(self.outputFolder + "/" + self.outputFilename + "_pos.png", format='png', dpi=300) plt.close() plt.plot(self.timeArray, self.speed, "") plt.title("Node 1 speed evolution") plt.savefig(self.outputFolder + "/" + self.outputFilename + "_speed.png", format='png', dpi=300) plt.close() def plotW(self, show=False, save=True): """ Plot distribution of wieight matrix W """ fig, ax = Plot.initPlot() plt.hist(self.weightMatrix.reshape(-1, 1), bins=40) plt.title("Distribution of output layer weight matrix") plt.xlabel("Value") plt.ylabel("Frequency") Plot.configurePlot(fig, ax, 'Temp', 'Temp', legend=False) if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_w_hist.png", format='png', dpi=300) plt.close() def save(self, filename=None): """ Save the weights matrix for further simulations """ if filename == None: filename = "weight_matrix_" + timestamp() + ".pkl" #dump_pickle(self, filename) def plotWDiff(self, n=None, show=False, save=True): """ Plot evolution of the weight matrix differences """ neg = False if n != None: if n < 0: neg = True n = abs(n) [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) if n > n_train - 2: n = n_train - 2 fig, ax = Plot.initPlot() if neg: plt.plot(self.timeArray[n_tot-n_run-n:n_tot-n_run], self.weightMatrixDiff[-n:]) else: plt.plot(self.timeArray[n_trans:n_trans+n], self.weightMatrixDiff[0:n]) plt.title("Evolution of the trained weights with time") plt.xlabel("Time") plt.ylabel("Weight matrix derivative") Plot.configurePlot(fig, ax, 'Temp', 'Temp', legend=False) if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_w_diff.png", format='png', dpi=300) def plotError(self, n=None, show=False, save=True): """ Plot error evolution""" neg = False if n != None: if n < 0: neg = True n = abs(n) [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) if n > n_train - 2: n = n_train - 2 fig, ax = Plot.initPlot() if neg: ax.plot(self.timeArray[n_tot-n_run-n:n_tot-n_run], 100*self.error[-n:], linewidth=1.2) else: ax.semilogy(self.timeArray[n_trans:n_trans+n], 100*self.error[0:n], linewidth=1.2) Plot.configurePlot(fig, ax, "Time [s]", "Mean Percentage Error on trained signal [\%]", legend = False) if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_error.png", format='png', dpi=300) def plotLimitCycle(self, n=None, show=False, save=True): """Plot the limit cycle of x_training and y_trained""" [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) gap = 100 window = int(np.ceil((n-n_run-n_trans-1)/gap)) vec = self.xTraining.T if vec.shape[0] > vec.shape[1]: res = PCA(vec) pc1 = res.Y[:,0] pc2 = res.Y[:,1] else: pc1 = vec[:,0] pc2 = vec[:,1] print " -- Plot Limit cycle -- " fig, ax = Plot.initPlot()#proj="3d") for j in xrange(1, window): print j, window ax.plot(pc1[j*gap:(j+1)*gap+1], pc2[j*gap:(j+1)*gap+1], \ #self.timeArray[n_trans+1+j*gap:n_trans+1+(j+1)*gap+1], \ c=plt.cm.hot(0.75-0.75*float(j)/window), linewidth=1.2, label="PCA trajectory") ax.set_xlabel('Node acceleration PC 1') ax.set_ylabel('Node acceleration PC 2') plt.title("Limit cycle of " + self.title + " nodes structure") #ax.set_zlabel('Time') #ax.view_init(30, 60) if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_lc.png", format='png', dpi=300) plt.close() return class ForceTrainingSimulation(TrainingSimulation): """ Extend the TrainingSimulation class to use FORCE online learning """ ## Parameters to discuss: # Beta and alpha # closing the loop completely # open loop previous to training # Memory of previous accelerations def __init__(self, simulEnv, robot, transPhase=0.2, trainPhase=0.6, trainingPlot=True, \ alpha=1, beta=0.1, openPhase=0.1, signTime=None, wDistPlot=True, outputFilename="sinusoid", \ outputFolder="RC", printPhase=True, title="None"): """ Init class: phases are reparted like this - Transition phase: nothing happens here, we let the dynamics stabilizes - Training phase: here we do online weights value training. The trianing phase itself is divided in three parts: - OpenLoop phase: here, we start training but we won't feed the generated signal until it stabilizes - Closing phase: here, we gradually mix the feedback signal sith the target signal - The rest is dedicates to Run phase """ # Fix here the training and running phase if needed super(ForceTrainingSimulation, self).__init__(simulEnv, robot, transPhase=transPhase, \ trainPhase=trainPhase, trainingPlot=trainingPlot, title=title, \ signTime=signTime, outputFilename=outputFilename, outputFolder=outputFolder) # Class variables self.openPhase = openPhase self.openLength = int(np.floor(self.openPhase * self.trainLength)) self.closedPhase = 1 - self.openPhase self.closedLength = int(np.floor(self.closedPhase * self.trainLength)) self.printPhase = printPhase # Algorithm constants self.alpha = alpha self.beta = beta self.hist = 3 self.eWin = 5 # Algorithm matrices self.trainIt = 0 self.a = 0 self.po = 0 self.v = 0 self.w = None self.p = None self.error = None self.yTrained = np.array([]) self.acc = np.array([]) self.accf = np.array([]) self.speed = np.array([]) self.pos = np.array([]) # Create actuator threshold and low pass filtering self.fc = 3 # Hz ## REPLACE THOSE ABSOLUTE VALUES self.thh = 1 + 0.5 # Higher value of thresholding self.thl = 1 - 0.5 # Lower value for thresholding self.order = 120 # Filter order self.buffLen = 3 * self.order # Signal buffer length for filtering self.filt_b = signal.firwin(self.order, self.fc * self.simulEnv.timeStep) self.filt_a = [1] #signal.butter(self.order, self.fc * self.simulEnv.timeStep, 'low') self.filt_fifo = [] # Create a fifo self.filt_fifo_2 = [] # Create a fifo def createNeuronLayer(self, size, x): """ Create the weights values for an intermediate layer of neurons""" self.neuron_layer_size = size self.neuron_layer_norm_factor = 10 self.neuron_layer_w = [] self.neuron_layer_b = [] for i in range(size): self.neuron_layer_w.append(2 * np.array(np.random.rand(x.shape[0])) - 1) self.neuron_layer_b.append(0.5 * np.random.rand() - 0.25) def neuronLayerFct(self, x): """ Run non-linear function on input vector through an neuronal layer""" y = [] for n in range(self.neuron_layer_size): x = x / self.neuron_layer_norm_factor y.append(np.tanh(np.dot(self.neuron_layer_w[n], x)))# + self.neuron_layer_b[n])) return np.mat(y).T def physActFilter(self, predSig): """ Apply filtering and thresholding to model physical actuation properties and avoid numerical instabilities """ # Threshold current value #print "Predicted signal: " + str(predSig) thSig = np.clip(predSig, self.thl, self.thh) #print "Thresholded signal: " + str(thSig) # Get prev time_steps and convert to numpy matrix if not self.filt_fifo: for i in range(self.buffLen): self.filt_fifo.append(np.zeros(predSig.shape)) self.filt_fifo.pop(0) self.filt_fifo.append(thSig) sigMat = np.mat(self.filt_fifo[0]) for i in range(len(self.filt_fifo)): sigMat = np.vstack((sigMat, self.filt_fifo[i])) # Filter (TODO: filtering too much useless points each timestep here) filtSig = np.zeros(sigMat.shape[1]) for i in range(sigMat.shape[1]): filtSig[i] = signal.lfilter(self.filt_b, self.filt_a, sigMat[:, i].T)[:, -1] return filtSig def physSensFilter(self, sig): """ Apply filtering and thresholding to model physical sensor properties and avoid numerical instabilities """ # Get prev time_steps and convert to numpy matrix if not self.filt_fifo_2: for i in range(self.buffLen): self.filt_fifo_2.append(np.zeros(sig.shape)) self.filt_fifo_2.pop(0) self.filt_fifo_2.append(sig) sigMat = np.mat(self.filt_fifo_2[0]) for i in range(len(self.filt_fifo_2)): sigMat = np.vstack((sigMat, self.filt_fifo_2[i])) # Filter (TODO: filtering too much useless points each timestep here) filtSig = np.zeros(sigMat.shape[1]) for i in range(sigMat.shape[1]): filtSig[i] = signal.lfilter(self.filt_b, self.filt_a, sigMat[:, i].T)[:, -1] return filtSig def runStep(self): """ Run the neuron for a given step """ # Get robot current state a_it = self.Aold.getArray()[0, :] v_it = self.robot.getState().speed.getArray()[0, :] pos_it = self.robot.getState().pos.getArray()[0, :] da_it = a_it - self.a self.a = a_it self.po = pos_it self.v = v_it # Process acceleration vector through a non-linear layer x_it = np.mat(self.physSensFilter(a_it)).T #self.neuronLayerFct(pos_it) # Get input vector if not self.inputs: raise('Error: No trianing before running') self.inputs.pop(0) self.inputs.append(x_it) x = np.mat(self.inputs[0]) for i in range(self.hist-1): #if i < 4: x = np.vstack((x, self.inputs[i])) #else: #if i%4 == 0: # x = np.vstack((x, self.inputs[i])) # Compute new estimation and filter it y_est = self.neuron_fct(np.asarray(self.w_prev.T * x).T) filt_y_est = y_est[0] # Store estimation in vector if self.yTrained.size == 0: self.yTrained = filt_y_est else: self.yTrained = np.vstack((self.yTrained, filt_y_est)) self.acc = np.vstack((self.acc, x[0:1].T)) self.speed = np.vstack((self.speed, v_it)) self.pos = np.vstack((self.pos, pos_it)) # Filter and pass the signal estimation to the controller to close the loop self.robot.control.setStepInput(filt_y_est) # Print if self.iterationNumber == self.simulEnv.simulationLength - 1: if self.printPhase: self.printSim() def trainStep(self): """ Add training data for a given step """ # Get robot current state #print str(self.robot.state.currentTime) + " Getting states of iteration " + str(self.iterationNumber) a_it = self.Aold.getArray()[0, :] v_it = self.robot.getState().speed.getArray()[0, :] pos_it = self.robot.getState().pos.getArray()[0, :] da_it = a_it - self.a self.a = a_it self.po = pos_it self.v = v_it # Create a non-linear layer of neurons and process acc vector through it #if self.trainIt == 0: # self.createNeuronLayer(20, pos_it) x_it = np.mat(self.physSensFilter(a_it)).T # self.neuronLayerFct(pos_it) # If the inputs fifo hasen't been created, do it if not self.inputs: self.N = x_it.shape[0] for i in range(self.hist): self.inputs.append(np.zeros((self.N, 1))) # Update the inputs fifo (TODO: useless if the whole xtraining is already savec below) self.inputs.pop(0) self.inputs.append(x_it) # Fill the xTraining vector (usefull for plotting limit cycle) if self.xTraining.size == 0: self.xTraining = x_it else: self.xTraining = np.hstack((self.xTraining, x_it)) # Get current learning algo inputs and supervized ouput x = np.mat(self.inputs[0]) for i in range(self.hist-1): #if i < 4: x = np.vstack((x, self.inputs[i])) #else: #if i%4 == 0: # x = np.vstack((x, self.inputs[i])) y = np.mat(self.yTraining[self.iterationNumber+1]) # If first iteration, init with random weights if self.trainIt == 0: w = np.random.rand(x.shape[0], self.O) p = np.identity(x.shape[0]) / self.alpha yTrained = self.neuron_fct(np.asarray(w.T * x).T) filtYTrained = yTrained[0] self.yTrained = filtYTrained self.acc = a_it[0] self.speed = v_it self.pos = pos_it #self.error = y - yTrained # Else update else: # Update inverse Correlation Matrix of the network states p_prev = np.mat(self.p_prev) den = 1 + x.T * p_prev * x num = p_prev * x * x.T * p_prev p = p_prev - num / den # Update weight matrix w_prev = np.mat(self.w_prev) # Compute minimal error a window if self.iterationNumber > self.eWin: e_lim_min = self.eWin else: e_lim_min = self.iterationNumber if self.iterationNumber < self.simulEnv.simulationLength - self.eWin: e_lim_max = self.eWin else: e_lim_max = self.simulEnv.simulationLength - self.iterationNumber e_i = self.neuron_fct(w_prev.T * x) - np.mat(self.yTraining[self.iterationNumber-e_lim_min]).T for i in range(self.iterationNumber-e_lim_min+1, self.iterationNumber+e_lim_max): e_i = np.hstack((e_i, self.neuron_fct(w_prev.T * x) - np.mat(self.yTraining[i]).T)) e_p = np.amin(e_i, axis=1) #e_p = w_prev.T * x - y.T #e_p = e_i[:, e_p_arg] # Update weight matrix l = (p * x) / (1 + x.T * p * x) w = w_prev - p * x * e_p.T # Fill the w error vector (usefull for plotting W convergence) if self.weightMatrixDiff == None: self.weightMatrixDiff = np.max(np.abs(w - w_prev)) else: self.weightMatrixDiff = np.hstack((self.weightMatrixDiff, np.max(np.abs(w - w_prev)))) # Update output yTrained = self.neuron_fct(np.asarray(w.T * x).T) filtYTrained = yTrained[0] self.yTrained = np.vstack((self.yTrained, np.asarray(filtYTrained))) self.acc = np.vstack((self.acc, a_it[0])) self.speed = np.vstack((self.speed, v_it)) self.pos = np.vstack((self.pos, pos_it)) # Update error (usefull for plotting error evolution) if self.error == None: self.error = np.mean(np.abs(y - filtYTrained)) else: self.error = np.hstack((self.error, np.mean(np.abs(y - filtYTrained)))) # start Closed-Loop mode if self.trainIt == self.openLength: self.robot.control.closeLoop(self.closedLength, beta=self.beta) # Pass the signal estimation to the controller to close the loop self.robot.control.setStepInput(filtYTrained) # Update iteration self.trainIt += 1 self.w_prev = w self.p_prev = p return def printSim(self): """ Save some useful information regarding the simulation proceeding """ with open("./" + self.outputFolder + "/" + self.outputFilename + ".txt", "w+") as file: file.write(" Phase name | length | t_start | t_stop |\n") file.write("------------------------------------------------\n") file.write(" Transitoire | " + str(self.transLength) + " | " + \ str(0.0) + " | " + \ str(self.transLength*self.simulEnv.timeStep) + " |\n") file.write(" Training | " + str(self.trainLength) + " | " + \ str(self.transLength*self.simulEnv.timeStep) + " | " + \ str((self.trainLength+self.transLength)*self.simulEnv.timeStep) + " |\n") file.write(" Running | " + str(self.runLength) + " | " + \ str((self.trainLength+self.transLength)*self.simulEnv.timeStep) + " | " + \ str(self.simulEnv.simulationLength*self.simulEnv.timeStep) + " |\n") file.write("------------------------------------------------\n") file.write(" Open Loop | " + str((self.openLength)) + " | " + \ str(self.transLength*self.simulEnv.timeStep) + " | " + \ str((self.transLength+self.openLength)*self.simulEnv.timeStep) + " |\n") file.write(" Closing Loop | " + str(self.closedLength) + " | " + \ str((self.transLength+self.openLength)*self.simulEnv.timeStep) + " | " + \ str((self.transLength+self.openLength+self.closedLength)*self.simulEnv.timeStep) + " |\n") file.write("------------------------------------------------\n") file.write("Parameters: alpha=" + str(self.alpha) + " beta=" + str(self.beta)) file.close() def train(self): """ Nothing to do here as FORCE is an online method """ self.weightMatrix = self.w_prev if self.trainingPlot == "cont" or self.trainingPlot == "all" : self.plotW() # Start Closed-Loop mode print " -- Training phase finished -- " #self.robot.control.closeLoop() return ### Absolutely remove this shit in inheritance!!! class NoisyTrainingSimulation(NoisyVerletSimulation): """ Extend VerletSimulation and train an output layer to produce a structured patterns The time is divided in five steps as described in the process method. Defaut training method use a one-shot regression learning from an input vector formed with the positions This class can be extended by rewriting the trainStep, runStep and train methods """ def __init__(self, simulEnv, robot, omega=5, transPhase=0.2, trainPhase=0.6, trainingPlot="all", \ simNoise=0.01, signTime=None, outputFilename="sinusoid", outputFolder="RC"): """ Init the training test sim class and parent classes - omega is the desired output sinusoid frequency. It should correspond to the frequency of the MSN - transPhase is the proportion of time dedicated to transitoire dynamics before training - trainPhase is the proportion of time dedicated to training - outputFilename and outputFolder are set to save plotting if traingPlot is True - wDistPlot is set to plot the output neurons weight dustribution - signTime can be set to reduce the phase impact to a significant part of the simulation. For instance, if the simulation time is set to 50s, the signTime=30, then, the transitoire trinaing and running phases will only applys to """ super(NoisyTrainingSimulation, self).__init__(simulEnv, robot, noise=simNoise) self.omega = omega self.transPhase = transPhase self.trainPhase = trainPhase self.trainingPlot = trainingPlot self.outputFolder = outputFolder self.outputFilename = outputFilename if not signTime: self.signLength = self.simulEnv.simulationLength else: self.signLength = int(np.floor(signTime / self.simulEnv.timeStep)) self.simulationTime = self.simulEnv.timeStep * self.simulEnv.simulationLength self.transLength = int(np.floor(self.transPhase * self.signLength)) self.trainLength = int(np.floor(self.trainPhase * self.signLength)) self.runLength = self.simulEnv.simulationLength - self.trainLength - self.transLength self.inputs = [] self.yTraining = self.create_y_training() self.xTraining = np.array([]) self.yTrained = np.array([]) self.nrmsError = None self.absError = None self.error = None self.weightMatrixDiff = None self.N = self.robot.getState().pos.getArray().shape[1] + self.robot.getState().speed.getArray().shape[1] if len(self.yTraining[self.iterationNumber].shape) != 0: self.O = self.yTraining[self.iterationNumber].shape[0] else: self.O = 1 if self.robot.control.__class__.__name__ == "ClosedLoopSineControl": self.CL = True #print " -- Closed-Loop simulation and control activated. Closing the loop from time = " + \ # num2str(self.simulEnv.timeStep * (self.transLength + self.trainLength)) + " s -- " else: self.CL = False mkdir_p(self.outputFolder) def create_y_training(self): """ Create a sinusoid signal of length trainLength to train the output neuron""" self.timeArray = np.linspace(0, self.simulationTime, num=self.simulEnv.simulationLength) # Single sinus # y = np.sin(self.omega * self.timeArray).reshape(-1,1) # Real input values y = [] for t in self.timeArray: line = connections2Array(self.robot.control.modulationFactorTime(t), self.robot.morph.connections) y.append(line) return np.array(y) def neuron_fct(self, x): """ The transition function of the output neuron """ y = 1 + np.tanh(x) return y def trainStep(self): """ Add training data for a given step """ posArray = self.robot.getState().pos.getArray().T if self.xTraining.size == 0: self.xTraining = posArray else: self.xTraining = np.vstack((self.xTraining, posArray)) def train(self): """ Determine the output weight matrix to minimize error """ # Copy input, and output x = self.xTraining y = self.yTraining[self.transLength:self.transLength+self.trainLength] # Compute weights w, res, rank, singVal = np.linalg.lstsq(x, y) np.set_printoptions(threshold=np.inf) self.weightMatrix = w self.yTrained = np.transpose(np.dot(w.T, x)) # Print debug print " -- Network training by linear regression performed. Sum of residues = {:.4f}".format(res[0]) + \ ". Global NRMSE = {:.4f} --".format(self.nrmse(y, np.dot(x, w))) if self.trainingPlot == "all": self.plotW() # Start Closed-Loop mode #self.robot.control.closeLoop() def runStep(self): """ Run the neuron for a given step """ # Get input state vector if hasattr(self, 'hist'): x_it = self.robot.getState().pos.getArray().T self.inputs.pop(0) self.inputs.append(x_it) posArray = np.mat(self.inputs[0]) for i in range(self.hist-1): if i < 3: posArray = np.vstack((posArray, self.inputs[i])) else: if i%4 == 0: posArray = np.vstack((posArray, self.inputs[i])) else: posArray = np.vstack((self.robot.getState().speed.getArray(), self.robot.getState().pos.getArray())) # Compute new signal estimation y_est = np.asarray(np.transpose(np.dot(self.weightMatrix.T, posArray))) # Store estimation in vector if self.yTrained.size == 0: self.yTrained = y_est else: self.yTrained = np.vstack((self.yTrained, y_est)) # Pass the signal estimation to the controller to close the loop stepInput = array2ModFactor(y_est, self.robot.morph.connections) self.robot.control.setStepInput(stepInput) def process(self): """ Add the training process to the normal simulation. 5 steps: - Transiant phase: nothing is done - Training phase: adding the data to the training vector - Training point: linear regression to compute the output neurons weight matrix - Runnning point: replace the control inputs by the trained values - End of sim: save and plot """ it = self.iterationNumber # Training phase (add data to trianing vector) if it >= self.transLength and it < (self.transLength + self.trainLength - 1): #print "2. Training phase. It: " + str(it) self.trainStep() # Training time (linear regression for output neurons) if it == (self.transLength + self.trainLength - 1): #print "3. Training time. It: " + str(it) self.train() self.runStep() # Running phase (connect the neurons output to the robot) if it >= (self.transLength + self.trainLength) and it < (self.simulEnv.simulationLength - 1) : #print "4. Running phase. It: " + str(it) self.runStep() # End of simulation (plot everything) if it == self.simulEnv.simulationLength - 1: #print "5. End of simulation. It: " + str(it) self.runStep() self.save() ## If save?? if self.trainingPlot == "all": self.plotWDiff(-self.trainLength) self.plotError(-self.trainLength+50) self.plotInputs() self.plot(n=6000) self.plotLimitCycle() if self.trainingPlot == "cont": self.plotWDiff(-self.trainLength*2/3) self.plotInputs() self.plotError(-self.trainLength+50) self.plot(n=6000, comp=2) self.plotLimitCycle() def mse(self, arr1, arr2): """ Compute MSE between two matrices """ assert arr1.shape == arr2.shape, "Mean Square Error can only be computed on matrices with same size" a, b = arr2.shape return np.sum((arr2 - arr1) ** 2) / float(a * b) def nrmse(self, arr1, arr2): """ Compute NRMSE between two matrices """ # Center signals around 0 arr1 = arr1 - 1 arr2 = arr2 - 1 rmse = np.sqrt(self.mse(arr1, arr2)) max_val = max(np.max(arr1), np.max(arr2)) min_val = min(np.min(arr1), np.min(arr2)) return 1 - (rmse / (max_val - min_val)) def _numPoints(self, n): """Give the number of points to plot in each phase when using n points""" # Compute number of points of each phase if not n or n > self.simulEnv.simulationLength: n = self.simulEnv.simulationLength n_tot = self.simulEnv.simulationLength n_trans = self.transLength n_train = self.trainLength n_run = self.runLength if n_run > n: n_run = n return [n_trans, n_train, n_run, n_tot, n] def get_training_error(self): """ Fill and return rms error and absolute for all actuators """ y_err = [] if self.nrmsError == None: for i in range(self.O): print self.yTraining[:,i].reshape(-1,1), self.yTraining[:,i].reshape(-1,1)-1 y_err.append(self.nrmse(self.yTraining[:,i].reshape(-1,1), self.yTrained[:,i].reshape(-1,1),)) self.nrmsError = sum(y_err) / float(len(y_err)) print " -- Computing NRMS Error: " + str(self.nrmsError) + " --" y_err = [] if self.absError == None: for i in range(self.O): y_norm = np.max(self.yTraining[:,i]) - np.min(self.yTraining[:,i]) y_err.append(100 * np.max(np.abs(self.yTraining[:,i].reshape(-1,1) - \ self.yTrained[:,i].reshape(-1,1)) / y_norm)) self.absError = sum(y_err) / float(len(y_err)) print " -- Computing Max Absolute Error: {:.2f} %".format(self.absError) + " --" return self.nrmsError, self.absError def plot(self, n=None, comp=None, show=False, save=True): """ Print the driving and trained signals as a fct of time in a file""" [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) # Some arrays init nrmse = 0 y_err = np.zeros(1) # Get number of graphs to print if comp == None: comp = self.O for i in range(comp): # Compute error vector if n_run != 0: nrmse = self.nrmse(self.yTraining[-n_run:, i].reshape(-1,1), self.yTrained[-n_run:, i].reshape(-1,1)) y_norm = np.max(self.yTraining[-n_run:, i]) - np.min(self.yTraining[-n_run:, i]) y_err = np.abs(self.yTraining[-n_run:, i].reshape(-1,1) - self.yTrained[-n_run:, i].reshape(-1,1)) / y_norm # Plot print " -- Generating training graph " + str(i+1) + "/" + str(self.yTraining.shape[1]) + ". NRMSE = " + \ str(nrmse) + " -- " fig, ax = Plot.initPlot() for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + ax.get_xticklabels() + ax.get_yticklabels()): item.set_fontsize(17) plt.plot(self.timeArray[-n:], self.yTraining[-n:, i] ,"r--", label="Training signal") plt.plot(self.timeArray[-n:], np.zeros(n) ,"k-") plt.plot(self.timeArray[-n:-n_run-1], self.yTrained[-n:-n_run-1, i] ,"y-", label="Trained sig (train)") if n_run != 0: plt.plot(self.timeArray[-n_run-1:], self.yTrained[-n_run-1:, i] ,"b-", label="Trained sig (run)") #plt.plot(self.timeArray[-n_run:], y_err[-n_run:] ,"g-", label="Error signal") plt.title("Spring control force " + str(i+1) + ". Maximum error = {:.2f} %".format(100 * np.max(y_err)) + \ ". NRMSE = {:.2f}".format(nrmse)) Plot.configurePlot(fig, ax, 'Temp', 'Temp', legend=False, legendLocation='lower center') plt.xlabel('Simulation step') plt.ylabel('Signal value') if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_sin_" + str(i+1) + ".png", format='png', dpi=300) plt.close() def plotInputs(self): """ Plot inputs evolution to monitor feddback effect """ plt.plot(self.timeArray, self.acc, "b-") plt.title("Node 1 acceleration evolution") plt.savefig(self.outputFolder + "/" + self.outputFilename + "_acc.png", format='png', dpi=300) plt.close() plt.plot(self.timeArray, self.pos, "") plt.title("Node 1 position evolution") plt.savefig(self.outputFolder + "/" + self.outputFilename + "_pos.png", format='png', dpi=300) plt.close() plt.plot(self.timeArray, self.speed, "") plt.title("Node 1 speed evolution") plt.savefig(self.outputFolder + "/" + self.outputFilename + "_speed.png", format='png', dpi=300) plt.close() def plotW(self, show=False, save=True): """ Plot distribution of wieight matrix W """ fig, ax = Plot.initPlot() plt.hist(self.weightMatrix.reshape(-1, 1), bins=40) plt.title("Distribution of output layer weight matrix") plt.xlabel("Value") plt.ylabel("Frequency") Plot.configurePlot(fig, ax, 'Temp', 'Temp', legend=False) if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_w_hist.png", format='png', dpi=300) plt.close() def save(self, filename=None): """ Save the weights matrix for further simulations """ if filename == None: filename = "weight_matrix_" + timestamp() + ".pkl" #dump_pickle(self, filename) def plotWDiff(self, n=None, show=False, save=True): """ Plot evolution of the weight matrix differences """ neg = False if n != None: if n < 0: neg = True n = abs(n) [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) if n > n_train - 2: n = n_train - 2 fig, ax = Plot.initPlot() if neg: plt.plot(self.timeArray[n_tot-n_run-n:n_tot-n_run], self.weightMatrixDiff[-n:]) else: plt.plot(self.timeArray[n_trans:n_trans+n], self.weightMatrixDiff[0:n]) plt.title("Evolution of the trained weights with time") plt.xlabel("Time") plt.ylabel("Weight matrix derivative") Plot.configurePlot(fig, ax, 'Temp', 'Temp', legend=False) if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_w_diff.png", format='png', dpi=300) def plotError(self, n=None, show=False, save=True): """ Plot error evolution""" neg = False if n != None: if n < 0: neg = True n = abs(n) [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) if n > n_train - 2: n = n_train - 2 fig, ax = Plot.initPlot() if neg: plt.plot(self.timeArray[n_tot-n_run-n:n_tot-n_run], self.error[-n:]) else: plt.plot(self.timeArray[n_trans:n_trans+n], self.error[0:n]) plt.title("Error evolution") plt.xlabel("Time") plt.ylabel("Error") if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_error.png", format='png', dpi=300) def plotLimitCycle(self, n=None, show=False, save=True): """Plot the limit cycle of x_training and y_trained""" [n_trans, n_train, n_run, n_tot, n] = self._numPoints(n) gap = 2 window = int(np.ceil((n-n_run-n_trans-1)/gap)) vec = self.xTraining.T if vec.shape[0] > vec.shape[1]: res = PCA(vec) pc1 = res.Y[:,0] pc2 = res.Y[:,1] else: pc1 = vec[:,0] pc2 = vec[:,1] fig, ax = Plot.initPlot()#proj="3d") for j in xrange(1, window): ax.plot(pc1[j*gap:(j+1)*gap+1], pc2[j*gap:(j+1)*gap+1], \ #self.timeArray[n_trans+1+j*gap:n_trans+1+(j+1)*gap+1], \ c=plt.cm.winter(1.*j/window), linewidth=1.2, label="PCA trajectory") ax.set_xlabel('PC 1') ax.set_ylabel('PC 2') #ax.set_zlabel('Time') #ax.view_init(30, 60) if show: plt.show() if save: plt.savefig(self.outputFolder + "/" + self.outputFilename + "_lc.png", format='png', dpi=300) plt.close() return class NoisyForceTrainingSimulation(NoisyTrainingSimulation): """ Extend the TrainingSimulation class to use FORCE online learning """ ## Parameters to discuss: # Beta and alpha # closing the loop completely # open loop previous to training # Memory of previous accelerations def __init__(self, simulEnv, robot, transPhase=0.2, trainPhase=0.6, trainingPlot=True, \ alpha=1, beta=0.1, openPhase=0.1, signTime=None, wDistPlot=True, outputFilename="sinusoid", \ simNoise=0.01, outputFolder="RC", printPhase=True): """ Init class: phases are reparted like this - Transition phase: nothing happens here, we let the dynamics stabilizes - Training phase: here we do online weights value training. The trianing phase itself is divided in three parts: - OpenLoop phase: here, we start training but we won't feed the generated signal until it stabilizes - Closing phase: here, we gradually mix the feedback signal sith the target signal - The rest is dedicates to Run phase """ # Fix here the training and running phase if needed super(NoisyForceTrainingSimulation, self).__init__(simulEnv, robot, transPhase=transPhase, \ simNoise=simNoise, trainPhase=trainPhase, trainingPlot=trainingPlot, \ signTime=signTime, outputFilename=outputFilename, outputFolder=outputFolder) # Class variables self.openPhase = openPhase self.openLength = int(np.floor(self.openPhase * self.trainLength)) self.closedPhase = 1 - self.openPhase self.closedLength = int(np.floor(self.closedPhase * self.trainLength)) self.printPhase = printPhase # Algorithm constants self.alpha = alpha self.beta = beta self.hist = 3 self.eWin = 5 # Algorithm matrices self.trainIt = 0 self.a = 0 self.po = 0 self.v = 0 self.w = None self.p = None self.error = None self.yTrained = np.array([]) self.acc = np.array([]) self.accf = np.array([]) self.speed = np.array([]) self.pos = np.array([]) # Create actuator threshold and low pass filtering self.fc = 3 # Hz ## REPLACE THOSE ABSOLUTE VALUES self.thh = 1 + 0.5 # Higher value of thresholding self.thl = 1 - 0.5 # Lower value for thresholding self.order = 120 # Filter order self.buffLen = 3 * self.order # Signal buffer length for filtering self.filt_b = signal.firwin(self.order, self.fc * self.simulEnv.timeStep) self.filt_a = [1] #signal.butter(self.order, self.fc * self.simulEnv.timeStep, 'low') self.filt_fifo = [] # Create a fifo self.filt_fifo_2 = [] # Create a fifo def createNeuronLayer(self, size, x): """ Create the weights values for an intermediate layer of neurons""" self.neuron_layer_size = size self.neuron_layer_norm_factor = 10 self.neuron_layer_w = [] self.neuron_layer_b = [] for i in range(size): self.neuron_layer_w.append(2 * np.array(np.random.rand(x.shape[0])) - 1) self.neuron_layer_b.append(0.5 * np.random.rand() - 0.25) def neuronLayerFct(self, x): """ Run non-linear function on input vector through an neuronal layer""" y = [] for n in range(self.neuron_layer_size): x = x / self.neuron_layer_norm_factor y.append(np.tanh(np.dot(self.neuron_layer_w[n], x)))# + self.neuron_layer_b[n])) return np.mat(y).T def physActFilter(self, predSig): """ Apply filtering and thresholding to model physical actuation properties and avoid numerical instabilities """ # Threshold current value #print "Predicted signal: " + str(predSig) thSig = np.clip(predSig, self.thl, self.thh) #print "Thresholded signal: " + str(thSig) # Get prev time_steps and convert to numpy matrix if not self.filt_fifo: for i in range(self.buffLen): self.filt_fifo.append(np.zeros(predSig.shape)) self.filt_fifo.pop(0) self.filt_fifo.append(thSig) sigMat = np.mat(self.filt_fifo[0]) for i in range(len(self.filt_fifo)): sigMat = np.vstack((sigMat, self.filt_fifo[i])) # Filter (TODO: filtering too much useless points each timestep here) filtSig = np.zeros(sigMat.shape[1]) for i in range(sigMat.shape[1]): filtSig[i] = signal.lfilter(self.filt_b, self.filt_a, sigMat[:, i].T)[:, -1] return filtSig def physSensFilter(self, sig): """ Apply filtering and thresholding to model physical sensor properties and avoid numerical instabilities """ # Get prev time_steps and convert to numpy matrix if not self.filt_fifo_2: for i in range(self.buffLen): self.filt_fifo_2.append(np.zeros(sig.shape)) self.filt_fifo_2.pop(0) self.filt_fifo_2.append(sig) sigMat = np.mat(self.filt_fifo_2[0]) for i in range(len(self.filt_fifo_2)): sigMat = np.vstack((sigMat, self.filt_fifo_2[i])) # Filter (TODO: filtering too much useless points each timestep here) filtSig = np.zeros(sigMat.shape[1]) for i in range(sigMat.shape[1]): filtSig[i] = signal.lfilter(self.filt_b, self.filt_a, sigMat[:, i].T)[:, -1] return filtSig def runStep(self): """ Run the neuron for a given step """ # Get robot current state a_it = self.Aold.getArray()[0, :] v_it = self.robot.getState().speed.getArray()[0, :] pos_it = self.robot.getState().pos.getArray()[0, :] da_it = a_it - self.a self.a = a_it self.po = pos_it self.v = v_it # Process acceleration vector through a non-linear layer x_it = np.mat(self.physSensFilter(a_it)).T #self.neuronLayerFct(pos_it) # Get input vector if not self.inputs: raise('Error: No trianing before running') self.inputs.pop(0) self.inputs.append(x_it) x = np.mat(self.inputs[0]) for i in range(self.hist-1): #if i < 4: x = np.vstack((x, self.inputs[i])) #else: #if i%4 == 0: # x = np.vstack((x, self.inputs[i])) # Compute new estimation and filter it y_est = self.neuron_fct(np.asarray(self.w_prev.T * x).T) filt_y_est = y_est[0] # Store estimation in vector if self.yTrained.size == 0: self.yTrained = filt_y_est else: self.yTrained = np.vstack((self.yTrained, filt_y_est)) self.acc = np.vstack((self.acc, x[0:1].T)) self.speed = np.vstack((self.speed, v_it)) self.pos = np.vstack((self.pos, pos_it)) # Filter and pass the signal estimation to the controller to close the loop self.robot.control.setStepInput(filt_y_est) # Print if self.iterationNumber == self.simulEnv.simulationLength - 1: if self.printPhase: self.printSim() def trainStep(self): """ Add training data for a given step """ # Get robot current state #print str(self.robot.state.currentTime) + " Getting states of iteration " + str(self.iterationNumber) a_it = self.Aold.getArray()[0, :] v_it = self.robot.getState().speed.getArray()[0, :] pos_it = self.robot.getState().pos.getArray()[0, :] da_it = a_it - self.a self.a = a_it self.po = pos_it self.v = v_it # Create a non-linear layer of neurons and process acc vector through it #if self.trainIt == 0: # self.createNeuronLayer(20, pos_it) x_it = np.mat(self.physSensFilter(a_it)).T # self.neuronLayerFct(pos_it) # If the inputs fifo hasen't been created, do it if not self.inputs: self.N = x_it.shape[0] for i in range(self.hist): self.inputs.append(np.zeros((self.N, 1))) # Update the inputs fifo (TODO: useless if the whole xtraining is already savec below) self.inputs.pop(0) self.inputs.append(x_it) # Fill the xTraining vector (usefull for plotting limit cycle) if self.xTraining.size == 0: self.xTraining = x_it else: self.xTraining = np.hstack((self.xTraining, x_it)) # Get current learning algo inputs and supervized ouput x = np.mat(self.inputs[0]) for i in range(self.hist-1): #if i < 4: x = np.vstack((x, self.inputs[i])) #else: #if i%4 == 0: # x = np.vstack((x, self.inputs[i])) y = np.mat(self.yTraining[self.iterationNumber+1]) # If first iteration, init with random weights if self.trainIt == 0: w = np.random.rand(x.shape[0], self.O) p = np.identity(x.shape[0]) / self.alpha yTrained = self.neuron_fct(np.asarray(w.T * x).T) filtYTrained = yTrained[0] self.yTrained = filtYTrained self.acc = a_it[0] self.speed = v_it self.pos = pos_it #self.error = y - yTrained # Else update else: # Update inverse Correlation Matrix of the network states p_prev = np.mat(self.p_prev) den = 1 + x.T * p_prev * x num = p_prev * x * x.T * p_prev p = p_prev - num / den # Update weight matrix w_prev = np.mat(self.w_prev) # Compute minimal error a window if self.iterationNumber > self.eWin: e_lim_min = self.eWin else: e_lim_min = self.iterationNumber if self.iterationNumber < self.simulEnv.simulationLength - self.eWin: e_lim_max = self.eWin else: e_lim_max = self.simulEnv.simulationLength - self.iterationNumber e_i = self.neuron_fct(w_prev.T * x) - np.mat(self.yTraining[self.iterationNumber-e_lim_min]).T for i in range(self.iterationNumber-e_lim_min+1, self.iterationNumber+e_lim_max): e_i = np.hstack((e_i, self.neuron_fct(w_prev.T * x) - np.mat(self.yTraining[i]).T)) e_p = np.amin(e_i, axis=1) #e_p = w_prev.T * x - y.T #e_p = e_i[:, e_p_arg] # Update weight matrix l = (p * x) / (1 + x.T * p * x) w = w_prev - p * x * e_p.T # Fill the w error vector (usefull for plotting W convergence) if self.weightMatrixDiff == None: self.weightMatrixDiff = np.max(np.abs(w - w_prev)) else: self.weightMatrixDiff = np.hstack((self.weightMatrixDiff, np.max(np.abs(w - w_prev)))) # Update output yTrained = self.neuron_fct(np.asarray(w.T * x).T) filtYTrained = yTrained[0] self.yTrained = np.vstack((self.yTrained, np.asarray(filtYTrained))) self.acc = np.vstack((self.acc, a_it[0])) self.speed = np.vstack((self.speed, v_it)) self.pos = np.vstack((self.pos, pos_it)) # Update error (usefull for plotting error evolution) if self.error == None: self.error = np.mean(np.abs(y - filtYTrained)) else: self.error = np.hstack((self.error, np.mean(np.abs(y - filtYTrained)))) # start Closed-Loop mode if self.trainIt == self.openLength: self.robot.control.closeLoop(self.closedLength, beta=self.beta) # Pass the signal estimation to the controller to close the loop self.robot.control.setStepInput(filtYTrained) # Update iteration self.trainIt += 1 self.w_prev = w self.p_prev = p return def printSim(self): """ Save some useful information regarding the simulation proceeding """ with open("./" + self.outputFolder + "/" + self.outputFilename + ".txt", "w+") as file: file.write(" Phase name | length | t_start | t_stop |\n") file.write("------------------------------------------------\n") file.write(" Transitoire | " + str(self.transLength) + " | " + \ str(0.0) + " | " + \ str(self.transLength*self.simulEnv.timeStep) + " |\n") file.write(" Training | " + str(self.trainLength) + " | " + \ str(self.transLength*self.simulEnv.timeStep) + " | " + \ str((self.trainLength+self.transLength)*self.simulEnv.timeStep) + " |\n") file.write(" Running | " + str(self.runLength) + " | " + \ str((self.trainLength+self.transLength)*self.simulEnv.timeStep) + " | " + \ str(self.simulEnv.simulationLength*self.simulEnv.timeStep) + " |\n") file.write("------------------------------------------------\n") file.write(" Open Loop | " + str((self.openLength)) + " | " + \ str(self.transLength*self.simulEnv.timeStep) + " | " + \ str((self.transLength+self.openLength)*self.simulEnv.timeStep) + " |\n") file.write(" Closing Loop | " + str(self.closedLength) + " | " + \ str((self.transLength+self.openLength)*self.simulEnv.timeStep) + " | " + \ str((self.transLength+self.openLength+self.closedLength)*self.simulEnv.timeStep) + " |\n") file.write("------------------------------------------------\n") file.write("Parameters: alpha=" + str(self.alpha) + " beta=" + str(self.beta)) file.close() def train(self): """ Nothing to do here as FORCE is an online method """ self.weightMatrix = self.w_prev if self.trainingPlot == "cont" or self.trainingPlot == "all" : self.plotW() # Start Closed-Loop mode print " -- Training phase finished -- " #self.robot.control.closeLoop() return
34.381073
147
0.675729
9,893
67,937
4.570909
0.078945
0.022556
0.015038
0.007541
0.827112
0.817559
0.80429
0.800862
0.797634
0.792923
0
0.014283
0.178651
67,937
1,976
148
34.381073
0.796111
0.11521
0
0.778824
0
0
0.068009
0.006626
0
0
0
0.00253
0.003137
0
null
null
0.000784
0.011765
null
null
0.021961
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
b73b272b52174b7da8ea03724dce60fcd8e7661b
49,664
py
Python
tests/test_api.py
Arize-ai/client_python
b80afbcafd243c693791bbb77f534eb6def731f1
[ "BSD-3-Clause" ]
12
2020-03-31T17:42:45.000Z
2022-03-31T07:30:24.000Z
tests/test_api.py
Arize-ai/client_python
b80afbcafd243c693791bbb77f534eb6def731f1
[ "BSD-3-Clause" ]
22
2021-08-18T20:16:09.000Z
2022-03-24T22:50:21.000Z
tests/test_api.py
Arize-ai/client_python
b80afbcafd243c693791bbb77f534eb6def731f1
[ "BSD-3-Clause" ]
2
2021-08-18T18:39:54.000Z
2021-08-30T23:14:59.000Z
import pytest import datetime import time import uuid from pathlib import Path import numpy as np import pandas as pd import arize.public_pb2 as public__pb2 from arize.model import TrainingRecords, ValidationRecords from arize.utils.types import ModelTypes from arize.api import Client NUM_VAL = 20.20 STR_VAL = 'arize' BOOL_VAL = True INT_VAL = 0 NP_FLOAT = float(1.2) file_to_open = Path(__file__).parent / "fixtures/mpg.csv" expected = { 'model': 'model_v0', 'model_version': 'v1.2.3.4', 'batch': 'batch1234', 'api_key': 'API_KEY', 'prediction_id': 'prediction_0', 'value_binary': BOOL_VAL, 'value_categorical': STR_VAL, 'value_numeric': NUM_VAL, 'organization_key': 'test_org', 'features': { 'feature_str': STR_VAL, 'feature_double': NUM_VAL, 'feature_int': INT_VAL, 'feature_bool': BOOL_VAL, 'feature_None': None }, 'feature_importances': { 'feature_str': NUM_VAL, 'feature_double': NUM_VAL, 'feature_int': NUM_VAL, 'feature_bool': NUM_VAL, 'feature_numpy_float': NP_FLOAT, } } def mock_dataframes_clean_nan(file): features, labels, ids = mock_dataframes(file) features = features.fillna('backfill') return features, labels, ids def mock_dataframes(file): features = pd.read_csv(file) labels = pd.DataFrame( np.random.randint(1, 100, size=(features.shape[0], 1))) ids = pd.DataFrame([str(uuid.uuid4()) for _ in range(len(labels.index))]) return features, labels, ids def mock_series(file): features = pd.read_csv(file) labels = pd.Series(np.random.randint(1, 100, size=features.shape[0])) ids = pd.Series([str(uuid.uuid4()) for _ in range(len(labels.index))]) return features, labels, ids def get_stubbed_client(): c = Client(organization_key="test_org", api_key="API_KEY", uri="https://localhost:443") def _post(record, uri, indexes): return record def _post_bulk(records, uri): return records def _post_preprod(records): return records c._post = _post c._post_bulk = _post_bulk c._post_preprod = _post_preprod return c # TODO for each existing test that has been modified to call Client.log, add a call # to the pre-existing method that should map to the identical cacll to Client.log to # assert that they are equivalent def test_build_binary_prediction_features(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_type=ModelTypes.BINARY, model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=expected['value_binary'], features=expected['features'], prediction_timestamp=None) assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction, public__pb2.Prediction) assert isinstance(record.prediction.label, public__pb2.Label) for feature in record.prediction.features: assert isinstance(record.prediction.features[feature], public__pb2.Value) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.prediction.model_version == expected['model_version'] assert record.prediction.label.binary == expected['value_binary'] assert record.prediction.features['feature_str'].WhichOneof( 'data') == 'string' assert record.prediction.features['feature_double'].WhichOneof( 'data') == 'double' assert record.prediction.features['feature_int'].WhichOneof( 'data') == 'int' assert record.prediction.features['feature_bool'].WhichOneof( 'data') == 'string' assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 def test_numeric_prediction_id(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_type=ModelTypes.BINARY, model_version=expected['model_version'], prediction_id=12345, prediction_label=expected['value_binary'], features=expected['features'], prediction_timestamp=None) assert record.prediction_id == "12345" record = c.log(model_id=expected['model'], model_type=ModelTypes.BINARY, model_version=expected['model_version'], prediction_id=1.2345, prediction_label=expected['value_binary'], features=expected['features'], prediction_timestamp=None) assert record.prediction_id == "1.2345" def test_numeric_feature_name(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_type=ModelTypes.BINARY, model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=expected['value_binary'], features={1: "hello", 2.0: "world"}, prediction_timestamp=None) for feature in record.prediction.features: assert isinstance(record.prediction.features[feature], public__pb2.Value) def test_build_binary_prediction_features(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_type=ModelTypes.BINARY, model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=expected['value_binary'], features=expected['features'], prediction_timestamp=None) assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction, public__pb2.Prediction) assert isinstance(record.prediction.label, public__pb2.Label) for feature in record.prediction.features: assert isinstance(record.prediction.features[feature], public__pb2.Value) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.prediction.model_version == expected['model_version'] assert record.prediction.label.binary == expected['value_binary'] assert record.prediction.features['feature_str'].WhichOneof( 'data') == 'string' assert record.prediction.features['feature_double'].WhichOneof( 'data') == 'double' assert record.prediction.features['feature_int'].WhichOneof( 'data') == 'int' assert record.prediction.features['feature_bool'].WhichOneof( 'data') == 'string' assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 def test_build_binary_prediction_zero_ones(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_type=ModelTypes.BINARY, model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=1, features=expected['features'], prediction_timestamp=None) assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction, public__pb2.Prediction) assert isinstance(record.prediction.label, public__pb2.Label) for feature in record.prediction.features: assert isinstance(record.prediction.features[feature], public__pb2.Value) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.prediction.model_version == expected['model_version'] assert record.prediction.label.binary == expected['value_binary'] assert record.prediction.features['feature_str'].WhichOneof( 'data') == 'string' assert record.prediction.features['feature_double'].WhichOneof( 'data') == 'double' assert record.prediction.features['feature_int'].WhichOneof( 'data') == 'int' assert record.prediction.features['feature_bool'].WhichOneof( 'data') == 'string' assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 def test_build_categorical_prediction(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=expected['value_categorical'], features=expected['features'], prediction_timestamp=None) assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction, public__pb2.Prediction) assert isinstance(record.prediction.label, public__pb2.Label) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.prediction.model_version == expected['model_version'] assert bool(record.prediction.features) assert record.prediction.label.categorical == expected['value_categorical'] def test_build_scored_prediction(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_type=ModelTypes.SCORE_CATEGORICAL, model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=(expected['value_categorical'], expected['value_numeric']), features=expected['features'], prediction_timestamp=None) assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction, public__pb2.Prediction) assert isinstance(record.prediction.label, public__pb2.Label) assert isinstance(record.prediction.label.score_categorical, public__pb2.ScoreCategorical) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.prediction.model_version == expected['model_version'] assert bool(record.prediction.features) assert record.prediction.label.score_categorical.HasField('score_category') assert record.prediction.label.score_categorical.score_category.category == expected['value_categorical'] assert record.prediction.label.score_categorical.score_category.score == expected['value_numeric'] def test_build_scored_actual(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_type=ModelTypes.SCORE_CATEGORICAL, prediction_id=expected['prediction_id'], actual_label=expected['value_categorical']) assert isinstance(record, public__pb2.Record) assert isinstance(record.actual, public__pb2.Actual) assert isinstance(record.actual.label, public__pb2.Label) assert isinstance(record.actual.label.score_categorical, public__pb2.ScoreCategorical) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.actual.label.score_categorical.HasField('category') def test_build_numeric_prediction(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=expected['value_numeric'], features=expected['features'], prediction_timestamp=None) assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction, public__pb2.Prediction) assert isinstance(record.prediction.label, public__pb2.Label) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.prediction.model_version == expected['model_version'] assert bool(record.prediction.features) assert record.prediction.label.numeric == expected['value_numeric'] def test_build_prediction_no_features(): c = get_stubbed_client() record = c.log(model_id=expected['model'], model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=expected['value_numeric'], features=None, prediction_timestamp=None) assert isinstance(record.prediction, public__pb2.Prediction) assert not bool(record.prediction.features) def test_build_numeric_actual(): c = get_stubbed_client() record = c.log(model_id=expected['model'], prediction_id=expected['prediction_id'], actual_label=expected['value_numeric']) assert isinstance(record, public__pb2.Record) assert isinstance(record.actual, public__pb2.Actual) assert isinstance(record.actual.label, public__pb2.Label) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.actual.label.numeric == expected['value_numeric'] assert record.actual.timestamp.seconds == 0 assert record.actual.timestamp.nanos == 0 def test_build_categorical_actual(): c = get_stubbed_client() record = c.log(model_id=expected['model'], prediction_id=expected['prediction_id'], actual_label=expected['value_categorical']) assert isinstance(record, public__pb2.Record) assert isinstance(record.actual, public__pb2.Actual) assert isinstance(record.actual.label, public__pb2.Label) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.actual.label.categorical == expected['value_categorical'] def test_build_binary_actual(): c = get_stubbed_client() record = c.log(model_id=expected['model'], prediction_id=expected['prediction_id'], actual_label=expected['value_binary']) assert isinstance(record, public__pb2.Record) assert isinstance(record.actual, public__pb2.Actual) assert isinstance(record.actual.label, public__pb2.Label) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert record.actual.label.binary == expected['value_binary'] ###################### ## Bulk Log Tests #### ###################### def test_build_bulk_predictions_dataframes(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) bulk_records = c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=None, prediction_timestamps=None) record_count = 0 for indexes, bulk in bulk_records.items(): assert indexes == (0, len(ids)) assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert bulk.model_version == expected['model_version'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert len(record.prediction.features) == features.shape[1] assert record.prediction.label.WhichOneof('data') == 'numeric' assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 record_count += 1 assert record_count == len(ids) def test_numeric_prediction_ids(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) ids = pd.DataFrame([i for i in range(len(labels.index))]) bulk_records = c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=None, prediction_timestamps=None) record_count = 0 for indexes, bulk in bulk_records.items(): assert indexes == (0, len(ids)) assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert bulk.model_version == expected['model_version'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert len(record.prediction.features) == features.shape[1] assert record.prediction.label.WhichOneof('data') == 'numeric' assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 record_count += 1 assert record_count == len(ids) def test_numeric_feature_names(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) # some features left with string name to test mix features = features.rename( columns={"mpg": 1, "cylinders": 2, "displacement": 3, "horsepower": 4, "weight": 5, "acceleration": 6}) bulk_records = c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=None, prediction_timestamps=None) record_count = 0 for indexes, bulk in bulk_records.items(): assert indexes == (0, len(ids)) assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert bulk.model_version == expected['model_version'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert len(record.prediction.features) == features.shape[1] assert record.prediction.label.WhichOneof('data') == 'numeric' assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 record_count += 1 assert record_count == len(ids) def test_build_bulk_scored_predictions(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) scores = pd.DataFrame(data=np.random.random(size=(features.shape[0], 1))) labels.columns = ["category"] scores.columns = ["score"] labels = labels.astype(str) score_labels = pd.concat([labels, scores], axis=1) bulk_records = c.bulk_log(model_id=expected['model'], model_type=ModelTypes.SCORE_CATEGORICAL, model_version=expected['model_version'], prediction_ids=ids, prediction_labels=score_labels, features=features, feature_names_overwrite=None, prediction_timestamps=None) record_count = 0 for indexes, bulk in bulk_records.items(): assert indexes[1] - indexes[0] == len(ids) / 2 assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert bulk.model_version == expected['model_version'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert isinstance(record.prediction.label.score_categorical, public__pb2.ScoreCategorical) assert len(record.prediction.features) == features.shape[1] assert isinstance(record.prediction.label.score_categorical.score_category.score, float) assert record.prediction.label.score_categorical.score_category.score == scores["score"][record_count] assert isinstance(record.prediction.label.score_categorical.score_category.category, str) assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 record_count += 1 assert record_count == len(ids) def test_build_bulk_predictions_dataframes_with_nans(): c = get_stubbed_client() features, labels, ids = mock_dataframes(file_to_open) features.horsepower = np.nan bulk_records = c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=None, prediction_timestamps=None) record_count = 0 for indexes, bulk in bulk_records.items(): assert indexes == (0, len(ids)) assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert bulk.model_version == expected['model_version'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert len(record.prediction.features) == (features.shape[1] - 1) assert record.prediction.label.WhichOneof('data') == 'numeric' assert record.prediction.timestamp.seconds == 0 assert record.prediction.timestamp.nanos == 0 record_count += 1 assert record_count == len(ids) def test_build_bulk_predictions_no_features(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) records = c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=None, feature_names_overwrite=None, prediction_timestamps=None) for _, bulk in records.items(): assert isinstance(bulk, public__pb2.BulkRecord) for r in bulk.records: assert isinstance(r, public__pb2.Record) assert not bool(r.organization_key) assert not bool(r.model_id) assert not bool(r.prediction.features) def test_build_bulk_prediction_with_feature_names_overwrites(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) feature_names_overwrite = [ 'mask_' + str(i) for i in range(len(features.columns)) ] records = c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=feature_names_overwrite, prediction_timestamps=None) for _, bulk in records.items(): assert isinstance(bulk, public__pb2.BulkRecord) for r in bulk.records: assert isinstance(r, public__pb2.Record) assert not bool(r.organization_key) assert not bool(r.model_id) assert bool(r.prediction.features) for feature in r.prediction.features: assert feature in feature_names_overwrite def test_build_bulk_actuals_dataframes(): c = get_stubbed_client() _, labels, ids = mock_dataframes_clean_nan(file_to_open) bulk_records = c.bulk_log(model_id=expected['model'], prediction_ids=ids, actual_labels=labels) record_count = 0 for indexes, bulk in bulk_records.items(): assert indexes == (0, len(ids)) assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.actual.label, public__pb2.Label) assert record.prediction_id == ids[0][record_count] assert record.actual.label.WhichOneof('data') == 'numeric' assert record.actual.timestamp.seconds == 0 assert record.actual.timestamp.nanos == 0 record_count += 1 assert record_count == len(ids) def test_validate_bulk_predictions_timestamp_out_of_range(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) current_time = datetime.datetime.now().timestamp() earlier_time = (datetime.datetime.now() - datetime.timedelta(days=30)).timestamp() prediction_timestamps = np.linspace(earlier_time, current_time, num=len(ids)) prediction_timestamps = pd.Series(prediction_timestamps.astype(int)) # break one of the timestamps prediction_timestamps.iloc[4] = int(current_time) + (366 * 24 * 60 * 60) ex = None try: c.bulk_log( model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, prediction_timestamps=prediction_timestamps, ) except Exception as err: ex = err assert isinstance(ex, ValueError) def test_validate_bulk_predictions_with_nan(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) # intentionally assign np.nan to labels labels.loc[labels.sample(frac=0.1).index, 0] = np.nan with pytest.raises(ValueError) as excinfo: c.bulk_log( model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=None, prediction_timestamps=None) assert str(excinfo.value) == "prediction labels cannot contain null value" def test_validate_bulk_predictions_mismatched_shapes(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) feature_names_overwrite = [ 'mask_' + str(i) for i in range(len(features.columns)) ] id_ex, feature_ex, label_ex, overwrite_ex = None, None, None, None try: c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids[3:], prediction_labels=labels, features=features, feature_names_overwrite=feature_names_overwrite, prediction_timestamps=None) except Exception as err: id_ex = err try: c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features[3:], feature_names_overwrite=None, prediction_timestamps=None) except Exception as err: feature_ex = err try: c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels[3:], features=None, feature_names_overwrite=None, prediction_timestamps=None) except Exception as err: label_ex = err try: c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=feature_names_overwrite[3:], prediction_timestamps=None) except Exception as err: overwrite_ex = err assert isinstance(id_ex, ValueError) assert isinstance(feature_ex, ValueError) assert isinstance(label_ex, ValueError) assert isinstance(overwrite_ex, ValueError) def test_build_bulk_prediction_with_prediction_timestamps(): c = get_stubbed_client() features, labels, ids = mock_dataframes_clean_nan(file_to_open) t = [int(time.time()) + i for i in range(features.shape[0])] records = c.bulk_log(model_id=expected['model'], model_version=expected['model_version'], prediction_ids=ids, prediction_labels=labels, features=features, feature_names_overwrite=None, prediction_timestamps=t) for _, bulk in records.items(): assert isinstance(bulk, public__pb2.BulkRecord) for r in bulk.records: assert isinstance(r, public__pb2.Record) assert not bool(r.organization_key) assert not bool(r.model_id) assert bool(r.prediction.features) assert r.prediction.timestamp is not None def test_handle_log_prediction_with_prediction_timestamps(): t = int(time.time()) c = get_stubbed_client() record = c.log(model_id=expected['model'], model_version=expected['model_version'], prediction_id=expected['prediction_id'], prediction_label=expected['value_binary'], features=expected['features'], prediction_timestamp=t) assert isinstance(record.prediction, public__pb2.Prediction) assert bool(record.prediction.features) assert record.prediction.timestamp.seconds == t def test_build_bulk_predictions_index(): c = get_stubbed_client() features, labels, idx = mock_dataframes_clean_nan(file_to_open) ids = pd.DataFrame(index=idx.values, data=idx.values).index.to_series() bulk_records = c.bulk_log(model_id=expected['model'], prediction_ids=ids, prediction_labels=labels, features=features, model_version=expected['model_version'], feature_names_overwrite=None, prediction_timestamps=None) record_count = 0 for _, bulk in bulk_records.items(): assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert len(record.prediction.features) == features.shape[1] assert record.prediction.label.WhichOneof('data') == 'numeric' assert record.prediction_id in idx.values record_count += 1 assert record_count == len(ids) def test_build_bulk_actuals_index(): c = get_stubbed_client() _, labels, idx = mock_dataframes_clean_nan(file_to_open) ids = pd.DataFrame(index=idx.values, data=idx.values).index.to_series() bulk_records = c.bulk_log(model_id=expected['model'], prediction_ids=ids, actual_labels=labels) record_count = 0 for _, bulk in bulk_records.items(): assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.actual.label, public__pb2.Label) assert record.prediction_id == ids[record_count][0] assert record.actual.label.WhichOneof('data') == 'numeric' assert record.prediction_id in idx.values record_count += 1 assert record_count == len(ids) def test_build_bulk_binary_predictions(): c = get_stubbed_client() features, _, idx = mock_dataframes_clean_nan(file_to_open) ids = pd.DataFrame(index=idx.values, data=idx.values).index.to_series() features['pred'] = features['mpg'].apply(lambda x: x > 15) bulk_records = c.bulk_log(model_id=expected['model'], prediction_ids=ids, prediction_labels=features['pred'], features=features, model_version=expected['model_version'], feature_names_overwrite=None, prediction_timestamps=None) record_count = 0 for _, bulk in bulk_records.items(): assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert len(record.prediction.features) == features.shape[1] assert record.prediction.label.WhichOneof('data') == 'binary' assert record.prediction_id in idx.values record_count += 1 assert record_count == len(ids) def test_build_bulk_binary_actuals(): c = get_stubbed_client() features, _, idx = mock_dataframes_clean_nan(file_to_open) features['actual'] = features['mpg'].apply(lambda x: x > 15) ids = pd.DataFrame(index=idx.values, data=idx.values).index.to_series() bulk_records = c.bulk_log(model_id=expected['model'], prediction_ids=ids, actual_labels=features['actual']) record_count = 0 for _, bulk in bulk_records.items(): assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.actual.label, public__pb2.Label) assert record.prediction_id == ids[record_count][0] assert record.actual.label.WhichOneof('data') == 'binary' assert record.prediction_id in idx.values record_count += 1 assert record_count == len(ids) def test_build_feature_importances(): c = get_stubbed_client() record = c.log(model_id=expected['model'], prediction_id=expected['prediction_id'], shap_values=expected['feature_importances']) assert isinstance(record, public__pb2.Record) assert isinstance(record.feature_importances, public__pb2.FeatureImportances) assert record.organization_key == expected['organization_key'] assert record.model_id == expected['model'] assert record.prediction_id == expected['prediction_id'] assert len(record.feature_importances.feature_importances) == len(expected['feature_importances']) def test_prediction_timestamp_out_of_range(): c = get_stubbed_client() ex = None try: c.log( model_id=expected['model'], prediction_id=expected['prediction_id'], model_version=expected['model_version'], model_type=ModelTypes.CATEGORICAL, prediction_label='HOTDOG', features=expected['features'], prediction_timestamp=int(time.time()) + (380 * 24 * 60 * 60), ) except Exception as err: ex = err assert isinstance(ex, ValueError) def test_build_missing_data(): c = get_stubbed_client() ex = None try: c.log(model_id=expected['model'], prediction_id=expected['prediction_id']) except Exception as err: # Error because everything is None ex = err assert isinstance(ex, ValueError) def test_build_feature_importances_error_empty_data(): c = get_stubbed_client() ex = None try: c.log(model_id=expected['model'], prediction_id=expected['prediction_id'], shap_values={} ) except Exception as err: # Error because no feature_importances were provided ex = err assert isinstance(ex, ValueError) def test_build_feature_importances_error_wrong_data_type(): c = get_stubbed_client() ex = None try: c.log(model_id=expected['model'], prediction_id=expected['prediction_id'], shap_values={"a": "string"} # feature importances should be float, so this will produce an error ) except Exception as err: ex = err assert isinstance(ex, TypeError) def test_build_bulk_feature_importances(): c = get_stubbed_client() features, _, pred_ids = mock_dataframes_clean_nan(file_to_open) data = np.random.rand(len(pred_ids), len(features.columns)) feature_importances = pd.DataFrame(data=data, columns=features.columns) ids = pd.DataFrame(index=pred_ids.values, data=pred_ids.values).index.to_series() bulk_records = c.bulk_log(model_id=expected['model'], prediction_ids=ids, shap_values=feature_importances) record_count = 0 for _, bulk in bulk_records.items(): assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) fi = record.feature_importances assert isinstance(fi, public__pb2.FeatureImportances) assert len(fi.feature_importances) == len(features.columns) assert record.prediction_id == ids[record_count][0] assert record.prediction_id in pred_ids.values record_count += 1 assert record_count == len(ids) def test_build_bulk_feature_importances_error_mismatch(): c = get_stubbed_client() features, _, pred_ids = mock_dataframes_clean_nan(file_to_open) # Make the length of feature importances data array mismatch the number of prediction ids data = np.random.rand(len(pred_ids) - 1, len(features.columns)) feature_importances = pd.DataFrame(data=data, columns=features.columns) ids = pd.DataFrame(index=pred_ids.values, data=pred_ids.values).index.to_series() ex = None try: c.bulk_log(model_id=expected['model'], prediction_ids=ids, shap_values=feature_importances) except Exception as err: # feature importances data length and number of prediction ids mismatch should cause this error ex = err assert isinstance(ex, ValueError) # def test_build_bulk_feature_importances_error_wrong_data_type(): # features, _, pred_ids = mock_dataframes(file_to_open) # # # Replace one of the rows in the feature importances data with values of the wrong data type (i.e. not float) # data = np.random.rand(len(pred_ids) - 1, len(features.columns)) # data_wrong_type = np.ones(len(features.columns), dtype=bool) # # data = np.vstack((data, data_wrong_type)) # feature_importances = pd.DataFrame(data=data, columns=features.columns) # ids = pd.DataFrame(index=pred_ids.values, data=pred_ids.values).index.to_series() # # ex = None # try: # bulk_fi = BulkFeatureImportances(organization_key=expected['organization_key'], # model_id=expected['model'], # prediction_ids=ids, # feature_importances=feature_importances) # # bulk_fi.validate_inputs() # except Exception as err: # # caused by wrong type # ex = err # # assert isinstance(ex, ValueError) def test_build_training_records(): features, labels, _ = mock_dataframes_clean_nan(file_to_open) recs = TrainingRecords(organization_key=expected['organization_key'], model_id=expected['model'], model_type=ModelTypes.NUMERIC, model_version=expected['model_version'], prediction_labels=labels, actual_labels=labels, features=features) bundles = recs.build_proto() record_count = 0 for _, recs in bundles.items(): for rec in recs: record_count += 1 assert isinstance(rec, public__pb2.PreProductionRecord) assert isinstance(rec.training_record, public__pb2.PreProductionRecord.TrainingRecord) assert isinstance(rec.training_record.record, public__pb2.Record) assert rec.training_record.record.organization_key == expected['organization_key'] assert rec.training_record.record.model_id == expected['model'] assert rec.training_record.record.prediction_and_actual.prediction.model_version == expected[ 'model_version'] assert isinstance(rec.training_record.record.prediction_and_actual.prediction.label, public__pb2.Label) assert len(rec.training_record.record.prediction_and_actual.prediction.features) == features.shape[1] assert rec.training_record.record.prediction_and_actual.prediction.label.WhichOneof('data') == 'numeric' assert rec.training_record.record.prediction_and_actual.prediction.timestamp.seconds == 0 assert rec.training_record.record.prediction_and_actual.prediction.timestamp.nanos == 0 assert record_count == len(labels) def test_send_validation_records(): c = get_stubbed_client() features, labels, pred_ids = mock_dataframes_clean_nan(file_to_open) t = [int(time.time()) + i for i in range(features.shape[0])] # make life a bit easier and just take the first record features = features[:1] labels = labels[:1] pred_ids = pred_ids[:1] t = t[:1] result = c.log_validation_records( model_id=expected['model'], model_version=expected['model_version'], batch_id=expected['batch'], prediction_labels=labels, actual_labels=labels, prediction_ids=pred_ids, model_type=ModelTypes.NUMERIC, features=features, prediction_timestamps=t, ) # test values in single record expected_prediction_id = pred_ids[0][0] for _, recs in result.items(): for rec in recs: assert isinstance(rec, public__pb2.PreProductionRecord) assert isinstance(rec.validation_record, public__pb2.PreProductionRecord.ValidationRecord) assert isinstance(rec.validation_record.record, public__pb2.Record) assert rec.validation_record.batch_id == expected['batch'] assert rec.validation_record.record.organization_key == expected['organization_key'] assert rec.validation_record.record.model_id == expected['model'] assert rec.validation_record.record.prediction_and_actual.prediction.model_version == expected[ 'model_version'] assert isinstance(rec.validation_record.record.prediction_and_actual.prediction.label, public__pb2.Label) assert len(rec.validation_record.record.prediction_and_actual.prediction.features) == features.shape[1] assert rec.validation_record.record.prediction_and_actual.prediction.label.WhichOneof('data') == 'numeric' assert rec.validation_record.record.prediction_and_actual.prediction.timestamp.seconds == t[0] assert rec.validation_record.record.prediction_and_actual.prediction.timestamp.nanos == 0 assert rec.validation_record.record.prediction_id == expected_prediction_id # now test a bunch of records at once features, labels, pred_ids = mock_dataframes_clean_nan(file_to_open) result = c.log_validation_records( model_id=expected['model'], model_version=expected['model_version'], batch_id=expected['batch'], prediction_labels=labels, actual_labels=labels, prediction_ids=pred_ids, model_type=ModelTypes.NUMERIC, features=features, ) records_count = 0 for _, recs in result.items(): for _ in recs: records_count += 1 assert len(labels) == records_count def test_send_validation_records_without_prediction_id(): c = get_stubbed_client() features, labels, pred_ids = mock_dataframes_clean_nan(file_to_open) # expect no exceptions c.log_validation_records( model_id=expected['model'], model_version=expected['model_version'], batch_id=expected['batch'], prediction_labels=labels, actual_labels=labels, model_type=ModelTypes.NUMERIC, features=features, ) def test_build_bulk_binary_predictions_deprecated_method(): c = get_stubbed_client() features, _, idx = mock_dataframes_clean_nan(file_to_open) ids = pd.DataFrame(index=idx.values, data=idx.values).index.to_series() features['pred'] = features['mpg'].apply(lambda x: x > 15) bulk_records = c.log_bulk_predictions(model_id=expected['model'], prediction_ids=ids, prediction_labels=features['pred'], features=features, model_version=expected['model_version']) record_count = 0 for _, bulk in bulk_records.items(): assert bulk.organization_key == expected['organization_key'] assert bulk.model_id == expected['model'] assert not hasattr(bulk, 'timestamp') for record in bulk.records: assert isinstance(record, public__pb2.Record) assert isinstance(record.prediction.label, public__pb2.Label) assert len(record.prediction.features) == features.shape[1] assert record.prediction.label.WhichOneof('data') == 'binary' assert record.prediction_id in idx.values record_count += 1 assert record_count == len(ids) def test_validation_predictions_ids_as_index_series(): c = get_stubbed_client() features, labels, idx = mock_dataframes_clean_nan(file_to_open) ids = pd.DataFrame(index=idx.values, data=idx.values).index.to_series() result = c.log_validation_records( model_id=expected['model'], model_version=expected['model_version'], batch_id=expected['batch'], prediction_labels=labels, actual_labels=labels, prediction_ids=ids, features=features, ) record_count = 0 for _, recs in result.items(): for rec in recs: record_count += 1 assert isinstance(rec, public__pb2.PreProductionRecord) assert isinstance(rec.validation_record, public__pb2.PreProductionRecord.ValidationRecord) assert isinstance(rec.validation_record.record, public__pb2.Record) assert rec.validation_record.batch_id == expected['batch'] assert rec.validation_record.record.organization_key == expected['organization_key'] assert rec.validation_record.record.model_id == expected['model'] assert rec.validation_record.record.prediction_and_actual.prediction.model_version == expected[ 'model_version'] assert isinstance(rec.validation_record.record.prediction_and_actual.prediction.label, public__pb2.Label) assert len(rec.validation_record.record.prediction_and_actual.prediction.features) == features.shape[1] assert rec.validation_record.record.prediction_and_actual.prediction.label.WhichOneof('data') == 'numeric' assert rec.validation_record.record.prediction_id in idx.values assert len(labels) == record_count if __name__ == "__main__": raise SystemExit(pytest.main([__file__]))
43.795414
118
0.652163
5,526
49,664
5.597539
0.05266
0.0719
0.035885
0.047847
0.863378
0.843334
0.818279
0.804604
0.787372
0.758276
0
0.007332
0.250302
49,664
1,133
119
43.834069
0.823436
0.03719
0
0.753396
0
0
0.070969
0
0
0
0
0.000883
0.328109
1
0.051202
false
0
0.028213
0.003135
0.086729
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
3fabe3fb45c5d2a5673211e7e037c597e5035b09
4,864
py
Python
scripts/01_MEGRE/08_merge_echoes.py
ofgulban/meso-MRI
15ef8e19aae6218833a06bf01418d3d83eafd8c7
[ "BSD-3-Clause" ]
1
2022-01-21T13:48:01.000Z
2022-01-21T13:48:01.000Z
scripts/01_MEGRE/08_merge_echoes.py
ofgulban/meso-MRI
15ef8e19aae6218833a06bf01418d3d83eafd8c7
[ "BSD-3-Clause" ]
null
null
null
scripts/01_MEGRE/08_merge_echoes.py
ofgulban/meso-MRI
15ef8e19aae6218833a06bf01418d3d83eafd8c7
[ "BSD-3-Clause" ]
1
2022-01-21T13:48:08.000Z
2022-01-21T13:48:08.000Z
"""Split each echo to prepare for registration.""" import os import subprocess import numpy as np import nibabel as nb # ============================================================================= NII_NAMES = [ [ "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/06_upsample_echos/sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop_echo1_ups2X.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/06_upsample_echos/sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop_echo2_ups2X.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/06_upsample_echos/sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop_echo3_ups2X.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/06_upsample_echos/sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop_echo4_ups2X.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/06_upsample_echos/sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop_echo5_ups2X.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/06_upsample_echos/sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop_echo6_ups2X.nii.gz" ], [ "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop_echo1_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop_echo2_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop_echo3_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop_echo4_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop_echo5_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop_echo6_ups2X_reg.nii.gz" ], [ "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop_echo1_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop_echo2_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop_echo3_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop_echo4_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop_echo5_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop_echo6_ups2X_reg.nii.gz" ], [ "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop_echo1_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop_echo2_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop_echo3_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop_echo4_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop_echo5_ups2X_reg.nii.gz", "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/07_apply_reg/sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop_echo6_ups2X_reg.nii.gz" ] ] OUTDIR = "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/08_merge_echos" OUT_NAMES = [ "sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop_ups2X_prepped.nii.gz", "sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop_ups2X_prepped.nii.gz", "sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop_ups2X_prepped.nii.gz", "sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop_ups2X_prepped.nii.gz", ]; # ============================================================================= print("Step_08: Merge echoes.") # Output directory if not os.path.exists(OUTDIR): os.makedirs(OUTDIR) print(" Output directory: {}".format(OUTDIR)) # Average across echoes dims = nb.load(NII_NAMES[0][0]).shape for i in range(len(NII_NAMES)): print("Merging file {}...".format(i+1)) temp = np.zeros(dims + (6,)) for j in range(len(NII_NAMES[i])): # Load data nii = nb.load(NII_NAMES[i][j]) temp[..., j] = np.squeeze(np.asanyarray(nii.dataobj)) # Save echos as timeseries out_name = os.path.join(OUTDIR, OUT_NAMES[i]) img = nb.Nifti1Image(temp, affine=nii.affine, header=nii.header) nb.save(img, out_name) print(' Finished.')
66.630137
143
0.760485
920
4,864
3.638043
0.108696
0.079175
0.066926
0.092023
0.843741
0.832985
0.824022
0.824022
0.818942
0.813564
0
0.071571
0.069285
4,864
72
144
67.555556
0.667771
0.056538
0
0.052632
0
0.421053
0.799476
0.78353
0
0
0
0
0
1
0
false
0
0.070175
0
0.070175
0.070175
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
3fe10ed184b888788fc7beb6b3470bb6eea7e5d4
3,378
py
Python
tests/player/challenge_player/strategy/mode/test_bottom_up_mode.py
Murthy10/pyschieber
f9db28c9553b8f321f6ed71cff04eff7879af5f6
[ "MIT" ]
5
2018-01-17T08:11:14.000Z
2018-11-27T11:37:15.000Z
tests/player/challenge_player/strategy/mode/test_bottom_up_mode.py
Murthy10/pyschieber
f9db28c9553b8f321f6ed71cff04eff7879af5f6
[ "MIT" ]
4
2018-05-09T08:41:05.000Z
2018-11-16T08:07:39.000Z
tests/player/challenge_player/strategy/mode/test_bottom_up_mode.py
Murthy10/pyschieber
f9db28c9553b8f321f6ed71cff04eff7879af5f6
[ "MIT" ]
3
2018-04-20T07:39:30.000Z
2018-11-10T12:44:08.000Z
import pytest from pyschieber.card import Card from pyschieber.suit import Suit from pyschieber.player.challenge_player.strategy.mode.bottom_up_mode import BottomUpMode @pytest.fixture def bum(): return BottomUpMode() @pytest.mark.parametrize("cards, score", [ ([Card(Suit.BELL, 13), Card(Suit.BELL, 12), Card(Suit.BELL, 11), Card(Suit.BELL, 8), Card(Suit.ACORN, 12), Card(Suit.ACORN, 11), Card(Suit.SHIELD, 12), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], 0), ([Card(Suit.BELL, 14), Card(Suit.BELL, 12), Card(Suit.BELL, 9), Card(Suit.BELL, 8), Card(Suit.BELL, 6), Card(Suit.ACORN, 11), Card(Suit.SHIELD, 12), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], 13), ([Card(Suit.BELL, 14), Card(Suit.BELL, 11), Card(Suit.BELL, 9), Card(Suit.ACORN, 12), Card(Suit.ACORN, 6), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 12), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], 26), ([Card(Suit.BELL, 11), Card(Suit.BELL, 9), Card(Suit.BELL, 6), Card(Suit.ACORN, 13), Card(Suit.ACORN, 12), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 14), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], 13), ([Card(Suit.BELL, 14), Card(Suit.BELL, 13), Card(Suit.BELL, 12), Card(Suit.BELL, 10), Card(Suit.BELL, 6), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 13), Card(Suit.ROSE, 13), Card(Suit.ROSE, 12)], 13), ([Card(Suit.BELL, 6), Card(Suit.BELL, 7), Card(Suit.BELL, 8), Card(Suit.BELL, 10), Card(Suit.ACORN, 6), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 14), Card(Suit.ROSE, 13), Card(Suit.ROSE, 12)], 75), ]) def test_calculate_score_bottom_up(bum, cards, score): s = bum.calculate_mode_score(cards, geschoben=False) assert s == score @pytest.mark.parametrize("cards, lowest, highest", [ ([Card(Suit.BELL, 13), Card(Suit.BELL, 12), Card(Suit.BELL, 11), Card(Suit.BELL, 8), Card(Suit.ACORN, 12), Card(Suit.ACORN, 11), Card(Suit.SHIELD, 12), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], Card(Suit.BELL, 13), [Card(Suit.SHIELD, 7)]), ([Card(Suit.BELL, 14), Card(Suit.BELL, 12), Card(Suit.BELL, 9), Card(Suit.BELL, 8), Card(Suit.BELL, 6), Card(Suit.ACORN, 11), Card(Suit.SHIELD, 12), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], Card(Suit.BELL, 14), [Card(Suit.BELL, 6)]), ([Card(Suit.BELL, 14), Card(Suit.BELL, 11), Card(Suit.BELL, 9), Card(Suit.ACORN, 12), Card(Suit.ACORN, 6), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 12), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], Card(Suit.BELL, 14), [Card(Suit.ACORN, 6)]), ([Card(Suit.BELL, 11), Card(Suit.BELL, 9), Card(Suit.BELL, 6), Card(Suit.ACORN, 13), Card(Suit.ACORN, 12), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 14), Card(Suit.SHIELD, 7), Card(Suit.ROSE, 9)], Card(Suit.SHIELD, 14), [Card(Suit.BELL, 6)]), ([Card(Suit.BELL, 14), Card(Suit.BELL, 13), Card(Suit.BELL, 12), Card(Suit.BELL, 10), Card(Suit.BELL, 6), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 13), Card(Suit.ROSE, 13), Card(Suit.ROSE, 12)], Card(Suit.BELL, 14), [Card(Suit.BELL, 6)]), ([Card(Suit.BELL, 6), Card(Suit.BELL, 7), Card(Suit.BELL, 8), Card(Suit.BELL, 10), Card(Suit.ACORN, 6), Card(Suit.ACORN, 7), Card(Suit.SHIELD, 14), Card(Suit.ROSE, 13), Card(Suit.ROSE, 12)], Card(Suit.SHIELD, 14), [Card(Suit.ACORN, 6), Card(Suit.BELL, 6)]), ]) def test_sort_by_rank_bottom_up(bum, cards, lowest, highest): sorted = bum.sort_by_rank(cards) assert sorted[-1] == lowest and sorted[0] in highest
71.87234
159
0.648609
577
3,378
3.769497
0.090121
0.445057
0.308966
0.071724
0.787586
0.787126
0.773793
0.773793
0.758161
0.758161
0
0.0676
0.132919
3,378
47
160
71.87234
0.674974
0
0
0.341463
0
0
0.010062
0
0
0
0
0
0.04878
1
0.073171
false
0
0.097561
0.02439
0.195122
0
0
0
0
null
1
1
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
b7587d9f0b462fcfc07d20b17d6f3aedf950c637
2,942
py
Python
asv_bench/benchmarks/commit_and_checkout.py
KarenImmanuel/hangar-py
2a5caff259ad699db56676f14a70cb94e75d8a5b
[ "Apache-2.0" ]
null
null
null
asv_bench/benchmarks/commit_and_checkout.py
KarenImmanuel/hangar-py
2a5caff259ad699db56676f14a70cb94e75d8a5b
[ "Apache-2.0" ]
null
null
null
asv_bench/benchmarks/commit_and_checkout.py
KarenImmanuel/hangar-py
2a5caff259ad699db56676f14a70cb94e75d8a5b
[ "Apache-2.0" ]
null
null
null
from tempfile import mkdtemp from shutil import rmtree import numpy as np from hangar import Repository class MakeCommit(object): params = [(5_000, 20_000), (5_000, 20_000)] param_names = ['num_samples', 'num_metadata'] processes = 2 number = 1 repeat = 2 warmup_time = 0 def setup(self, num_samples, num_metadata): self.tmpdir = mkdtemp() self.repo = Repository(path=self.tmpdir, exists=False) self.repo.init('tester', 'foo@test.bar', remove_old=True) self.co = self.repo.checkout(write=True) arr = np.array([0,], dtype=np.uint8) try: aset = self.co.arraysets.init_arrayset( 'aset', prototype=arr, backend_opts='10') except TypeError: aset = self.co.arraysets.init_arrayset( 'aset', prototype=arr, backend='10') with aset as cm_aset: for i in range(num_samples): arr[:] = i % 255 cm_aset[i] = arr with self.co.metadata as cm_meta: for i in range(num_metadata): cm_meta[i] = f'{i % 500} data' def teardown(self, num_samples, num_metadata): self.co.close() self.repo._env._close_environments() rmtree(self.tmpdir) def time_commit(self, num_samples, num_metadata): self.co.commit('hello') class CheckoutCommit(object): params = [(5_000, 20_000), (5_000, 20_000)] param_names = ['num_samples', 'num_metadata'] processes = 2 number = 1 repeat = 2 warmup_time = 0 def setup(self, num_samples, num_metadata): self.tmpdir = mkdtemp() self.repo = Repository(path=self.tmpdir, exists=False) self.repo.init('tester', 'foo@test.bar', remove_old=True) self.co = self.repo.checkout(write=True) arr = np.array([0,], dtype=np.uint8) try: aset = self.co.arraysets.init_arrayset( 'aset', prototype=arr, backend_opts='10') except TypeError: aset = self.co.arraysets.init_arrayset( 'aset', prototype=arr, backend='10') with aset as cm_aset: for i in range(num_samples): arr[:] = i % 255 cm_aset[i] = arr with self.co.metadata as cm_meta: for i in range(num_metadata): cm_meta[i] = f'{i % 500} data' self.co.commit('first') self.co.close() self.co = None def teardown(self, num_samples, num_metadata): try: self.co.close() except PermissionError: pass self.repo._env._close_environments() rmtree(self.tmpdir) def time_checkout_read_only(self, num_samples, num_metadata): self.co = self.repo.checkout(write=False) def time_checkout_write_enabled(self, num_samples, num_metadata): self.co = self.repo.checkout(write=True) self.co.close()
31.978261
69
0.589395
381
2,942
4.385827
0.233596
0.061041
0.070018
0.113106
0.832436
0.832436
0.832436
0.764811
0.764811
0.764811
0
0.03274
0.294018
2,942
92
70
31.978261
0.771786
0
0
0.805195
0
0
0.04893
0
0
0
0
0
0
1
0.090909
false
0.012987
0.051948
0
0.324675
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
b79f083b781e955d692380389e544cf37900bf6d
8,390
py
Python
model/parts/v2_hydra_utils.py
inventandchill/HydraDX-simulations
cfc380363c7aa9abc2ee8aae670a4a6d704d36ca
[ "Apache-2.0" ]
4
2021-08-12T21:33:26.000Z
2022-03-04T22:51:33.000Z
model/parts/v2_hydra_utils.py
inventandchill/HydraDX-simulations
cfc380363c7aa9abc2ee8aae670a4a6d704d36ca
[ "Apache-2.0" ]
31
2021-10-31T20:18:57.000Z
2022-03-25T16:01:41.000Z
model/parts/v2_hydra_utils.py
inventandchill/HydraDX-simulations
cfc380363c7aa9abc2ee8aae670a4a6d704d36ca
[ "Apache-2.0" ]
4
2021-08-13T06:59:59.000Z
2021-12-13T17:47:57.000Z
import numpy as np def addLiquidity_Sq(params, substep, state_history, prev_state, policy_input): """ This function updates and returns shares Sq of the pool after a liquidity add. """ asset_id = policy_input['asset_id'] # defines asset subscript Sq = prev_state['Sq'] delta_R = policy_input['ri_deposit'] pool = prev_state['pool'] R = pool.get_reserve(asset_id) S = pool.get_share(asset_id) delta_S = S * (delta_R / R ) return ('Sq', Sq + delta_S) def addLiquidity_Qh(params, substep, state_history, prev_state, policy_input): """ This function updates and returns quantity Q after a deposit in a risk asset """ asset_id = policy_input['asset_id'] # defines asset subscript pool = prev_state['pool'] a = params['a'] delta_R = policy_input['ri_deposit'] Ri = pool.pool[asset_id]['R'] Y = prev_state['Y'] Ci = pool.get_coefficient(asset_id) Q = prev_state['Q'] Sq = prev_state['Sq'] P = pool.get_price(asset_id) Ri_plus = Ri + delta_R Ci_plus = Ci * ((Ri + delta_R) / Ri) ** (a+1) Y_plus = ((Y ** (-a)) - Ci * (Ri ** (-a)) + Ci_plus * ((Ri + delta_R) ** (-a))) ** (- (1 / a)) Q_plus = Q * (Ci / Ci_plus) * ((Y / Y_plus) ** (a)) * ((Ri_plus / Ri) ** (a + 1)) return ('Q', Q_plus) def addLiquidity_pool(params, substep, state_history, prev_state, policy_input): """ Updates pool values after an add liquidity event """ asset_id = policy_input['asset_id'] # defines asset subscript pool = prev_state['pool'] delta_R = policy_input['ri_deposit'] a = params['a'] R = pool.get_reserve(asset_id) S = pool.get_share(asset_id) C = pool.get_coefficient(asset_id) a = params['a'] delta_S = S * ( delta_R / R ) delta_C = C * ( ((R + delta_R) / R) ** (a+1) - 1 ) pool.add_liquidity_pool(asset_id, delta_R, delta_S, delta_C) return ('pool', pool) def removeLiquidity_Sq(params, substep, state_history, prev_state, policy_input): """ This function returns shares Sq after a liquidity removal in a specific risk asset. The delta_Sq is taken prom the policy_input as the amount 'HYDRA_burn' """ asset_id = policy_input['asset_id'] # defines asset subscript delta_S = policy_input['HYDRA_burn'] Sq = prev_state['Sq'] return ('Sq', Sq - delta_S) def resolve_addLiquidity_H(params, substep, state_history, prev_state, policy_input): """ This function returns the toal amount of H in the system after a deposit in a specific risk asset. Works the same as Q. """ asset_id = policy_input['asset_id'] # defines asset subscript pool = prev_state['pool'] a = params['a'] delta_R = policy_input['ri_deposit'] Ri = pool.pool[asset_id]['R'] Y = prev_state['Y'] Ci = pool.get_coefficient(asset_id) Q = prev_state['Q'] Sq = prev_state['Sq'] P = pool.get_price(asset_id) Ri_plus = Ri + delta_R Ci_plus = Ci * ((Ri + delta_R) / Ri) ** (a+1) Y_plus = ((Y ** (-a)) - Ci * (Ri ** (-a)) + Ci_plus * ((Ri + delta_R) ** (-a))) ** (- (1 / a)) Q_plus = Q * (Ci / Ci_plus) * ((Y / Y_plus) ** (a)) * ((Ri_plus / Ri) ** (a + 1)) return ('H', Q_plus) def resolve_remove_Liquidity_H(params, substep, state_history, prev_state, policy_input): """ This function returns the toal amount of H in the system after a removal in a specific risk asset. Works the same as Q. """ asset_id = policy_input['asset_id'] # defines asset subscript pool = prev_state['pool'] a = params['a'] delta_S = policy_input['HYDRA_burn'] Ri = pool.pool[asset_id]['R'] Y = prev_state['Y'] Ci = pool.get_coefficient(asset_id) Q = prev_state['Q'] Sq = prev_state['Sq'] P = pool.get_price(asset_id) Si = pool.get_share(asset_id) delta_R = (delta_S / Si) * (Q / P) Ri_plus = Ri - delta_R Ci_plus = Ci * ((Ri - delta_R) / Ri) ** (a+1) Y_plus = ((Y ** (-a)) - Ci * (Ri ** (-a)) + Ci_plus * ((Ri - delta_R) ** (-a))) ** (- (1 / a)) Q_plus = Q * (Ci / Ci_plus) * ((Y / Y_plus) ** (a)) * ((Ri_plus / Ri) ** (a + 1)) return ('H', (Q_plus)) def removeLiquidity_Qh(params, substep, state_history, prev_state, policy_input): """ This function updates and returns the amount Q after a liquidity removal in a specific risk asset; spec 6-28-21 as delta R is assumed to be positive, the signs are reversed """ asset_id = policy_input['asset_id'] # defines asset subscript pool = prev_state['pool'] a = params['a'] delta_S = policy_input['HYDRA_burn'] Ri = pool.pool[asset_id]['R'] Y = prev_state['Y'] Ci = pool.get_coefficient(asset_id) Q = prev_state['Q'] Sq = prev_state['Sq'] P = pool.get_price(asset_id) Si = pool.get_share(asset_id) delta_R = (delta_S / Si) * (Q / P) Ri_plus = Ri - delta_R Ci_plus = Ci * ((Ri - delta_R) / Ri) ** (a+1) Y_plus = ((Y ** (-a)) - Ci * (Ri ** (-a)) + Ci_plus * ((Ri - delta_R) ** (-a))) ** (- (1 / a)) Q_plus = Q * (Ci / Ci_plus) * ((Y / Y_plus) ** (a)) * ((Ri_plus / Ri) ** (a + 1)) return ('Q', Q_plus) def removeLiquidity_pool(params, substep, state_history, prev_state, policy_input): """ Updates pool values after a remove liquidity event Amended 9 July, 2021 to V2 Spec """ asset_id = policy_input['asset_id'] # defines asset subscript delta_S = policy_input['HYDRA_burn'] pool = prev_state['pool'] R = pool.get_reserve(asset_id) S = pool.get_share(asset_id) C = pool.get_coefficient(asset_id) a = params['a'] Sq = prev_state['Sq'] delta_R = R * ( delta_S / S ) delta_C = C * ( ((R - delta_R) / R) ** (a+1) - 1 ) # JS July 9, 2021: Note the minus sign added to delta_C in function call below: # delta_C < 0, but remove_liquidity_pool expects a positive number pool.remove_liquidity_pool(asset_id, delta_R, delta_S, -delta_C, a) return ('pool', pool) def q_to_r_pool(params, substep, state_history, prev_state, policy_input): """ This function calculates and returns the pool variable after a trade where delta_Q is the amount being sold. """ asset_id = policy_input['asset_id'] # defines asset subscript delta_Q = policy_input['q_sold'] #amount of Q being sold by the user pool = prev_state['pool'] Q = prev_state['Q'] Y = prev_state['Y'] Ri = pool.get_reserve(asset_id) Ci = pool.get_coefficient(asset_id) a = params['a'] if delta_Q == 0 or delta_Q < 0: return ('pool', pool) else: delta_Ri = ( (1/Ci) * ((Q*Y) / (Q + delta_Q))**(-a) - (Y**(-a) / Ci) + Ri**(-a) )**(-1/a) - Ri pool.q_to_r_pool(asset_id, delta_Ri) return ('pool', pool) def addLiquidity_Y(params, substep, state_history, prev_state, policy_input): """ This function updates and returns Y after a liquidity add; according to spec 6-28-21 """ asset_id = policy_input['asset_id'] # defines asset subscript pool = prev_state['pool'] a = params['a'] delta_R = policy_input['ri_deposit'] Ri = pool.pool[asset_id]['R'] Y = prev_state['Y'] Ci = pool.get_coefficient(asset_id) Q = prev_state['Q'] Sq = prev_state['Sq'] P = pool.get_price(asset_id) Ri_plus = Ri + delta_R Ci_plus = Ci * ((Ri + delta_R) / Ri) ** (a+1) Y_plus = ((Y ** (-a)) - Ci * (Ri ** (-a)) + Ci_plus * ((Ri + delta_R) ** (-a))) ** (- (1 / a)) return ('Y', Y_plus) def removeLiquidity_Y(params, substep, state_history, prev_state, policy_input): """ This function updates and returns Y after a liquidity remove; according to spec 6-28-21 """ asset_id = policy_input['asset_id'] # defines asset subscript pool = prev_state['pool'] a = params['a'] delta_S = policy_input['HYDRA_burn'] Ri = pool.pool[asset_id]['R'] Y = prev_state['Y'] Ci = pool.get_coefficient(asset_id) Q = prev_state['Q'] Sq = prev_state['Sq'] Si = pool.get_share(asset_id) P = pool.get_price(asset_id) delta_R = (delta_S / Si) * (Q / P) Ri_plus = Ri - delta_R print('Ri_plus = ', Ri_plus) Ci_plus = Ci * ((Ri - delta_R) / Ri) ** (a+1) Y_plus = ((Y ** (-a)) - Ci * (Ri ** (-a)) + Ci_plus * ((Ri - delta_R) ** (-a))) ** (- (1 / a)) return ('Y', Y_plus)
33.426295
123
0.602503
1,294
8,390
3.661515
0.087326
0.082735
0.030393
0.030393
0.809413
0.798649
0.757915
0.757915
0.75095
0.722668
0
0.008038
0.243743
8,390
250
124
33.56
0.738692
0.189392
0
0.821656
0
0
0.048153
0
0
0
0
0
0
1
0.070064
false
0
0.006369
0
0.152866
0.006369
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
b7d117cced02b6a682ca5b7f340dc395ffeec7b3
2,561
py
Python
WordsRestService/app/tests/test_colors.py
ernestocasanova/WordsRestService
ac60aaf773abfa174da8730082df005df5afc7f0
[ "MIT" ]
null
null
null
WordsRestService/app/tests/test_colors.py
ernestocasanova/WordsRestService
ac60aaf773abfa174da8730082df005df5afc7f0
[ "MIT" ]
null
null
null
WordsRestService/app/tests/test_colors.py
ernestocasanova/WordsRestService
ac60aaf773abfa174da8730082df005df5afc7f0
[ "MIT" ]
null
null
null
from django.urls import reverse, resolve from mixer.backend.django import mixer from app.utils.colors import Colors import pytest @pytest.mark.django_db class TestColors: gray = '#D3D3D3' def test_color_nonestring_returns_random(self): words="" color = Colors.get_color(words, len(words.split(' '))) assert color != self.gray def test_color_spacesinstring_returns_random(self): words=" ksjd jsakd jkasjd lka s " color = Colors.get_color(words, len(words.split(' '))) assert color != self.gray def test_color_spaceinstring_returns_random(self): words=" " color = Colors.get_color(words, len(words.split(' '))) assert color != self.gray def test_color_lesshundred_returns_notgray(self): words="My test case one" color = Colors.get_color(words, len(words.split(' '))) assert color != self.gray def test_color_lesshundred_returns_random(self): words="My test case one" color = Colors.get_color(words, len(words.split(' '))) assert color != self.gray def test_color_overthanhundred_returns_gray(self): words="My test case one My test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case one My test case one My test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case one" color = Colors.get_color(words, len(words.split(' '))) assert color == self.gray def test_color_equaltohundred_returns_gray(self): words="My test case one My test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case one My test case one My test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy test case oneMy" color = Colors.get_color(words, len(words.split(' '))) assert color == self.gray
58.204545
597
0.716127
393
2,561
4.575064
0.114504
0.311457
0.441046
0.567297
0.842047
0.842047
0.842047
0.842047
0.842047
0.842047
0
0.001504
0.221007
2,561
44
598
58.204545
0.899749
0
0
0.514286
0
0.057143
0.466432
0
0
0
0
0
0.2
1
0.2
false
0
0.114286
0
0.371429
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
b7ed61b4db1dd48eb21c03159701af531cf94765
3,920
py
Python
sanicbe/src/helpers/schema/user.py
fairuztahir/mieagro-be
6b591ecbad7d146f2c252677ba09b9afe2e6e921
[ "MIT" ]
null
null
null
sanicbe/src/helpers/schema/user.py
fairuztahir/mieagro-be
6b591ecbad7d146f2c252677ba09b9afe2e6e921
[ "MIT" ]
null
null
null
sanicbe/src/helpers/schema/user.py
fairuztahir/mieagro-be
6b591ecbad7d146f2c252677ba09b9afe2e6e921
[ "MIT" ]
null
null
null
user_post_schema = { 'first_name': { 'required': True, 'type': 'list', 'schema': { 'type': 'string', 'empty': False, 'maxlength': 30 } }, 'last_name': { 'required': False, 'type': 'list', 'schema': { 'type': 'string', 'empty': True, 'maxlength': 30 } }, 'email': { 'required': True, 'type': 'list', 'schema': { 'type': 'string', 'empty': False, 'regex': r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b' } }, 'challenge': { 'required': True, 'type': 'list', 'schema': { 'type': 'string', 'empty': False, 'maxlength': 60 } }, 'role': { 'required': False, 'type': 'list', 'schema': { 'type': 'string', 'empty': False, 'maxlength': 30 } }, } user_upd_schema = { 'first_name': { 'required': False, 'type': 'string', 'empty': False, 'maxlength': 30 }, 'last_name': { 'required': False, 'type': 'string', 'empty': True, 'maxlength': 30 }, 'email': { 'required': False, 'type': 'string', 'empty': False, 'regex': r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b' }, 'challenge': { 'required': False, 'type': 'string', 'empty': False, 'maxlength': 60 }, 'role': { 'required': False, 'type': 'string', 'empty': False, 'maxlength': 30 }, 'status': { 'required': False, 'type': 'boolean', 'empty': False }, } user_reg_schema = { 'first_name': { 'required': True, 'type': 'string', 'empty': False, 'maxlength': 30 }, 'last_name': { 'required': False, 'type': 'string', 'empty': True, 'maxlength': 30 }, 'email': { 'required': True, 'type': 'string', 'empty': False, 'regex': r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b' }, 'challenge': { 'required': True, 'type': 'string', 'empty': False, 'maxlength': 60 }, 'role': { 'required': False, 'type': 'string', 'empty': False, 'maxlength': 30 }, 'x-key': { 'required': True, 'type': 'string', 'empty': False, 'maxlength': 30 } } users_post_schema = { 'data': { 'type': 'list', 'required': True, 'schema': { 'type': 'dict', 'schema': { 'first_name': { 'required': True, 'type': 'string', 'empty': False, 'maxlength': 30 }, 'last_name': { 'required': False, 'type': 'string', 'empty': True, 'maxlength': 30 }, 'email': { 'required': True, 'type': 'string', 'empty': False, 'regex': r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b' }, 'challenge': { 'required': True, 'type': 'string', 'empty': False, 'maxlength': 60 }, 'role': { 'required': False, 'type': 'string', 'empty': False, 'maxlength': 30 }, } }, }, }
23.473054
83
0.341582
301
3,920
4.38206
0.122924
0.159212
0.238817
0.257771
0.90144
0.90144
0.883245
0.879454
0.829416
0.790751
0
0.026049
0.471173
3,920
166
84
23.614458
0.610227
0
0
0.730061
0
0.02454
0.294133
0.052041
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
b7fe19592fd9d26f2f4d8471bb2a92a44fe11e2c
1,309
py
Python
airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/airavata/api/ttypes.py
st3h3n/airavata
007e251bb2dea831f8acf1075e38afd9c3e1eafc
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/airavata/api/ttypes.py
st3h3n/airavata
007e251bb2dea831f8acf1075e38afd9c3e1eafc
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/airavata/api/ttypes.py
st3h3n/airavata
007e251bb2dea831f8acf1075e38afd9c3e1eafc
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# # Autogenerated by Thrift Compiler (0.10.0) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException from thrift.protocol.TProtocol import TProtocolException import sys import airavata.api.error.ttypes import airavata.model.security.ttypes import airavata.model.ttypes import airavata.model.credential.store.ttypes import airavata.model.status.ttypes import airavata.model.job.ttypes import airavata.model.experiment.ttypes import airavata.model.workspace.ttypes import airavata.model.scheduling.ttypes import airavata.model.application.io.ttypes import airavata.model.appcatalog.appdeployment.ttypes import airavata.model.appcatalog.appinterface.ttypes import airavata.model.appcatalog.accountprovisioning.ttypes import airavata.model.appcatalog.computeresource.ttypes import airavata.model.appcatalog.storageresource.ttypes import airavata.model.appcatalog.gatewayprofile.ttypes import airavata.model.appcatalog.userresourceprofile.ttypes import airavata.model.data.movement.ttypes import airavata.model.workflow.ttypes import airavata.model.data.replica.ttypes import airavata.model.group.ttypes import airavata.model.user.ttypes from thrift.transport import TTransport
36.361111
93
0.854851
166
1,309
6.740964
0.373494
0.275246
0.375335
0.469169
0.270777
0
0
0
0
0
0
0.003292
0.071811
1,309
35
94
37.4
0.917695
0.096257
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
4d2b8f0fd1b92f8797555049e7095667e5e72354
21,401
py
Python
angr/procedures/definitions/win32_ktmw32.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
angr/procedures/definitions/win32_ktmw32.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
angr/procedures/definitions/win32_ktmw32.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
# pylint:disable=line-too-long import logging from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64 from .. import SIM_PROCEDURES as P from . import SimLibrary _l = logging.getLogger(name=__name__) lib = SimLibrary() lib.set_default_cc('X86', SimCCStdcall) lib.set_default_cc('AMD64', SimCCMicrosoftAMD64) lib.set_library_names("ktmw32.dll") prototypes = \ { # 'CreateTransaction': SimTypeFunction([SimTypePointer(SimStruct({"nLength": SimTypeInt(signed=False, label="UInt32"), "lpSecurityDescriptor": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "bInheritHandle": SimTypeInt(signed=True, label="Int32")}, name="SECURITY_ATTRIBUTES", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpTransactionAttributes", "UOW", "CreateOptions", "IsolationLevel", "IsolationFlags", "Timeout", "Description"]), # 'OpenTransaction': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["dwDesiredAccess", "TransactionId"]), # 'CommitTransaction': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionHandle"]), # 'CommitTransactionAsync': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionHandle"]), # 'RollbackTransaction': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionHandle"]), # 'RollbackTransactionAsync': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionHandle"]), # 'GetTransactionId': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionHandle", "TransactionId"]), # 'GetTransactionInformation': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionHandle", "Outcome", "IsolationLevel", "IsolationFlags", "Timeout", "BufferLength", "Description"]), # 'SetTransactionInformation': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionHandle", "IsolationLevel", "IsolationFlags", "Timeout", "Description"]), # 'CreateTransactionManager': SimTypeFunction([SimTypePointer(SimStruct({"nLength": SimTypeInt(signed=False, label="UInt32"), "lpSecurityDescriptor": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "bInheritHandle": SimTypeInt(signed=True, label="Int32")}, name="SECURITY_ATTRIBUTES", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpTransactionAttributes", "LogFileName", "CreateOptions", "CommitStrength"]), # 'OpenTransactionManager': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["LogFileName", "DesiredAccess", "OpenOptions"]), # 'OpenTransactionManagerById': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["TransactionManagerId", "DesiredAccess", "OpenOptions"]), # 'RenameTransactionManager': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["LogFileName", "ExistingTransactionManagerGuid"]), # 'RollforwardTransactionManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionManagerHandle", "TmVirtualClock"]), # 'RecoverTransactionManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionManagerHandle"]), # 'GetCurrentClockTransactionManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionManagerHandle", "TmVirtualClock"]), # 'GetTransactionManagerId': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["TransactionManagerHandle", "TransactionManagerId"]), # 'CreateResourceManager': SimTypeFunction([SimTypePointer(SimStruct({"nLength": SimTypeInt(signed=False, label="UInt32"), "lpSecurityDescriptor": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "bInheritHandle": SimTypeInt(signed=True, label="Int32")}, name="SECURITY_ATTRIBUTES", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpResourceManagerAttributes", "ResourceManagerId", "CreateOptions", "TmHandle", "Description"]), # 'OpenResourceManager': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["dwDesiredAccess", "TmHandle", "ResourceManagerId"]), # 'RecoverResourceManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ResourceManagerHandle"]), # 'GetNotificationResourceManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"TransactionKey": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "TransactionNotification": SimTypeInt(signed=False, label="UInt32"), "TmVirtualClock": SimTypeBottom(label="LARGE_INTEGER"), "ArgumentLength": SimTypeInt(signed=False, label="UInt32")}, name="TRANSACTION_NOTIFICATION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ResourceManagerHandle", "TransactionNotification", "NotificationLength", "dwMilliseconds", "ReturnLength"]), # 'GetNotificationResourceManagerAsync': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"TransactionKey": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "TransactionNotification": SimTypeInt(signed=False, label="UInt32"), "TmVirtualClock": SimTypeBottom(label="LARGE_INTEGER"), "ArgumentLength": SimTypeInt(signed=False, label="UInt32")}, name="TRANSACTION_NOTIFICATION", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ResourceManagerHandle", "TransactionNotification", "TransactionNotificationLength", "ReturnLength", "lpOverlapped"]), # 'SetResourceManagerCompletionPort': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ResourceManagerHandle", "IoCompletionPortHandle", "CompletionKey"]), # 'CreateEnlistment': SimTypeFunction([SimTypePointer(SimStruct({"nLength": SimTypeInt(signed=False, label="UInt32"), "lpSecurityDescriptor": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "bInheritHandle": SimTypeInt(signed=True, label="Int32")}, name="SECURITY_ATTRIBUTES", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpEnlistmentAttributes", "ResourceManagerHandle", "TransactionHandle", "NotificationMask", "CreateOptions", "EnlistmentKey"]), # 'OpenEnlistment': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["dwDesiredAccess", "ResourceManagerHandle", "EnlistmentId"]), # 'RecoverEnlistment': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "EnlistmentKey"]), # 'GetEnlistmentRecoveryInformation': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "BufferSize", "Buffer", "BufferUsed"]), # 'GetEnlistmentId': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "EnlistmentId"]), # 'SetEnlistmentRecoveryInformation': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "BufferSize", "Buffer"]), # 'PrepareEnlistment': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'PrePrepareEnlistment': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'CommitEnlistment': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'RollbackEnlistment': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'PrePrepareComplete': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'PrepareComplete': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'ReadOnlyEnlistment': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'CommitComplete': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'RollbackComplete': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), # 'SinglePhaseReject': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["EnlistmentHandle", "TmVirtualClock"]), } lib.set_prototypes(prototypes)
214.01
1,451
0.733517
2,207
21,401
7.03942
0.077481
0.177137
0.111032
0.165744
0.853566
0.851506
0.846679
0.841594
0.836187
0.832067
0
0.019522
0.080884
21,401
99
1,452
216.171717
0.77031
0.001308
0
0
0
0
0.255107
0.04903
0
0
0
0
0
1
0
false
0
0.09434
0
0.09434
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
4d2d7717d5234b0c586e75aeb461a24d74b8261e
179
py
Python
pyexcel_io/plugin_api/__init__.py
vinraspa/pyexcel-io
1b4fde5b79c42c57ebed54ed94272d700c6f9317
[ "BSD-3-Clause" ]
52
2016-06-15T17:11:23.000Z
2022-02-07T12:44:07.000Z
pyexcel_io/plugin_api/__init__.py
vinraspa/pyexcel-io
1b4fde5b79c42c57ebed54ed94272d700c6f9317
[ "BSD-3-Clause" ]
100
2015-12-28T17:58:50.000Z
2022-01-29T19:48:39.000Z
pyexcel_io/plugin_api/__init__.py
vinraspa/pyexcel-io
1b4fde5b79c42c57ebed54ed94272d700c6f9317
[ "BSD-3-Clause" ]
20
2016-05-09T16:44:36.000Z
2021-09-27T11:54:00.000Z
from .abstract_sheet import ISheet, ISheetWriter, NamedContent # noqa: F401 from .abstract_reader import IReader # noqa: F401 from .abstract_writer import IWriter # noqa: F401
44.75
76
0.793296
23
179
6.043478
0.565217
0.258993
0.172662
0.28777
0
0
0
0
0
0
0
0.058824
0.145251
179
3
77
59.666667
0.849673
0.178771
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
4d80bb45ba1e2aebd0b9f7ae8cbe286056bb760e
15,584
py
Python
tests/test_hap_handler.py
bdraco/HAP-python
a2a5ce109d08af2f4f5bda4075f2176a98123806
[ "Apache-2.0" ]
null
null
null
tests/test_hap_handler.py
bdraco/HAP-python
a2a5ce109d08af2f4f5bda4075f2176a98123806
[ "Apache-2.0" ]
null
null
null
tests/test_hap_handler.py
bdraco/HAP-python
a2a5ce109d08af2f4f5bda4075f2176a98123806
[ "Apache-2.0" ]
null
null
null
"""Tests for the HAPServerHandler.""" from unittest.mock import patch from uuid import UUID import json import pytest from pyhap import hap_handler from pyhap.accessory import Accessory, Bridge from pyhap.characteristic import CharacteristicError import pyhap.tlv as tlv CLIENT_UUID = UUID("7d0d1ee9-46fe-4a56-a115-69df3f6860c1") PUBLIC_KEY = b"\x99\x98d%\x8c\xf6h\x06\xfa\x85\x9f\x90\x82\xf2\xe8\x18\x9f\xf8\xc75\x1f>~\xc32\xc1OC\x13\xbfH\xad" def test_response(): """Test object creation of HAPResponse.""" response = hap_handler.HAPResponse() assert response.status_code == 500 assert "500" in str(response) def test_list_pairings_unencrypted(driver): """Verify an unencrypted list pairings request fails.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False driver.pair( CLIENT_UUID, PUBLIC_KEY, ) assert CLIENT_UUID in driver.state.paired_clients response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.REQUEST_TYPE, hap_handler.HAP_TLV_STATES.M5 ) handler.handle_pairings() tlv_objects = tlv.decode(response.body) assert tlv_objects == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M2, hap_handler.HAP_TLV_TAGS.ERROR_CODE: hap_handler.HAP_TLV_ERRORS.AUTHENTICATION, } def test_list_pairings(driver): """Verify an encrypted list pairings request.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True driver.pair( CLIENT_UUID, PUBLIC_KEY, ) assert CLIENT_UUID in driver.state.paired_clients response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.REQUEST_TYPE, hap_handler.HAP_TLV_STATES.M5 ) handler.handle_pairings() tlv_objects = tlv.decode(response.body) assert tlv_objects == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M2, hap_handler.HAP_TLV_TAGS.USERNAME: str(CLIENT_UUID).encode("utf8"), hap_handler.HAP_TLV_TAGS.PUBLIC_KEY: PUBLIC_KEY, hap_handler.HAP_TLV_TAGS.PERMISSIONS: hap_handler.HAP_PERMISSIONS.ADMIN, } def test_add_pairing(driver): """Verify an encrypted add pairing request.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.REQUEST_TYPE, hap_handler.HAP_TLV_STATES.M3, hap_handler.HAP_TLV_TAGS.USERNAME, str(CLIENT_UUID).encode("utf-8"), hap_handler.HAP_TLV_TAGS.PUBLIC_KEY, PUBLIC_KEY, hap_handler.HAP_TLV_TAGS.PERMISSIONS, hap_handler.HAP_PERMISSIONS.ADMIN, ) assert driver.state.paired is False handler.handle_pairings() assert tlv.decode(response.body) == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M2 } assert driver.state.paired is True assert CLIENT_UUID in driver.state.paired_clients def test_remove_pairing(driver): """Verify an encrypted remove pairing request.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True driver.pair( CLIENT_UUID, PUBLIC_KEY, ) assert driver.state.paired is True assert CLIENT_UUID in driver.state.paired_clients for _ in range(2): response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.REQUEST_TYPE, hap_handler.HAP_TLV_STATES.M4, hap_handler.HAP_TLV_TAGS.USERNAME, str(CLIENT_UUID).encode("utf-8"), hap_handler.HAP_TLV_TAGS.PUBLIC_KEY, PUBLIC_KEY, ) handler.handle_pairings() assert tlv.decode(response.body) == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M2 } assert CLIENT_UUID not in driver.state.paired_clients assert driver.state.paired is False def test_invalid_pairings_request(driver): """Verify an encrypted invalid pairings request.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True driver.pair( CLIENT_UUID, PUBLIC_KEY, ) assert CLIENT_UUID in driver.state.paired_clients response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.REQUEST_TYPE, hap_handler.HAP_TLV_STATES.M6 ) with pytest.raises(ValueError): handler.handle_pairings() def test_pair_verify_one(driver): """Verify an unencrypted pair verify one.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False driver.pair( CLIENT_UUID, PUBLIC_KEY, ) assert CLIENT_UUID in driver.state.paired_clients response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M1, hap_handler.HAP_TLV_TAGS.PUBLIC_KEY, PUBLIC_KEY, ) handler.handle_pair_verify() tlv_objects = tlv.decode(response.body) assert ( tlv_objects[hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM] == hap_handler.HAP_TLV_STATES.M2 ) def test_pair_verify_one_not_paired(driver): """Verify an unencrypted pair verify one.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M1, hap_handler.HAP_TLV_TAGS.PUBLIC_KEY, PUBLIC_KEY, ) handler.handle_pair_verify() tlv_objects = tlv.decode(response.body) assert tlv_objects == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M2, hap_handler.HAP_TLV_TAGS.ERROR_CODE: hap_handler.HAP_TLV_ERRORS.AUTHENTICATION, } def test_pair_verify_two_invaild_state(driver): """Verify an unencrypted pair verify two.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False driver.pair( CLIENT_UUID, PUBLIC_KEY, ) assert CLIENT_UUID in driver.state.paired_clients response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M1, hap_handler.HAP_TLV_TAGS.PUBLIC_KEY, PUBLIC_KEY, ) handler.handle_pair_verify() tlv_objects = tlv.decode(response.body) assert ( tlv_objects[hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM] == hap_handler.HAP_TLV_STATES.M2 ) response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M3, hap_handler.HAP_TLV_TAGS.ENCRYPTED_DATA, b"invalid", ) handler.handle_pair_verify() tlv_objects = tlv.decode(response.body) assert tlv_objects == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M4, hap_handler.HAP_TLV_TAGS.ERROR_CODE: hap_handler.HAP_TLV_ERRORS.AUTHENTICATION, } def test_invalid_pairing_request(driver): """Verify an unencrypted pair verify with an invalid sequence fails.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False driver.pair( CLIENT_UUID, PUBLIC_KEY, ) assert CLIENT_UUID in driver.state.paired_clients response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M6, hap_handler.HAP_TLV_TAGS.PUBLIC_KEY, PUBLIC_KEY, ) with pytest.raises(ValueError): handler.handle_pair_verify() def test_handle_set_handle_set_characteristics_unencrypted(driver): """Verify an unencrypted set_characteristics.""" acc = Accessory(driver, "TestAcc", aid=1) assert acc.aid == 1 service = acc.driver.loader.get_service("GarageDoorOpener") acc.add_service(service) driver.add_accessory(acc) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False response = hap_handler.HAPResponse() handler.response = response handler.request_body = b'{"characteristics":[{"aid":1,"iid":9,"ev":true}]}' handler.handle_set_characteristics() assert response.status_code == 401 def test_handle_set_handle_set_characteristics_encrypted(driver): """Verify an encrypted set_characteristics.""" acc = Accessory(driver, "TestAcc", aid=1) assert acc.aid == 1 service = acc.driver.loader.get_service("GarageDoorOpener") acc.add_service(service) driver.add_accessory(acc) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True response = hap_handler.HAPResponse() handler.response = response handler.request_body = b'{"characteristics":[{"aid":1,"iid":9,"ev":true}]}' handler.handle_set_characteristics() assert response.status_code == 204 assert response.body == b"" def test_handle_set_handle_set_characteristics_encrypted_with_exception(driver): """Verify an encrypted set_characteristics.""" acc = Accessory(driver, "TestAcc", aid=1) assert acc.aid == 1 def _mock_failure(*_): raise ValueError service = acc.driver.loader.get_service("GarageDoorOpener") service.setter_callback = _mock_failure acc.add_service(service) driver.add_accessory(acc) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True response = hap_handler.HAPResponse() handler.response = response handler.request_body = b'{"characteristics":[{"aid":1,"iid":9,"value":1}]}' handler.handle_set_characteristics() assert response.status_code == 207 assert b"-70402" in response.body def test_handle_snapshot_encrypted_non_existant_accessory(driver): """Verify an encrypted snapshot with non-existant accessory.""" bridge = Bridge(driver, "Test Bridge") driver.add_accessory(bridge) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True response = hap_handler.HAPResponse() handler.response = response handler.request_body = b'{"image-height":360,"resource-type":"image","image-width":640,"aid":1411620844}' with pytest.raises(ValueError): handler.handle_resource() def test_attempt_to_pair_when_already_paired(driver): """Verify we respond with unavailable if already paired.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False driver.pair( CLIENT_UUID, PUBLIC_KEY, ) response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M1, ) handler.handle_pairing() tlv_objects = tlv.decode(response.body) assert tlv_objects == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M2, hap_handler.HAP_TLV_TAGS.ERROR_CODE: hap_handler.HAP_TLV_ERRORS.UNAVAILABLE, } def test_handle_get_characteristics_encrypted(driver): """Verify an encrypted get_characteristics.""" acc = Accessory(driver, "TestAcc", aid=1) assert acc.aid == 1 service = acc.driver.loader.get_service("GarageDoorOpener") acc.add_service(service) driver.add_accessory(acc) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = True response = hap_handler.HAPResponse() handler.response = response handler.path = "/characteristics?id=1.9" handler.handle_get_characteristics() assert response.status_code == 200 decoded_response = json.loads(response.body.decode()) assert "characteristics" in decoded_response assert "status" not in decoded_response["characteristics"][0] assert b'"value":0' in response.body with patch.object(acc.iid_manager, "get_obj", side_effect=CharacteristicError): response = hap_handler.HAPResponse() handler.response = response handler.path = "/characteristics?id=1.9" handler.handle_get_characteristics() assert response.status_code == 207 decoded_response = json.loads(response.body.decode()) assert "characteristics" in decoded_response assert "status" in decoded_response["characteristics"][0] assert decoded_response["characteristics"][0]["status"] == -70402 def test_invalid_pairing_two(driver): """Verify we respond with error with invalid request.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M3, hap_handler.HAP_TLV_TAGS.ENCRYPTED_DATA, b"", hap_handler.HAP_TLV_TAGS.PUBLIC_KEY, b"", hap_handler.HAP_TLV_TAGS.PASSWORD_PROOF, b"", ) handler.accessory_handler.setup_srp_verifier() handler.handle_pairing() tlv_objects = tlv.decode(response.body) assert tlv_objects == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M4, hap_handler.HAP_TLV_TAGS.ERROR_CODE: hap_handler.HAP_TLV_ERRORS.AUTHENTICATION, } def test_invalid_pairing_three(driver): """Verify we respond with error with invalid request.""" driver.add_accessory(Accessory(driver, "TestAcc")) handler = hap_handler.HAPServerHandler(driver, "peername") handler.is_encrypted = False response = hap_handler.HAPResponse() handler.response = response handler.request_body = tlv.encode( hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM, hap_handler.HAP_TLV_STATES.M5, hap_handler.HAP_TLV_TAGS.ENCRYPTED_DATA, b"", ) handler.accessory_handler.setup_srp_verifier() handler.accessory_handler.srp_verifier.set_A(b"") handler.handle_pairing() tlv_objects = tlv.decode(response.body) assert tlv_objects == { hap_handler.HAP_TLV_TAGS.SEQUENCE_NUM: hap_handler.HAP_TLV_STATES.M6, hap_handler.HAP_TLV_TAGS.ERROR_CODE: hap_handler.HAP_TLV_ERRORS.AUTHENTICATION, }
32.198347
114
0.715798
1,933
15,584
5.473875
0.092085
0.110576
0.097061
0.116435
0.866175
0.8451
0.807013
0.795955
0.770154
0.768453
0
0.0116
0.186858
15,584
483
115
32.26501
0.82339
0.054928
0
0.735537
0
0.00551
0.059322
0.027747
0
0
0
0
0.118457
1
0.052342
false
0.002755
0.022039
0
0.07438
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
4d8bff28b0313a3fb7a6fbcbe2346a6d2b9c4010
989
py
Python
tests/basic_stages/test_val_keep.py
DavidKatz-il/pdpipe
5ddd066425d99886bfc51cf19ab78b2bf8c7791a
[ "MIT" ]
438
2017-03-02T16:55:34.000Z
2019-12-20T03:29:36.000Z
tests/basic_stages/test_val_keep.py
DavidKatz-il/pdpipe
5ddd066425d99886bfc51cf19ab78b2bf8c7791a
[ "MIT" ]
68
2019-12-21T12:51:51.000Z
2022-03-13T13:06:14.000Z
tests/basic_stages/test_val_keep.py
DavidKatz-il/pdpipe
5ddd066425d99886bfc51cf19ab78b2bf8c7791a
[ "MIT" ]
30
2019-12-21T12:18:18.000Z
2022-03-17T05:53:19.000Z
"""Testing ValKeep pipeline stages.""" import pandas as pd from pdpipe.basic_stages import ValKeep def test_valkeep_with_columns(): """Testing the ColDrop pipeline stage.""" df = pd.DataFrame([[1, 4], [4, 5], [5, 11]], [1, 2, 3], ['a', 'b']) res_df = ValKeep([4, 5], 'a').apply(df) assert 1 not in res_df.index assert 2 in res_df.index assert 3 in res_df.index def test_valkeep_with_columns_verbose(): """Testing the ColDrop pipeline stage.""" df = pd.DataFrame([[1, 4], [4, 5], [5, 11]], [1, 2, 3], ['a', 'b']) res_df = ValKeep([4, 5], 'a').apply(df, verbose=True) assert 1 not in res_df.index assert 2 in res_df.index assert 3 in res_df.index def test_valkeep_without_columns(): """Testing the ColDrop pipeline stage.""" df = pd.DataFrame([[1, 4], [4, 5], [5, 11]], [1, 2, 3], ['a', 'b']) res_df = ValKeep([4, 5]).apply(df) assert 1 not in res_df.index assert 2 in res_df.index assert 3 not in res_df.index
29.969697
71
0.616785
170
989
3.452941
0.217647
0.102215
0.107325
0.183986
0.824532
0.749574
0.749574
0.749574
0.749574
0.749574
0
0.057545
0.209302
989
32
72
30.90625
0.693095
0.141557
0
0.55
0
0
0.009662
0
0
0
0
0
0.45
1
0.15
false
0
0.1
0
0.25
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
4dea46905e8db5b5e8bbfd2a289f99c91195f21f
204,397
py
Python
dask-fargate/.env/lib/python3.6/site-packages/aws_cdk/aws_ssm/__init__.py
chriscoombs/amazon-sagemaker-cdk-examples
ba848218dab59abb03f68dc92bcad7929841fcc9
[ "Apache-2.0" ]
41
2019-08-22T13:03:42.000Z
2022-02-24T05:07:32.000Z
dask-fargate/.env/lib/python3.6/site-packages/aws_cdk/aws_ssm/__init__.py
chriscoombs/amazon-sagemaker-cdk-examples
ba848218dab59abb03f68dc92bcad7929841fcc9
[ "Apache-2.0" ]
1
2020-06-17T17:44:28.000Z
2021-02-12T22:40:01.000Z
dask-fargate/.env/lib/python3.6/site-packages/aws_cdk/aws_ssm/__init__.py
chriscoombs/amazon-sagemaker-cdk-examples
ba848218dab59abb03f68dc92bcad7929841fcc9
[ "Apache-2.0" ]
31
2019-08-23T17:33:41.000Z
2022-03-28T09:20:07.000Z
""" ## AWS Systems Manager Construct Library <!--BEGIN STABILITY BANNER-->--- ![Stability: Stable](https://img.shields.io/badge/stability-Stable-success.svg?style=for-the-badge) --- <!--END STABILITY BANNER--> This module is part of the [AWS Cloud Development Kit](https://github.com/aws/aws-cdk) project. ### Installation Install the module: ```console $ npm i @aws-cdk/aws-ssm ``` Import it into your code: ```python # Example automatically generated. See https://github.com/aws/jsii/issues/826 import aws_cdk.aws_ssm as ssm ``` ### Using existing SSM Parameters in your CDK app You can reference existing SSM Parameter Store values that you want to use in your CDK app by using `ssm.ParameterStoreString`: ```python # Example automatically generated. See https://github.com/aws/jsii/issues/826 # Retrieve the latest value of the non-secret parameter # with name "/My/String/Parameter". string_value = ssm.StringParameter.from_string_parameter_attributes(self, "MyValue", parameter_name="/My/Public/Parameter" ).string_value # Retrieve a specific version of the secret (SecureString) parameter. # 'version' is always required. secret_value = ssm.StringParameter.from_secure_string_parameter_attributes(self, "MySecureValue", parameter_name="/My/Secret/Parameter", version=5 ) ``` ### Creating new SSM Parameters in your CDK app You can create either `ssm.StringParameter` or `ssm.StringListParameter`s in a CDK app. These are public (not secret) values. Parameters of type *SecretString* cannot be created directly from a CDK application; if you want to provision secrets automatically, use Secrets Manager Secrets (see the `@aws-cdk/aws-secretsmanager` package). ```python # Example automatically generated. See https://github.com/aws/jsii/issues/826 # Create a new SSM Parameter holding a String param = ssm.StringParameter(stack, "StringParameter", # description: 'Some user-friendly description', # name: 'ParameterName', string_value="Initial parameter value" ) # Grant read access to some Role param.grant_read(role) # Create a new SSM Parameter holding a StringList list_parameter = ssm.StringListParameter(stack, "StringListParameter", # description: 'Some user-friendly description', # name: 'ParameterName', string_list_value=["Initial parameter value A", "Initial parameter value B"] ) ``` When specifying an `allowedPattern`, the values provided as string literals are validated against the pattern and an exception is raised if a value provided does not comply. """ import abc import datetime import enum import typing import jsii import jsii.compat import publication from jsii.python import classproperty import aws_cdk.aws_iam import aws_cdk.aws_kms import aws_cdk.core import aws_cdk.cx_api __jsii_assembly__ = jsii.JSIIAssembly.load("@aws-cdk/aws-ssm", "1.18.0", __name__, "aws-ssm@1.18.0.jsii.tgz") @jsii.implements(aws_cdk.core.IInspectable) class CfnAssociation(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnAssociation"): """A CloudFormation ``AWS::SSM::Association``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html cloudformationResource: :cloudformationResource:: AWS::SSM::Association """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, name: str, association_name: typing.Optional[str]=None, document_version: typing.Optional[str]=None, instance_id: typing.Optional[str]=None, output_location: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["InstanceAssociationOutputLocationProperty"]]]=None, parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.Mapping[str,typing.Union[aws_cdk.core.IResolvable, "ParameterValuesProperty"]]]]]=None, schedule_expression: typing.Optional[str]=None, targets: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetProperty"]]]]]=None) -> None: """Create a new ``AWS::SSM::Association``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param name: ``AWS::SSM::Association.Name``. :param association_name: ``AWS::SSM::Association.AssociationName``. :param document_version: ``AWS::SSM::Association.DocumentVersion``. :param instance_id: ``AWS::SSM::Association.InstanceId``. :param output_location: ``AWS::SSM::Association.OutputLocation``. :param parameters: ``AWS::SSM::Association.Parameters``. :param schedule_expression: ``AWS::SSM::Association.ScheduleExpression``. :param targets: ``AWS::SSM::Association.Targets``. """ props = CfnAssociationProps(name=name, association_name=association_name, document_version=document_version, instance_id=instance_id, output_location=output_location, parameters=parameters, schedule_expression=schedule_expression, targets=targets) jsii.create(CfnAssociation, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="name") def name(self) -> str: """``AWS::SSM::Association.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-name """ return jsii.get(self, "name") @name.setter def name(self, value: str): return jsii.set(self, "name", value) @property @jsii.member(jsii_name="associationName") def association_name(self) -> typing.Optional[str]: """``AWS::SSM::Association.AssociationName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-associationname """ return jsii.get(self, "associationName") @association_name.setter def association_name(self, value: typing.Optional[str]): return jsii.set(self, "associationName", value) @property @jsii.member(jsii_name="documentVersion") def document_version(self) -> typing.Optional[str]: """``AWS::SSM::Association.DocumentVersion``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-documentversion """ return jsii.get(self, "documentVersion") @document_version.setter def document_version(self, value: typing.Optional[str]): return jsii.set(self, "documentVersion", value) @property @jsii.member(jsii_name="instanceId") def instance_id(self) -> typing.Optional[str]: """``AWS::SSM::Association.InstanceId``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-instanceid """ return jsii.get(self, "instanceId") @instance_id.setter def instance_id(self, value: typing.Optional[str]): return jsii.set(self, "instanceId", value) @property @jsii.member(jsii_name="outputLocation") def output_location(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["InstanceAssociationOutputLocationProperty"]]]: """``AWS::SSM::Association.OutputLocation``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-outputlocation """ return jsii.get(self, "outputLocation") @output_location.setter def output_location(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["InstanceAssociationOutputLocationProperty"]]]): return jsii.set(self, "outputLocation", value) @property @jsii.member(jsii_name="parameters") def parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.Mapping[str,typing.Union[aws_cdk.core.IResolvable, "ParameterValuesProperty"]]]]]: """``AWS::SSM::Association.Parameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-parameters """ return jsii.get(self, "parameters") @parameters.setter def parameters(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.Mapping[str,typing.Union[aws_cdk.core.IResolvable, "ParameterValuesProperty"]]]]]): return jsii.set(self, "parameters", value) @property @jsii.member(jsii_name="scheduleExpression") def schedule_expression(self) -> typing.Optional[str]: """``AWS::SSM::Association.ScheduleExpression``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-scheduleexpression """ return jsii.get(self, "scheduleExpression") @schedule_expression.setter def schedule_expression(self, value: typing.Optional[str]): return jsii.set(self, "scheduleExpression", value) @property @jsii.member(jsii_name="targets") def targets(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetProperty"]]]]]: """``AWS::SSM::Association.Targets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-targets """ return jsii.get(self, "targets") @targets.setter def targets(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetProperty"]]]]]): return jsii.set(self, "targets", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnAssociation.InstanceAssociationOutputLocationProperty", jsii_struct_bases=[], name_mapping={'s3_location': 's3Location'}) class InstanceAssociationOutputLocationProperty(): def __init__(self, *, s3_location: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAssociation.S3OutputLocationProperty"]]]=None): """ :param s3_location: ``CfnAssociation.InstanceAssociationOutputLocationProperty.S3Location``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-instanceassociationoutputlocation.html """ self._values = { } if s3_location is not None: self._values["s3_location"] = s3_location @property def s3_location(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAssociation.S3OutputLocationProperty"]]]: """``CfnAssociation.InstanceAssociationOutputLocationProperty.S3Location``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-instanceassociationoutputlocation.html#cfn-ssm-association-instanceassociationoutputlocation-s3location """ return self._values.get('s3_location') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'InstanceAssociationOutputLocationProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnAssociation.ParameterValuesProperty", jsii_struct_bases=[], name_mapping={'parameter_values': 'parameterValues'}) class ParameterValuesProperty(): def __init__(self, *, parameter_values: typing.List[str]): """ :param parameter_values: ``CfnAssociation.ParameterValuesProperty.ParameterValues``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-parametervalues.html """ self._values = { 'parameter_values': parameter_values, } @property def parameter_values(self) -> typing.List[str]: """``CfnAssociation.ParameterValuesProperty.ParameterValues``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-parametervalues.html#cfn-ssm-association-parametervalues-parametervalues """ return self._values.get('parameter_values') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'ParameterValuesProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnAssociation.S3OutputLocationProperty", jsii_struct_bases=[], name_mapping={'output_s3_bucket_name': 'outputS3BucketName', 'output_s3_key_prefix': 'outputS3KeyPrefix'}) class S3OutputLocationProperty(): def __init__(self, *, output_s3_bucket_name: typing.Optional[str]=None, output_s3_key_prefix: typing.Optional[str]=None): """ :param output_s3_bucket_name: ``CfnAssociation.S3OutputLocationProperty.OutputS3BucketName``. :param output_s3_key_prefix: ``CfnAssociation.S3OutputLocationProperty.OutputS3KeyPrefix``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-s3outputlocation.html """ self._values = { } if output_s3_bucket_name is not None: self._values["output_s3_bucket_name"] = output_s3_bucket_name if output_s3_key_prefix is not None: self._values["output_s3_key_prefix"] = output_s3_key_prefix @property def output_s3_bucket_name(self) -> typing.Optional[str]: """``CfnAssociation.S3OutputLocationProperty.OutputS3BucketName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-s3outputlocation.html#cfn-ssm-association-s3outputlocation-outputs3bucketname """ return self._values.get('output_s3_bucket_name') @property def output_s3_key_prefix(self) -> typing.Optional[str]: """``CfnAssociation.S3OutputLocationProperty.OutputS3KeyPrefix``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-s3outputlocation.html#cfn-ssm-association-s3outputlocation-outputs3keyprefix """ return self._values.get('output_s3_key_prefix') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'S3OutputLocationProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnAssociation.TargetProperty", jsii_struct_bases=[], name_mapping={'key': 'key', 'values': 'values'}) class TargetProperty(): def __init__(self, *, key: str, values: typing.List[str]): """ :param key: ``CfnAssociation.TargetProperty.Key``. :param values: ``CfnAssociation.TargetProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-target.html """ self._values = { 'key': key, 'values': values, } @property def key(self) -> str: """``CfnAssociation.TargetProperty.Key``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-target.html#cfn-ssm-association-target-key """ return self._values.get('key') @property def values(self) -> typing.List[str]: """``CfnAssociation.TargetProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-association-target.html#cfn-ssm-association-target-values """ return self._values.get('values') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'TargetProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnAssociationProps", jsii_struct_bases=[], name_mapping={'name': 'name', 'association_name': 'associationName', 'document_version': 'documentVersion', 'instance_id': 'instanceId', 'output_location': 'outputLocation', 'parameters': 'parameters', 'schedule_expression': 'scheduleExpression', 'targets': 'targets'}) class CfnAssociationProps(): def __init__(self, *, name: str, association_name: typing.Optional[str]=None, document_version: typing.Optional[str]=None, instance_id: typing.Optional[str]=None, output_location: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAssociation.InstanceAssociationOutputLocationProperty"]]]=None, parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.Mapping[str,typing.Union[aws_cdk.core.IResolvable, "CfnAssociation.ParameterValuesProperty"]]]]]=None, schedule_expression: typing.Optional[str]=None, targets: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAssociation.TargetProperty"]]]]]=None): """Properties for defining a ``AWS::SSM::Association``. :param name: ``AWS::SSM::Association.Name``. :param association_name: ``AWS::SSM::Association.AssociationName``. :param document_version: ``AWS::SSM::Association.DocumentVersion``. :param instance_id: ``AWS::SSM::Association.InstanceId``. :param output_location: ``AWS::SSM::Association.OutputLocation``. :param parameters: ``AWS::SSM::Association.Parameters``. :param schedule_expression: ``AWS::SSM::Association.ScheduleExpression``. :param targets: ``AWS::SSM::Association.Targets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html """ self._values = { 'name': name, } if association_name is not None: self._values["association_name"] = association_name if document_version is not None: self._values["document_version"] = document_version if instance_id is not None: self._values["instance_id"] = instance_id if output_location is not None: self._values["output_location"] = output_location if parameters is not None: self._values["parameters"] = parameters if schedule_expression is not None: self._values["schedule_expression"] = schedule_expression if targets is not None: self._values["targets"] = targets @property def name(self) -> str: """``AWS::SSM::Association.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-name """ return self._values.get('name') @property def association_name(self) -> typing.Optional[str]: """``AWS::SSM::Association.AssociationName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-associationname """ return self._values.get('association_name') @property def document_version(self) -> typing.Optional[str]: """``AWS::SSM::Association.DocumentVersion``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-documentversion """ return self._values.get('document_version') @property def instance_id(self) -> typing.Optional[str]: """``AWS::SSM::Association.InstanceId``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-instanceid """ return self._values.get('instance_id') @property def output_location(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAssociation.InstanceAssociationOutputLocationProperty"]]]: """``AWS::SSM::Association.OutputLocation``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-outputlocation """ return self._values.get('output_location') @property def parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.Mapping[str,typing.Union[aws_cdk.core.IResolvable, "CfnAssociation.ParameterValuesProperty"]]]]]: """``AWS::SSM::Association.Parameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-parameters """ return self._values.get('parameters') @property def schedule_expression(self) -> typing.Optional[str]: """``AWS::SSM::Association.ScheduleExpression``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-scheduleexpression """ return self._values.get('schedule_expression') @property def targets(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAssociation.TargetProperty"]]]]]: """``AWS::SSM::Association.Targets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-association.html#cfn-ssm-association-targets """ return self._values.get('targets') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnAssociationProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(aws_cdk.core.IInspectable) class CfnDocument(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnDocument"): """A CloudFormation ``AWS::SSM::Document``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html cloudformationResource: :cloudformationResource:: AWS::SSM::Document """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, content: typing.Any, document_type: typing.Optional[str]=None, tags: typing.Optional[typing.List[aws_cdk.core.CfnTag]]=None) -> None: """Create a new ``AWS::SSM::Document``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param content: ``AWS::SSM::Document.Content``. :param document_type: ``AWS::SSM::Document.DocumentType``. :param tags: ``AWS::SSM::Document.Tags``. """ props = CfnDocumentProps(content=content, document_type=document_type, tags=tags) jsii.create(CfnDocument, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="tags") def tags(self) -> aws_cdk.core.TagManager: """``AWS::SSM::Document.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-tags """ return jsii.get(self, "tags") @property @jsii.member(jsii_name="content") def content(self) -> typing.Any: """``AWS::SSM::Document.Content``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-content """ return jsii.get(self, "content") @content.setter def content(self, value: typing.Any): return jsii.set(self, "content", value) @property @jsii.member(jsii_name="documentType") def document_type(self) -> typing.Optional[str]: """``AWS::SSM::Document.DocumentType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-documenttype """ return jsii.get(self, "documentType") @document_type.setter def document_type(self, value: typing.Optional[str]): return jsii.set(self, "documentType", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnDocumentProps", jsii_struct_bases=[], name_mapping={'content': 'content', 'document_type': 'documentType', 'tags': 'tags'}) class CfnDocumentProps(): def __init__(self, *, content: typing.Any, document_type: typing.Optional[str]=None, tags: typing.Optional[typing.List[aws_cdk.core.CfnTag]]=None): """Properties for defining a ``AWS::SSM::Document``. :param content: ``AWS::SSM::Document.Content``. :param document_type: ``AWS::SSM::Document.DocumentType``. :param tags: ``AWS::SSM::Document.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html """ self._values = { 'content': content, } if document_type is not None: self._values["document_type"] = document_type if tags is not None: self._values["tags"] = tags @property def content(self) -> typing.Any: """``AWS::SSM::Document.Content``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-content """ return self._values.get('content') @property def document_type(self) -> typing.Optional[str]: """``AWS::SSM::Document.DocumentType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-documenttype """ return self._values.get('document_type') @property def tags(self) -> typing.Optional[typing.List[aws_cdk.core.CfnTag]]: """``AWS::SSM::Document.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-document.html#cfn-ssm-document-tags """ return self._values.get('tags') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnDocumentProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(aws_cdk.core.IInspectable) class CfnMaintenanceWindow(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindow"): """A CloudFormation ``AWS::SSM::MaintenanceWindow``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html cloudformationResource: :cloudformationResource:: AWS::SSM::MaintenanceWindow """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, allow_unassociated_targets: typing.Union[bool, aws_cdk.core.IResolvable], cutoff: jsii.Number, duration: jsii.Number, name: str, schedule: str, description: typing.Optional[str]=None, end_date: typing.Optional[str]=None, schedule_timezone: typing.Optional[str]=None, start_date: typing.Optional[str]=None, tags: typing.Optional[typing.List[aws_cdk.core.CfnTag]]=None) -> None: """Create a new ``AWS::SSM::MaintenanceWindow``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param allow_unassociated_targets: ``AWS::SSM::MaintenanceWindow.AllowUnassociatedTargets``. :param cutoff: ``AWS::SSM::MaintenanceWindow.Cutoff``. :param duration: ``AWS::SSM::MaintenanceWindow.Duration``. :param name: ``AWS::SSM::MaintenanceWindow.Name``. :param schedule: ``AWS::SSM::MaintenanceWindow.Schedule``. :param description: ``AWS::SSM::MaintenanceWindow.Description``. :param end_date: ``AWS::SSM::MaintenanceWindow.EndDate``. :param schedule_timezone: ``AWS::SSM::MaintenanceWindow.ScheduleTimezone``. :param start_date: ``AWS::SSM::MaintenanceWindow.StartDate``. :param tags: ``AWS::SSM::MaintenanceWindow.Tags``. """ props = CfnMaintenanceWindowProps(allow_unassociated_targets=allow_unassociated_targets, cutoff=cutoff, duration=duration, name=name, schedule=schedule, description=description, end_date=end_date, schedule_timezone=schedule_timezone, start_date=start_date, tags=tags) jsii.create(CfnMaintenanceWindow, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="tags") def tags(self) -> aws_cdk.core.TagManager: """``AWS::SSM::MaintenanceWindow.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-tags """ return jsii.get(self, "tags") @property @jsii.member(jsii_name="allowUnassociatedTargets") def allow_unassociated_targets(self) -> typing.Union[bool, aws_cdk.core.IResolvable]: """``AWS::SSM::MaintenanceWindow.AllowUnassociatedTargets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-allowunassociatedtargets """ return jsii.get(self, "allowUnassociatedTargets") @allow_unassociated_targets.setter def allow_unassociated_targets(self, value: typing.Union[bool, aws_cdk.core.IResolvable]): return jsii.set(self, "allowUnassociatedTargets", value) @property @jsii.member(jsii_name="cutoff") def cutoff(self) -> jsii.Number: """``AWS::SSM::MaintenanceWindow.Cutoff``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-cutoff """ return jsii.get(self, "cutoff") @cutoff.setter def cutoff(self, value: jsii.Number): return jsii.set(self, "cutoff", value) @property @jsii.member(jsii_name="duration") def duration(self) -> jsii.Number: """``AWS::SSM::MaintenanceWindow.Duration``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-duration """ return jsii.get(self, "duration") @duration.setter def duration(self, value: jsii.Number): return jsii.set(self, "duration", value) @property @jsii.member(jsii_name="name") def name(self) -> str: """``AWS::SSM::MaintenanceWindow.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-name """ return jsii.get(self, "name") @name.setter def name(self, value: str): return jsii.set(self, "name", value) @property @jsii.member(jsii_name="schedule") def schedule(self) -> str: """``AWS::SSM::MaintenanceWindow.Schedule``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-schedule """ return jsii.get(self, "schedule") @schedule.setter def schedule(self, value: str): return jsii.set(self, "schedule", value) @property @jsii.member(jsii_name="description") def description(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-description """ return jsii.get(self, "description") @description.setter def description(self, value: typing.Optional[str]): return jsii.set(self, "description", value) @property @jsii.member(jsii_name="endDate") def end_date(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.EndDate``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-enddate """ return jsii.get(self, "endDate") @end_date.setter def end_date(self, value: typing.Optional[str]): return jsii.set(self, "endDate", value) @property @jsii.member(jsii_name="scheduleTimezone") def schedule_timezone(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.ScheduleTimezone``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-scheduletimezone """ return jsii.get(self, "scheduleTimezone") @schedule_timezone.setter def schedule_timezone(self, value: typing.Optional[str]): return jsii.set(self, "scheduleTimezone", value) @property @jsii.member(jsii_name="startDate") def start_date(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.StartDate``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-startdate """ return jsii.get(self, "startDate") @start_date.setter def start_date(self, value: typing.Optional[str]): return jsii.set(self, "startDate", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowProps", jsii_struct_bases=[], name_mapping={'allow_unassociated_targets': 'allowUnassociatedTargets', 'cutoff': 'cutoff', 'duration': 'duration', 'name': 'name', 'schedule': 'schedule', 'description': 'description', 'end_date': 'endDate', 'schedule_timezone': 'scheduleTimezone', 'start_date': 'startDate', 'tags': 'tags'}) class CfnMaintenanceWindowProps(): def __init__(self, *, allow_unassociated_targets: typing.Union[bool, aws_cdk.core.IResolvable], cutoff: jsii.Number, duration: jsii.Number, name: str, schedule: str, description: typing.Optional[str]=None, end_date: typing.Optional[str]=None, schedule_timezone: typing.Optional[str]=None, start_date: typing.Optional[str]=None, tags: typing.Optional[typing.List[aws_cdk.core.CfnTag]]=None): """Properties for defining a ``AWS::SSM::MaintenanceWindow``. :param allow_unassociated_targets: ``AWS::SSM::MaintenanceWindow.AllowUnassociatedTargets``. :param cutoff: ``AWS::SSM::MaintenanceWindow.Cutoff``. :param duration: ``AWS::SSM::MaintenanceWindow.Duration``. :param name: ``AWS::SSM::MaintenanceWindow.Name``. :param schedule: ``AWS::SSM::MaintenanceWindow.Schedule``. :param description: ``AWS::SSM::MaintenanceWindow.Description``. :param end_date: ``AWS::SSM::MaintenanceWindow.EndDate``. :param schedule_timezone: ``AWS::SSM::MaintenanceWindow.ScheduleTimezone``. :param start_date: ``AWS::SSM::MaintenanceWindow.StartDate``. :param tags: ``AWS::SSM::MaintenanceWindow.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html """ self._values = { 'allow_unassociated_targets': allow_unassociated_targets, 'cutoff': cutoff, 'duration': duration, 'name': name, 'schedule': schedule, } if description is not None: self._values["description"] = description if end_date is not None: self._values["end_date"] = end_date if schedule_timezone is not None: self._values["schedule_timezone"] = schedule_timezone if start_date is not None: self._values["start_date"] = start_date if tags is not None: self._values["tags"] = tags @property def allow_unassociated_targets(self) -> typing.Union[bool, aws_cdk.core.IResolvable]: """``AWS::SSM::MaintenanceWindow.AllowUnassociatedTargets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-allowunassociatedtargets """ return self._values.get('allow_unassociated_targets') @property def cutoff(self) -> jsii.Number: """``AWS::SSM::MaintenanceWindow.Cutoff``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-cutoff """ return self._values.get('cutoff') @property def duration(self) -> jsii.Number: """``AWS::SSM::MaintenanceWindow.Duration``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-duration """ return self._values.get('duration') @property def name(self) -> str: """``AWS::SSM::MaintenanceWindow.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-name """ return self._values.get('name') @property def schedule(self) -> str: """``AWS::SSM::MaintenanceWindow.Schedule``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-schedule """ return self._values.get('schedule') @property def description(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-description """ return self._values.get('description') @property def end_date(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.EndDate``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-enddate """ return self._values.get('end_date') @property def schedule_timezone(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.ScheduleTimezone``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-scheduletimezone """ return self._values.get('schedule_timezone') @property def start_date(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindow.StartDate``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-startdate """ return self._values.get('start_date') @property def tags(self) -> typing.Optional[typing.List[aws_cdk.core.CfnTag]]: """``AWS::SSM::MaintenanceWindow.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindow.html#cfn-ssm-maintenancewindow-tags """ return self._values.get('tags') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnMaintenanceWindowProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(aws_cdk.core.IInspectable) class CfnMaintenanceWindowTarget(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTarget"): """A CloudFormation ``AWS::SSM::MaintenanceWindowTarget``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html cloudformationResource: :cloudformationResource:: AWS::SSM::MaintenanceWindowTarget """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, resource_type: str, targets: typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetsProperty"]]], window_id: str, description: typing.Optional[str]=None, name: typing.Optional[str]=None, owner_information: typing.Optional[str]=None) -> None: """Create a new ``AWS::SSM::MaintenanceWindowTarget``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param resource_type: ``AWS::SSM::MaintenanceWindowTarget.ResourceType``. :param targets: ``AWS::SSM::MaintenanceWindowTarget.Targets``. :param window_id: ``AWS::SSM::MaintenanceWindowTarget.WindowId``. :param description: ``AWS::SSM::MaintenanceWindowTarget.Description``. :param name: ``AWS::SSM::MaintenanceWindowTarget.Name``. :param owner_information: ``AWS::SSM::MaintenanceWindowTarget.OwnerInformation``. """ props = CfnMaintenanceWindowTargetProps(resource_type=resource_type, targets=targets, window_id=window_id, description=description, name=name, owner_information=owner_information) jsii.create(CfnMaintenanceWindowTarget, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="resourceType") def resource_type(self) -> str: """``AWS::SSM::MaintenanceWindowTarget.ResourceType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-resourcetype """ return jsii.get(self, "resourceType") @resource_type.setter def resource_type(self, value: str): return jsii.set(self, "resourceType", value) @property @jsii.member(jsii_name="targets") def targets(self) -> typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetsProperty"]]]: """``AWS::SSM::MaintenanceWindowTarget.Targets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-targets """ return jsii.get(self, "targets") @targets.setter def targets(self, value: typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetsProperty"]]]): return jsii.set(self, "targets", value) @property @jsii.member(jsii_name="windowId") def window_id(self) -> str: """``AWS::SSM::MaintenanceWindowTarget.WindowId``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-windowid """ return jsii.get(self, "windowId") @window_id.setter def window_id(self, value: str): return jsii.set(self, "windowId", value) @property @jsii.member(jsii_name="description") def description(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTarget.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-description """ return jsii.get(self, "description") @description.setter def description(self, value: typing.Optional[str]): return jsii.set(self, "description", value) @property @jsii.member(jsii_name="name") def name(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTarget.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-name """ return jsii.get(self, "name") @name.setter def name(self, value: typing.Optional[str]): return jsii.set(self, "name", value) @property @jsii.member(jsii_name="ownerInformation") def owner_information(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTarget.OwnerInformation``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-ownerinformation """ return jsii.get(self, "ownerInformation") @owner_information.setter def owner_information(self, value: typing.Optional[str]): return jsii.set(self, "ownerInformation", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTarget.TargetsProperty", jsii_struct_bases=[], name_mapping={'key': 'key', 'values': 'values'}) class TargetsProperty(): def __init__(self, *, key: str, values: typing.Optional[typing.List[str]]=None): """ :param key: ``CfnMaintenanceWindowTarget.TargetsProperty.Key``. :param values: ``CfnMaintenanceWindowTarget.TargetsProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtarget-targets.html """ self._values = { 'key': key, } if values is not None: self._values["values"] = values @property def key(self) -> str: """``CfnMaintenanceWindowTarget.TargetsProperty.Key``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtarget-targets.html#cfn-ssm-maintenancewindowtarget-targets-key """ return self._values.get('key') @property def values(self) -> typing.Optional[typing.List[str]]: """``CfnMaintenanceWindowTarget.TargetsProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtarget-targets.html#cfn-ssm-maintenancewindowtarget-targets-values """ return self._values.get('values') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'TargetsProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTargetProps", jsii_struct_bases=[], name_mapping={'resource_type': 'resourceType', 'targets': 'targets', 'window_id': 'windowId', 'description': 'description', 'name': 'name', 'owner_information': 'ownerInformation'}) class CfnMaintenanceWindowTargetProps(): def __init__(self, *, resource_type: str, targets: typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnMaintenanceWindowTarget.TargetsProperty"]]], window_id: str, description: typing.Optional[str]=None, name: typing.Optional[str]=None, owner_information: typing.Optional[str]=None): """Properties for defining a ``AWS::SSM::MaintenanceWindowTarget``. :param resource_type: ``AWS::SSM::MaintenanceWindowTarget.ResourceType``. :param targets: ``AWS::SSM::MaintenanceWindowTarget.Targets``. :param window_id: ``AWS::SSM::MaintenanceWindowTarget.WindowId``. :param description: ``AWS::SSM::MaintenanceWindowTarget.Description``. :param name: ``AWS::SSM::MaintenanceWindowTarget.Name``. :param owner_information: ``AWS::SSM::MaintenanceWindowTarget.OwnerInformation``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html """ self._values = { 'resource_type': resource_type, 'targets': targets, 'window_id': window_id, } if description is not None: self._values["description"] = description if name is not None: self._values["name"] = name if owner_information is not None: self._values["owner_information"] = owner_information @property def resource_type(self) -> str: """``AWS::SSM::MaintenanceWindowTarget.ResourceType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-resourcetype """ return self._values.get('resource_type') @property def targets(self) -> typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnMaintenanceWindowTarget.TargetsProperty"]]]: """``AWS::SSM::MaintenanceWindowTarget.Targets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-targets """ return self._values.get('targets') @property def window_id(self) -> str: """``AWS::SSM::MaintenanceWindowTarget.WindowId``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-windowid """ return self._values.get('window_id') @property def description(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTarget.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-description """ return self._values.get('description') @property def name(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTarget.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-name """ return self._values.get('name') @property def owner_information(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTarget.OwnerInformation``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtarget.html#cfn-ssm-maintenancewindowtarget-ownerinformation """ return self._values.get('owner_information') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnMaintenanceWindowTargetProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(aws_cdk.core.IInspectable) class CfnMaintenanceWindowTask(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask"): """A CloudFormation ``AWS::SSM::MaintenanceWindowTask``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html cloudformationResource: :cloudformationResource:: AWS::SSM::MaintenanceWindowTask """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, max_concurrency: str, max_errors: str, priority: jsii.Number, targets: typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetProperty"]]], task_arn: str, task_type: str, window_id: str, description: typing.Optional[str]=None, logging_info: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["LoggingInfoProperty"]]]=None, name: typing.Optional[str]=None, service_role_arn: typing.Optional[str]=None, task_invocation_parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["TaskInvocationParametersProperty"]]]=None, task_parameters: typing.Any=None) -> None: """Create a new ``AWS::SSM::MaintenanceWindowTask``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param max_concurrency: ``AWS::SSM::MaintenanceWindowTask.MaxConcurrency``. :param max_errors: ``AWS::SSM::MaintenanceWindowTask.MaxErrors``. :param priority: ``AWS::SSM::MaintenanceWindowTask.Priority``. :param targets: ``AWS::SSM::MaintenanceWindowTask.Targets``. :param task_arn: ``AWS::SSM::MaintenanceWindowTask.TaskArn``. :param task_type: ``AWS::SSM::MaintenanceWindowTask.TaskType``. :param window_id: ``AWS::SSM::MaintenanceWindowTask.WindowId``. :param description: ``AWS::SSM::MaintenanceWindowTask.Description``. :param logging_info: ``AWS::SSM::MaintenanceWindowTask.LoggingInfo``. :param name: ``AWS::SSM::MaintenanceWindowTask.Name``. :param service_role_arn: ``AWS::SSM::MaintenanceWindowTask.ServiceRoleArn``. :param task_invocation_parameters: ``AWS::SSM::MaintenanceWindowTask.TaskInvocationParameters``. :param task_parameters: ``AWS::SSM::MaintenanceWindowTask.TaskParameters``. """ props = CfnMaintenanceWindowTaskProps(max_concurrency=max_concurrency, max_errors=max_errors, priority=priority, targets=targets, task_arn=task_arn, task_type=task_type, window_id=window_id, description=description, logging_info=logging_info, name=name, service_role_arn=service_role_arn, task_invocation_parameters=task_invocation_parameters, task_parameters=task_parameters) jsii.create(CfnMaintenanceWindowTask, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="maxConcurrency") def max_concurrency(self) -> str: """``AWS::SSM::MaintenanceWindowTask.MaxConcurrency``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-maxconcurrency """ return jsii.get(self, "maxConcurrency") @max_concurrency.setter def max_concurrency(self, value: str): return jsii.set(self, "maxConcurrency", value) @property @jsii.member(jsii_name="maxErrors") def max_errors(self) -> str: """``AWS::SSM::MaintenanceWindowTask.MaxErrors``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-maxerrors """ return jsii.get(self, "maxErrors") @max_errors.setter def max_errors(self, value: str): return jsii.set(self, "maxErrors", value) @property @jsii.member(jsii_name="priority") def priority(self) -> jsii.Number: """``AWS::SSM::MaintenanceWindowTask.Priority``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-priority """ return jsii.get(self, "priority") @priority.setter def priority(self, value: jsii.Number): return jsii.set(self, "priority", value) @property @jsii.member(jsii_name="targets") def targets(self) -> typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetProperty"]]]: """``AWS::SSM::MaintenanceWindowTask.Targets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-targets """ return jsii.get(self, "targets") @targets.setter def targets(self, value: typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "TargetProperty"]]]): return jsii.set(self, "targets", value) @property @jsii.member(jsii_name="taskArn") def task_arn(self) -> str: """``AWS::SSM::MaintenanceWindowTask.TaskArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-taskarn """ return jsii.get(self, "taskArn") @task_arn.setter def task_arn(self, value: str): return jsii.set(self, "taskArn", value) @property @jsii.member(jsii_name="taskParameters") def task_parameters(self) -> typing.Any: """``AWS::SSM::MaintenanceWindowTask.TaskParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-taskparameters """ return jsii.get(self, "taskParameters") @task_parameters.setter def task_parameters(self, value: typing.Any): return jsii.set(self, "taskParameters", value) @property @jsii.member(jsii_name="taskType") def task_type(self) -> str: """``AWS::SSM::MaintenanceWindowTask.TaskType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-tasktype """ return jsii.get(self, "taskType") @task_type.setter def task_type(self, value: str): return jsii.set(self, "taskType", value) @property @jsii.member(jsii_name="windowId") def window_id(self) -> str: """``AWS::SSM::MaintenanceWindowTask.WindowId``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-windowid """ return jsii.get(self, "windowId") @window_id.setter def window_id(self, value: str): return jsii.set(self, "windowId", value) @property @jsii.member(jsii_name="description") def description(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTask.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-description """ return jsii.get(self, "description") @description.setter def description(self, value: typing.Optional[str]): return jsii.set(self, "description", value) @property @jsii.member(jsii_name="loggingInfo") def logging_info(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["LoggingInfoProperty"]]]: """``AWS::SSM::MaintenanceWindowTask.LoggingInfo``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-logginginfo """ return jsii.get(self, "loggingInfo") @logging_info.setter def logging_info(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["LoggingInfoProperty"]]]): return jsii.set(self, "loggingInfo", value) @property @jsii.member(jsii_name="name") def name(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTask.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-name """ return jsii.get(self, "name") @name.setter def name(self, value: typing.Optional[str]): return jsii.set(self, "name", value) @property @jsii.member(jsii_name="serviceRoleArn") def service_role_arn(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTask.ServiceRoleArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-servicerolearn """ return jsii.get(self, "serviceRoleArn") @service_role_arn.setter def service_role_arn(self, value: typing.Optional[str]): return jsii.set(self, "serviceRoleArn", value) @property @jsii.member(jsii_name="taskInvocationParameters") def task_invocation_parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["TaskInvocationParametersProperty"]]]: """``AWS::SSM::MaintenanceWindowTask.TaskInvocationParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-taskinvocationparameters """ return jsii.get(self, "taskInvocationParameters") @task_invocation_parameters.setter def task_invocation_parameters(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["TaskInvocationParametersProperty"]]]): return jsii.set(self, "taskInvocationParameters", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.LoggingInfoProperty", jsii_struct_bases=[], name_mapping={'region': 'region', 's3_bucket': 's3Bucket', 's3_prefix': 's3Prefix'}) class LoggingInfoProperty(): def __init__(self, *, region: str, s3_bucket: str, s3_prefix: typing.Optional[str]=None): """ :param region: ``CfnMaintenanceWindowTask.LoggingInfoProperty.Region``. :param s3_bucket: ``CfnMaintenanceWindowTask.LoggingInfoProperty.S3Bucket``. :param s3_prefix: ``CfnMaintenanceWindowTask.LoggingInfoProperty.S3Prefix``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-logginginfo.html """ self._values = { 'region': region, 's3_bucket': s3_bucket, } if s3_prefix is not None: self._values["s3_prefix"] = s3_prefix @property def region(self) -> str: """``CfnMaintenanceWindowTask.LoggingInfoProperty.Region``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-logginginfo.html#cfn-ssm-maintenancewindowtask-logginginfo-region """ return self._values.get('region') @property def s3_bucket(self) -> str: """``CfnMaintenanceWindowTask.LoggingInfoProperty.S3Bucket``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-logginginfo.html#cfn-ssm-maintenancewindowtask-logginginfo-s3bucket """ return self._values.get('s3_bucket') @property def s3_prefix(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.LoggingInfoProperty.S3Prefix``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-logginginfo.html#cfn-ssm-maintenancewindowtask-logginginfo-s3prefix """ return self._values.get('s3_prefix') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'LoggingInfoProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.MaintenanceWindowAutomationParametersProperty", jsii_struct_bases=[], name_mapping={'document_version': 'documentVersion', 'parameters': 'parameters'}) class MaintenanceWindowAutomationParametersProperty(): def __init__(self, *, document_version: typing.Optional[str]=None, parameters: typing.Any=None): """ :param document_version: ``CfnMaintenanceWindowTask.MaintenanceWindowAutomationParametersProperty.DocumentVersion``. :param parameters: ``CfnMaintenanceWindowTask.MaintenanceWindowAutomationParametersProperty.Parameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowautomationparameters.html """ self._values = { } if document_version is not None: self._values["document_version"] = document_version if parameters is not None: self._values["parameters"] = parameters @property def document_version(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowAutomationParametersProperty.DocumentVersion``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowautomationparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowautomationparameters-documentversion """ return self._values.get('document_version') @property def parameters(self) -> typing.Any: """``CfnMaintenanceWindowTask.MaintenanceWindowAutomationParametersProperty.Parameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowautomationparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowautomationparameters-parameters """ return self._values.get('parameters') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'MaintenanceWindowAutomationParametersProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty", jsii_struct_bases=[], name_mapping={'client_context': 'clientContext', 'payload': 'payload', 'qualifier': 'qualifier'}) class MaintenanceWindowLambdaParametersProperty(): def __init__(self, *, client_context: typing.Optional[str]=None, payload: typing.Optional[str]=None, qualifier: typing.Optional[str]=None): """ :param client_context: ``CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty.ClientContext``. :param payload: ``CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty.Payload``. :param qualifier: ``CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty.Qualifier``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowlambdaparameters.html """ self._values = { } if client_context is not None: self._values["client_context"] = client_context if payload is not None: self._values["payload"] = payload if qualifier is not None: self._values["qualifier"] = qualifier @property def client_context(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty.ClientContext``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowlambdaparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowlambdaparameters-clientcontext """ return self._values.get('client_context') @property def payload(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty.Payload``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowlambdaparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowlambdaparameters-payload """ return self._values.get('payload') @property def qualifier(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty.Qualifier``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowlambdaparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowlambdaparameters-qualifier """ return self._values.get('qualifier') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'MaintenanceWindowLambdaParametersProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty", jsii_struct_bases=[], name_mapping={'comment': 'comment', 'document_hash': 'documentHash', 'document_hash_type': 'documentHashType', 'notification_config': 'notificationConfig', 'output_s3_bucket_name': 'outputS3BucketName', 'output_s3_key_prefix': 'outputS3KeyPrefix', 'parameters': 'parameters', 'service_role_arn': 'serviceRoleArn', 'timeout_seconds': 'timeoutSeconds'}) class MaintenanceWindowRunCommandParametersProperty(): def __init__(self, *, comment: typing.Optional[str]=None, document_hash: typing.Optional[str]=None, document_hash_type: typing.Optional[str]=None, notification_config: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.NotificationConfigProperty"]]]=None, output_s3_bucket_name: typing.Optional[str]=None, output_s3_key_prefix: typing.Optional[str]=None, parameters: typing.Any=None, service_role_arn: typing.Optional[str]=None, timeout_seconds: typing.Optional[jsii.Number]=None): """ :param comment: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.Comment``. :param document_hash: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.DocumentHash``. :param document_hash_type: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.DocumentHashType``. :param notification_config: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.NotificationConfig``. :param output_s3_bucket_name: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.OutputS3BucketName``. :param output_s3_key_prefix: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.OutputS3KeyPrefix``. :param parameters: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.Parameters``. :param service_role_arn: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.ServiceRoleArn``. :param timeout_seconds: ``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.TimeoutSeconds``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html """ self._values = { } if comment is not None: self._values["comment"] = comment if document_hash is not None: self._values["document_hash"] = document_hash if document_hash_type is not None: self._values["document_hash_type"] = document_hash_type if notification_config is not None: self._values["notification_config"] = notification_config if output_s3_bucket_name is not None: self._values["output_s3_bucket_name"] = output_s3_bucket_name if output_s3_key_prefix is not None: self._values["output_s3_key_prefix"] = output_s3_key_prefix if parameters is not None: self._values["parameters"] = parameters if service_role_arn is not None: self._values["service_role_arn"] = service_role_arn if timeout_seconds is not None: self._values["timeout_seconds"] = timeout_seconds @property def comment(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.Comment``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-comment """ return self._values.get('comment') @property def document_hash(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.DocumentHash``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-documenthash """ return self._values.get('document_hash') @property def document_hash_type(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.DocumentHashType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-documenthashtype """ return self._values.get('document_hash_type') @property def notification_config(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.NotificationConfigProperty"]]]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.NotificationConfig``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-notificationconfig """ return self._values.get('notification_config') @property def output_s3_bucket_name(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.OutputS3BucketName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-outputs3bucketname """ return self._values.get('output_s3_bucket_name') @property def output_s3_key_prefix(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.OutputS3KeyPrefix``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-outputs3keyprefix """ return self._values.get('output_s3_key_prefix') @property def parameters(self) -> typing.Any: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.Parameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-parameters """ return self._values.get('parameters') @property def service_role_arn(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.ServiceRoleArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-servicerolearn """ return self._values.get('service_role_arn') @property def timeout_seconds(self) -> typing.Optional[jsii.Number]: """``CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty.TimeoutSeconds``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowruncommandparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowruncommandparameters-timeoutseconds """ return self._values.get('timeout_seconds') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'MaintenanceWindowRunCommandParametersProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.MaintenanceWindowStepFunctionsParametersProperty", jsii_struct_bases=[], name_mapping={'input': 'input', 'name': 'name'}) class MaintenanceWindowStepFunctionsParametersProperty(): def __init__(self, *, input: typing.Optional[str]=None, name: typing.Optional[str]=None): """ :param input: ``CfnMaintenanceWindowTask.MaintenanceWindowStepFunctionsParametersProperty.Input``. :param name: ``CfnMaintenanceWindowTask.MaintenanceWindowStepFunctionsParametersProperty.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowstepfunctionsparameters.html """ self._values = { } if input is not None: self._values["input"] = input if name is not None: self._values["name"] = name @property def input(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowStepFunctionsParametersProperty.Input``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowstepfunctionsparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowstepfunctionsparameters-input """ return self._values.get('input') @property def name(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.MaintenanceWindowStepFunctionsParametersProperty.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-maintenancewindowstepfunctionsparameters.html#cfn-ssm-maintenancewindowtask-maintenancewindowstepfunctionsparameters-name """ return self._values.get('name') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'MaintenanceWindowStepFunctionsParametersProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.NotificationConfigProperty", jsii_struct_bases=[], name_mapping={'notification_arn': 'notificationArn', 'notification_events': 'notificationEvents', 'notification_type': 'notificationType'}) class NotificationConfigProperty(): def __init__(self, *, notification_arn: str, notification_events: typing.Optional[typing.List[str]]=None, notification_type: typing.Optional[str]=None): """ :param notification_arn: ``CfnMaintenanceWindowTask.NotificationConfigProperty.NotificationArn``. :param notification_events: ``CfnMaintenanceWindowTask.NotificationConfigProperty.NotificationEvents``. :param notification_type: ``CfnMaintenanceWindowTask.NotificationConfigProperty.NotificationType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-notificationconfig.html """ self._values = { 'notification_arn': notification_arn, } if notification_events is not None: self._values["notification_events"] = notification_events if notification_type is not None: self._values["notification_type"] = notification_type @property def notification_arn(self) -> str: """``CfnMaintenanceWindowTask.NotificationConfigProperty.NotificationArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-notificationconfig.html#cfn-ssm-maintenancewindowtask-notificationconfig-notificationarn """ return self._values.get('notification_arn') @property def notification_events(self) -> typing.Optional[typing.List[str]]: """``CfnMaintenanceWindowTask.NotificationConfigProperty.NotificationEvents``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-notificationconfig.html#cfn-ssm-maintenancewindowtask-notificationconfig-notificationevents """ return self._values.get('notification_events') @property def notification_type(self) -> typing.Optional[str]: """``CfnMaintenanceWindowTask.NotificationConfigProperty.NotificationType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-notificationconfig.html#cfn-ssm-maintenancewindowtask-notificationconfig-notificationtype """ return self._values.get('notification_type') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'NotificationConfigProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.TargetProperty", jsii_struct_bases=[], name_mapping={'key': 'key', 'values': 'values'}) class TargetProperty(): def __init__(self, *, key: str, values: typing.Optional[typing.List[str]]=None): """ :param key: ``CfnMaintenanceWindowTask.TargetProperty.Key``. :param values: ``CfnMaintenanceWindowTask.TargetProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-target.html """ self._values = { 'key': key, } if values is not None: self._values["values"] = values @property def key(self) -> str: """``CfnMaintenanceWindowTask.TargetProperty.Key``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-target.html#cfn-ssm-maintenancewindowtask-target-key """ return self._values.get('key') @property def values(self) -> typing.Optional[typing.List[str]]: """``CfnMaintenanceWindowTask.TargetProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-target.html#cfn-ssm-maintenancewindowtask-target-values """ return self._values.get('values') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'TargetProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTask.TaskInvocationParametersProperty", jsii_struct_bases=[], name_mapping={'maintenance_window_automation_parameters': 'maintenanceWindowAutomationParameters', 'maintenance_window_lambda_parameters': 'maintenanceWindowLambdaParameters', 'maintenance_window_run_command_parameters': 'maintenanceWindowRunCommandParameters', 'maintenance_window_step_functions_parameters': 'maintenanceWindowStepFunctionsParameters'}) class TaskInvocationParametersProperty(): def __init__(self, *, maintenance_window_automation_parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowAutomationParametersProperty"]]]=None, maintenance_window_lambda_parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty"]]]=None, maintenance_window_run_command_parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty"]]]=None, maintenance_window_step_functions_parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowStepFunctionsParametersProperty"]]]=None): """ :param maintenance_window_automation_parameters: ``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowAutomationParameters``. :param maintenance_window_lambda_parameters: ``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowLambdaParameters``. :param maintenance_window_run_command_parameters: ``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowRunCommandParameters``. :param maintenance_window_step_functions_parameters: ``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowStepFunctionsParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-taskinvocationparameters.html """ self._values = { } if maintenance_window_automation_parameters is not None: self._values["maintenance_window_automation_parameters"] = maintenance_window_automation_parameters if maintenance_window_lambda_parameters is not None: self._values["maintenance_window_lambda_parameters"] = maintenance_window_lambda_parameters if maintenance_window_run_command_parameters is not None: self._values["maintenance_window_run_command_parameters"] = maintenance_window_run_command_parameters if maintenance_window_step_functions_parameters is not None: self._values["maintenance_window_step_functions_parameters"] = maintenance_window_step_functions_parameters @property def maintenance_window_automation_parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowAutomationParametersProperty"]]]: """``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowAutomationParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-taskinvocationparameters.html#cfn-ssm-maintenancewindowtask-taskinvocationparameters-maintenancewindowautomationparameters """ return self._values.get('maintenance_window_automation_parameters') @property def maintenance_window_lambda_parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowLambdaParametersProperty"]]]: """``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowLambdaParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-taskinvocationparameters.html#cfn-ssm-maintenancewindowtask-taskinvocationparameters-maintenancewindowlambdaparameters """ return self._values.get('maintenance_window_lambda_parameters') @property def maintenance_window_run_command_parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowRunCommandParametersProperty"]]]: """``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowRunCommandParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-taskinvocationparameters.html#cfn-ssm-maintenancewindowtask-taskinvocationparameters-maintenancewindowruncommandparameters """ return self._values.get('maintenance_window_run_command_parameters') @property def maintenance_window_step_functions_parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.MaintenanceWindowStepFunctionsParametersProperty"]]]: """``CfnMaintenanceWindowTask.TaskInvocationParametersProperty.MaintenanceWindowStepFunctionsParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-maintenancewindowtask-taskinvocationparameters.html#cfn-ssm-maintenancewindowtask-taskinvocationparameters-maintenancewindowstepfunctionsparameters """ return self._values.get('maintenance_window_step_functions_parameters') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'TaskInvocationParametersProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnMaintenanceWindowTaskProps", jsii_struct_bases=[], name_mapping={'max_concurrency': 'maxConcurrency', 'max_errors': 'maxErrors', 'priority': 'priority', 'targets': 'targets', 'task_arn': 'taskArn', 'task_type': 'taskType', 'window_id': 'windowId', 'description': 'description', 'logging_info': 'loggingInfo', 'name': 'name', 'service_role_arn': 'serviceRoleArn', 'task_invocation_parameters': 'taskInvocationParameters', 'task_parameters': 'taskParameters'}) class CfnMaintenanceWindowTaskProps(): def __init__(self, *, max_concurrency: str, max_errors: str, priority: jsii.Number, targets: typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnMaintenanceWindowTask.TargetProperty"]]], task_arn: str, task_type: str, window_id: str, description: typing.Optional[str]=None, logging_info: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.LoggingInfoProperty"]]]=None, name: typing.Optional[str]=None, service_role_arn: typing.Optional[str]=None, task_invocation_parameters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.TaskInvocationParametersProperty"]]]=None, task_parameters: typing.Any=None): """Properties for defining a ``AWS::SSM::MaintenanceWindowTask``. :param max_concurrency: ``AWS::SSM::MaintenanceWindowTask.MaxConcurrency``. :param max_errors: ``AWS::SSM::MaintenanceWindowTask.MaxErrors``. :param priority: ``AWS::SSM::MaintenanceWindowTask.Priority``. :param targets: ``AWS::SSM::MaintenanceWindowTask.Targets``. :param task_arn: ``AWS::SSM::MaintenanceWindowTask.TaskArn``. :param task_type: ``AWS::SSM::MaintenanceWindowTask.TaskType``. :param window_id: ``AWS::SSM::MaintenanceWindowTask.WindowId``. :param description: ``AWS::SSM::MaintenanceWindowTask.Description``. :param logging_info: ``AWS::SSM::MaintenanceWindowTask.LoggingInfo``. :param name: ``AWS::SSM::MaintenanceWindowTask.Name``. :param service_role_arn: ``AWS::SSM::MaintenanceWindowTask.ServiceRoleArn``. :param task_invocation_parameters: ``AWS::SSM::MaintenanceWindowTask.TaskInvocationParameters``. :param task_parameters: ``AWS::SSM::MaintenanceWindowTask.TaskParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html """ self._values = { 'max_concurrency': max_concurrency, 'max_errors': max_errors, 'priority': priority, 'targets': targets, 'task_arn': task_arn, 'task_type': task_type, 'window_id': window_id, } if description is not None: self._values["description"] = description if logging_info is not None: self._values["logging_info"] = logging_info if name is not None: self._values["name"] = name if service_role_arn is not None: self._values["service_role_arn"] = service_role_arn if task_invocation_parameters is not None: self._values["task_invocation_parameters"] = task_invocation_parameters if task_parameters is not None: self._values["task_parameters"] = task_parameters @property def max_concurrency(self) -> str: """``AWS::SSM::MaintenanceWindowTask.MaxConcurrency``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-maxconcurrency """ return self._values.get('max_concurrency') @property def max_errors(self) -> str: """``AWS::SSM::MaintenanceWindowTask.MaxErrors``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-maxerrors """ return self._values.get('max_errors') @property def priority(self) -> jsii.Number: """``AWS::SSM::MaintenanceWindowTask.Priority``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-priority """ return self._values.get('priority') @property def targets(self) -> typing.Union[aws_cdk.core.IResolvable, typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnMaintenanceWindowTask.TargetProperty"]]]: """``AWS::SSM::MaintenanceWindowTask.Targets``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-targets """ return self._values.get('targets') @property def task_arn(self) -> str: """``AWS::SSM::MaintenanceWindowTask.TaskArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-taskarn """ return self._values.get('task_arn') @property def task_type(self) -> str: """``AWS::SSM::MaintenanceWindowTask.TaskType``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-tasktype """ return self._values.get('task_type') @property def window_id(self) -> str: """``AWS::SSM::MaintenanceWindowTask.WindowId``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-windowid """ return self._values.get('window_id') @property def description(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTask.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-description """ return self._values.get('description') @property def logging_info(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.LoggingInfoProperty"]]]: """``AWS::SSM::MaintenanceWindowTask.LoggingInfo``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-logginginfo """ return self._values.get('logging_info') @property def name(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTask.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-name """ return self._values.get('name') @property def service_role_arn(self) -> typing.Optional[str]: """``AWS::SSM::MaintenanceWindowTask.ServiceRoleArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-servicerolearn """ return self._values.get('service_role_arn') @property def task_invocation_parameters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnMaintenanceWindowTask.TaskInvocationParametersProperty"]]]: """``AWS::SSM::MaintenanceWindowTask.TaskInvocationParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-taskinvocationparameters """ return self._values.get('task_invocation_parameters') @property def task_parameters(self) -> typing.Any: """``AWS::SSM::MaintenanceWindowTask.TaskParameters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-maintenancewindowtask.html#cfn-ssm-maintenancewindowtask-taskparameters """ return self._values.get('task_parameters') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnMaintenanceWindowTaskProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(aws_cdk.core.IInspectable) class CfnParameter(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnParameter"): """A CloudFormation ``AWS::SSM::Parameter``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html cloudformationResource: :cloudformationResource:: AWS::SSM::Parameter """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, type: str, value: str, allowed_pattern: typing.Optional[str]=None, description: typing.Optional[str]=None, name: typing.Optional[str]=None, policies: typing.Optional[str]=None, tags: typing.Any=None, tier: typing.Optional[str]=None) -> None: """Create a new ``AWS::SSM::Parameter``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param type: ``AWS::SSM::Parameter.Type``. :param value: ``AWS::SSM::Parameter.Value``. :param allowed_pattern: ``AWS::SSM::Parameter.AllowedPattern``. :param description: ``AWS::SSM::Parameter.Description``. :param name: ``AWS::SSM::Parameter.Name``. :param policies: ``AWS::SSM::Parameter.Policies``. :param tags: ``AWS::SSM::Parameter.Tags``. :param tier: ``AWS::SSM::Parameter.Tier``. """ props = CfnParameterProps(type=type, value=value, allowed_pattern=allowed_pattern, description=description, name=name, policies=policies, tags=tags, tier=tier) jsii.create(CfnParameter, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="attrType") def attr_type(self) -> str: """ cloudformationAttribute: :cloudformationAttribute:: Type """ return jsii.get(self, "attrType") @property @jsii.member(jsii_name="attrValue") def attr_value(self) -> str: """ cloudformationAttribute: :cloudformationAttribute:: Value """ return jsii.get(self, "attrValue") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="tags") def tags(self) -> aws_cdk.core.TagManager: """``AWS::SSM::Parameter.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-tags """ return jsii.get(self, "tags") @property @jsii.member(jsii_name="type") def type(self) -> str: """``AWS::SSM::Parameter.Type``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-type """ return jsii.get(self, "type") @type.setter def type(self, value: str): return jsii.set(self, "type", value) @property @jsii.member(jsii_name="value") def value(self) -> str: """``AWS::SSM::Parameter.Value``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-value """ return jsii.get(self, "value") @value.setter def value(self, value: str): return jsii.set(self, "value", value) @property @jsii.member(jsii_name="allowedPattern") def allowed_pattern(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.AllowedPattern``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-allowedpattern """ return jsii.get(self, "allowedPattern") @allowed_pattern.setter def allowed_pattern(self, value: typing.Optional[str]): return jsii.set(self, "allowedPattern", value) @property @jsii.member(jsii_name="description") def description(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-description """ return jsii.get(self, "description") @description.setter def description(self, value: typing.Optional[str]): return jsii.set(self, "description", value) @property @jsii.member(jsii_name="name") def name(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-name """ return jsii.get(self, "name") @name.setter def name(self, value: typing.Optional[str]): return jsii.set(self, "name", value) @property @jsii.member(jsii_name="policies") def policies(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Policies``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-policies """ return jsii.get(self, "policies") @policies.setter def policies(self, value: typing.Optional[str]): return jsii.set(self, "policies", value) @property @jsii.member(jsii_name="tier") def tier(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Tier``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-tier """ return jsii.get(self, "tier") @tier.setter def tier(self, value: typing.Optional[str]): return jsii.set(self, "tier", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnParameterProps", jsii_struct_bases=[], name_mapping={'type': 'type', 'value': 'value', 'allowed_pattern': 'allowedPattern', 'description': 'description', 'name': 'name', 'policies': 'policies', 'tags': 'tags', 'tier': 'tier'}) class CfnParameterProps(): def __init__(self, *, type: str, value: str, allowed_pattern: typing.Optional[str]=None, description: typing.Optional[str]=None, name: typing.Optional[str]=None, policies: typing.Optional[str]=None, tags: typing.Any=None, tier: typing.Optional[str]=None): """Properties for defining a ``AWS::SSM::Parameter``. :param type: ``AWS::SSM::Parameter.Type``. :param value: ``AWS::SSM::Parameter.Value``. :param allowed_pattern: ``AWS::SSM::Parameter.AllowedPattern``. :param description: ``AWS::SSM::Parameter.Description``. :param name: ``AWS::SSM::Parameter.Name``. :param policies: ``AWS::SSM::Parameter.Policies``. :param tags: ``AWS::SSM::Parameter.Tags``. :param tier: ``AWS::SSM::Parameter.Tier``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html """ self._values = { 'type': type, 'value': value, } if allowed_pattern is not None: self._values["allowed_pattern"] = allowed_pattern if description is not None: self._values["description"] = description if name is not None: self._values["name"] = name if policies is not None: self._values["policies"] = policies if tags is not None: self._values["tags"] = tags if tier is not None: self._values["tier"] = tier @property def type(self) -> str: """``AWS::SSM::Parameter.Type``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-type """ return self._values.get('type') @property def value(self) -> str: """``AWS::SSM::Parameter.Value``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-value """ return self._values.get('value') @property def allowed_pattern(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.AllowedPattern``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-allowedpattern """ return self._values.get('allowed_pattern') @property def description(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-description """ return self._values.get('description') @property def name(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-name """ return self._values.get('name') @property def policies(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Policies``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-policies """ return self._values.get('policies') @property def tags(self) -> typing.Any: """``AWS::SSM::Parameter.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-tags """ return self._values.get('tags') @property def tier(self) -> typing.Optional[str]: """``AWS::SSM::Parameter.Tier``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-parameter.html#cfn-ssm-parameter-tier """ return self._values.get('tier') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnParameterProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(aws_cdk.core.IInspectable) class CfnPatchBaseline(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnPatchBaseline"): """A CloudFormation ``AWS::SSM::PatchBaseline``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html cloudformationResource: :cloudformationResource:: AWS::SSM::PatchBaseline """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, name: str, approval_rules: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["RuleGroupProperty"]]]=None, approved_patches: typing.Optional[typing.List[str]]=None, approved_patches_compliance_level: typing.Optional[str]=None, approved_patches_enable_non_security: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]=None, description: typing.Optional[str]=None, global_filters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["PatchFilterGroupProperty"]]]=None, operating_system: typing.Optional[str]=None, patch_groups: typing.Optional[typing.List[str]]=None, rejected_patches: typing.Optional[typing.List[str]]=None, rejected_patches_action: typing.Optional[str]=None, sources: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "PatchSourceProperty"]]]]]=None, tags: typing.Optional[typing.List[aws_cdk.core.CfnTag]]=None) -> None: """Create a new ``AWS::SSM::PatchBaseline``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param name: ``AWS::SSM::PatchBaseline.Name``. :param approval_rules: ``AWS::SSM::PatchBaseline.ApprovalRules``. :param approved_patches: ``AWS::SSM::PatchBaseline.ApprovedPatches``. :param approved_patches_compliance_level: ``AWS::SSM::PatchBaseline.ApprovedPatchesComplianceLevel``. :param approved_patches_enable_non_security: ``AWS::SSM::PatchBaseline.ApprovedPatchesEnableNonSecurity``. :param description: ``AWS::SSM::PatchBaseline.Description``. :param global_filters: ``AWS::SSM::PatchBaseline.GlobalFilters``. :param operating_system: ``AWS::SSM::PatchBaseline.OperatingSystem``. :param patch_groups: ``AWS::SSM::PatchBaseline.PatchGroups``. :param rejected_patches: ``AWS::SSM::PatchBaseline.RejectedPatches``. :param rejected_patches_action: ``AWS::SSM::PatchBaseline.RejectedPatchesAction``. :param sources: ``AWS::SSM::PatchBaseline.Sources``. :param tags: ``AWS::SSM::PatchBaseline.Tags``. """ props = CfnPatchBaselineProps(name=name, approval_rules=approval_rules, approved_patches=approved_patches, approved_patches_compliance_level=approved_patches_compliance_level, approved_patches_enable_non_security=approved_patches_enable_non_security, description=description, global_filters=global_filters, operating_system=operating_system, patch_groups=patch_groups, rejected_patches=rejected_patches, rejected_patches_action=rejected_patches_action, sources=sources, tags=tags) jsii.create(CfnPatchBaseline, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="tags") def tags(self) -> aws_cdk.core.TagManager: """``AWS::SSM::PatchBaseline.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-tags """ return jsii.get(self, "tags") @property @jsii.member(jsii_name="name") def name(self) -> str: """``AWS::SSM::PatchBaseline.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-name """ return jsii.get(self, "name") @name.setter def name(self, value: str): return jsii.set(self, "name", value) @property @jsii.member(jsii_name="approvalRules") def approval_rules(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["RuleGroupProperty"]]]: """``AWS::SSM::PatchBaseline.ApprovalRules``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvalrules """ return jsii.get(self, "approvalRules") @approval_rules.setter def approval_rules(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["RuleGroupProperty"]]]): return jsii.set(self, "approvalRules", value) @property @jsii.member(jsii_name="approvedPatches") def approved_patches(self) -> typing.Optional[typing.List[str]]: """``AWS::SSM::PatchBaseline.ApprovedPatches``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvedpatches """ return jsii.get(self, "approvedPatches") @approved_patches.setter def approved_patches(self, value: typing.Optional[typing.List[str]]): return jsii.set(self, "approvedPatches", value) @property @jsii.member(jsii_name="approvedPatchesComplianceLevel") def approved_patches_compliance_level(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.ApprovedPatchesComplianceLevel``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvedpatchescompliancelevel """ return jsii.get(self, "approvedPatchesComplianceLevel") @approved_patches_compliance_level.setter def approved_patches_compliance_level(self, value: typing.Optional[str]): return jsii.set(self, "approvedPatchesComplianceLevel", value) @property @jsii.member(jsii_name="approvedPatchesEnableNonSecurity") def approved_patches_enable_non_security(self) -> typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]: """``AWS::SSM::PatchBaseline.ApprovedPatchesEnableNonSecurity``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvedpatchesenablenonsecurity """ return jsii.get(self, "approvedPatchesEnableNonSecurity") @approved_patches_enable_non_security.setter def approved_patches_enable_non_security(self, value: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]): return jsii.set(self, "approvedPatchesEnableNonSecurity", value) @property @jsii.member(jsii_name="description") def description(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-description """ return jsii.get(self, "description") @description.setter def description(self, value: typing.Optional[str]): return jsii.set(self, "description", value) @property @jsii.member(jsii_name="globalFilters") def global_filters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["PatchFilterGroupProperty"]]]: """``AWS::SSM::PatchBaseline.GlobalFilters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-globalfilters """ return jsii.get(self, "globalFilters") @global_filters.setter def global_filters(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["PatchFilterGroupProperty"]]]): return jsii.set(self, "globalFilters", value) @property @jsii.member(jsii_name="operatingSystem") def operating_system(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.OperatingSystem``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-operatingsystem """ return jsii.get(self, "operatingSystem") @operating_system.setter def operating_system(self, value: typing.Optional[str]): return jsii.set(self, "operatingSystem", value) @property @jsii.member(jsii_name="patchGroups") def patch_groups(self) -> typing.Optional[typing.List[str]]: """``AWS::SSM::PatchBaseline.PatchGroups``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-patchgroups """ return jsii.get(self, "patchGroups") @patch_groups.setter def patch_groups(self, value: typing.Optional[typing.List[str]]): return jsii.set(self, "patchGroups", value) @property @jsii.member(jsii_name="rejectedPatches") def rejected_patches(self) -> typing.Optional[typing.List[str]]: """``AWS::SSM::PatchBaseline.RejectedPatches``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-rejectedpatches """ return jsii.get(self, "rejectedPatches") @rejected_patches.setter def rejected_patches(self, value: typing.Optional[typing.List[str]]): return jsii.set(self, "rejectedPatches", value) @property @jsii.member(jsii_name="rejectedPatchesAction") def rejected_patches_action(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.RejectedPatchesAction``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-rejectedpatchesaction """ return jsii.get(self, "rejectedPatchesAction") @rejected_patches_action.setter def rejected_patches_action(self, value: typing.Optional[str]): return jsii.set(self, "rejectedPatchesAction", value) @property @jsii.member(jsii_name="sources") def sources(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "PatchSourceProperty"]]]]]: """``AWS::SSM::PatchBaseline.Sources``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-sources """ return jsii.get(self, "sources") @sources.setter def sources(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "PatchSourceProperty"]]]]]): return jsii.set(self, "sources", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnPatchBaseline.PatchFilterGroupProperty", jsii_struct_bases=[], name_mapping={'patch_filters': 'patchFilters'}) class PatchFilterGroupProperty(): def __init__(self, *, patch_filters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnPatchBaseline.PatchFilterProperty"]]]]]=None): """ :param patch_filters: ``CfnPatchBaseline.PatchFilterGroupProperty.PatchFilters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchfiltergroup.html """ self._values = { } if patch_filters is not None: self._values["patch_filters"] = patch_filters @property def patch_filters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnPatchBaseline.PatchFilterProperty"]]]]]: """``CfnPatchBaseline.PatchFilterGroupProperty.PatchFilters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchfiltergroup.html#cfn-ssm-patchbaseline-patchfiltergroup-patchfilters """ return self._values.get('patch_filters') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'PatchFilterGroupProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnPatchBaseline.PatchFilterProperty", jsii_struct_bases=[], name_mapping={'key': 'key', 'values': 'values'}) class PatchFilterProperty(): def __init__(self, *, key: typing.Optional[str]=None, values: typing.Optional[typing.List[str]]=None): """ :param key: ``CfnPatchBaseline.PatchFilterProperty.Key``. :param values: ``CfnPatchBaseline.PatchFilterProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchfilter.html """ self._values = { } if key is not None: self._values["key"] = key if values is not None: self._values["values"] = values @property def key(self) -> typing.Optional[str]: """``CfnPatchBaseline.PatchFilterProperty.Key``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchfilter.html#cfn-ssm-patchbaseline-patchfilter-key """ return self._values.get('key') @property def values(self) -> typing.Optional[typing.List[str]]: """``CfnPatchBaseline.PatchFilterProperty.Values``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchfilter.html#cfn-ssm-patchbaseline-patchfilter-values """ return self._values.get('values') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'PatchFilterProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnPatchBaseline.PatchSourceProperty", jsii_struct_bases=[], name_mapping={'configuration': 'configuration', 'name': 'name', 'products': 'products'}) class PatchSourceProperty(): def __init__(self, *, configuration: typing.Optional[str]=None, name: typing.Optional[str]=None, products: typing.Optional[typing.List[str]]=None): """ :param configuration: ``CfnPatchBaseline.PatchSourceProperty.Configuration``. :param name: ``CfnPatchBaseline.PatchSourceProperty.Name``. :param products: ``CfnPatchBaseline.PatchSourceProperty.Products``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchsource.html """ self._values = { } if configuration is not None: self._values["configuration"] = configuration if name is not None: self._values["name"] = name if products is not None: self._values["products"] = products @property def configuration(self) -> typing.Optional[str]: """``CfnPatchBaseline.PatchSourceProperty.Configuration``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchsource.html#cfn-ssm-patchbaseline-patchsource-configuration """ return self._values.get('configuration') @property def name(self) -> typing.Optional[str]: """``CfnPatchBaseline.PatchSourceProperty.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchsource.html#cfn-ssm-patchbaseline-patchsource-name """ return self._values.get('name') @property def products(self) -> typing.Optional[typing.List[str]]: """``CfnPatchBaseline.PatchSourceProperty.Products``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-patchsource.html#cfn-ssm-patchbaseline-patchsource-products """ return self._values.get('products') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'PatchSourceProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnPatchBaseline.RuleGroupProperty", jsii_struct_bases=[], name_mapping={'patch_rules': 'patchRules'}) class RuleGroupProperty(): def __init__(self, *, patch_rules: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnPatchBaseline.RuleProperty"]]]]]=None): """ :param patch_rules: ``CfnPatchBaseline.RuleGroupProperty.PatchRules``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-rulegroup.html """ self._values = { } if patch_rules is not None: self._values["patch_rules"] = patch_rules @property def patch_rules(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnPatchBaseline.RuleProperty"]]]]]: """``CfnPatchBaseline.RuleGroupProperty.PatchRules``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-rulegroup.html#cfn-ssm-patchbaseline-rulegroup-patchrules """ return self._values.get('patch_rules') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'RuleGroupProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnPatchBaseline.RuleProperty", jsii_struct_bases=[], name_mapping={'approve_after_days': 'approveAfterDays', 'compliance_level': 'complianceLevel', 'enable_non_security': 'enableNonSecurity', 'patch_filter_group': 'patchFilterGroup'}) class RuleProperty(): def __init__(self, *, approve_after_days: typing.Optional[jsii.Number]=None, compliance_level: typing.Optional[str]=None, enable_non_security: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]=None, patch_filter_group: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnPatchBaseline.PatchFilterGroupProperty"]]]=None): """ :param approve_after_days: ``CfnPatchBaseline.RuleProperty.ApproveAfterDays``. :param compliance_level: ``CfnPatchBaseline.RuleProperty.ComplianceLevel``. :param enable_non_security: ``CfnPatchBaseline.RuleProperty.EnableNonSecurity``. :param patch_filter_group: ``CfnPatchBaseline.RuleProperty.PatchFilterGroup``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-rule.html """ self._values = { } if approve_after_days is not None: self._values["approve_after_days"] = approve_after_days if compliance_level is not None: self._values["compliance_level"] = compliance_level if enable_non_security is not None: self._values["enable_non_security"] = enable_non_security if patch_filter_group is not None: self._values["patch_filter_group"] = patch_filter_group @property def approve_after_days(self) -> typing.Optional[jsii.Number]: """``CfnPatchBaseline.RuleProperty.ApproveAfterDays``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-rule.html#cfn-ssm-patchbaseline-rule-approveafterdays """ return self._values.get('approve_after_days') @property def compliance_level(self) -> typing.Optional[str]: """``CfnPatchBaseline.RuleProperty.ComplianceLevel``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-rule.html#cfn-ssm-patchbaseline-rule-compliancelevel """ return self._values.get('compliance_level') @property def enable_non_security(self) -> typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]: """``CfnPatchBaseline.RuleProperty.EnableNonSecurity``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-rule.html#cfn-ssm-patchbaseline-rule-enablenonsecurity """ return self._values.get('enable_non_security') @property def patch_filter_group(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnPatchBaseline.PatchFilterGroupProperty"]]]: """``CfnPatchBaseline.RuleProperty.PatchFilterGroup``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ssm-patchbaseline-rule.html#cfn-ssm-patchbaseline-rule-patchfiltergroup """ return self._values.get('patch_filter_group') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'RuleProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnPatchBaselineProps", jsii_struct_bases=[], name_mapping={'name': 'name', 'approval_rules': 'approvalRules', 'approved_patches': 'approvedPatches', 'approved_patches_compliance_level': 'approvedPatchesComplianceLevel', 'approved_patches_enable_non_security': 'approvedPatchesEnableNonSecurity', 'description': 'description', 'global_filters': 'globalFilters', 'operating_system': 'operatingSystem', 'patch_groups': 'patchGroups', 'rejected_patches': 'rejectedPatches', 'rejected_patches_action': 'rejectedPatchesAction', 'sources': 'sources', 'tags': 'tags'}) class CfnPatchBaselineProps(): def __init__(self, *, name: str, approval_rules: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnPatchBaseline.RuleGroupProperty"]]]=None, approved_patches: typing.Optional[typing.List[str]]=None, approved_patches_compliance_level: typing.Optional[str]=None, approved_patches_enable_non_security: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]=None, description: typing.Optional[str]=None, global_filters: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnPatchBaseline.PatchFilterGroupProperty"]]]=None, operating_system: typing.Optional[str]=None, patch_groups: typing.Optional[typing.List[str]]=None, rejected_patches: typing.Optional[typing.List[str]]=None, rejected_patches_action: typing.Optional[str]=None, sources: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnPatchBaseline.PatchSourceProperty"]]]]]=None, tags: typing.Optional[typing.List[aws_cdk.core.CfnTag]]=None): """Properties for defining a ``AWS::SSM::PatchBaseline``. :param name: ``AWS::SSM::PatchBaseline.Name``. :param approval_rules: ``AWS::SSM::PatchBaseline.ApprovalRules``. :param approved_patches: ``AWS::SSM::PatchBaseline.ApprovedPatches``. :param approved_patches_compliance_level: ``AWS::SSM::PatchBaseline.ApprovedPatchesComplianceLevel``. :param approved_patches_enable_non_security: ``AWS::SSM::PatchBaseline.ApprovedPatchesEnableNonSecurity``. :param description: ``AWS::SSM::PatchBaseline.Description``. :param global_filters: ``AWS::SSM::PatchBaseline.GlobalFilters``. :param operating_system: ``AWS::SSM::PatchBaseline.OperatingSystem``. :param patch_groups: ``AWS::SSM::PatchBaseline.PatchGroups``. :param rejected_patches: ``AWS::SSM::PatchBaseline.RejectedPatches``. :param rejected_patches_action: ``AWS::SSM::PatchBaseline.RejectedPatchesAction``. :param sources: ``AWS::SSM::PatchBaseline.Sources``. :param tags: ``AWS::SSM::PatchBaseline.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html """ self._values = { 'name': name, } if approval_rules is not None: self._values["approval_rules"] = approval_rules if approved_patches is not None: self._values["approved_patches"] = approved_patches if approved_patches_compliance_level is not None: self._values["approved_patches_compliance_level"] = approved_patches_compliance_level if approved_patches_enable_non_security is not None: self._values["approved_patches_enable_non_security"] = approved_patches_enable_non_security if description is not None: self._values["description"] = description if global_filters is not None: self._values["global_filters"] = global_filters if operating_system is not None: self._values["operating_system"] = operating_system if patch_groups is not None: self._values["patch_groups"] = patch_groups if rejected_patches is not None: self._values["rejected_patches"] = rejected_patches if rejected_patches_action is not None: self._values["rejected_patches_action"] = rejected_patches_action if sources is not None: self._values["sources"] = sources if tags is not None: self._values["tags"] = tags @property def name(self) -> str: """``AWS::SSM::PatchBaseline.Name``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-name """ return self._values.get('name') @property def approval_rules(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnPatchBaseline.RuleGroupProperty"]]]: """``AWS::SSM::PatchBaseline.ApprovalRules``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvalrules """ return self._values.get('approval_rules') @property def approved_patches(self) -> typing.Optional[typing.List[str]]: """``AWS::SSM::PatchBaseline.ApprovedPatches``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvedpatches """ return self._values.get('approved_patches') @property def approved_patches_compliance_level(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.ApprovedPatchesComplianceLevel``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvedpatchescompliancelevel """ return self._values.get('approved_patches_compliance_level') @property def approved_patches_enable_non_security(self) -> typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]: """``AWS::SSM::PatchBaseline.ApprovedPatchesEnableNonSecurity``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-approvedpatchesenablenonsecurity """ return self._values.get('approved_patches_enable_non_security') @property def description(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.Description``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-description """ return self._values.get('description') @property def global_filters(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnPatchBaseline.PatchFilterGroupProperty"]]]: """``AWS::SSM::PatchBaseline.GlobalFilters``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-globalfilters """ return self._values.get('global_filters') @property def operating_system(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.OperatingSystem``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-operatingsystem """ return self._values.get('operating_system') @property def patch_groups(self) -> typing.Optional[typing.List[str]]: """``AWS::SSM::PatchBaseline.PatchGroups``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-patchgroups """ return self._values.get('patch_groups') @property def rejected_patches(self) -> typing.Optional[typing.List[str]]: """``AWS::SSM::PatchBaseline.RejectedPatches``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-rejectedpatches """ return self._values.get('rejected_patches') @property def rejected_patches_action(self) -> typing.Optional[str]: """``AWS::SSM::PatchBaseline.RejectedPatchesAction``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-rejectedpatchesaction """ return self._values.get('rejected_patches_action') @property def sources(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnPatchBaseline.PatchSourceProperty"]]]]]: """``AWS::SSM::PatchBaseline.Sources``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-sources """ return self._values.get('sources') @property def tags(self) -> typing.Optional[typing.List[aws_cdk.core.CfnTag]]: """``AWS::SSM::PatchBaseline.Tags``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-patchbaseline.html#cfn-ssm-patchbaseline-tags """ return self._values.get('tags') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnPatchBaselineProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(aws_cdk.core.IInspectable) class CfnResourceDataSync(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.CfnResourceDataSync"): """A CloudFormation ``AWS::SSM::ResourceDataSync``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html cloudformationResource: :cloudformationResource:: AWS::SSM::ResourceDataSync """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, bucket_name: str, bucket_region: str, sync_format: str, sync_name: str, bucket_prefix: typing.Optional[str]=None, kms_key_arn: typing.Optional[str]=None) -> None: """Create a new ``AWS::SSM::ResourceDataSync``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param bucket_name: ``AWS::SSM::ResourceDataSync.BucketName``. :param bucket_region: ``AWS::SSM::ResourceDataSync.BucketRegion``. :param sync_format: ``AWS::SSM::ResourceDataSync.SyncFormat``. :param sync_name: ``AWS::SSM::ResourceDataSync.SyncName``. :param bucket_prefix: ``AWS::SSM::ResourceDataSync.BucketPrefix``. :param kms_key_arn: ``AWS::SSM::ResourceDataSync.KMSKeyArn``. """ props = CfnResourceDataSyncProps(bucket_name=bucket_name, bucket_region=bucket_region, sync_format=sync_format, sync_name=sync_name, bucket_prefix=bucket_prefix, kms_key_arn=kms_key_arn) jsii.create(CfnResourceDataSync, self, [scope, id, props]) @jsii.member(jsii_name="inspect") def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None: """Examines the CloudFormation resource and discloses attributes. :param inspector: - tree inspector to collect and process attributes. stability :stability: experimental """ return jsii.invoke(self, "inspect", [inspector]) @jsii.member(jsii_name="renderProperties") def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]: """ :param props: - """ return jsii.invoke(self, "renderProperties", [props]) @classproperty @jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME") def CFN_RESOURCE_TYPE_NAME(cls) -> str: """The CloudFormation resource type name for this resource class.""" return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME") @property @jsii.member(jsii_name="cfnProperties") def _cfn_properties(self) -> typing.Mapping[str,typing.Any]: return jsii.get(self, "cfnProperties") @property @jsii.member(jsii_name="bucketName") def bucket_name(self) -> str: """``AWS::SSM::ResourceDataSync.BucketName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-bucketname """ return jsii.get(self, "bucketName") @bucket_name.setter def bucket_name(self, value: str): return jsii.set(self, "bucketName", value) @property @jsii.member(jsii_name="bucketRegion") def bucket_region(self) -> str: """``AWS::SSM::ResourceDataSync.BucketRegion``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-bucketregion """ return jsii.get(self, "bucketRegion") @bucket_region.setter def bucket_region(self, value: str): return jsii.set(self, "bucketRegion", value) @property @jsii.member(jsii_name="syncFormat") def sync_format(self) -> str: """``AWS::SSM::ResourceDataSync.SyncFormat``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-syncformat """ return jsii.get(self, "syncFormat") @sync_format.setter def sync_format(self, value: str): return jsii.set(self, "syncFormat", value) @property @jsii.member(jsii_name="syncName") def sync_name(self) -> str: """``AWS::SSM::ResourceDataSync.SyncName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-syncname """ return jsii.get(self, "syncName") @sync_name.setter def sync_name(self, value: str): return jsii.set(self, "syncName", value) @property @jsii.member(jsii_name="bucketPrefix") def bucket_prefix(self) -> typing.Optional[str]: """``AWS::SSM::ResourceDataSync.BucketPrefix``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-bucketprefix """ return jsii.get(self, "bucketPrefix") @bucket_prefix.setter def bucket_prefix(self, value: typing.Optional[str]): return jsii.set(self, "bucketPrefix", value) @property @jsii.member(jsii_name="kmsKeyArn") def kms_key_arn(self) -> typing.Optional[str]: """``AWS::SSM::ResourceDataSync.KMSKeyArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-kmskeyarn """ return jsii.get(self, "kmsKeyArn") @kms_key_arn.setter def kms_key_arn(self, value: typing.Optional[str]): return jsii.set(self, "kmsKeyArn", value) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CfnResourceDataSyncProps", jsii_struct_bases=[], name_mapping={'bucket_name': 'bucketName', 'bucket_region': 'bucketRegion', 'sync_format': 'syncFormat', 'sync_name': 'syncName', 'bucket_prefix': 'bucketPrefix', 'kms_key_arn': 'kmsKeyArn'}) class CfnResourceDataSyncProps(): def __init__(self, *, bucket_name: str, bucket_region: str, sync_format: str, sync_name: str, bucket_prefix: typing.Optional[str]=None, kms_key_arn: typing.Optional[str]=None): """Properties for defining a ``AWS::SSM::ResourceDataSync``. :param bucket_name: ``AWS::SSM::ResourceDataSync.BucketName``. :param bucket_region: ``AWS::SSM::ResourceDataSync.BucketRegion``. :param sync_format: ``AWS::SSM::ResourceDataSync.SyncFormat``. :param sync_name: ``AWS::SSM::ResourceDataSync.SyncName``. :param bucket_prefix: ``AWS::SSM::ResourceDataSync.BucketPrefix``. :param kms_key_arn: ``AWS::SSM::ResourceDataSync.KMSKeyArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html """ self._values = { 'bucket_name': bucket_name, 'bucket_region': bucket_region, 'sync_format': sync_format, 'sync_name': sync_name, } if bucket_prefix is not None: self._values["bucket_prefix"] = bucket_prefix if kms_key_arn is not None: self._values["kms_key_arn"] = kms_key_arn @property def bucket_name(self) -> str: """``AWS::SSM::ResourceDataSync.BucketName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-bucketname """ return self._values.get('bucket_name') @property def bucket_region(self) -> str: """``AWS::SSM::ResourceDataSync.BucketRegion``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-bucketregion """ return self._values.get('bucket_region') @property def sync_format(self) -> str: """``AWS::SSM::ResourceDataSync.SyncFormat``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-syncformat """ return self._values.get('sync_format') @property def sync_name(self) -> str: """``AWS::SSM::ResourceDataSync.SyncName``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-syncname """ return self._values.get('sync_name') @property def bucket_prefix(self) -> typing.Optional[str]: """``AWS::SSM::ResourceDataSync.BucketPrefix``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-bucketprefix """ return self._values.get('bucket_prefix') @property def kms_key_arn(self) -> typing.Optional[str]: """``AWS::SSM::ResourceDataSync.KMSKeyArn``. see :see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ssm-resourcedatasync.html#cfn-ssm-resourcedatasync-kmskeyarn """ return self._values.get('kms_key_arn') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CfnResourceDataSyncProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.CommonStringParameterAttributes", jsii_struct_bases=[], name_mapping={'parameter_name': 'parameterName', 'simple_name': 'simpleName'}) class CommonStringParameterAttributes(): def __init__(self, *, parameter_name: str, simple_name: typing.Optional[bool]=None): """Common attributes for string parameters. :param parameter_name: The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` """ self._values = { 'parameter_name': parameter_name, } if simple_name is not None: self._values["simple_name"] = simple_name @property def parameter_name(self) -> str: """The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). """ return self._values.get('parameter_name') @property def simple_name(self) -> typing.Optional[bool]: """Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. default :default: - auto-detect based on ``parameterName`` """ return self._values.get('simple_name') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'CommonStringParameterAttributes(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.interface(jsii_type="@aws-cdk/aws-ssm.IParameter") class IParameter(aws_cdk.core.IResource, jsii.compat.Protocol): """An SSM Parameter reference.""" @staticmethod def __jsii_proxy_class__(): return _IParameterProxy @property @jsii.member(jsii_name="parameterArn") def parameter_arn(self) -> str: """The ARN of the SSM Parameter resource. attribute: :attribute:: true """ ... @property @jsii.member(jsii_name="parameterName") def parameter_name(self) -> str: """The name of the SSM Parameter resource. attribute: :attribute:: true """ ... @property @jsii.member(jsii_name="parameterType") def parameter_type(self) -> str: """The type of the SSM Parameter resource. attribute: :attribute:: true """ ... @jsii.member(jsii_name="grantRead") def grant_read(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants read (DescribeParameter, GetParameter, GetParameterHistory) permissions on the SSM Parameter. :param grantee: the role to be granted read-only access to the parameter. """ ... @jsii.member(jsii_name="grantWrite") def grant_write(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants write (PutParameter) permissions on the SSM Parameter. :param grantee: the role to be granted write access to the parameter. """ ... class _IParameterProxy(jsii.proxy_for(aws_cdk.core.IResource)): """An SSM Parameter reference.""" __jsii_type__ = "@aws-cdk/aws-ssm.IParameter" @property @jsii.member(jsii_name="parameterArn") def parameter_arn(self) -> str: """The ARN of the SSM Parameter resource. attribute: :attribute:: true """ return jsii.get(self, "parameterArn") @property @jsii.member(jsii_name="parameterName") def parameter_name(self) -> str: """The name of the SSM Parameter resource. attribute: :attribute:: true """ return jsii.get(self, "parameterName") @property @jsii.member(jsii_name="parameterType") def parameter_type(self) -> str: """The type of the SSM Parameter resource. attribute: :attribute:: true """ return jsii.get(self, "parameterType") @jsii.member(jsii_name="grantRead") def grant_read(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants read (DescribeParameter, GetParameter, GetParameterHistory) permissions on the SSM Parameter. :param grantee: the role to be granted read-only access to the parameter. """ return jsii.invoke(self, "grantRead", [grantee]) @jsii.member(jsii_name="grantWrite") def grant_write(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants write (PutParameter) permissions on the SSM Parameter. :param grantee: the role to be granted write access to the parameter. """ return jsii.invoke(self, "grantWrite", [grantee]) @jsii.interface(jsii_type="@aws-cdk/aws-ssm.IStringListParameter") class IStringListParameter(IParameter, jsii.compat.Protocol): """A StringList SSM Parameter.""" @staticmethod def __jsii_proxy_class__(): return _IStringListParameterProxy @property @jsii.member(jsii_name="stringListValue") def string_list_value(self) -> typing.List[str]: """The parameter value. Value must not nest another parameter. Do not use {{}} in the value. Values in the array cannot contain commas (``,``). attribute: :attribute:: Value """ ... class _IStringListParameterProxy(jsii.proxy_for(IParameter)): """A StringList SSM Parameter.""" __jsii_type__ = "@aws-cdk/aws-ssm.IStringListParameter" @property @jsii.member(jsii_name="stringListValue") def string_list_value(self) -> typing.List[str]: """The parameter value. Value must not nest another parameter. Do not use {{}} in the value. Values in the array cannot contain commas (``,``). attribute: :attribute:: Value """ return jsii.get(self, "stringListValue") @jsii.interface(jsii_type="@aws-cdk/aws-ssm.IStringParameter") class IStringParameter(IParameter, jsii.compat.Protocol): """A String SSM Parameter.""" @staticmethod def __jsii_proxy_class__(): return _IStringParameterProxy @property @jsii.member(jsii_name="stringValue") def string_value(self) -> str: """The parameter value. Value must not nest another parameter. Do not use {{}} in the value. attribute: :attribute:: Value """ ... class _IStringParameterProxy(jsii.proxy_for(IParameter)): """A String SSM Parameter.""" __jsii_type__ = "@aws-cdk/aws-ssm.IStringParameter" @property @jsii.member(jsii_name="stringValue") def string_value(self) -> str: """The parameter value. Value must not nest another parameter. Do not use {{}} in the value. attribute: :attribute:: Value """ return jsii.get(self, "stringValue") @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.ParameterOptions", jsii_struct_bases=[], name_mapping={'allowed_pattern': 'allowedPattern', 'description': 'description', 'parameter_name': 'parameterName', 'simple_name': 'simpleName'}) class ParameterOptions(): def __init__(self, *, allowed_pattern: typing.Optional[str]=None, description: typing.Optional[str]=None, parameter_name: typing.Optional[str]=None, simple_name: typing.Optional[bool]=None): """Properties needed to create a new SSM Parameter. :param allowed_pattern: A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` Default: no validation is performed :param description: Information about the parameter that you want to add to the system. Default: none :param parameter_name: The name of the parameter. Default: - a name will be generated by CloudFormation :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` """ self._values = { } if allowed_pattern is not None: self._values["allowed_pattern"] = allowed_pattern if description is not None: self._values["description"] = description if parameter_name is not None: self._values["parameter_name"] = parameter_name if simple_name is not None: self._values["simple_name"] = simple_name @property def allowed_pattern(self) -> typing.Optional[str]: """A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` default :default: no validation is performed """ return self._values.get('allowed_pattern') @property def description(self) -> typing.Optional[str]: """Information about the parameter that you want to add to the system. default :default: none """ return self._values.get('description') @property def parameter_name(self) -> typing.Optional[str]: """The name of the parameter. default :default: - a name will be generated by CloudFormation """ return self._values.get('parameter_name') @property def simple_name(self) -> typing.Optional[bool]: """Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. default :default: - auto-detect based on ``parameterName`` """ return self._values.get('simple_name') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'ParameterOptions(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.enum(jsii_type="@aws-cdk/aws-ssm.ParameterType") class ParameterType(enum.Enum): """SSM parameter type.""" STRING = "STRING" """String.""" SECURE_STRING = "SECURE_STRING" """Secure String Parameter Store uses an AWS Key Management Service (KMS) customer master key (CMK) to encrypt the parameter value.""" STRING_LIST = "STRING_LIST" """String List.""" AWS_EC2_IMAGE_ID = "AWS_EC2_IMAGE_ID" """An Amazon EC2 image ID, such as ami-0ff8a91507f77f867.""" @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.SecureStringParameterAttributes", jsii_struct_bases=[CommonStringParameterAttributes], name_mapping={'parameter_name': 'parameterName', 'simple_name': 'simpleName', 'version': 'version', 'encryption_key': 'encryptionKey'}) class SecureStringParameterAttributes(CommonStringParameterAttributes): def __init__(self, *, parameter_name: str, simple_name: typing.Optional[bool]=None, version: jsii.Number, encryption_key: typing.Optional[aws_cdk.aws_kms.IKey]=None): """ :param parameter_name: The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` :param version: The version number of the value you wish to retrieve. This is required for secure strings. :param encryption_key: The encryption key that is used to encrypt this parameter. Default: - default master key """ self._values = { 'parameter_name': parameter_name, 'version': version, } if simple_name is not None: self._values["simple_name"] = simple_name if encryption_key is not None: self._values["encryption_key"] = encryption_key @property def parameter_name(self) -> str: """The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). """ return self._values.get('parameter_name') @property def simple_name(self) -> typing.Optional[bool]: """Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. default :default: - auto-detect based on ``parameterName`` """ return self._values.get('simple_name') @property def version(self) -> jsii.Number: """The version number of the value you wish to retrieve. This is required for secure strings. """ return self._values.get('version') @property def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]: """The encryption key that is used to encrypt this parameter. default :default: - default master key """ return self._values.get('encryption_key') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'SecureStringParameterAttributes(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(IStringListParameter, IParameter) class StringListParameter(aws_cdk.core.Resource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.StringListParameter"): """Creates a new StringList SSM Parameter. resource: :resource:: AWS::SSM::Parameter """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, string_list_value: typing.List[str], allowed_pattern: typing.Optional[str]=None, description: typing.Optional[str]=None, parameter_name: typing.Optional[str]=None, simple_name: typing.Optional[bool]=None) -> None: """ :param scope: - :param id: - :param props: - :param string_list_value: The values of the parameter. It may not reference another parameter and ``{{}}`` cannot be used in the value. :param allowed_pattern: A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` Default: no validation is performed :param description: Information about the parameter that you want to add to the system. Default: none :param parameter_name: The name of the parameter. Default: - a name will be generated by CloudFormation :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` """ props = StringListParameterProps(string_list_value=string_list_value, allowed_pattern=allowed_pattern, description=description, parameter_name=parameter_name, simple_name=simple_name) jsii.create(StringListParameter, self, [scope, id, props]) @jsii.member(jsii_name="fromStringListParameterName") @classmethod def from_string_list_parameter_name(cls, scope: aws_cdk.core.Construct, id: str, string_list_parameter_name: str) -> "IStringListParameter": """Imports an external parameter of type string list. :param scope: - :param id: - :param string_list_parameter_name: - """ return jsii.sinvoke(cls, "fromStringListParameterName", [scope, id, string_list_parameter_name]) @jsii.member(jsii_name="grantRead") def grant_read(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants read (DescribeParameter, GetParameter, GetParameterHistory) permissions on the SSM Parameter. :param grantee: - """ return jsii.invoke(self, "grantRead", [grantee]) @jsii.member(jsii_name="grantWrite") def grant_write(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants write (PutParameter) permissions on the SSM Parameter. :param grantee: - """ return jsii.invoke(self, "grantWrite", [grantee]) @property @jsii.member(jsii_name="parameterArn") def parameter_arn(self) -> str: """The ARN of the SSM Parameter resource.""" return jsii.get(self, "parameterArn") @property @jsii.member(jsii_name="parameterName") def parameter_name(self) -> str: """The name of the SSM Parameter resource.""" return jsii.get(self, "parameterName") @property @jsii.member(jsii_name="parameterType") def parameter_type(self) -> str: """The type of the SSM Parameter resource.""" return jsii.get(self, "parameterType") @property @jsii.member(jsii_name="stringListValue") def string_list_value(self) -> typing.List[str]: """The parameter value. Value must not nest another parameter. Do not use {{}} in the value. Values in the array cannot contain commas (``,``). """ return jsii.get(self, "stringListValue") @property @jsii.member(jsii_name="encryptionKey") def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]: return jsii.get(self, "encryptionKey") @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.StringListParameterProps", jsii_struct_bases=[ParameterOptions], name_mapping={'allowed_pattern': 'allowedPattern', 'description': 'description', 'parameter_name': 'parameterName', 'simple_name': 'simpleName', 'string_list_value': 'stringListValue'}) class StringListParameterProps(ParameterOptions): def __init__(self, *, allowed_pattern: typing.Optional[str]=None, description: typing.Optional[str]=None, parameter_name: typing.Optional[str]=None, simple_name: typing.Optional[bool]=None, string_list_value: typing.List[str]): """Properties needed to create a StringList SSM Parameter. :param allowed_pattern: A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` Default: no validation is performed :param description: Information about the parameter that you want to add to the system. Default: none :param parameter_name: The name of the parameter. Default: - a name will be generated by CloudFormation :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` :param string_list_value: The values of the parameter. It may not reference another parameter and ``{{}}`` cannot be used in the value. """ self._values = { 'string_list_value': string_list_value, } if allowed_pattern is not None: self._values["allowed_pattern"] = allowed_pattern if description is not None: self._values["description"] = description if parameter_name is not None: self._values["parameter_name"] = parameter_name if simple_name is not None: self._values["simple_name"] = simple_name @property def allowed_pattern(self) -> typing.Optional[str]: """A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` default :default: no validation is performed """ return self._values.get('allowed_pattern') @property def description(self) -> typing.Optional[str]: """Information about the parameter that you want to add to the system. default :default: none """ return self._values.get('description') @property def parameter_name(self) -> typing.Optional[str]: """The name of the parameter. default :default: - a name will be generated by CloudFormation """ return self._values.get('parameter_name') @property def simple_name(self) -> typing.Optional[bool]: """Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. default :default: - auto-detect based on ``parameterName`` """ return self._values.get('simple_name') @property def string_list_value(self) -> typing.List[str]: """The values of the parameter. It may not reference another parameter and ``{{}}`` cannot be used in the value. """ return self._values.get('string_list_value') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'StringListParameterProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.implements(IStringParameter, IParameter) class StringParameter(aws_cdk.core.Resource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-ssm.StringParameter"): """Creates a new String SSM Parameter. resource: :resource:: AWS::SSM::Parameter """ def __init__(self, scope: aws_cdk.core.Construct, id: str, *, string_value: str, type: typing.Optional["ParameterType"]=None, allowed_pattern: typing.Optional[str]=None, description: typing.Optional[str]=None, parameter_name: typing.Optional[str]=None, simple_name: typing.Optional[bool]=None) -> None: """ :param scope: - :param id: - :param props: - :param string_value: The value of the parameter. It may not reference another parameter and ``{{}}`` cannot be used in the value. :param type: The type of the string parameter. Default: ParameterType.STRING :param allowed_pattern: A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` Default: no validation is performed :param description: Information about the parameter that you want to add to the system. Default: none :param parameter_name: The name of the parameter. Default: - a name will be generated by CloudFormation :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` """ props = StringParameterProps(string_value=string_value, type=type, allowed_pattern=allowed_pattern, description=description, parameter_name=parameter_name, simple_name=simple_name) jsii.create(StringParameter, self, [scope, id, props]) @jsii.member(jsii_name="fromSecureStringParameterAttributes") @classmethod def from_secure_string_parameter_attributes(cls, scope: aws_cdk.core.Construct, id: str, *, version: jsii.Number, encryption_key: typing.Optional[aws_cdk.aws_kms.IKey]=None, parameter_name: str, simple_name: typing.Optional[bool]=None) -> "IStringParameter": """Imports a secure string parameter from the SSM parameter store. :param scope: - :param id: - :param attrs: - :param version: The version number of the value you wish to retrieve. This is required for secure strings. :param encryption_key: The encryption key that is used to encrypt this parameter. Default: - default master key :param parameter_name: The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` """ attrs = SecureStringParameterAttributes(version=version, encryption_key=encryption_key, parameter_name=parameter_name, simple_name=simple_name) return jsii.sinvoke(cls, "fromSecureStringParameterAttributes", [scope, id, attrs]) @jsii.member(jsii_name="fromStringParameterAttributes") @classmethod def from_string_parameter_attributes(cls, scope: aws_cdk.core.Construct, id: str, *, type: typing.Optional["ParameterType"]=None, version: typing.Optional[jsii.Number]=None, parameter_name: str, simple_name: typing.Optional[bool]=None) -> "IStringParameter": """Imports an external string parameter with name and optional version. :param scope: - :param id: - :param attrs: - :param type: The type of the string parameter. Default: ParameterType.STRING :param version: The version number of the value you wish to retrieve. Default: The latest version will be retrieved. :param parameter_name: The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` """ attrs = StringParameterAttributes(type=type, version=version, parameter_name=parameter_name, simple_name=simple_name) return jsii.sinvoke(cls, "fromStringParameterAttributes", [scope, id, attrs]) @jsii.member(jsii_name="fromStringParameterName") @classmethod def from_string_parameter_name(cls, scope: aws_cdk.core.Construct, id: str, string_parameter_name: str) -> "IStringParameter": """Imports an external string parameter by name. :param scope: - :param id: - :param string_parameter_name: - """ return jsii.sinvoke(cls, "fromStringParameterName", [scope, id, string_parameter_name]) @jsii.member(jsii_name="valueForSecureStringParameter") @classmethod def value_for_secure_string_parameter(cls, scope: aws_cdk.core.Construct, parameter_name: str, version: jsii.Number) -> str: """Returns a token that will resolve (during deployment). :param scope: Some scope within a stack. :param parameter_name: The name of the SSM parameter. :param version: The parameter version (required for secure strings). """ return jsii.sinvoke(cls, "valueForSecureStringParameter", [scope, parameter_name, version]) @jsii.member(jsii_name="valueForStringParameter") @classmethod def value_for_string_parameter(cls, scope: aws_cdk.core.Construct, parameter_name: str, version: typing.Optional[jsii.Number]=None) -> str: """Returns a token that will resolve (during deployment) to the string value of an SSM string parameter. :param scope: Some scope within a stack. :param parameter_name: The name of the SSM parameter. :param version: The parameter version (recommended in order to ensure that the value won't change during deployment). """ return jsii.sinvoke(cls, "valueForStringParameter", [scope, parameter_name, version]) @jsii.member(jsii_name="valueForTypedStringParameter") @classmethod def value_for_typed_string_parameter(cls, scope: aws_cdk.core.Construct, parameter_name: str, type: typing.Optional["ParameterType"]=None, version: typing.Optional[jsii.Number]=None) -> str: """Returns a token that will resolve (during deployment) to the string value of an SSM string parameter. :param scope: Some scope within a stack. :param parameter_name: The name of the SSM parameter. :param type: The type of the SSM parameter. :param version: The parameter version (recommended in order to ensure that the value won't change during deployment). """ return jsii.sinvoke(cls, "valueForTypedStringParameter", [scope, parameter_name, type, version]) @jsii.member(jsii_name="valueFromLookup") @classmethod def value_from_lookup(cls, scope: aws_cdk.core.Construct, parameter_name: str) -> str: """Reads the value of an SSM parameter during synthesis through an environmental context provider. Requires that the stack this scope is defined in will have explicit account/region information. Otherwise, it will fail during synthesis. :param scope: - :param parameter_name: - """ return jsii.sinvoke(cls, "valueFromLookup", [scope, parameter_name]) @jsii.member(jsii_name="grantRead") def grant_read(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants read (DescribeParameter, GetParameter, GetParameterHistory) permissions on the SSM Parameter. :param grantee: - """ return jsii.invoke(self, "grantRead", [grantee]) @jsii.member(jsii_name="grantWrite") def grant_write(self, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant: """Grants write (PutParameter) permissions on the SSM Parameter. :param grantee: - """ return jsii.invoke(self, "grantWrite", [grantee]) @property @jsii.member(jsii_name="parameterArn") def parameter_arn(self) -> str: """The ARN of the SSM Parameter resource.""" return jsii.get(self, "parameterArn") @property @jsii.member(jsii_name="parameterName") def parameter_name(self) -> str: """The name of the SSM Parameter resource.""" return jsii.get(self, "parameterName") @property @jsii.member(jsii_name="parameterType") def parameter_type(self) -> str: """The type of the SSM Parameter resource.""" return jsii.get(self, "parameterType") @property @jsii.member(jsii_name="stringValue") def string_value(self) -> str: """The parameter value. Value must not nest another parameter. Do not use {{}} in the value. """ return jsii.get(self, "stringValue") @property @jsii.member(jsii_name="encryptionKey") def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]: return jsii.get(self, "encryptionKey") @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.StringParameterAttributes", jsii_struct_bases=[CommonStringParameterAttributes], name_mapping={'parameter_name': 'parameterName', 'simple_name': 'simpleName', 'type': 'type', 'version': 'version'}) class StringParameterAttributes(CommonStringParameterAttributes): def __init__(self, *, parameter_name: str, simple_name: typing.Optional[bool]=None, type: typing.Optional["ParameterType"]=None, version: typing.Optional[jsii.Number]=None): """ :param parameter_name: The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` :param type: The type of the string parameter. Default: ParameterType.STRING :param version: The version number of the value you wish to retrieve. Default: The latest version will be retrieved. """ self._values = { 'parameter_name': parameter_name, } if simple_name is not None: self._values["simple_name"] = simple_name if type is not None: self._values["type"] = type if version is not None: self._values["version"] = version @property def parameter_name(self) -> str: """The name of the parameter store value. This value can be a token or a concrete string. If it is a concrete string and includes "/" it must also be prefixed with a "/" (fully-qualified). """ return self._values.get('parameter_name') @property def simple_name(self) -> typing.Optional[bool]: """Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. default :default: - auto-detect based on ``parameterName`` """ return self._values.get('simple_name') @property def type(self) -> typing.Optional["ParameterType"]: """The type of the string parameter. default :default: ParameterType.STRING """ return self._values.get('type') @property def version(self) -> typing.Optional[jsii.Number]: """The version number of the value you wish to retrieve. default :default: The latest version will be retrieved. """ return self._values.get('version') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'StringParameterAttributes(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) @jsii.data_type(jsii_type="@aws-cdk/aws-ssm.StringParameterProps", jsii_struct_bases=[ParameterOptions], name_mapping={'allowed_pattern': 'allowedPattern', 'description': 'description', 'parameter_name': 'parameterName', 'simple_name': 'simpleName', 'string_value': 'stringValue', 'type': 'type'}) class StringParameterProps(ParameterOptions): def __init__(self, *, allowed_pattern: typing.Optional[str]=None, description: typing.Optional[str]=None, parameter_name: typing.Optional[str]=None, simple_name: typing.Optional[bool]=None, string_value: str, type: typing.Optional["ParameterType"]=None): """Properties needed to create a String SSM parameter. :param allowed_pattern: A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` Default: no validation is performed :param description: Information about the parameter that you want to add to the system. Default: none :param parameter_name: The name of the parameter. Default: - a name will be generated by CloudFormation :param simple_name: Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. Default: - auto-detect based on ``parameterName`` :param string_value: The value of the parameter. It may not reference another parameter and ``{{}}`` cannot be used in the value. :param type: The type of the string parameter. Default: ParameterType.STRING """ self._values = { 'string_value': string_value, } if allowed_pattern is not None: self._values["allowed_pattern"] = allowed_pattern if description is not None: self._values["description"] = description if parameter_name is not None: self._values["parameter_name"] = parameter_name if simple_name is not None: self._values["simple_name"] = simple_name if type is not None: self._values["type"] = type @property def allowed_pattern(self) -> typing.Optional[str]: """A regular expression used to validate the parameter value. For example, for String types with values restricted to numbers, you can specify the following: ``^\d+$`` default :default: no validation is performed """ return self._values.get('allowed_pattern') @property def description(self) -> typing.Optional[str]: """Information about the parameter that you want to add to the system. default :default: none """ return self._values.get('description') @property def parameter_name(self) -> typing.Optional[str]: """The name of the parameter. default :default: - a name will be generated by CloudFormation """ return self._values.get('parameter_name') @property def simple_name(self) -> typing.Optional[bool]: """Indicates of the parameter name is a simple name (i.e. does not include "/" separators). This is only required only if ``parameterName`` is a token, which means we are unable to detect if the name is simple or "path-like" for the purpose of rendering SSM parameter ARNs. If ``parameterName`` is not specified, ``simpleName`` must be ``true`` (or undefined) since the name generated by AWS CloudFormation is always a simple name. default :default: - auto-detect based on ``parameterName`` """ return self._values.get('simple_name') @property def string_value(self) -> str: """The value of the parameter. It may not reference another parameter and ``{{}}`` cannot be used in the value. """ return self._values.get('string_value') @property def type(self) -> typing.Optional["ParameterType"]: """The type of the string parameter. default :default: ParameterType.STRING """ return self._values.get('type') def __eq__(self, rhs) -> bool: return isinstance(rhs, self.__class__) and rhs._values == self._values def __ne__(self, rhs) -> bool: return not (rhs == self) def __repr__(self) -> str: return 'StringParameterProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items()) __all__ = ["CfnAssociation", "CfnAssociationProps", "CfnDocument", "CfnDocumentProps", "CfnMaintenanceWindow", "CfnMaintenanceWindowProps", "CfnMaintenanceWindowTarget", "CfnMaintenanceWindowTargetProps", "CfnMaintenanceWindowTask", "CfnMaintenanceWindowTaskProps", "CfnParameter", "CfnParameterProps", "CfnPatchBaseline", "CfnPatchBaselineProps", "CfnResourceDataSync", "CfnResourceDataSyncProps", "CommonStringParameterAttributes", "IParameter", "IStringListParameter", "IStringParameter", "ParameterOptions", "ParameterType", "SecureStringParameterAttributes", "StringListParameter", "StringListParameterProps", "StringParameter", "StringParameterAttributes", "StringParameterProps", "__jsii_assembly__"] publication.publish()
48.735575
1,113
0.690617
22,478
204,397
6.138891
0.025492
0.04667
0.015581
0.021813
0.833647
0.806957
0.774259
0.7617
0.746395
0.737169
0
0.000777
0.181906
204,397
4,193
1,114
48.747198
0.824443
0.404825
0
0.586006
0
0
0.159985
0.073558
0
0
0
0
0
1
0.279883
false
0
0.006997
0.100292
0.570845
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
9
15098d81374eaa632d3b86bb9b0644d2ce366a1d
868
py
Python
stubs/loboris-v3_2_9-esp32/time.py
mattytrentini/micropython-stubs
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
[ "MIT" ]
null
null
null
stubs/loboris-v3_2_9-esp32/time.py
mattytrentini/micropython-stubs
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
[ "MIT" ]
null
null
null
stubs/loboris-v3_2_9-esp32/time.py
mattytrentini/micropython-stubs
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
[ "MIT" ]
null
null
null
""" Module: 'time' on esp32_LoBo 3.2.9 """ # MCU: (sysname='esp32_LoBo', nodename='esp32_LoBo', release='3.2.9', version='ESP32_LoBo_v3.2.9 on 2018-04-12', machine='ESP32 board with ESP32') # Stubber: 1.1.2 - updated from typing import Any def block_sleep(*args) -> Any: pass def gmtime(*args) -> Any: pass def localtime(*args) -> Any: pass def mktime(*args) -> Any: pass def sleep(*args) -> Any: pass def sleep_ms(*args) -> Any: pass def sleep_us(*args) -> Any: pass def strftime(*args) -> Any: pass def ticks_add(*args) -> Any: pass def ticks_base(*args) -> Any: pass def ticks_cpu(*args) -> Any: pass def ticks_diff(*args) -> Any: pass def ticks_ms(*args) -> Any: pass def ticks_us(*args) -> Any: pass def tickscpu_diff(*args) -> Any: pass def time(*args) -> Any: pass
12.225352
146
0.602535
133
868
3.819549
0.330827
0.220472
0.346457
0.413386
0.488189
0
0
0
0
0
0
0.048193
0.235023
868
70
147
12.4
0.716867
0.236175
0
0.484848
0
0
0
0
0
0
0
0
0
1
0.484848
true
0.484848
0.030303
0
0.515152
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
1
0
0
7
12bf0e3ea8d36c0f8466c7110d1ca8693bd9f323
18,836
py
Python
senslify/verify.py
gollum18/senslify
a003ccf8febdf0ba2122200866599fb6a2616a03
[ "MIT" ]
2
2019-08-07T16:50:58.000Z
2019-08-08T12:31:56.000Z
senslify/verify.py
gollum18/senslify-web
a003ccf8febdf0ba2122200866599fb6a2616a03
[ "MIT" ]
null
null
null
senslify/verify.py
gollum18/senslify-web
a003ccf8febdf0ba2122200866599fb6a2616a03
[ "MIT" ]
null
null
null
# THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY # APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT # HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT # WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND # PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE # DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR # CORRECTION. # Name: verify.py # Since: Aug. 8th, 2019 # Author: Christen Ford # Description: Contains useful methods for verifying Senslify data objects. import simplejson async def _verify_find_request(request, params): """Verifies a received 'find' REST command. Args: request (aiohttp.Web.Request): The request from the client. params (dict-like): A dictionary like object containing the REST command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "target" not in params: return False, "ERROR: Request params requires 'target' field!" target = params["target"] if target != "groups" and target != "rtypes" and target != "sensors" and target != "readings": return False, "ERROR: Request parameter 'target' must be one of {'groups', 'rtypes', 'sensors', 'readings'}!" if target == "sensors": if "groupid" not in params: return False, "ERROR: Request params requires 'groupid' field!" try: groupid = int(params["groupid"]) except Exception: return False, "ERROR: A parameter is of incorrect type!" if groupid < 0: return False, "ERROR: Request parameter 'groupid' must be >= 0!" if not await request.app["db"].does_group_exist(groupid): return False, "ERROR: No such group provisioned into the system!" elif target == "readings": if "groupid" not in params: return False, "ERROR: Request params requires 'groupid' field!" if "sensorid" not in params: return False, "ERROR: Request params requires 'sensorid' field!" try: groupid = int(params["groupid"]) sensorid = int(params["sensorid"]) except Exception: return False, "ERROR: A parameter is on incorrect type!" if groupid < 0: return False, "ERROR: Request parameter 'groupid' must be >= 0!" if sensorid < 0: return False, "ERROR: Request parameter 'sensorid' must be >= 0!" if not await request.app["db"].does_group_exist(groupid): return False, "ERROR: No such group provisioned into the system!" if not await request.app["db"].does_sensor_exist(sensorid, groupid): return False, "ERROR: No such sensor provisioned into the system!" return True, None async def _verify_stats_request(request, params): """Verifies a received RQST_STATS WebSocket command. Args: request (aiohttp.Web.Request): The request from the client. json (dict-like): A dictionary like object containing the WebSocket command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "target" not in params: return False, "ERROR: Request params requires 'target' field!" if "groupid" not in params: return False, "ERROR: Request params requires 'groupid' field!" if "rtypeid" not in params: return False, "ERROR: Request params requires 'rtypeid' field!" if "start_ts" not in params: return False, "ERROR: Request params requires 'start_ts' field!" if "end_ts" not in params: return False, "ERROR: Request params requires 'end_ts' field!" target = params["target"] if target != "group" and target != "sensor": return False, "ERROR: Request parameter 'target' must be one of {'group', 'sensor'}!" if target == "sensor": if "sensorid" not in params: return False, "ERROR: Request params requires 'sensorid' field!" try: groupid = int(params["groupid"]) rtypeid = int(params["rtypeid"]) start_ts = int(params["start_ts"]) end_ts = int(params["end_ts"]) if target == "sensor": sensorid = int(params["sensorid"]) if sensorid < 0: return False, "ERROR: Request parameter 'sensorid' must be >= 0!" except Exception: return False, "ERROR: A parameter is of incorrect type!" if groupid < 0: return False, "ERROR: Request parameter 'groupid' must be >= 0!" if rtypeid < 0: return False, "ERROR: Request parameter 'rtypeid' must be >= 0!" if start_ts < 0: return False, "ERROR: Request parameter 'start_ts' must be >= 0!" if end_ts < 0: return False, "ERROR: Request parameter 'end_ts' must be >= 0!" if start_ts >= end_ts: return False, "ERROR: Request parmeter 'start_ts must be < ''end_ts!" if not await request.app["db"].does_group_exist(groupid): return False, "ERROR: No such group provisioned into the system!" if target == "sensor": if sensorid < 0: return False, "ERROR: Request parameter 'sensorid' must be >= 0!" if not await request.app["db"].does_sensor_exist(sensorid, groupid): return False, "ERROR: No such sensor provisioned into the system!" if not await request.app["db"].does_rtype_exist(rtypeid): return False, "ERROR: No such reading type provisioned into the system!" return True, None async def _verify_download_request(request, params): """Verifies a received RQST_DOWNLOAD WebSocket command or 'download' REST command. Args: request (aiohttp.Web.Request): The request from the client. json (dict-like): A dictionary like object containing the WebSocket command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "sensorid" not in params: return False, "ERROR: Request params requires 'sensorid' field!" if "groupid" not in params: return False, "ERROR: Request params requires 'groupid' field!" if "start_ts" not in params: return False, "ERROR: Request params requires 'start_ts' field!" if "end_ts" not in params: return False, "ERROR: Request params requires 'end_ts' field!" try: groupid = int(params["groupid"]) sensorid = int(params["sensorid"]) start_ts = int(params["start_ts"]) end_ts = int(params["end_ts"]) except Exception: return False, "ERROR: A parameter is of incorrect type!" if groupid < 0: return False, "ERROR: Request parameter 'groupid' must be >= 0!" if sensorid < 0: return False, "ERROR: Request parameter 'sensorid' must be >= 0!" if start_ts < 0: return False, "ERROR: Request parameter 'start_ts' must be >= 0!" if end_ts < 0: return False, "ERROR: Request parameter 'end_ts' must be >= 0!" if start_ts >= end_ts: return False, "ERROR: Request parmeter 'start_ts must be < ''end_ts!" if not await request.app["db"].does_group_exist(groupid): return False, "ERROR: No such group provisioned into the system!" if not await request.app["db"].does_sensor_exist(sensorid, groupid): return False, "ERROR: No such sensor provisioned into the system!" return True, None async def _verify_upload_request(request, params): """Verifies a received 'upload' REST command. Args: request (aiohttp.Web.Request): The request from the client. params (dict-like): A dictionary like object containing the REST command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "readings" not in params: return False, "ERROR: Request params requires 'readings' field!" readings = params["readings"] if not isinstance(readings, list): return False, "ERROR: Request parameter 'readings' must be a JSON array!" for reading in readings: if "groupid" not in reading: return False, "ERROR: Request params requires 'groupid' field!" if "sensorid" not in reading: return False, "ERROR: Request params requires 'sensorid' field!" if "rtypeid" not in reading: return False, "ERROR: Request params requires 'rtypeid' field!" if "val" not in reading: return False, "ERROR: Request params requires 'val' field!" if "ts" not in reading: return False, "ERROR: Request params requires 'ts' field!" try: groupid = int(reading["groupid"]) sensorid = int(reading["sensorid"]) rtypeid = int(reading["rtypeid"]) val = float(reading["val"]) ts = int(reading["ts"]) except Exception: return False, "ERROR: A parameter is of incorrect type!" if groupid < 0: return False, "ERROR: Request parameter 'groupid' must be >= 0!" if sensorid < 0: return False, "ERROR: Request parameter 'sensorid' must be >= 0!" if rtypeid < 0: return False, "ERROR: Request parameter 'rtypeid' must be >= 0!" if ts < 0: return False, "ERROR: Request parameter 'ts' must be >= 0!" if not await request.app["db"].does_group_exist(groupid): return False, "ERROR: No such group provisioned into the system!" if not await request.app["db"].does_sensor_exist(sensorid, groupid): return False, "ERROR: No such sensor provisioned into the system!" if not await request.app["db"].does_rtype_exist(rtypeid): return False, "ERROR: No such reading type provisioned into the system!" return True, None async def _verify_provision_request(request, params): """Verifies a received 'provision' REST command. Args: request (aiohttp.Web.Request): The request from the client. params (dict-like): A dictionary like object containing the REST command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "target" not in params: return False, "ERROR: Request params requires 'target' field!" target = params["target"] if target != "sensor" and target != "group": return False, "ERROR: Invalid 'target' specified! Must be one of {'sensor', 'group'}." if target == "sensor": if "groupid" not in params: return False, "ERROR: Request params requires 'groupid' field!" try: groupid = int(params["groupid"]) except Exception: return False, "ERROR: Request parameter 'groupid' must be an integer!" if groupid <= 0: return False, "ERROR: Request parameter 'groupid' must be >= 0!" if "alias" in params: if not params["alias"]: return False, "ERROR: Request parameter 'alias' must contain at least one (1) character!" return True, None async def _verify_join_command(request, params): """Verifies a received RQST_JOIN WebSocket command. Args: request (aiohttp.Web.Request): The request from the client. json (dict-like): A dictionary like object containing the WebSocket command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "groupid" not in params: return False, "ERROR: Request requires 'groupid' field!" if "sensorid" not in params: return False, "ERROR: Request requires 'sensorid' field!" try: groupid = int(params["groupid"]) sensorid = int(params["sensorid"]) except Exception: return False, "ERROR: A parameter is of incorrect type!" if groupid < 0: return False, "ERROR: Request parameter 'groupid' must be >= 0." if sensorid < 0: return False, "ERROR: Request parameter 'sensorid' must be >= 0." if not await request.app["db"].does_group_exist(groupid): return False, "ERROR: No such group provisioned into the system!" if not await request.app["db"].does_sensor_exist(sensorid, groupid): return False, "ERROR: No such sensor provisioned into the system!" return True, None async def _verify_close_command(request, params): """Verifies a received RQST_CLOSE WebSocket command. Args: request (aiohttp.Web.Request): The request from the client. json (dict-like): A dictionary like object containing the WebSocket command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "groupid" not in params: return False, "ERROR: Request requires 'groupid' field!" if "sensorid" not in params: return False, "ERROR: Request requires 'sensorid' field!" try: groupid = int(params["groupid"]) sensorid = int(params["sensorid"]) except Exception: return False, "ERROR: A parameter is of incorrect type!" if groupid < 0: return False, "ERROR: Request parameter 'groupid' must be >= 0." if sensorid < 0: return False, "ERROR: Request parameter 'sensorid' must be >= 0." if not await request.app["db"].does_group_exist(groupid): return False, "ERROR: No such group provisioned into the system!" if not await request.app["db"].does_sensor_exist(sensorid, groupid): return False, "ERROR: No such sensor provisioned into the system!" return True, None async def _verify_stream_command(request, params): """Verifies a received RQST_STREAM WebSocket command. Args: request (aiohttp.Web.Request): The request from the client. json (dict-like): A dictionary like object containing the WebSocket command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if not params: return False, "ERROR: Request parameters must not be null!" if not isinstance(params, dict): return False, "ERROR: Request parameters must be a JSON object!" if "rtypeid" not in params: return False, "ERROR: Request requires 'rtypeid' field!" try: rtypeid = int(params["rtypeid"]) except Exception: return False, "ERROR: A parameter is of incorrect type!" if rtypeid < 0: return False, "ERROR: Request parameter 'rtypeid' must be >= 0!" if not await request.app["db"].does_rtype_exist(rtypeid): return False, "ERROR: No such reading type provisioned into the system!" return True, None async def verify_rest_request(request): """Determines if a rest request is valid. Arguments: request (aiohttp.web.Request): The REST request to validate. Returns: A tuple containing (boolean, str) indicating the whether the REST request is valid as well as a status string. """ json = await request.json() # check if the command and parameters are present if "cmd" not in json: return False, "ERROR: Request requires 'cmd' field!" if "params" not in json: return False, "ERROR: Request requires 'params' field!" cmd = json["cmd"] params = json["params"] if cmd == "find": return await _verify_find_request(request, params) elif cmd == "stats": return await _verify_stats_request(request, params) elif cmd == "download": return await _verify_download_request(request, params) elif cmd == "upload": return await _verify_upload_request(request, params) elif cmd == "provision": return await _verify_provision_request(request, params) else: return False, "ERROR: 'cmd' must be one of {'find', 'stats', 'download', 'upload', 'provision'}!" async def verify_ws_request(request, json): """Verifies a received WebSocket request. This function routes requests to other verification functions based on the 'cmd' fielkd specified in the json message accompanying the request. Args: request (aiohttp.Web.Request): The request from the client. json (dict-like): A dictionary like object containing the WebSocket command request parameters. Returns: (boolean, str): A boolean indicating if the request is valid. The other parameter is an error message if the boolean is True, and is None otherwise. """ if "cmd" not in json: return False, "ERROR: Request requires 'cmd' field!" cmd = json["cmd"] if cmd == "RQST_JOIN": return await _verify_join_command(request, json) elif cmd == "RQST_CLOSE": return await _verify_close_command(request, json) elif cmd == "RQST_STREAM": return await _verify_stream_command(request, json) elif cmd == "RQST_SENSOR_STATS": return await _verify_stats_request(request, json) elif cmd == "RQST_DOWNLOAD": return await _verify_download_request(request, json) else: return False, "ERROR: 'cmd' must be one of {'RQST_JOIN', 'RQST_CLOSE', 'RQST_STREAM', 'RQST_SENSOR_STATS', 'RQST_DOWNLOAD'}!"
51.889807
135
0.677639
2,526
18,836
4.998021
0.079177
0.089743
0.130535
0.138455
0.841109
0.812515
0.792554
0.759604
0.753505
0.717782
0
0.003566
0.225738
18,836
362
136
52.033149
0.862109
0.038596
0
0.704846
0
0.013216
0.40537
0
0
0
0
0
0
1
0
false
0
0.004405
0
0.246696
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
12d275c26a44dcfd64a4be5f3b4553618bd3b6ca
10,564
py
Python
jmetal/util/test/test_neighborhood.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
335
2017-03-16T19:44:50.000Z
2022-03-30T08:50:46.000Z
jmetal/util/test/test_neighborhood.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
85
2017-05-16T06:40:51.000Z
2022-02-05T23:43:49.000Z
jmetal/util/test/test_neighborhood.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
130
2017-02-08T01:19:15.000Z
2022-03-25T08:32:08.000Z
import unittest import numpy from jmetal.core.solution import Solution from jmetal.util.ckecking import NoneParameterException, InvalidConditionException from jmetal.util.neighborhood import WeightVectorNeighborhood, TwoDimensionalMesh, L5 class WeightVectorNeighborhoodTestCases(unittest.TestCase): def test_should_constructor_work_properly(self) -> None: number_of_weight_vectors = 100 neighborhood_size = 20 neighborhood: WeightVectorNeighborhood = WeightVectorNeighborhood(number_of_weight_vectors, neighborhood_size) self.assertEqual(number_of_weight_vectors, neighborhood.number_of_weight_vectors) self.assertEqual(neighborhood_size, neighborhood.neighborhood_size) self.assertEqual(2, neighborhood.weight_vector_size) self.assertEqual(0.0, neighborhood.weight_vectors[0][0]) self.assertEqual(1.0, neighborhood.weight_vectors[0][1]) self.assertEqual(0.0101010101010101010101, neighborhood.weight_vectors[1][0]) self.assertEqual(0.989898989898989898, neighborhood.weight_vectors[1][1]) self.assertEqual(1.0, neighborhood.weight_vectors[99][0]) self.assertEqual(0.0, neighborhood.weight_vectors[99][1]) self.assertTrue(numpy.array_equal(numpy.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]), neighborhood.neighborhood[0])) self.assertTrue(numpy.array_equal(numpy.array([69, 70, 68, 71, 67, 72, 66, 73, 65, 64, 74, 75, 63, 76, 62, 77, 61, 78, 60, 79]), neighborhood.neighborhood[69])) def test_should_get_neighbors_work_properly_with_two_objectives(self): number_of_weight_vectors = 100 neighborhood_size = 20 neighborhood: WeightVectorNeighborhood = WeightVectorNeighborhood(number_of_weight_vectors, neighborhood_size) solution_list = [Solution(2, 2) for _ in range(number_of_weight_vectors)] neighbors = neighborhood.get_neighbors(0, solution_list) self.assertEqual(neighborhood_size, len(neighbors)) self.assertTrue(solution_list[0] == neighbors[0]) self.assertTrue(solution_list[19] == neighbors[19]) neighbors = neighborhood.get_neighbors(69, solution_list) self.assertEqual(neighborhood_size, len(neighbors)) self.assertTrue(solution_list[69] == neighbors[0]) self.assertTrue(solution_list[79] == neighbors[19]) class TwoDimensionalMeshTestCases(unittest.TestCase): def test_should_get_neighbors_throw_an_exception_if_the_solution_list_is_none(self): """ Topology: north = -1, 0 south = 1, 0 east = 0, 1 west = 0, -1 :return: """ neighborhood = TwoDimensionalMesh(3, 3, [[-1, 0], [1, 0], [0, 1], [0, -1]]) with self.assertRaises(NoneParameterException): neighborhood.get_neighbors(0, None) def test_should_get_neighbors_throw_an_exception_if_the_solution_list_is_empty(self): """ Topology: north = -1, 0 south = 1, 0 east = 0, 1 west = 0, -1 """ neighborhood = TwoDimensionalMesh(3, 3, [[-1, 0], [1, 0], [0, 1], [0, -1]]) with self.assertRaises(InvalidConditionException): neighborhood.get_neighbors(0, []) def test_should_get_neighbors_return_four_neighbors_case1(self): """ Case 1 Solution list: 0 1 2 3 4 5 6 7 8 The solution location is 1, so the neighborhood is 7, 0, 2, 4 """ rows = 3 columns = 3 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]]) result = neighborhood.get_neighbors(1, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[7] in result) self.assertTrue(solution_list[0] in result) self.assertTrue(solution_list[2] in result) self.assertTrue(solution_list[4] in result) def test_should_get_neighbors_return_four_neighbors_case2(self): """ Case 1 Solution list: 0 1 2 3 4 5 6 7 8 The solution location is 4, so the neighborhood is 1, 3, 5, 7 """ rows = 3 columns = 3 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]]) result = neighborhood.get_neighbors(4, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[1] in result) self.assertTrue(solution_list[3] in result) self.assertTrue(solution_list[5] in result) self.assertTrue(solution_list[7] in result) def test_should_get_neighbors_return_four_neighbors_case3(self): """ Case 1 Solution list: 0 1 2 3 4 5 6 7 8 The solution location is 0, so the neighborhood is 1, 3, 2, 6 """ rows = 3 columns = 3 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]]) result = neighborhood.get_neighbors(0, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[1] in result) self.assertTrue(solution_list[3] in result) self.assertTrue(solution_list[2] in result) self.assertTrue(solution_list[6] in result) def test_should_get_neighbors_return_four_neighbors_case4(self): """ Case 1 Solution list: 0 1 2 3 4 5 6 7 8 The solution location is 2, so the neighborhood is 1, 5, 8, 0 """ rows = 3 columns = 3 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]]) result = neighborhood.get_neighbors(2, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[1] in result) self.assertTrue(solution_list[5] in result) self.assertTrue(solution_list[8] in result) self.assertTrue(solution_list[0] in result) def test_should_get_neighbors_return_four_neighbors_case5(self): """ Case 1 Solution list: 0 1 2 3 4 5 6 7 8 The solution location is 8, so the neighborhood is 2, 5, 6, 7 """ rows = 3 columns = 3 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]]) result = neighborhood.get_neighbors(8, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[2] in result) self.assertTrue(solution_list[5] in result) self.assertTrue(solution_list[6] in result) self.assertTrue(solution_list[7] in result) def test_should_get_neighbors_return_four_neighbors_case6(self): """ Case 1 Solution list: 0 1 2 3 4 5 The solution location is 0, so the neighborhood is 1, 3, 3, 2 """ rows = 2 columns = 3 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]]) result = neighborhood.get_neighbors(0, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[1] in result) self.assertTrue(solution_list[3] in result) self.assertTrue(solution_list[2] in result) class L5TestCases(unittest.TestCase): def test_should_get_neighbors_return_four_neighbors_case1(self): rows = 1 columns = 1 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = L5(rows, columns) result = neighborhood.get_neighbors(0, solution_list) self.assertEqual(4, len(result)) def test_should_get_neighbors_return_four_neighbors_case2(self): """ Solution list: 0, 1 Solution location: 0; the neighborhood is: 0, 1 """ rows = 1 columns = 2 solution_list = [] for i in range(rows * columns): solution = Solution(i, 2) solution.variables = [i, i+1] solution_list.append(solution) neighborhood = L5(rows, columns) result = neighborhood.get_neighbors(0, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[0] in result) self.assertTrue(solution_list[1] in result) self.assertEqual(2, result.count(solution_list[0])) self.assertEqual(2, result.count(solution_list[1])) def test_should_get_neighbors_return_four_neighbors_case3(self): """ Solution list: 0, 1 Solution location: 1; the neighborhood is: 0, 1 """ rows = 1 columns = 2 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = L5(rows, columns) result = neighborhood.get_neighbors(1, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[0] in result) self.assertTrue(solution_list[1] in result) self.assertEqual(2, result.count(solution_list[0])) self.assertEqual(2, result.count(solution_list[1])) def test_should_get_neighbors_return_four_neighbors_case4(self): """ Solution list: 0 1 2 3 Solution location: 0; the neighborhood is: 1, 2 """ rows = 2 columns = 2 solution_list = [Solution(i, 2) for i in range(rows * columns)] neighborhood = L5(rows, columns) result = neighborhood.get_neighbors(0, solution_list) self.assertEqual(4, len(result)) self.assertTrue(solution_list[1] in result) self.assertTrue(solution_list[2] in result) self.assertTrue(solution_list[3] not in result) self.assertTrue(solution_list[0] not in result) self.assertEqual(2, result.count(solution_list[1])) self.assertEqual(2, result.count(solution_list[2])) if __name__ == '__main__': unittest.main()
37.594306
136
0.625426
1,324
10,564
4.814199
0.095921
0.143081
0.120803
0.142767
0.815657
0.797772
0.771729
0.706934
0.702698
0.687794
0
0.060472
0.270541
10,564
280
137
37.728571
0.766675
0.102707
0
0.660377
0
0
0.000901
0
0
0
0
0
0.415094
1
0.08805
false
0
0.031447
0
0.138365
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
12f8ca8da942a364bf26e6f85fd934d42ca4c29e
34,115
py
Python
python/test/test_binsearch_col.py
tecnickcom/binsearch
1ff439ed6c48be1b549dc6d1080b83761beea2b8
[ "MIT" ]
5
2017-12-29T12:56:21.000Z
2020-10-13T21:39:29.000Z
python/test/test_binsearch_col.py
tecnickcom/binsearch
1ff439ed6c48be1b549dc6d1080b83761beea2b8
[ "MIT" ]
11
2017-12-16T18:28:37.000Z
2018-09-06T16:09:16.000Z
python/test/test_binsearch_col.py
tecnickcom/binsearch
1ff439ed6c48be1b549dc6d1080b83761beea2b8
[ "MIT" ]
1
2018-01-02T17:48:10.000Z
2018-01-02T17:48:10.000Z
"""Tests for binsearch module - column mode.""" import binsearch as bs import os from unittest import TestCase nrows = 251 testDataCol8 = [ (0, 251, 0x00, 0, 0, 0, 1, 2, 2), (1, 251, 0x00, 1, 1, 1, 1, 2, 2), (0, 251, 0x01, 2, 2, 2, 2, 3, 3), (0, 251, 0x0F, 16, 16, 16, 16, 17, 17), (0, 251, 0x10, 17, 17, 17, 17, 18, 18), (0, 251, 0x1F, 32, 32, 32, 32, 33, 33), (0, 251, 0x20, 33, 33, 33, 33, 34, 34), (0, 251, 0x2F, 48, 48, 48, 48, 49, 49), (0, 251, 0x30, 49, 49, 49, 49, 50, 50), (0, 251, 0x3F, 64, 64, 64, 64, 65, 65), (0, 251, 0x40, 65, 65, 65, 65, 66, 66), (0, 251, 0x4F, 80, 80, 80, 80, 81, 81), (0, 251, 0x50, 81, 81, 81, 81, 82, 82), (0, 251, 0x5F, 96, 96, 96, 96, 97, 97), (0, 251, 0x60, 97, 97, 97, 97, 98, 98), (0, 251, 0x6F, 112, 112, 112, 112, 113, 113), (0, 251, 0x70, 113, 113, 113, 113, 114, 114), (0, 251, 0x7F, 128, 128, 128, 128, 129, 129), (0, 251, 0x80, 129, 129, 129, 129, 130, 130), (0, 251, 0x8F, 144, 144, 144, 144, 145, 145), (0, 251, 0x90, 145, 145, 145, 145, 146, 146), (0, 251, 0x9F, 160, 160, 160, 160, 161, 161), (0, 251, 0xA0, 161, 161, 161, 161, 162, 162), (0, 251, 0xAF, 176, 176, 176, 176, 177, 177), (0, 251, 0xB0, 177, 177, 177, 177, 178, 178), (0, 251, 0xBF, 192, 192, 192, 192, 193, 193), (0, 251, 0xC0, 193, 193, 193, 193, 194, 194), (0, 251, 0xCF, 208, 208, 208, 208, 209, 209), (0, 251, 0xD0, 209, 209, 209, 209, 210, 210), (0, 251, 0xDF, 224, 224, 224, 224, 225, 225), (0, 251, 0xE0, 225, 225, 225, 225, 226, 226), (0, 251, 0xEF, 240, 240, 240, 240, 241, 241), (0, 251, 0xF0, 241, 241, 241, 241, 242, 242), (0, 251, 0xF8, 249, 249, 249, 249, 250, 250), (0, 251, 0xFF, 250, 250, 250, 250, 251, 251), (0, 251, 0xF9, 251, 249, 250, 251, 249, 250), (0, 51, 0x70, 51, 50, 51, 51, 50, 51), (150, 251, 0x70, 251, 149, 150, 251, 149, 150), ] testDataColSub8 = [ (0, 251, 0x00, 0, 0, 0, 1, 2, 2), (1, 251, 0x00, 1, 1, 1, 1, 2, 2), (0, 251, 0x01, 2, 2, 2, 2, 3, 3), (0, 251, 0x0F, 16, 16, 16, 16, 17, 17), (0, 251, 0x10, 17, 17, 17, 17, 18, 18), (0, 251, 0x1F, 32, 32, 32, 32, 33, 33), (0, 251, 0x20, 33, 33, 33, 33, 34, 34), (0, 251, 0x2F, 48, 48, 48, 48, 49, 49), (0, 251, 0x30, 49, 49, 49, 49, 50, 50), (0, 251, 0x3F, 64, 64, 64, 64, 65, 65), (0, 251, 0x40, 65, 65, 65, 65, 66, 66), (0, 251, 0x4F, 80, 80, 80, 80, 81, 81), (0, 251, 0x50, 81, 81, 81, 81, 82, 82), (0, 251, 0x5F, 96, 96, 96, 96, 97, 97), (0, 251, 0x60, 97, 97, 97, 97, 98, 98), (0, 251, 0x6F, 112, 112, 112, 112, 113, 113), (0, 251, 0x70, 113, 113, 113, 113, 114, 114), (0, 251, 0x7F, 128, 128, 128, 128, 129, 129), (0, 251, 0x80, 129, 129, 129, 129, 130, 130), (0, 251, 0x8F, 144, 144, 144, 144, 145, 145), (0, 251, 0x90, 145, 145, 145, 145, 146, 146), (0, 251, 0x9F, 160, 160, 160, 160, 161, 161), (0, 251, 0xA0, 161, 161, 161, 161, 162, 162), (0, 251, 0xAF, 176, 176, 176, 176, 177, 177), (0, 251, 0xB0, 177, 177, 177, 177, 178, 178), (0, 251, 0xBF, 192, 192, 192, 192, 193, 193), (0, 251, 0xC0, 193, 193, 193, 193, 194, 194), (0, 251, 0xCF, 208, 208, 208, 208, 209, 209), (0, 251, 0xD0, 209, 209, 209, 209, 210, 210), (0, 251, 0xDF, 224, 224, 224, 224, 225, 225), (0, 251, 0xE0, 225, 225, 225, 225, 226, 226), (0, 251, 0xEF, 240, 240, 240, 240, 241, 241), (0, 251, 0xF0, 241, 241, 241, 241, 242, 242), (0, 251, 0xF8, 249, 249, 249, 249, 250, 250), (0, 251, 0xFF, 250, 250, 250, 250, 251, 251), (0, 251, 0xF9, 251, 249, 250, 251, 249, 250), (0, 51, 0x70, 51, 50, 51, 51, 50, 51), (150, 251, 0x70, 251, 149, 150, 251, 149, 150), ] testDataCol16 = [ (0, 251, 0x0000, 0, 0, 0, 0, 1, 1), (1, 251, 0x0001, 1, 1, 1, 1, 2, 2), (0, 251, 0x0102, 2, 2, 2, 2, 3, 3), (0, 251, 0x0F10, 16, 16, 16, 16, 17, 17), (0, 251, 0x1011, 17, 17, 17, 17, 18, 18), (0, 251, 0x1F20, 32, 32, 32, 32, 33, 33), (0, 251, 0x2021, 33, 33, 33, 33, 34, 34), (0, 251, 0x2F30, 48, 48, 48, 48, 49, 49), (0, 251, 0x3031, 49, 49, 49, 49, 50, 50), (0, 251, 0x3F40, 64, 64, 64, 64, 65, 65), (0, 251, 0x4041, 65, 65, 65, 65, 66, 66), (0, 251, 0x4F50, 80, 80, 80, 80, 81, 81), (0, 251, 0x5051, 81, 81, 81, 81, 82, 82), (0, 251, 0x5F60, 96, 96, 96, 96, 97, 97), (0, 251, 0x6061, 97, 97, 97, 97, 98, 98), (0, 251, 0x6F70, 112, 112, 112, 112, 113, 113), (0, 251, 0x7071, 113, 113, 113, 113, 114, 114), (0, 251, 0x7F80, 128, 128, 128, 128, 129, 129), (0, 251, 0x8081, 129, 129, 129, 129, 130, 130), (0, 251, 0x8F90, 144, 144, 144, 144, 145, 145), (0, 251, 0x9091, 145, 145, 145, 145, 146, 146), (0, 251, 0x9FA0, 160, 160, 160, 160, 161, 161), (0, 251, 0xA0A1, 161, 161, 161, 161, 162, 162), (0, 251, 0xAFB0, 176, 176, 176, 176, 177, 177), (0, 251, 0xB0B1, 177, 177, 177, 177, 178, 178), (0, 251, 0xBFC0, 192, 192, 192, 192, 193, 193), (0, 251, 0xC0C1, 193, 193, 193, 193, 194, 194), (0, 251, 0xCFD0, 208, 208, 208, 208, 209, 209), (0, 251, 0xD0D1, 209, 209, 209, 209, 210, 210), (0, 251, 0xDFE0, 224, 224, 224, 224, 225, 225), (0, 251, 0xE0E1, 225, 225, 225, 225, 226, 226), (0, 251, 0xEFF0, 240, 240, 240, 240, 241, 241), (0, 251, 0xF0F1, 241, 241, 241, 241, 242, 242), (0, 251, 0xF8F9, 249, 249, 249, 249, 250, 250), (0, 251, 0xFFFF, 250, 250, 250, 250, 251, 251), (0, 251, 0xF9F9, 251, 249, 250, 251, 249, 250), (0, 51, 0x7071, 51, 50, 51, 51, 50, 51), (150, 251, 0x7071, 251, 149, 150, 251, 149, 150), ] testDataColSub16 = [ (0, 251, 0x0000, 0, 0, 0, 0, 1, 1), (1, 251, 0x0001, 1, 1, 1, 1, 2, 2), (0, 251, 0x0102, 2, 2, 2, 2, 3, 3), (0, 251, 0x0F10, 16, 16, 16, 16, 17, 17), (0, 251, 0x1011, 17, 17, 17, 17, 18, 18), (0, 251, 0x1F20, 32, 32, 32, 32, 33, 33), (0, 251, 0x2021, 33, 33, 33, 33, 34, 34), (0, 251, 0x2F30, 48, 48, 48, 48, 49, 49), (0, 251, 0x3031, 49, 49, 49, 49, 50, 50), (0, 251, 0x3F40, 64, 64, 64, 64, 65, 65), (0, 251, 0x4041, 65, 65, 65, 65, 66, 66), (0, 251, 0x4F50, 80, 80, 80, 80, 81, 81), (0, 251, 0x5051, 81, 81, 81, 81, 82, 82), (0, 251, 0x5F60, 96, 96, 96, 96, 97, 97), (0, 251, 0x6061, 97, 97, 97, 97, 98, 98), (0, 251, 0x6F70, 112, 112, 112, 112, 113, 113), (0, 251, 0x7071, 113, 113, 113, 113, 114, 114), (0, 251, 0x7F80, 128, 128, 128, 128, 129, 129), (0, 251, 0x8081, 129, 129, 129, 129, 130, 130), (0, 251, 0x8F90, 144, 144, 144, 144, 145, 145), (0, 251, 0x9091, 145, 145, 145, 145, 146, 146), (0, 251, 0x9FA0, 160, 160, 160, 160, 161, 161), (0, 251, 0xA0A1, 161, 161, 161, 161, 162, 162), (0, 251, 0xAFB0, 176, 176, 176, 176, 177, 177), (0, 251, 0xB0B1, 177, 177, 177, 177, 178, 178), (0, 251, 0xBFC0, 192, 192, 192, 192, 193, 193), (0, 251, 0xC0C1, 193, 193, 193, 193, 194, 194), (0, 251, 0xCFD0, 208, 208, 208, 208, 209, 209), (0, 251, 0xD0D1, 209, 209, 209, 209, 210, 210), (0, 251, 0xDFE0, 224, 224, 224, 224, 225, 225), (0, 251, 0xE0E1, 225, 225, 225, 225, 226, 226), (0, 251, 0xEFF0, 240, 240, 240, 240, 241, 241), (0, 251, 0xF0F1, 241, 241, 241, 241, 242, 242), (0, 251, 0xF8F9, 249, 249, 249, 249, 250, 250), (0, 251, 0xFFFF, 250, 250, 250, 250, 251, 251), (0, 251, 0xF9F9, 251, 249, 250, 251, 249, 250), (0, 51, 0x7071, 51, 50, 51, 51, 50, 51), (150, 251, 0x7071, 251, 149, 150, 251, 149, 150), ] testDataCol32 = [ (0, 251, 0x00000000, 0, 0, 0, 0, 1, 1), (1, 251, 0x00010203, 1, 1, 1, 1, 2, 2), (0, 251, 0x01020304, 2, 2, 2, 2, 3, 3), (0, 251, 0x0F101112, 16, 16, 16, 16, 17, 17), (0, 251, 0x10111213, 17, 17, 17, 17, 18, 18), (0, 251, 0x1F202122, 32, 32, 32, 32, 33, 33), (0, 251, 0x20212223, 33, 33, 33, 33, 34, 34), (0, 251, 0x2F303132, 48, 48, 48, 48, 49, 49), (0, 251, 0x30313233, 49, 49, 49, 49, 50, 50), (0, 251, 0x3F404142, 64, 64, 64, 64, 65, 65), (0, 251, 0x40414243, 65, 65, 65, 65, 66, 66), (0, 251, 0x4F505152, 80, 80, 80, 80, 81, 81), (0, 251, 0x50515253, 81, 81, 81, 81, 82, 82), (0, 251, 0x5F606162, 96, 96, 96, 96, 97, 97), (0, 251, 0x60616263, 97, 97, 97, 97, 98, 98), (0, 251, 0x6F707172, 112, 112, 112, 112, 113, 113), (0, 251, 0x70717273, 113, 113, 113, 113, 114, 114), (0, 251, 0x7F808182, 128, 128, 128, 128, 129, 129), (0, 251, 0x80818283, 129, 129, 129, 129, 130, 130), (0, 251, 0x8F909192, 144, 144, 144, 144, 145, 145), (0, 251, 0x90919293, 145, 145, 145, 145, 146, 146), (0, 251, 0x9FA0A1A2, 160, 160, 160, 160, 161, 161), (0, 251, 0xA0A1A2A3, 161, 161, 161, 161, 162, 162), (0, 251, 0xAFB0B1B2, 176, 176, 176, 176, 177, 177), (0, 251, 0xB0B1B2B3, 177, 177, 177, 177, 178, 178), (0, 251, 0xBFC0C1C2, 192, 192, 192, 192, 193, 193), (0, 251, 0xC0C1C2C3, 193, 193, 193, 193, 194, 194), (0, 251, 0xCFD0D1D2, 208, 208, 208, 208, 209, 209), (0, 251, 0xD0D1D2D3, 209, 209, 209, 209, 210, 210), (0, 251, 0xDFE0E1E2, 224, 224, 224, 224, 225, 225), (0, 251, 0xE0E1E2E3, 225, 225, 225, 225, 226, 226), (0, 251, 0xEFF0F1F2, 240, 240, 240, 240, 241, 241), (0, 251, 0xF0F1F2F3, 241, 241, 241, 241, 242, 242), (0, 251, 0xF8F9FAFB, 249, 249, 249, 249, 250, 250), (0, 251, 0xFFFFFFFF, 250, 250, 250, 250, 251, 251), (0, 251, 0xF9F9FAFB, 251, 249, 250, 251, 249, 250), (0, 51, 0x70717273, 51, 50, 51, 51, 50, 51), (150, 251, 0x70717273, 251, 149, 150, 251, 149, 150), ] testDataColSub32 = [ (0, 251, 0x00000000, 0, 0, 0, 0, 1, 1), (1, 251, 0x00000102, 1, 1, 1, 1, 2, 2), (0, 251, 0x00000203, 2, 2, 2, 2, 3, 3), (0, 251, 0x00001011, 16, 16, 16, 16, 17, 17), (0, 251, 0x00001112, 17, 17, 17, 17, 18, 18), (0, 251, 0x00002021, 32, 32, 32, 32, 33, 33), (0, 251, 0x00002122, 33, 33, 33, 33, 34, 34), (0, 251, 0x00003031, 48, 48, 48, 48, 49, 49), (0, 251, 0x00003132, 49, 49, 49, 49, 50, 50), (0, 251, 0x00004041, 64, 64, 64, 64, 65, 65), (0, 251, 0x00004142, 65, 65, 65, 65, 66, 66), (0, 251, 0x00005051, 80, 80, 80, 80, 81, 81), (0, 251, 0x00005152, 81, 81, 81, 81, 82, 82), (0, 251, 0x00006061, 96, 96, 96, 96, 97, 97), (0, 251, 0x00006162, 97, 97, 97, 97, 98, 98), (0, 251, 0x00007071, 112, 112, 112, 112, 113, 113), (0, 251, 0x00007172, 113, 113, 113, 113, 114, 114), (0, 251, 0x00008081, 128, 128, 128, 128, 129, 129), (0, 251, 0x00008182, 129, 129, 129, 129, 130, 130), (0, 251, 0x00009091, 144, 144, 144, 144, 145, 145), (0, 251, 0x00009192, 145, 145, 145, 145, 146, 146), (0, 251, 0x0000A0A1, 160, 160, 160, 160, 161, 161), (0, 251, 0x0000A1A2, 161, 161, 161, 161, 162, 162), (0, 251, 0x0000B0B1, 176, 176, 176, 176, 177, 177), (0, 251, 0x0000B1B2, 177, 177, 177, 177, 178, 178), (0, 251, 0x0000C0C1, 192, 192, 192, 192, 193, 193), (0, 251, 0x0000C1C2, 193, 193, 193, 193, 194, 194), (0, 251, 0x0000D0D1, 208, 208, 208, 208, 209, 209), (0, 251, 0x0000D1D2, 209, 209, 209, 209, 210, 210), (0, 251, 0x0000E0E1, 224, 224, 224, 224, 225, 225), (0, 251, 0x0000E1E2, 225, 225, 225, 225, 226, 226), (0, 251, 0x0000F0F1, 240, 240, 240, 240, 241, 241), (0, 251, 0x0000F1F2, 241, 241, 241, 241, 242, 242), (0, 251, 0x0000F9FA, 249, 249, 249, 249, 250, 250), (0, 251, 0x0000FFFF, 250, 250, 250, 250, 251, 251), (0, 251, 0x0000F9FA, 249, 249, 249, 249, 250, 250), (0, 51, 0x00007172, 51, 50, 51, 51, 50, 51), (150, 251, 0x00007172, 251, 149, 150, 251, 149, 150), ] testDataCol64 = [ (0, 251, 0x0000000000000000, 0, 0, 0, 0, 1, 1), (1, 251, 0x0001020304050607, 1, 1, 1, 1, 2, 2), (0, 251, 0x0102030405060708, 2, 2, 2, 2, 3, 3), (0, 251, 0x0F10111213141516, 16, 16, 16, 16, 17, 17), (0, 251, 0x1011121314151617, 17, 17, 17, 17, 18, 18), (0, 251, 0x1F20212223242526, 32, 32, 32, 32, 33, 33), (0, 251, 0x2021222324252627, 33, 33, 33, 33, 34, 34), (0, 251, 0x2F30313233343536, 48, 48, 48, 48, 49, 49), (0, 251, 0x3031323334353637, 49, 49, 49, 49, 50, 50), (0, 251, 0x3F40414243444546, 64, 64, 64, 64, 65, 65), (0, 251, 0x4041424344454647, 65, 65, 65, 65, 66, 66), (0, 251, 0x4F50515253545556, 80, 80, 80, 80, 81, 81), (0, 251, 0x5051525354555657, 81, 81, 81, 81, 82, 82), (0, 251, 0x5F60616263646566, 96, 96, 96, 96, 97, 97), (0, 251, 0x6061626364656667, 97, 97, 97, 97, 98, 98), (0, 251, 0x6F70717273747576, 112, 112, 112, 112, 113, 113), (0, 251, 0x7071727374757677, 113, 113, 113, 113, 114, 114), (0, 251, 0x7F80818283848586, 128, 128, 128, 128, 129, 129), (0, 251, 0x8081828384858687, 129, 129, 129, 129, 130, 130), (0, 251, 0x8F90919293949596, 144, 144, 144, 144, 145, 145), (0, 251, 0x9091929394959697, 145, 145, 145, 145, 146, 146), (0, 251, 0x9FA0A1A2A3A4A5A6, 160, 160, 160, 160, 161, 161), (0, 251, 0xA0A1A2A3A4A5A6A7, 161, 161, 161, 161, 162, 162), (0, 251, 0xAFB0B1B2B3B4B5B6, 176, 176, 176, 176, 177, 177), (0, 251, 0xB0B1B2B3B4B5B6B7, 177, 177, 177, 177, 178, 178), (0, 251, 0xBFC0C1C2C3C4C5C6, 192, 192, 192, 192, 193, 193), (0, 251, 0xC0C1C2C3C4C5C6C7, 193, 193, 193, 193, 194, 194), (0, 251, 0xCFD0D1D2D3D4D5D6, 208, 208, 208, 208, 209, 209), (0, 251, 0xD0D1D2D3D4D5D6D7, 209, 209, 209, 209, 210, 210), (0, 251, 0xDFE0E1E2E3E4E5E6, 224, 224, 224, 224, 225, 225), (0, 251, 0xE0E1E2E3E4E5E6E7, 225, 225, 225, 225, 226, 226), (0, 251, 0xEFF0F1F2F3F4F5F6, 240, 240, 240, 240, 241, 241), (0, 251, 0xF0F1F2F3F4F5F6F7, 241, 241, 241, 241, 242, 242), (0, 251, 0xF8F9FAFBFCFDFEFF, 249, 249, 249, 249, 250, 250), (0, 251, 0xFFFFFFFFFFFFFFFF, 250, 250, 250, 250, 251, 251), (0, 251, 0xF9F9FAFBFCFDFEFF, 251, 249, 250, 251, 249, 250), (0, 51, 0x7071727374757677, 51, 50, 51, 51, 50, 51), (150, 251, 0x7071727374757677, 251, 149, 150, 251, 149, 150), ] testDataColSub64 = [ (0, 251, 0x0000000000000000, 0, 0, 0, 0, 1, 1), (1, 251, 0x0000000002030405, 1, 1, 1, 1, 2, 2), (0, 251, 0x0000000003040506, 2, 2, 2, 2, 3, 3), (0, 251, 0x0000000011121314, 16, 16, 16, 16, 17, 17), (0, 251, 0x0000000012131415, 17, 17, 17, 17, 18, 18), (0, 251, 0x0000000021222324, 32, 32, 32, 32, 33, 33), (0, 251, 0x0000000022232425, 33, 33, 33, 33, 34, 34), (0, 251, 0x0000000031323334, 48, 48, 48, 48, 49, 49), (0, 251, 0x0000000032333435, 49, 49, 49, 49, 50, 50), (0, 251, 0x0000000041424344, 64, 64, 64, 64, 65, 65), (0, 251, 0x0000000042434445, 65, 65, 65, 65, 66, 66), (0, 251, 0x0000000051525354, 80, 80, 80, 80, 81, 81), (0, 251, 0x0000000052535455, 81, 81, 81, 81, 82, 82), (0, 251, 0x0000000061626364, 96, 96, 96, 96, 97, 97), (0, 251, 0x0000000062636465, 97, 97, 97, 97, 98, 98), (0, 251, 0x0000000071727374, 112, 112, 112, 112, 113, 113), (0, 251, 0x0000000072737475, 113, 113, 113, 113, 114, 114), (0, 251, 0x0000000081828384, 128, 128, 128, 128, 129, 129), (0, 251, 0x0000000082838485, 129, 129, 129, 129, 130, 130), (0, 251, 0x0000000091929394, 144, 144, 144, 144, 145, 145), (0, 251, 0x0000000092939495, 145, 145, 145, 145, 146, 146), (0, 251, 0x00000000A1A2A3A4, 160, 160, 160, 160, 161, 161), (0, 251, 0x00000000A2A3A4A5, 161, 161, 161, 161, 162, 162), (0, 251, 0x00000000B1B2B3B4, 176, 176, 176, 176, 177, 177), (0, 251, 0x00000000B2B3B4B5, 177, 177, 177, 177, 178, 178), (0, 251, 0x00000000C1C2C3C4, 192, 192, 192, 192, 193, 193), (0, 251, 0x00000000C2C3C4C5, 193, 193, 193, 193, 194, 194), (0, 251, 0x00000000D1D2D3D4, 208, 208, 208, 208, 209, 209), (0, 251, 0x00000000D2D3D4D5, 209, 209, 209, 209, 210, 210), (0, 251, 0x00000000E1E2E3E4, 224, 224, 224, 224, 225, 225), (0, 251, 0x00000000E2E3E4E5, 225, 225, 225, 225, 226, 226), (0, 251, 0x00000000F1F2F3F4, 240, 240, 240, 240, 241, 241), (0, 251, 0x00000000F2F3F4F5, 241, 241, 241, 241, 242, 242), (0, 251, 0x00000000FAFBFCFD, 249, 249, 249, 249, 250, 250), (0, 251, 0x00000000FFFFFFFF, 250, 250, 250, 250, 251, 251), (0, 251, 0x00000000FAFBFCFD, 249, 249, 249, 249, 250, 250), (0, 51, 0x0000000072737475, 51, 50, 51, 51, 50, 51), (150, 251, 0x0000000072737475, 251, 149, 150, 251, 149, 150), ] class TestFunctions(TestCase): @classmethod def setUpClass(cls): global src, fd, size, doffset, dlength, nrows, ncols, index inputfile = os.path.realpath( os.path.dirname(os.path.realpath(__file__)) + "/../../c/test/data/test_data_col.bin" ) src, fd, size, doffset, dlength, nrows, ncols, index, idx = bs.mmap_binfile( inputfile, [1, 2, 4, 8] ) if fd < 0 or size != 3776: assert False, "Unable to open the file" @classmethod def tearDownClass(cls): global src, fd, size h = bs.munmap_binfile(src, fd, size) if h != 0: assert False, "Error while closing the memory-mapped file" def test_col_find_first_uint8(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol8: rp, rf, rl = bs.col_find_first_uint8(src, index[0], first, last, search) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_uint8(src, index[0], pos, last, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_first_uint16(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol16: rp, rf, rl = bs.col_find_first_uint16(src, index[1], first, last, search) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_uint16(src, index[1], pos, last, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_first_uint32(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol32: rp, rf, rl = bs.col_find_first_uint32(src, index[2], first, last, search) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_uint32(src, index[2], pos, last, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_first_uint64(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol64: rp, rf, rl = bs.col_find_first_uint64(src, index[3], first, last, search) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_uint64(src, index[3], pos, last, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_uint8(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol8: rp, rf, rl = bs.col_find_last_uint8(src, index[0], first, last, search) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_uint8(src, index[0], first, pos, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_uint16(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol16: rp, rf, rl = bs.col_find_last_uint16(src, index[1], first, last, search) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_uint16(src, index[1], first, pos, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_uint32(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol32: rp, rf, rl = bs.col_find_last_uint32(src, index[2], first, last, search) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_uint32(src, index[2], first, pos, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_uint64(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataCol64: rp, rf, rl = bs.col_find_last_uint64(src, index[3], first, last, search) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_uint64(src, index[3], first, pos, search) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_first_sub_uint8(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub8: rp, rf, rl = bs.col_find_first_sub_uint8( src, index[0], 0, 7, first, last, search ) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_sub_uint8( src, index[0], 0, 7, pos, last, search ) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_first_sub_uint16(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub16: rp, rf, rl = bs.col_find_first_sub_uint16( src, index[1], 0, 15, first, last, search ) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_sub_uint16( src, index[1], 0, 15, pos, last, search ) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_first_sub_uint32(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub32: rp, rf, rl = bs.col_find_first_sub_uint32( src, index[2], 8, 23, first, last, search ) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_sub_uint32( src, index[2], 8, 23, pos, last, search ) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_first_sub_uint64(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub64: rp, rf, rl = bs.col_find_first_sub_uint64( src, index[3], 16, 47, first, last, search ) self.assertEqual(rp, fF) self.assertEqual(rf, fFF) self.assertEqual(rl, fFL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_next_sub_uint64( src, index[3], 16, 47, pos, last, search ) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_sub_uint8(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub8: rp, rf, rl = bs.col_find_last_sub_uint8( src, index[0], 0, 7, first, last, search ) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_sub_uint8( src, index[0], 0, 7, first, pos, search ) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_sub_uint16(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub16: rp, rf, rl = bs.col_find_last_sub_uint16( src, index[1], 0, 15, first, last, search ) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_sub_uint16( src, index[1], 0, 15, first, pos, search ) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_sub_uint32(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub32: rp, rf, rl = bs.col_find_last_sub_uint32( src, index[2], 8, 23, first, last, search ) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_sub_uint32( src, index[2], 8, 23, first, pos, search ) counter = counter + 1 self.assertEqual(counter, numitems) def test_col_find_last_sub_uint64(self): for first, last, search, fF, fFF, fFL, fL, fLF, fLL in testDataColSub64: rp, rf, rl = bs.col_find_last_sub_uint64( src, index[3], 16, 47, first, last, search ) self.assertEqual(rp, fL) self.assertEqual(rf, fLF) self.assertEqual(rl, fLL) numitems = fL - fF + 1 if (rp < last) and (numitems > 0): pos = rp ret = True counter = 0 while ret: ret, pos = bs.col_has_prev_sub_uint64( src, index[3], 16, 47, first, pos, search ) counter = counter + 1 self.assertEqual(counter, numitems) class TestBenchmark(object): global setup def setup(): global src, fd, size, doffset, dlength, nrows, ncols, index if fd >= 0: pass bs.munmap_binfile(src, fd, size) inputfile = os.path.realpath( os.path.dirname(os.path.realpath(__file__)) + "/../../c/test/data/test_data_col.bin" ) src, fd, size, doffset, dlength, nrows, ncols, index, idx = bs.mmap_binfile( inputfile, [1, 2, 4, 8] ) if fd < 0 or size != 3776: assert False, "Unable to open the file" def test_col_find_first_uint8_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_uint8, args=[src, index[0], 0, 251, 0x2F], setup=setup, iterations=1, rounds=10000, ) def test_col_find_first_uint16_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_uint16, args=[src, index[1], 0, 251, 0x2F30], setup=setup, iterations=1, rounds=10000, ) def test_col_find_first_uint32_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_uint32, args=[src, index[2], 0, 251, 0x2F303132], setup=setup, iterations=1, rounds=10000, ) def test_col_find_first_uint64_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_uint64, args=[src, index[3], 0, 251, 0x2F30313233343536], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_uint8_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_uint8, args=[src, index[0], 0, 250, 0x2F], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_uint16_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_uint16, args=[src, index[1], 0, 251, 0x2F30], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_uint32_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_uint32, args=[src, index[2], 0, 251, 0x2F303132], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_uint64_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_uint64, args=[src, index[3], 0, 251, 0x2F30313233343536], setup=setup, iterations=1, rounds=10000, ) def test_col_find_first_sub_uint8_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_sub_uint8, args=[src, index[0], 0, 7, 0, 251, 0x2F], setup=setup, iterations=1, rounds=10000, ) def test_col_find_first_sub_uint16_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_sub_uint16, args=[src, index[1], 0, 15, 0, 251, 0x2F30], setup=setup, iterations=1, rounds=10000, ) def test_col_find_first_sub_uint32_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_sub_uint32, args=[src, index[2], 8, 23, 0, 251, 0x00003031], setup=setup, iterations=1, rounds=10000, ) def test_col_find_first_sub_uint64_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_first_sub_uint64, args=[src, index[3], 16, 47, 0, 251, 0x0000000031323334], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_sub_uint8_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_sub_uint8, args=[src, index[0], 0, 7, 0, 251, 0x2F], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_sub_uint16_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_sub_uint16, args=[src, index[1], 0, 15, 0, 251, 0x2F30], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_sub_uint32_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_sub_uint32, args=[src, index[2], 8, 23, 0, 251, 0x00003031], setup=setup, iterations=1, rounds=10000, ) def test_col_find_last_sub_uint64_benchmark(self, benchmark): benchmark.pedantic( bs.col_find_last_sub_uint64, args=[src, index[3], 16, 47, 0, 251, 0x0000000031323334], setup=setup, iterations=1, rounds=10000, ) def test_col_tearDown(self): global src, fd, size h = bs.munmap_binfile(src, fd, size) fd = -1 size = 0 if h != 0: assert False, "Error while closing the memory-mapped file"
41.654457
88
0.525751
4,839
34,115
3.634842
0.078322
0.067087
0.018762
0.02547
0.861561
0.857866
0.850304
0.84206
0.684064
0.679061
0
0.371545
0.325458
34,115
818
89
41.705379
0.392795
0.001202
0
0.605744
0
0
0.005929
0.002113
0
0
0.089233
0
0.088773
1
0.046997
false
0.001305
0.003916
0
0.053525
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
4207b50bef78cc2d170b52540c86c61f5b69971b
5,859
py
Python
codewars_kata/tests/test_battleships_field_validator.py
gretkierewicz/gret_code_examples
623dce8fd3319091e3bdea946af7093442abb1bf
[ "MIT" ]
null
null
null
codewars_kata/tests/test_battleships_field_validator.py
gretkierewicz/gret_code_examples
623dce8fd3319091e3bdea946af7093442abb1bf
[ "MIT" ]
null
null
null
codewars_kata/tests/test_battleships_field_validator.py
gretkierewicz/gret_code_examples
623dce8fd3319091e3bdea946af7093442abb1bf
[ "MIT" ]
null
null
null
from ..battleships_field_validator.validator import validate_battlefield def test_proper_setup(): assert validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], ] ) def test_l_shaped_four_field_ship(): assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], ] ) def test_wrong_shaped_five_field_ship(): assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], ] ) assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], ] ) def test_wrong_neigbour_by_diagonal(): assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 1, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ] ) assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ] ) assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ] ) assert not validate_battlefield( [ [0, 1, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], ] ) def test_five_long_ship(): assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], ] ) def test_too_many_one_field_ships(): assert not validate_battlefield( [ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], ] ) def test_too_less_one_field_ships(): assert not validate_battlefield( [ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], ] )
33.101695
72
0.288445
1,194
5,859
1.379397
0.025126
0.864602
1.107468
1.226472
0.913783
0.913783
0.913783
0.913783
0.876746
0.876746
0
0.357375
0.474654
5,859
176
73
33.289773
0.177713
0
0
0.703704
0
0
0
0
0
0
0
0
0.067901
1
0.04321
true
0
0.006173
0
0.049383
0
0
0
1
null
1
1
1
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
14
425e61075a3524761ec63b315fe6c85f9315e667
42
py
Python
pnerf/utils/__init__.py
clintonjwang/pixel-nerf
a64dd8856e587addab350278d1a5eef5c7084668
[ "BSD-2-Clause" ]
null
null
null
pnerf/utils/__init__.py
clintonjwang/pixel-nerf
a64dd8856e587addab350278d1a5eef5c7084668
[ "BSD-2-Clause" ]
null
null
null
pnerf/utils/__init__.py
clintonjwang/pixel-nerf
a64dd8856e587addab350278d1a5eef5c7084668
[ "BSD-2-Clause" ]
null
null
null
from . import args # from . import recon
14
22
0.690476
6
42
4.833333
0.666667
0.689655
0
0
0
0
0
0
0
0
0
0
0.238095
42
2
23
21
0.90625
0.452381
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
426b6b52ce822df3da1332611377b29194c98124
1,032
py
Python
tests/module/test_arcsecond_root.py
onekiloparsec/arcsecond.python
e4b22bf055c7f089ca9f0d6c4bda6314350878e0
[ "MIT" ]
7
2018-08-29T15:31:25.000Z
2022-01-08T14:08:39.000Z
tests/module/test_arcsecond_root.py
onekiloparsec/arcsecond-python
e4b22bf055c7f089ca9f0d6c4bda6314350878e0
[ "MIT" ]
2
2018-10-21T07:42:26.000Z
2020-02-24T10:11:22.000Z
tests/module/test_arcsecond_root.py
onekiloparsec/arcsecond-python
e4b22bf055c7f089ca9f0d6c4bda6314350878e0
[ "MIT" ]
null
null
null
from arcsecond import ArcsecondAPI from tests.utils import save_test_credentials, clear_test_credentials def test_default_empty_state(): clear_test_credentials() assert ArcsecondAPI.is_logged_in(debug=True, test=True) is False assert ArcsecondAPI.username(debug=True, test=True) == '' assert ArcsecondAPI.memberships(debug=True, test=True) == {} def test_default_logged_in_state(): clear_test_credentials() save_test_credentials('cedric') assert ArcsecondAPI.is_logged_in(debug=True, test=True) is True assert ArcsecondAPI.username(debug=True, test=True) == 'cedric' assert ArcsecondAPI.memberships(debug=True, test=True) == {} def test_default_logged_in_with_membership_state(): clear_test_credentials() save_test_credentials('cedric', {'saao': 'superadmin'}) assert ArcsecondAPI.is_logged_in(debug=True, test=True) is True assert ArcsecondAPI.username(debug=True, test=True) == 'cedric' assert ArcsecondAPI.memberships(debug=True, test=True) == {'saao': 'superadmin'}
39.692308
84
0.761628
131
1,032
5.748092
0.21374
0.215139
0.155378
0.203187
0.759628
0.759628
0.759628
0.702523
0.569721
0.569721
0
0
0.125969
1,032
25
85
41.28
0.834812
0
0
0.473684
0
0
0.050388
0
0
0
0
0
0.473684
1
0.157895
true
0
0.105263
0
0.263158
0
0
0
0
null
1
0
1
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
7
4296f76d741dbc0dd9f3e720be63d66c77442190
55,758
py
Python
tests/components/manual_mqtt/test_alarm_control_panel.py
liangleslie/core
cc807b4d597daaaadc92df4a93c6e30da4f570c6
[ "Apache-2.0" ]
30,023
2016-04-13T10:17:53.000Z
2020-03-02T12:56:31.000Z
tests/components/manual_mqtt/test_alarm_control_panel.py
liangleslie/core
cc807b4d597daaaadc92df4a93c6e30da4f570c6
[ "Apache-2.0" ]
24,710
2016-04-13T08:27:26.000Z
2020-03-02T12:59:13.000Z
tests/components/manual_mqtt/test_alarm_control_panel.py
liangleslie/core
cc807b4d597daaaadc92df4a93c6e30da4f570c6
[ "Apache-2.0" ]
11,956
2016-04-13T18:42:31.000Z
2020-03-02T09:32:12.000Z
"""The tests for the manual_mqtt Alarm Control Panel component.""" from datetime import timedelta from unittest.mock import patch from freezegun import freeze_time from homeassistant.components import alarm_control_panel from homeassistant.const import ( STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, ) from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( assert_setup_component, async_fire_mqtt_message, async_fire_time_changed, ) from tests.components.alarm_control_panel import common CODE = "HELLO_CODE" async def test_fail_setup_without_state_topic(hass, mqtt_mock_entry_with_yaml_config): """Test for failing with no state topic.""" with assert_setup_component(0, alarm_control_panel.DOMAIN) as config: assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { "platform": "mqtt_alarm", "command_topic": "alarm/command", } }, ) assert not config[alarm_control_panel.DOMAIN] async def test_fail_setup_without_command_topic(hass, mqtt_mock_entry_with_yaml_config): """Test failing with no command topic.""" with assert_setup_component(0, alarm_control_panel.DOMAIN): assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { "platform": "mqtt_alarm", "state_topic": "alarm/state", } }, ) async def test_arm_home_no_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME async def test_arm_home_no_pending_when_code_not_req( hass, mqtt_mock_entry_with_yaml_config ): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "code_arm_required": False, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, 0) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME async def test_arm_home_with_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 1, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, CODE, entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING state = hass.states.get(entity_id) assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_HOME future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME async def test_arm_home_with_invalid_code(hass, mqtt_mock_entry_with_yaml_config): """Attempt to arm home without a valid code.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 1, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, f"{CODE}2") await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_arm_away_no_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_arm_away_no_pending_when_code_not_req( hass, mqtt_mock_entry_with_yaml_config ): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code_arm_required": False, "code": CODE, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, 0, entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_arm_home_with_template_code(hass, mqtt_mock_entry_with_yaml_config): """Attempt to arm with a template-based code.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code_template": '{{ "abc" }}', "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, "abc") await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_ARMED_HOME async def test_arm_away_with_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 1, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING state = hass.states.get(entity_id) assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_arm_away_with_invalid_code(hass, mqtt_mock_entry_with_yaml_config): """Attempt to arm away without a valid code.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 1, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, f"{CODE}2") await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_arm_night_no_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm night method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_night(hass, CODE, entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT async def test_arm_night_no_pending_when_code_not_req( hass, mqtt_mock_entry_with_yaml_config ): """Test arm night method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code_arm_required": False, "code": CODE, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_night(hass, 0, entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT async def test_arm_night_with_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm night method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 1, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_night(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING state = hass.states.get(entity_id) assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_NIGHT future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT # Do not go to the pending state when updating to the same state await common.async_alarm_arm_night(hass, CODE, entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT async def test_arm_night_with_invalid_code(hass, mqtt_mock_entry_with_yaml_config): """Attempt to arm night without a valid code.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 1, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_night(hass, f"{CODE}2") await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_no_pending(hass, mqtt_mock_entry_with_yaml_config): """Test triggering when no pending submitted method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "trigger_time": 1, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=60) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED async def test_trigger_with_delay(hass, mqtt_mock_entry_with_yaml_config): """Test trigger method and switch from pending to triggered.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "delay_time": 1, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_zero_trigger_time(hass, mqtt_mock_entry_with_yaml_config): """Test disabled trigger.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 0, "trigger_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_zero_trigger_time_with_pending( hass, mqtt_mock_entry_with_yaml_config ): """Test disabled trigger.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 2, "trigger_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 2, "trigger_time": 3, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING state = hass.states.get(entity_id) assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=2) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_disarm_after_trigger( hass, mqtt_mock_entry_with_yaml_config ): """Test disarm after trigger.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "trigger_time": 5, "pending_time": 0, "disarm_after_trigger": True, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_zero_specific_trigger_time( hass, mqtt_mock_entry_with_yaml_config ): """Test trigger method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "trigger_time": 5, "disarmed": {"trigger_time": 0}, "pending_time": 0, "disarm_after_trigger": True, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_unused_zero_specific_trigger_time( hass, mqtt_mock_entry_with_yaml_config ): """Test disarm after trigger.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "trigger_time": 5, "armed_home": {"trigger_time": 0}, "pending_time": 0, "disarm_after_trigger": True, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_trigger_with_specific_trigger_time( hass, mqtt_mock_entry_with_yaml_config ): """Test disarm after trigger.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "disarmed": {"trigger_time": 5}, "pending_time": 0, "disarm_after_trigger": True, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_back_to_back_trigger_with_no_disarm_after_trigger( hass, mqtt_mock_entry_with_yaml_config ): """Test no disarm after back to back trigger.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "trigger_time": 5, "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE, entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_disarm_while_pending_trigger(hass, mqtt_mock_entry_with_yaml_config): """Test disarming while pending state.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "trigger_time": 5, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_disarm_during_trigger_with_invalid_code( hass, mqtt_mock_entry_with_yaml_config ): """Test disarming while code is invalid.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 5, "code": f"{CODE}2", "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING await common.async_alarm_disarm(hass, entity_id=entity_id) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED async def test_trigger_with_unused_specific_delay( hass, mqtt_mock_entry_with_yaml_config ): """Test trigger method and switch from pending to triggered.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "delay_time": 5, "pending_time": 0, "armed_home": {"delay_time": 10}, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_specific_delay(hass, mqtt_mock_entry_with_yaml_config): """Test trigger method and switch from pending to triggered.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "delay_time": 10, "pending_time": 0, "armed_away": {"delay_time": 1}, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_pending_and_delay(hass, mqtt_mock_entry_with_yaml_config): """Test trigger method and switch from pending to triggered.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "delay_time": 1, "pending_time": 0, "triggered": {"pending_time": 1}, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED async def test_trigger_with_pending_and_specific_delay( hass, mqtt_mock_entry_with_yaml_config ): """Test trigger method and switch from pending to triggered.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "delay_time": 10, "pending_time": 0, "armed_away": {"delay_time": 1}, "triggered": {"pending_time": 1}, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED async def test_armed_home_with_specific_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 10, "armed_home": {"pending_time": 2}, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" await common.async_alarm_arm_home(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME async def test_armed_away_with_specific_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 10, "armed_away": {"pending_time": 2}, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" await common.async_alarm_arm_away(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_armed_night_with_specific_pending( hass, mqtt_mock_entry_with_yaml_config ): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 10, "armed_night": {"pending_time": 2}, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" await common.async_alarm_arm_night(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT async def test_trigger_with_specific_pending(hass, mqtt_mock_entry_with_yaml_config): """Test arm home method.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "pending_time": 10, "triggered": {"pending_time": 2}, "trigger_time": 3, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" await common.async_alarm_trigger(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING future = dt_util.utcnow() + timedelta(seconds=2) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED future = dt_util.utcnow() + timedelta(seconds=5) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_arm_away_after_disabled_disarmed(hass, mqtt_mock_entry_with_yaml_config): """Test pending state with and without zero trigger time.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code": CODE, "pending_time": 0, "delay_time": 1, "armed_away": {"pending_time": 1}, "disarmed": {"trigger_time": 0}, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_away(hass, CODE) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["pre_pending_state"] == STATE_ALARM_DISARMED assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY future = dt_util.utcnow() + timedelta(seconds=1) with freeze_time(future): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_ARMED_AWAY await common.async_alarm_trigger(hass, entity_id=entity_id) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_PENDING assert state.attributes["pre_pending_state"] == STATE_ALARM_ARMED_AWAY assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED future += timedelta(seconds=1) with freeze_time(future): async_fire_time_changed(hass, future) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_TRIGGERED async def test_disarm_with_template_code(hass, mqtt_mock_entry_with_yaml_config): """Attempt to disarm with a valid or invalid template-based code.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { "alarm_control_panel": { "platform": "manual_mqtt", "name": "test", "code_template": '{{ "" if from_state == "disarmed" else "abc" }}', "pending_time": 0, "disarm_after_trigger": False, "command_topic": "alarm/command", "state_topic": "alarm/state", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_arm_home(hass, "def") await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_ARMED_HOME await common.async_alarm_disarm(hass, "def") await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_ARMED_HOME await common.async_alarm_disarm(hass, "abc") await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == STATE_ALARM_DISARMED async def test_arm_home_via_command_topic(hass, mqtt_mock_entry_with_yaml_config): """Test arming home via command topic.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { "platform": "manual_mqtt", "name": "test", "pending_time": 1, "state_topic": "alarm/state", "command_topic": "alarm/command", "payload_arm_home": "ARM_HOME", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # Fire the arm command via MQTT; ensure state changes to pending async_fire_mqtt_message(hass, "alarm/command", "ARM_HOME") await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_HOME async def test_arm_away_via_command_topic(hass, mqtt_mock_entry_with_yaml_config): """Test arming away via command topic.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { "platform": "manual_mqtt", "name": "test", "pending_time": 1, "state_topic": "alarm/state", "command_topic": "alarm/command", "payload_arm_away": "ARM_AWAY", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # Fire the arm command via MQTT; ensure state changes to pending async_fire_mqtt_message(hass, "alarm/command", "ARM_AWAY") await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY async def test_arm_night_via_command_topic(hass, mqtt_mock_entry_with_yaml_config): """Test arming night via command topic.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { "platform": "manual_mqtt", "name": "test", "pending_time": 1, "state_topic": "alarm/state", "command_topic": "alarm/command", "payload_arm_night": "ARM_NIGHT", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # Fire the arm command via MQTT; ensure state changes to pending async_fire_mqtt_message(hass, "alarm/command", "ARM_NIGHT") await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_NIGHT async def test_disarm_pending_via_command_topic(hass, mqtt_mock_entry_with_yaml_config): """Test disarming pending alarm via command topic.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { "platform": "manual_mqtt", "name": "test", "pending_time": 1, "state_topic": "alarm/state", "command_topic": "alarm/command", "payload_disarm": "DISARM", } }, ) await hass.async_block_till_done() entity_id = "alarm_control_panel.test" assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED await common.async_alarm_trigger(hass) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_PENDING # Now that we're pending, receive a command to disarm async_fire_mqtt_message(hass, "alarm/command", "DISARM") await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED async def test_state_changes_are_published_to_mqtt( hass, mqtt_mock_entry_with_yaml_config ): """Test publishing of MQTT messages when state changes.""" assert await async_setup_component( hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { "platform": "manual_mqtt", "name": "test", "pending_time": 1, "trigger_time": 1, "state_topic": "alarm/state", "command_topic": "alarm/command", } }, ) await hass.async_block_till_done() # Component should send disarmed alarm state on startup await hass.async_block_till_done() mqtt_mock = await mqtt_mock_entry_with_yaml_config() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_DISARMED, 0, True ) mqtt_mock.async_publish.reset_mock() # Arm in home mode await common.async_alarm_arm_home(hass) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_ARMED_HOME, 0, True ) mqtt_mock.async_publish.reset_mock() # Arm in away mode await common.async_alarm_arm_away(hass) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_ARMED_AWAY, 0, True ) mqtt_mock.async_publish.reset_mock() # Arm in night mode await common.async_alarm_arm_night(hass) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_PENDING, 0, True ) mqtt_mock.async_publish.reset_mock() # Fast-forward a little bit future = dt_util.utcnow() + timedelta(seconds=1) with patch( ("homeassistant.components.manual_mqtt.alarm_control_panel.dt_util.utcnow"), return_value=future, ): async_fire_time_changed(hass, future) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_ARMED_NIGHT, 0, True ) mqtt_mock.async_publish.reset_mock() # Disarm await common.async_alarm_disarm(hass) await hass.async_block_till_done() mqtt_mock.async_publish.assert_called_once_with( "alarm/state", STATE_ALARM_DISARMED, 0, True )
32.174264
88
0.641397
6,665
55,758
4.984696
0.022656
0.0484
0.081871
0.07549
0.958101
0.955964
0.950516
0.947777
0.941848
0.940162
0
0.003076
0.25955
55,758
1,732
89
32.192841
0.801628
0.011371
0
0.78534
0
0
0.174085
0.05874
0
0
0
0
0.142109
1
0
false
0
0.006731
0
0.006731
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
67288a847fff08a3e01f2fa542d72b183eb7ae50
4,428
py
Python
test/unit/test_citrusd_data_shims.py
citruscoin/Sentinel
85470f79ce0f49a05794df6ad7528dbf6825dc26
[ "MIT" ]
null
null
null
test/unit/test_citrusd_data_shims.py
citruscoin/Sentinel
85470f79ce0f49a05794df6ad7528dbf6825dc26
[ "MIT" ]
null
null
null
test/unit/test_citrusd_data_shims.py
citruscoin/Sentinel
85470f79ce0f49a05794df6ad7528dbf6825dc26
[ "MIT" ]
null
null
null
import pytest import sys import os os.environ['SENTINEL_CONFIG'] = os.path.normpath(os.path.join(os.path.dirname(__file__), '../test_sentinel.conf')) sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../../lib'))) import citruslib @pytest.fixture def sentinel_proposal_hex(): return '5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313032323830302c20226e616d65223a2022626565722d7265696d62757273656d656e742d37222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a20372e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202275726c223a202268747470733a2f2f6461736863656e7472616c2e636f6d2f626565722d7265696d62757273656d656e742d37227d5d' @pytest.fixture def sentinel_superblock_hex(): return '5b227375706572626c6f636b222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33227d5d' @pytest.fixture def citrusd_proposal_hex(): return '5b5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313336383430302c20226e616d65223a2022626565722d7265696d62757273656d656e742d39222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a2034392e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202274797065223a20312c202275726c223a202268747470733a2f2f7777772e6461736863656e7472616c2e6f72672f702f626565722d7265696d62757273656d656e742d39227d5d5d' @pytest.fixture def citrusd_superblock_hex(): return '5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33222c202274797065223a20327d5d5d' # ======================================================================== def test_SHIM_deserialise_from_citrusd(citrusd_proposal_hex, citrusd_superblock_hex): assert citruslib.SHIM_deserialise_from_citrusd(citrusd_proposal_hex) == '5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313336383430302c20226e616d65223a2022626565722d7265696d62757273656d656e742d39222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a2034392e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202275726c223a202268747470733a2f2f7777772e6461736863656e7472616c2e6f72672f702f626565722d7265696d62757273656d656e742d39227d5d' assert citruslib.SHIM_deserialise_from_citrusd(citrusd_superblock_hex) == '5b227375706572626c6f636b222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33227d5d' def test_SHIM_serialise_for_citrusd(sentinel_proposal_hex, sentinel_superblock_hex): assert citruslib.SHIM_serialise_for_citrusd(sentinel_proposal_hex) == '5b5b2270726f706f73616c222c207b22656e645f65706f6368223a20313439313032323830302c20226e616d65223a2022626565722d7265696d62757273656d656e742d37222c20227061796d656e745f61646472657373223a2022795965384b77796155753559737753596d4233713372797838585455753979375569222c20227061796d656e745f616d6f756e74223a20372e30303030303030302c202273746172745f65706f6368223a20313438333235303430302c202274797065223a20312c202275726c223a202268747470733a2f2f6461736863656e7472616c2e636f6d2f626565722d7265696d62757273656d656e742d37227d5d5d' assert citruslib.SHIM_serialise_for_citrusd(sentinel_superblock_hex) == '5b5b2274726967676572222c207b226576656e745f626c6f636b5f686569676874223a2036323530302c20227061796d656e745f616464726573736573223a2022795965384b77796155753559737753596d42337133727978385854557539793755697c795443363268755234595145506e39414a486a6e517878726548536267416f617456222c20227061796d656e745f616d6f756e7473223a2022357c33222c202274797065223a20327d5d5d'
113.538462
582
0.934508
135
4,428
30.251852
0.266667
0.008815
0.015671
0.019099
0.098923
0.092556
0.092556
0.020078
0.020078
0.020078
0
0.686243
0.023261
4,428
38
583
116.526316
0.258035
0.01626
0
0.166667
0
0
0.78204
0.776527
0
1
0
0
0.166667
1
0.25
false
0
0.166667
0.166667
0.583333
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
1
0
0
0
0
0
1
1
null
1
0
0
0
0
1
0
0
0
1
1
0
0
10
672c713e78d11a599103e0e2d3ba17d84d048061
72,998
py
Python
sdk/NewsSearch/news_search_client/operations/_news_operations.py
WMRamadan/bing-search-sdk-for-python
276d9cd6963c939081b3dec91bdd9aded42b3b35
[ "MIT" ]
12
2021-03-11T20:24:12.000Z
2022-02-10T22:55:03.000Z
sdk/NewsSearch/news_search_client/operations/_news_operations.py
WMRamadan/bing-search-sdk-for-python
276d9cd6963c939081b3dec91bdd9aded42b3b35
[ "MIT" ]
null
null
null
sdk/NewsSearch/news_search_client/operations/_news_operations.py
WMRamadan/bing-search-sdk-for-python
276d9cd6963c939081b3dec91bdd9aded42b3b35
[ "MIT" ]
10
2021-03-09T17:02:48.000Z
2022-02-12T18:40:01.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6320, generator: {generator}) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class NewsOperations(object): """NewsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~news_search_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def search( self, query, # type: str x_bing_apis_sdk =True, # type: Union[str, "_models.XBingApisSDK"] accept=None, # type: Optional[str] accept_language=None, # type: Optional[str] user_agent_parameter=None, # type: Optional[str] client_id=None, # type: Optional[str] client_ip=None, # type: Optional[str] location=None, # type: Optional[str] country_code=None, # type: Optional[str] count=None, # type: Optional[int] freshness=None, # type: Optional[Union[str, "_models.Freshness"]] market=None, # type: Optional[str] offset=None, # type: Optional[int] original_image=None, # type: Optional[bool] safe_search=None, # type: Optional[Union[str, "_models.SafeSearch"]] set_lang=None, # type: Optional[str] sort_by=None, # type: Optional[str] text_decorations=None, # type: Optional[bool] text_format=None, # type: Optional[Union[str, "_models.TextFormat"]] **kwargs # type: Any ): # type: (...) -> "_models.News" """The News Search API lets you send a search query to Bing and get back a list of news that are relevant to the search query. This section provides technical details about the query parameters and headers that you use to request news and the JSON response objects that contain them. For examples that show how to make requests, see `Searching the web for news <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. The News Search API lets you send a search query to Bing and get back a list of news that are relevant to the search query. This section provides technical details about the query parameters and headers that you use to request news and the JSON response objects that contain them. For examples that show how to make requests, see `Searching the web for news <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. :param x_bing_apis_sdk: Activate swagger compliance. :type x_bing_apis_sdk: str or ~news_search_client.models.XBingApisSDK :param query: The user's search query string. The query string cannot be empty. The query string may contain `Bing Advanced Operators <http://msdn.microsoft.com/library/ff795620.aspx>`_. For example, to limit news to a specific domain, use the `site: <http://msdn.microsoft.com/library/ff795613.aspx>`_ operator. Use this parameter only with the News Search API. Do not specify this parameter when calling the Trending Topics API or News Category API. :type query: str :param accept: The default media type is application/json. To specify that the response use `JSON-LD <http://json-ld.org/>`_\ , set the Accept header to application/ld+json. :type accept: str :param accept_language: A comma-delimited list of one or more languages to use for user interface strings. The list is in decreasing order of preference. For additional information, including expected format, see `RFC2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>`_. This header and the `setLang <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter are mutually exclusive; do not specify both. If you set this header, you must also specify the `cc <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter. To determine the market to return results for, Bing uses the first supported language it finds from the list and combines it with the cc parameter value. If the list does not include a supported language, Bing finds the closest language and market that supports the request or it uses an aggregated or default market for the results. To determine the market that Bing used, see the BingAPIs-Market header. Use this header and the cc query parameter only if you specify multiple languages. Otherwise, use the `mkt <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ and `setLang <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ query parameters. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Any links to Bing.com properties in the response objects apply the specified language. :type accept_language: str :param user_agent_parameter: The user agent originating the request. Bing uses the user agent to provide mobile users with an optimized experience. Although optional, you are encouraged to always specify this header. The user-agent should be the same string that any commonly used browser sends. For information about user agents, see `RFC 2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>`_. The following are examples of user- agent strings. Windows Phone: Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 822). Android: Mozilla / 5.0 (Linux; U; Android 2.3.5; en - us; SCH - I500 Build / GINGERBREAD) AppleWebKit / 533.1 (KHTML; like Gecko) Version / 4.0 Mobile Safari / 533.1. iPhone: Mozilla / 5.0 (iPhone; CPU iPhone OS 6_1 like Mac OS X) AppleWebKit / 536.26 (KHTML; like Gecko) Mobile / 10B142 iPhone4; 1 BingWeb / 3.03.1428.20120423. PC: Mozilla / 5.0 (Windows NT 6.3; WOW64; Trident / 7.0; Touch; rv:11.0) like Gecko. iPad: Mozilla / 5.0 (iPad; CPU OS 7_0 like Mac OS X) AppleWebKit / 537.51.1 (KHTML, like Gecko) Version / 7.0 Mobile / 11A465 Safari / 9537.53. :type user_agent_parameter: str :param client_id: Bing uses this header to provide users with consistent behavior across Bing API calls. Bing often flights new features and improvements, and it uses the client ID as a key for assigning traffic on different flights. If you do not use the same client ID for a user across multiple requests, then Bing may assign the user to multiple conflicting flights. Being assigned to multiple conflicting flights can lead to an inconsistent user experience. For example, if the second request has a different flight assignment than the first, the experience may be unexpected. Also, Bing can use the client ID to tailor web results to that client ID’s search history, providing a richer experience for the user. Bing also uses this header to help improve result rankings by analyzing the activity generated by a client ID. The relevance improvements help with better quality of results delivered by Bing APIs and in turn enables higher click-through rates for the API consumer. IMPORTANT: Although optional, you should consider this header required. Persisting the client ID across multiple requests for the same end user and device combination enables 1) the API consumer to receive a consistent user experience, and 2) higher click-through rates via better quality of results from the Bing APIs. Each user that uses your application on the device must have a unique, Bing generated client ID. If you do not include this header in the request, Bing generates an ID and returns it in the X-MSEdge-ClientID response header. The only time that you should NOT include this header in a request is the first time the user uses your app on that device. Use the client ID for each Bing API request that your app makes for this user on the device. Persist the client ID. To persist the ID in a browser app, use a persistent HTTP cookie to ensure the ID is used across all sessions. Do not use a session cookie. For other apps such as mobile apps, use the device's persistent storage to persist the ID. The next time the user uses your app on that device, get the client ID that you persisted. Bing responses may or may not include this header. If the response includes this header, capture the client ID and use it for all subsequent Bing requests for the user on that device. If you include the X-MSEdge-ClientID, you must not include cookies in the request. :type client_id: str :param client_ip: The IPv4 or IPv6 address of the client device. The IP address is used to discover the user's location. Bing uses the location information to determine safe search behavior. Although optional, you are encouraged to always specify this header and the X-Search- Location header. Do not obfuscate the address (for example, by changing the last octet to 0). Obfuscating the address results in the location not being anywhere near the device's actual location, which may result in Bing serving erroneous results. :type client_ip: str :param location: A semicolon-delimited list of key/value pairs that describe the client's geographical location. Bing uses the location information to determine safe search behavior and to return relevant local content. Specify the key/value pair as :code:`<key>`::code:`<value>`. The following are the keys that you use to specify the user's location. lat (required): The latitude of the client's location, in degrees. The latitude must be greater than or equal to -90.0 and less than or equal to +90.0. Negative values indicate southern latitudes and positive values indicate northern latitudes. long (required): The longitude of the client's location, in degrees. The longitude must be greater than or equal to -180.0 and less than or equal to +180.0. Negative values indicate western longitudes and positive values indicate eastern longitudes. re (required): The radius, in meters, which specifies the horizontal accuracy of the coordinates. Pass the value returned by the device's location service. Typical values might be 22m for GPS/Wi-Fi, 380m for cell tower triangulation, and 18,000m for reverse IP lookup. ts (optional): The UTC UNIX timestamp of when the client was at the location. (The UNIX timestamp is the number of seconds since January 1, 1970.) head (optional): The client's relative heading or direction of travel. Specify the direction of travel as degrees from 0 through 360, counting clockwise relative to true north. Specify this key only if the sp key is nonzero. sp (optional): The horizontal velocity (speed), in meters per second, that the client device is traveling. alt (optional): The altitude of the client device, in meters. are (optional): The radius, in meters, that specifies the vertical accuracy of the coordinates. Specify this key only if you specify the alt key. Although many of the keys are optional, the more information that you provide, the more accurate the location results are. Although optional, you are encouraged to always specify the user's geographical location. Providing the location is especially important if the client's IP address does not accurately reflect the user's physical location (for example, if the client uses VPN). For optimal results, you should include this header and the X-MSEdge-ClientIP header, but at a minimum, you should include this header. :type location: str :param country_code: A 2-character country code of the country where the results come from. This API supports only the United States market. If you specify this query parameter, it must be set to us. If you set this parameter, you must also specify the Accept-Language header. Bing uses the first supported language it finds from the languages list, and combine that language with the country code that you specify to determine the market to return results for. If the languages list does not include a supported language, Bing finds the closest language and market that supports the request, or it may use an aggregated or default market for the results instead of a specified one. You should use this query parameter and the Accept-Language query parameter only if you specify multiple languages; otherwise, you should use the mkt and setLang query parameters. This parameter and the mkt query parameter are mutually exclusive—do not specify both. :type country_code: str :param count: The number of news articles to return in the response. The actual number delivered may be less than requested. The default is 10 and the maximum value is 100. The actual number delivered may be less than requested.You may use this parameter along with the offset parameter to page results. For example, if your user interface displays 20 articles per page, set count to 20 and offset to 0 to get the first page of results. For each subsequent page, increment offset by 20 (for example, 0, 20, 40). It is possible for multiple pages to include some overlap in results. :type count: int :param freshness: Filter news by the date and time that Bing discovered the news. The following are the possible filter values. Day: Return news discovered within the last 24 hours. Week: Return news discovered within the last 7 days. Month: Return news discovered within the last 30 days. Use this parameter only with the News Search API. Do not specify this parameter when calling the News Category API or the Trending Topics API. :type freshness: str or ~news_search_client.models.Freshness :param market: The market where the results come from. Typically, mkt is the country where the user is making the request from. However, it could be a different country if the user is not located in a country where Bing delivers results. The market must be in the form :code:`<language code>`-:code:`<country code>`. For example, en-US. The string is case insensitive. For a list of possible market values, see `Market Codes <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. NOTE: If known, you are encouraged to always specify the market. Specifying the market helps Bing route the request and return an appropriate and optimal response. If you specify a market that is not listed in `Market Codes <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_\ , Bing uses a best fit market code based on an internal mapping that is subject to change. This parameter and the `cc <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter are mutually exclusive—do not specify both. :type market: str :param offset: The zero-based offset that indicates the number of news to skip before returning news. The default is 0. The offset should be less than (\ `totalEstimatedMatches <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ - count). Use this parameter along with the count parameter to page results. For example, if your user interface displays 20 news per page, set count to 20 and offset to 0 to get the first page of results. For each subsequent page, increment offset by 20 (for example, 0, 20, 40). It is possible for multiple pages to include some overlap in results. :type offset: int :param original_image: A Boolean value that determines whether the image's contentUrl contains a URL that points to a thumbnail of the original article's image or the image itself. If the article includes an image, and this parameter is set to true, the image's contentUrl property contains a URL that you may use to download the original image from the publisher's website. Otherwise, if this parameter is false, the image's contentUrl and thumbnailUrl URLs both point to the same thumbnail image. Use this parameter only with the News Search API. Do not specify this parameter when calling the Trending Topics API or News Category API. :type original_image: bool :param safe_search: Filter news for adult content. The following are the possible filter values. Off: Return news articles with adult text, images, or videos. Moderate: Return news articles with adult text but not adult images or videos. Strict: Do not return news articles with adult text, images, or videos. If the request comes from a market that Bing's adult policy requires that safeSearch is set to Strict, Bing ignores the safeSearch value and uses Strict. If you use the site: query operator, there is the chance that the response may contain adult content regardless of what the safeSearch query parameter is set to. Use site: only if you are aware of the content on the site and your scenario supports the possibility of adult content. :type safe_search: str or ~news_search_client.models.SafeSearch :param set_lang: The language to use for user interface strings. Specify the language using the ISO 639-1 2-letter language code. For example, the language code for English is EN. The default is EN (English). Although optional, you should always specify the language. Typically, you set setLang to the same language specified by mkt unless the user wants the user interface strings displayed in a different language. This parameter and the `Accept-Language <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ header are mutually exclusive; do not specify both. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Also, any links to Bing.com properties in the response objects apply the specified language. :type set_lang: str :param sort_by: The order to return the news in. The following are the possible case- insensitive values. Date: If the request is through the News Search API, the response returns news articles sorted by date from the most recent to the oldest. If the request is through the News Trending Topics API, the response returns trending topics sorted by date from the most recent to the oldest. :type sort_by: str :param text_decorations: A Boolean value that determines whether display strings contain decoration markers such as hit highlighting characters. If true, the strings may include markers. The default is false. To specify whether to use Unicode characters or HTML tags as the markers, see the `textFormat <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ query parameter. For information about hit highlighting, see `Hit Highlighting <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. :type text_decorations: bool :param text_format: The type of markers to use for text decorations (see the textDecorations query parameter). Possible values are Raw—Use Unicode characters to mark content that needs special formatting. The Unicode characters are in the range E000 through E019. For example, Bing uses E000 and E001 to mark the beginning and end of query terms for hit highlighting. HTML—Use HTML tags to mark content that needs special formatting. For example, use :code:`<b>` tags to highlight query terms in display strings. The default is Raw. For display strings that contain escapable HTML characters such as <, >, and &, if textFormat is set to HTML, Bing escapes the characters as appropriate (for example, < is escaped to &lt;). :type text_format: str or ~news_search_client.models.TextFormat :keyword callable cls: A custom type or function that will be passed the direct response :return: News, or the result of cls(response) :rtype: ~news_search_client.models.News :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.News"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) # Construct URL url = self.search.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] if country_code is not None: query_parameters['cc'] = self._serialize.query("country_code", country_code, 'str') if count is not None: query_parameters['count'] = self._serialize.query("count", count, 'int') if freshness is not None: query_parameters['freshness'] = self._serialize.query("freshness", freshness, 'str') if market is not None: query_parameters['mkt'] = self._serialize.query("market", market, 'str') if offset is not None: query_parameters['offset'] = self._serialize.query("offset", offset, 'int') if original_image is not None: query_parameters['originalImg'] = self._serialize.query("original_image", original_image, 'bool') query_parameters['q'] = self._serialize.query("query", query, 'str') if safe_search is not None: query_parameters['safeSearch'] = self._serialize.query("safe_search", safe_search, 'str') if set_lang is not None: query_parameters['setLang'] = self._serialize.query("set_lang", set_lang, 'str') if sort_by is not None: query_parameters['sortBy'] = self._serialize.query("sort_by", sort_by, 'str') if text_decorations is not None: query_parameters['textDecorations'] = self._serialize.query("text_decorations", text_decorations, 'bool') if text_format is not None: query_parameters['textFormat'] = self._serialize.query("text_format", text_format, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['X-BingApis-SDK'] = self._serialize.header("x_bing_apis_sdk", x_bing_apis_sdk, 'str') if accept is not None: header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if accept_language is not None: header_parameters['Accept-Language'] = self._serialize.header("accept_language", accept_language, 'str') if user_agent_parameter is not None: header_parameters['User-Agent'] = self._serialize.header("user_agent_parameter", user_agent_parameter, 'str') if client_id is not None: header_parameters['X-MSEdge-ClientID'] = self._serialize.header("client_id", client_id, 'str') if client_ip is not None: header_parameters['X-MSEdge-ClientIP'] = self._serialize.header("client_ip", client_ip, 'str') if location is not None: header_parameters['X-Search-Location'] = self._serialize.header("location", location, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('News', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized search.metadata = {'url': '/news/search'} # type: ignore def category( self, x_bing_apis_sdk =True, # type: Union[str, "_models.XBingApisSDK"] accept=None, # type: Optional[str] accept_language=None, # type: Optional[str] user_agent_parameter=None, # type: Optional[str] client_id=None, # type: Optional[str] client_ip=None, # type: Optional[str] location=None, # type: Optional[str] country_code=None, # type: Optional[str] category=None, # type: Optional[str] count=None, # type: Optional[int] headline_count=None, # type: Optional[int] market=None, # type: Optional[str] offset=None, # type: Optional[int] original_image=None, # type: Optional[bool] safe_search=None, # type: Optional[Union[str, "_models.SafeSearch"]] set_lang=None, # type: Optional[str] text_decorations=None, # type: Optional[bool] text_format=None, # type: Optional[Union[str, "_models.TextFormat"]] **kwargs # type: Any ): # type: (...) -> "_models.News" """The News Category API lets you search on Bing and get back a list of top news articles by category. This section provides technical details about the query parameters and headers that you use to request news and the JSON response objects that contain them. For examples that show how to make requests, see `Searching the web for news <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. The News Category API lets you search on Bing and get back a list of top news articles by category. This section provides technical details about the query parameters and headers that you use to request news and the JSON response objects that contain them. For examples that show how to make requests, see `Searching the web for news <https://docs.microsoft.com/en- us/bing/bing-news-search/overview>`_. :param x_bing_apis_sdk: Activate swagger compliance. :type x_bing_apis_sdk: str or ~news_search_client.models.XBingApisSDK :param accept: The default media type is application/json. To specify that the response use `JSON-LD <http://json-ld.org/>`_\ , set the Accept header to application/ld+json. :type accept: str :param accept_language: A comma-delimited list of one or more languages to use for user interface strings. The list is in decreasing order of preference. For additional information, including expected format, see `RFC2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>`_. This header and the `setLang <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter are mutually exclusive; do not specify both. If you set this header, you must also specify the `cc <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter. To determine the market to return results for, Bing uses the first supported language it finds from the list and combines it with the cc parameter value. If the list does not include a supported language, Bing finds the closest language and market that supports the request or it uses an aggregated or default market for the results. To determine the market that Bing used, see the BingAPIs-Market header. Use this header and the cc query parameter only if you specify multiple languages. Otherwise, use the `mkt <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ and `setLang <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ query parameters. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Any links to Bing.com properties in the response objects apply the specified language. :type accept_language: str :param user_agent_parameter: The user agent originating the request. Bing uses the user agent to provide mobile users with an optimized experience. Although optional, you are encouraged to always specify this header. The user-agent should be the same string that any commonly used browser sends. For information about user agents, see `RFC 2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>`_. The following are examples of user- agent strings. Windows Phone: Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 822). Android: Mozilla / 5.0 (Linux; U; Android 2.3.5; en - us; SCH - I500 Build / GINGERBREAD) AppleWebKit / 533.1 (KHTML; like Gecko) Version / 4.0 Mobile Safari / 533.1. iPhone: Mozilla / 5.0 (iPhone; CPU iPhone OS 6_1 like Mac OS X) AppleWebKit / 536.26 (KHTML; like Gecko) Mobile / 10B142 iPhone4; 1 BingWeb / 3.03.1428.20120423. PC: Mozilla / 5.0 (Windows NT 6.3; WOW64; Trident / 7.0; Touch; rv:11.0) like Gecko. iPad: Mozilla / 5.0 (iPad; CPU OS 7_0 like Mac OS X) AppleWebKit / 537.51.1 (KHTML, like Gecko) Version / 7.0 Mobile / 11A465 Safari / 9537.53. :type user_agent_parameter: str :param client_id: Bing uses this header to provide users with consistent behavior across Bing API calls. Bing often flights new features and improvements, and it uses the client ID as a key for assigning traffic on different flights. If you do not use the same client ID for a user across multiple requests, then Bing may assign the user to multiple conflicting flights. Being assigned to multiple conflicting flights can lead to an inconsistent user experience. For example, if the second request has a different flight assignment than the first, the experience may be unexpected. Also, Bing can use the client ID to tailor web results to that client ID’s search history, providing a richer experience for the user. Bing also uses this header to help improve result rankings by analyzing the activity generated by a client ID. The relevance improvements help with better quality of results delivered by Bing APIs and in turn enables higher click-through rates for the API consumer. IMPORTANT: Although optional, you should consider this header required. Persisting the client ID across multiple requests for the same end user and device combination enables 1) the API consumer to receive a consistent user experience, and 2) higher click-through rates via better quality of results from the Bing APIs. Each user that uses your application on the device must have a unique, Bing generated client ID. If you do not include this header in the request, Bing generates an ID and returns it in the X-MSEdge-ClientID response header. The only time that you should NOT include this header in a request is the first time the user uses your app on that device. Use the client ID for each Bing API request that your app makes for this user on the device. Persist the client ID. To persist the ID in a browser app, use a persistent HTTP cookie to ensure the ID is used across all sessions. Do not use a session cookie. For other apps such as mobile apps, use the device's persistent storage to persist the ID. The next time the user uses your app on that device, get the client ID that you persisted. Bing responses may or may not include this header. If the response includes this header, capture the client ID and use it for all subsequent Bing requests for the user on that device. If you include the X-MSEdge-ClientID, you must not include cookies in the request. :type client_id: str :param client_ip: The IPv4 or IPv6 address of the client device. The IP address is used to discover the user's location. Bing uses the location information to determine safe search behavior. Although optional, you are encouraged to always specify this header and the X-Search- Location header. Do not obfuscate the address (for example, by changing the last octet to 0). Obfuscating the address results in the location not being anywhere near the device's actual location, which may result in Bing serving erroneous results. :type client_ip: str :param location: A semicolon-delimited list of key/value pairs that describe the client's geographical location. Bing uses the location information to determine safe search behavior and to return relevant local content. Specify the key/value pair as :code:`<key>`::code:`<value>`. The following are the keys that you use to specify the user's location. lat (required): The latitude of the client's location, in degrees. The latitude must be greater than or equal to -90.0 and less than or equal to +90.0. Negative values indicate southern latitudes and positive values indicate northern latitudes. long (required): The longitude of the client's location, in degrees. The longitude must be greater than or equal to -180.0 and less than or equal to +180.0. Negative values indicate western longitudes and positive values indicate eastern longitudes. re (required): The radius, in meters, which specifies the horizontal accuracy of the coordinates. Pass the value returned by the device's location service. Typical values might be 22m for GPS/Wi-Fi, 380m for cell tower triangulation, and 18,000m for reverse IP lookup. ts (optional): The UTC UNIX timestamp of when the client was at the location. (The UNIX timestamp is the number of seconds since January 1, 1970.) head (optional): The client's relative heading or direction of travel. Specify the direction of travel as degrees from 0 through 360, counting clockwise relative to true north. Specify this key only if the sp key is nonzero. sp (optional): The horizontal velocity (speed), in meters per second, that the client device is traveling. alt (optional): The altitude of the client device, in meters. are (optional): The radius, in meters, that specifies the vertical accuracy of the coordinates. Specify this key only if you specify the alt key. Although many of the keys are optional, the more information that you provide, the more accurate the location results are. Although optional, you are encouraged to always specify the user's geographical location. Providing the location is especially important if the client's IP address does not accurately reflect the user's physical location (for example, if the client uses VPN). For optimal results, you should include this header and the X-MSEdge-ClientIP header, but at a minimum, you should include this header. :type location: str :param country_code: A 2-character country code of the country where the results come from. This API supports only the United States market. If you specify this query parameter, it must be set to us. If you set this parameter, you must also specify the Accept-Language header. Bing uses the first supported language it finds from the languages list, and combine that language with the country code that you specify to determine the market to return results for. If the languages list does not include a supported language, Bing finds the closest language and market that supports the request, or it may use an aggregated or default market for the results instead of a specified one. You should use this query parameter and the Accept-Language query parameter only if you specify multiple languages; otherwise, you should use the mkt and setLang query parameters. This parameter and the mkt query parameter are mutually exclusive—do not specify both. :type country_code: str :param category: The category of articles to return. For example, Sports articles or Entertainment articles. For a list of possible categories, see `News Categories by Market <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. Use this parameter only with News Category API. If you do not specify this parameter, the response includes both: Headline articles typically published in the last 24 hours from any category and articles from each parent category (up to four articles). If the article is a headline, the article's headline field is set to true. By default, the response includes up to 12 headline articles. To specify the number of headline articles to return, set the `headlineCount <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter. :type category: str :param count: The number of news articles to return in the response. The actual number delivered may be less than requested. The default is 10 and the maximum value is 100. The actual number delivered may be less than requested.You may use this parameter along with the offset parameter to page results. For example, if your user interface displays 20 articles per page, set count to 20 and offset to 0 to get the first page of results. For each subsequent page, increment offset by 20 (for example, 0, 20, 40). It is possible for multiple pages to include some overlap in results. If you do not specify the `category <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ parameter, Bing ignores this parameter. :type count: int :param headline_count: The number of headline articles to return in the response. The default is 12. Specify this parameter only if you do not specify the `category <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ parameter. :type headline_count: int :param market: The market where the results come from. Typically, mkt is the country where the user is making the request from. However, it could be a different country if the user is not located in a country where Bing delivers results. The market must be in the form :code:`<language code>`-:code:`<country code>`. For example, en-US. The string is case insensitive. For a list of possible market values, see `Market Codes <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. NOTE: If known, you are encouraged to always specify the market. Specifying the market helps Bing route the request and return an appropriate and optimal response. If you specify a market that is not listed in `Market Codes <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_\ , Bing uses a best fit market code based on an internal mapping that is subject to change. This parameter and the `cc <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter are mutually exclusive—do not specify both. :type market: str :param offset: The zero-based offset that indicates the number of news to skip before returning news. The default is 0. The offset should be less than (\ `totalEstimatedMatches <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ - count). Use this parameter along with the count parameter to page results. For example, if your user interface displays 20 news per page, set count to 20 and offset to 0 to get the first page of results. For each subsequent page, increment offset by 20 (for example, 0, 20, 40). It is possible for multiple pages to include some overlap in results. If you do not specify the `category <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ parameter, Bing ignores this parameter. :type offset: int :param original_image: A Boolean value that determines whether the image's contentUrl contains a URL that points to a thumbnail of the original article's image or the image itself. If the article includes an image, and this parameter is set to true, the image's contentUrl property contains a URL that you may use to download the original image from the publisher's website. Otherwise, if this parameter is false, the image's contentUrl and thumbnailUrl URLs both point to the same thumbnail image. Use this parameter only with the News Search API or News Category API. Trending Topics API ignore this parameter. :type original_image: bool :param safe_search: Filter news for adult content. The following are the possible filter values. Off: Return news articles with adult text, images, or videos. Moderate: Return news articles with adult text but not adult images or videos. Strict: Do not return news articles with adult text, images, or videos. If the request comes from a market that Bing's adult policy requires that safeSearch is set to Strict, Bing ignores the safeSearch value and uses Strict. If you use the site: query operator, there is the chance that the response may contain adult content regardless of what the safeSearch query parameter is set to. Use site: only if you are aware of the content on the site and your scenario supports the possibility of adult content. :type safe_search: str or ~news_search_client.models.SafeSearch :param set_lang: The language to use for user interface strings. Specify the language using the ISO 639-1 2-letter language code. For example, the language code for English is EN. The default is EN (English). Although optional, you should always specify the language. Typically, you set setLang to the same language specified by mkt unless the user wants the user interface strings displayed in a different language. This parameter and the `Accept-Language <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ header are mutually exclusive; do not specify both. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Also, any links to Bing.com properties in the response objects apply the specified language. :type set_lang: str :param text_decorations: A Boolean value that determines whether display strings contain decoration markers such as hit highlighting characters. If true, the strings may include markers. The default is false. To specify whether to use Unicode characters or HTML tags as the markers, see the `textFormat <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ query parameter. For information about hit highlighting, see `Hit Highlighting <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. :type text_decorations: bool :param text_format: The type of markers to use for text decorations (see the textDecorations query parameter). Possible values are Raw—Use Unicode characters to mark content that needs special formatting. The Unicode characters are in the range E000 through E019. For example, Bing uses E000 and E001 to mark the beginning and end of query terms for hit highlighting. HTML—Use HTML tags to mark content that needs special formatting. For example, use :code:`<b>` tags to highlight query terms in display strings. The default is Raw. For display strings that contain escapable HTML characters such as <, >, and &, if textFormat is set to HTML, Bing escapes the characters as appropriate (for example, < is escaped to &lt;). :type text_format: str or ~news_search_client.models.TextFormat :keyword callable cls: A custom type or function that will be passed the direct response :return: News, or the result of cls(response) :rtype: ~news_search_client.models.News :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.News"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) # Construct URL url = self.category.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] if country_code is not None: query_parameters['cc'] = self._serialize.query("country_code", country_code, 'str') if category is not None: query_parameters['category'] = self._serialize.query("category", category, 'str') if count is not None: query_parameters['count'] = self._serialize.query("count", count, 'int') if headline_count is not None: query_parameters['headlineCount'] = self._serialize.query("headline_count", headline_count, 'int') if market is not None: query_parameters['mkt'] = self._serialize.query("market", market, 'str') if offset is not None: query_parameters['offset'] = self._serialize.query("offset", offset, 'int') if original_image is not None: query_parameters['originalImg'] = self._serialize.query("original_image", original_image, 'bool') if safe_search is not None: query_parameters['safeSearch'] = self._serialize.query("safe_search", safe_search, 'str') if set_lang is not None: query_parameters['setLang'] = self._serialize.query("set_lang", set_lang, 'str') if text_decorations is not None: query_parameters['textDecorations'] = self._serialize.query("text_decorations", text_decorations, 'bool') if text_format is not None: query_parameters['textFormat'] = self._serialize.query("text_format", text_format, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['X-BingApis-SDK'] = self._serialize.header("x_bing_apis_sdk", x_bing_apis_sdk, 'str') if accept is not None: header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if accept_language is not None: header_parameters['Accept-Language'] = self._serialize.header("accept_language", accept_language, 'str') if user_agent_parameter is not None: header_parameters['User-Agent'] = self._serialize.header("user_agent_parameter", user_agent_parameter, 'str') if client_id is not None: header_parameters['X-MSEdge-ClientID'] = self._serialize.header("client_id", client_id, 'str') if client_ip is not None: header_parameters['X-MSEdge-ClientIP'] = self._serialize.header("client_ip", client_ip, 'str') if location is not None: header_parameters['X-Search-Location'] = self._serialize.header("location", location, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('News', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized category.metadata = {'url': '/news'} # type: ignore def trending( self, x_bing_apis_sdk =True, # type: Union[str, "_models.XBingApisSDK"] accept=None, # type: Optional[str] accept_language=None, # type: Optional[str] user_agent_parameter=None, # type: Optional[str] client_id=None, # type: Optional[str] client_ip=None, # type: Optional[str] location=None, # type: Optional[str] country_code=None, # type: Optional[str] count=None, # type: Optional[int] market=None, # type: Optional[str] offset=None, # type: Optional[int] safe_search=None, # type: Optional[Union[str, "_models.SafeSearch"]] set_lang=None, # type: Optional[str] since=None, # type: Optional[int] sort_by=None, # type: Optional[str] text_decorations=None, # type: Optional[bool] text_format=None, # type: Optional[Union[str, "_models.TextFormat"]] **kwargs # type: Any ): # type: (...) -> "_models.TrendingTopics" """The News Trending Topics API lets you search on Bing and get back a list of trending news topics that are currently trending on Bing. This section provides technical details about the query parameters and headers that you use to request news and the JSON response objects that contain them. For examples that show how to make requests, see `Searching the web for news <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. The News Trending Topics API lets you search on Bing and get back a list of trending news topics that are currently trending on Bing. This section provides technical details about the query parameters and headers that you use to request news and the JSON response objects that contain them. For examples that show how to make requests, see `Searching the web for news <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. :param x_bing_apis_sdk: Activate swagger compliance. :type x_bing_apis_sdk: str or ~news_search_client.models.XBingApisSDK :param accept: The default media type is application/json. To specify that the response use `JSON-LD <http://json-ld.org/>`_\ , set the Accept header to application/ld+json. :type accept: str :param accept_language: A comma-delimited list of one or more languages to use for user interface strings. The list is in decreasing order of preference. For additional information, including expected format, see `RFC2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>`_. This header and the `setLang <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter are mutually exclusive; do not specify both. If you set this header, you must also specify the `cc <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter. To determine the market to return results for, Bing uses the first supported language it finds from the list and combines it with the cc parameter value. If the list does not include a supported language, Bing finds the closest language and market that supports the request or it uses an aggregated or default market for the results. To determine the market that Bing used, see the BingAPIs-Market header. Use this header and the cc query parameter only if you specify multiple languages. Otherwise, use the `mkt <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ and `setLang <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ query parameters. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Any links to Bing.com properties in the response objects apply the specified language. :type accept_language: str :param user_agent_parameter: The user agent originating the request. Bing uses the user agent to provide mobile users with an optimized experience. Although optional, you are encouraged to always specify this header. The user-agent should be the same string that any commonly used browser sends. For information about user agents, see `RFC 2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>`_. The following are examples of user- agent strings. Windows Phone: Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 822). Android: Mozilla / 5.0 (Linux; U; Android 2.3.5; en - us; SCH - I500 Build / GINGERBREAD) AppleWebKit / 533.1 (KHTML; like Gecko) Version / 4.0 Mobile Safari / 533.1. iPhone: Mozilla / 5.0 (iPhone; CPU iPhone OS 6_1 like Mac OS X) AppleWebKit / 536.26 (KHTML; like Gecko) Mobile / 10B142 iPhone4; 1 BingWeb / 3.03.1428.20120423. PC: Mozilla / 5.0 (Windows NT 6.3; WOW64; Trident / 7.0; Touch; rv:11.0) like Gecko. iPad: Mozilla / 5.0 (iPad; CPU OS 7_0 like Mac OS X) AppleWebKit / 537.51.1 (KHTML, like Gecko) Version / 7.0 Mobile / 11A465 Safari / 9537.53. :type user_agent_parameter: str :param client_id: Bing uses this header to provide users with consistent behavior across Bing API calls. Bing often flights new features and improvements, and it uses the client ID as a key for assigning traffic on different flights. If you do not use the same client ID for a user across multiple requests, then Bing may assign the user to multiple conflicting flights. Being assigned to multiple conflicting flights can lead to an inconsistent user experience. For example, if the second request has a different flight assignment than the first, the experience may be unexpected. Also, Bing can use the client ID to tailor web results to that client ID’s search history, providing a richer experience for the user. Bing also uses this header to help improve result rankings by analyzing the activity generated by a client ID. The relevance improvements help with better quality of results delivered by Bing APIs and in turn enables higher click-through rates for the API consumer. IMPORTANT: Although optional, you should consider this header required. Persisting the client ID across multiple requests for the same end user and device combination enables 1) the API consumer to receive a consistent user experience, and 2) higher click-through rates via better quality of results from the Bing APIs. Each user that uses your application on the device must have a unique, Bing generated client ID. If you do not include this header in the request, Bing generates an ID and returns it in the X-MSEdge-ClientID response header. The only time that you should NOT include this header in a request is the first time the user uses your app on that device. Use the client ID for each Bing API request that your app makes for this user on the device. Persist the client ID. To persist the ID in a browser app, use a persistent HTTP cookie to ensure the ID is used across all sessions. Do not use a session cookie. For other apps such as mobile apps, use the device's persistent storage to persist the ID. The next time the user uses your app on that device, get the client ID that you persisted. Bing responses may or may not include this header. If the response includes this header, capture the client ID and use it for all subsequent Bing requests for the user on that device. If you include the X-MSEdge-ClientID, you must not include cookies in the request. :type client_id: str :param client_ip: The IPv4 or IPv6 address of the client device. The IP address is used to discover the user's location. Bing uses the location information to determine safe search behavior. Although optional, you are encouraged to always specify this header and the X-Search- Location header. Do not obfuscate the address (for example, by changing the last octet to 0). Obfuscating the address results in the location not being anywhere near the device's actual location, which may result in Bing serving erroneous results. :type client_ip: str :param location: A semicolon-delimited list of key/value pairs that describe the client's geographical location. Bing uses the location information to determine safe search behavior and to return relevant local content. Specify the key/value pair as :code:`<key>`::code:`<value>`. The following are the keys that you use to specify the user's location. lat (required): The latitude of the client's location, in degrees. The latitude must be greater than or equal to -90.0 and less than or equal to +90.0. Negative values indicate southern latitudes and positive values indicate northern latitudes. long (required): The longitude of the client's location, in degrees. The longitude must be greater than or equal to -180.0 and less than or equal to +180.0. Negative values indicate western longitudes and positive values indicate eastern longitudes. re (required): The radius, in meters, which specifies the horizontal accuracy of the coordinates. Pass the value returned by the device's location service. Typical values might be 22m for GPS/Wi-Fi, 380m for cell tower triangulation, and 18,000m for reverse IP lookup. ts (optional): The UTC UNIX timestamp of when the client was at the location. (The UNIX timestamp is the number of seconds since January 1, 1970.) head (optional): The client's relative heading or direction of travel. Specify the direction of travel as degrees from 0 through 360, counting clockwise relative to true north. Specify this key only if the sp key is nonzero. sp (optional): The horizontal velocity (speed), in meters per second, that the client device is traveling. alt (optional): The altitude of the client device, in meters. are (optional): The radius, in meters, that specifies the vertical accuracy of the coordinates. Specify this key only if you specify the alt key. Although many of the keys are optional, the more information that you provide, the more accurate the location results are. Although optional, you are encouraged to always specify the user's geographical location. Providing the location is especially important if the client's IP address does not accurately reflect the user's physical location (for example, if the client uses VPN). For optimal results, you should include this header and the X-MSEdge-ClientIP header, but at a minimum, you should include this header. :type location: str :param country_code: A 2-character country code of the country where the results come from. This API supports only the United States market. If you specify this query parameter, it must be set to us. If you set this parameter, you must also specify the Accept-Language header. Bing uses the first supported language it finds from the languages list, and combine that language with the country code that you specify to determine the market to return results for. If the languages list does not include a supported language, Bing finds the closest language and market that supports the request, or it may use an aggregated or default market for the results instead of a specified one. You should use this query parameter and the Accept-Language query parameter only if you specify multiple languages; otherwise, you should use the mkt and setLang query parameters. This parameter and the mkt query parameter are mutually exclusive—do not specify both. :type country_code: str :param count: The number of news articles to return in the response. The actual number delivered may be less than requested. The default is 10 and the maximum value is 100. The actual number delivered may be less than requested.You may use this parameter along with the offset parameter to page results. For example, if your user interface displays 20 articles per page, set count to 20 and offset to 0 to get the first page of results. For each subsequent page, increment offset by 20 (for example, 0, 20, 40). It is possible for multiple pages to include some overlap in results. :type count: int :param market: The market where the results come from. Typically, mkt is the country where the user is making the request from. However, it could be a different country if the user is not located in a country where Bing delivers results. The market must be in the form :code:`<language code>`-:code:`<country code>`. For example, en-US. The string is case insensitive. For a list of possible market values, see `Market Codes <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. NOTE: If known, you are encouraged to always specify the market. Specifying the market helps Bing route the request and return an appropriate and optimal response. If you specify a market that is not listed in `Market Codes <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_\ , Bing uses a best fit market code based on an internal mapping that is subject to change. This parameter and the `cc <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter are mutually exclusive—do not specify both. :type market: str :param offset: The zero-based offset that indicates the number of news to skip before returning news. The default is 0. The offset should be less than (\ `totalEstimatedMatches <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ - count). Use this parameter along with the count parameter to page results. For example, if your user interface displays 20 news per page, set count to 20 and offset to 0 to get the first page of results. For each subsequent page, increment offset by 20 (for example, 0, 20, 40). It is possible for multiple pages to include some overlap in results. :type offset: int :param safe_search: Filter news for adult content. The following are the possible filter values. Off: Return news articles with adult text, images, or videos. Moderate: Return news articles with adult text but not adult images or videos. Strict: Do not return news articles with adult text, images, or videos. If the request comes from a market that Bing's adult policy requires that safeSearch is set to Strict, Bing ignores the safeSearch value and uses Strict. If you use the site: query operator, there is the chance that the response may contain adult content regardless of what the safeSearch query parameter is set to. Use site: only if you are aware of the content on the site and your scenario supports the possibility of adult content. :type safe_search: str or ~news_search_client.models.SafeSearch :param set_lang: The language to use for user interface strings. Specify the language using the ISO 639-1 2-letter language code. For example, the language code for English is EN. The default is EN (English). Although optional, you should always specify the language. Typically, you set setLang to the same language specified by mkt unless the user wants the user interface strings displayed in a different language. This parameter and the `Accept-Language <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ header are mutually exclusive; do not specify both. A user interface string is a string that's used as a label in a user interface. There are few user interface strings in the JSON response objects. Also, any links to Bing.com properties in the response objects apply the specified language. :type set_lang: str :param since: The Unix epoch time (Unix timestamp) that Bing uses to select the trending topics. Bing returns trending topics that it discovered on or after the specified date and time, not the date the topic was published. To use this parameter, also specify the sortBy parameter. Use this parameter only with the News Trending Topics API. Do not specify this parameter when calling the News Search API or News Category API. :type since: long :param sort_by: The order to return the news in. The following are the possible case- insensitive values. Date: If the request is through the News Search API, the response returns news articles sorted by date from the most recent to the oldest. If the request is through the News Trending Topics API, the response returns trending topics sorted by date from the most recent to the oldest. :type sort_by: str :param text_decorations: A Boolean value that determines whether display strings contain decoration markers such as hit highlighting characters. If true, the strings may include markers. The default is false. To specify whether to use Unicode characters or HTML tags as the markers, see the `textFormat <https://docs.microsoft.com/en-us/bing/bing-news- search/overview>`_ query parameter. For information about hit highlighting, see `Hit Highlighting <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. :type text_decorations: bool :param text_format: The type of markers to use for text decorations (see the textDecorations query parameter). Possible values are Raw—Use Unicode characters to mark content that needs special formatting. The Unicode characters are in the range E000 through E019. For example, Bing uses E000 and E001 to mark the beginning and end of query terms for hit highlighting. HTML—Use HTML tags to mark content that needs special formatting. For example, use :code:`<b>` tags to highlight query terms in display strings. The default is Raw. For display strings that contain escapable HTML characters such as <, >, and &, if textFormat is set to HTML, Bing escapes the characters as appropriate (for example, < is escaped to &lt;). :type text_format: str or ~news_search_client.models.TextFormat :keyword callable cls: A custom type or function that will be passed the direct response :return: TrendingTopics, or the result of cls(response) :rtype: ~news_search_client.models.TrendingTopics :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.TrendingTopics"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) # Construct URL url = self.trending.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] if country_code is not None: query_parameters['cc'] = self._serialize.query("country_code", country_code, 'str') if count is not None: query_parameters['count'] = self._serialize.query("count", count, 'int') if market is not None: query_parameters['mkt'] = self._serialize.query("market", market, 'str') if offset is not None: query_parameters['offset'] = self._serialize.query("offset", offset, 'int') if safe_search is not None: query_parameters['safeSearch'] = self._serialize.query("safe_search", safe_search, 'str') if set_lang is not None: query_parameters['setLang'] = self._serialize.query("set_lang", set_lang, 'str') if since is not None: query_parameters['since'] = self._serialize.query("since", since, 'long') if sort_by is not None: query_parameters['sortBy'] = self._serialize.query("sort_by", sort_by, 'str') if text_decorations is not None: query_parameters['textDecorations'] = self._serialize.query("text_decorations", text_decorations, 'bool') if text_format is not None: query_parameters['textFormat'] = self._serialize.query("text_format", text_format, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['X-BingApis-SDK'] = self._serialize.header("x_bing_apis_sdk", x_bing_apis_sdk, 'str') if accept is not None: header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if accept_language is not None: header_parameters['Accept-Language'] = self._serialize.header("accept_language", accept_language, 'str') if user_agent_parameter is not None: header_parameters['User-Agent'] = self._serialize.header("user_agent_parameter", user_agent_parameter, 'str') if client_id is not None: header_parameters['X-MSEdge-ClientID'] = self._serialize.header("client_id", client_id, 'str') if client_ip is not None: header_parameters['X-MSEdge-ClientIP'] = self._serialize.header("client_ip", client_ip, 'str') if location is not None: header_parameters['X-Search-Location'] = self._serialize.header("location", location, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('TrendingTopics', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized trending.metadata = {'url': '/news/trendingtopics'} # type: ignore
78.072727
448
0.704088
10,543
72,998
4.823674
0.067438
0.013371
0.015731
0.018169
0.943822
0.939653
0.935937
0.932889
0.932043
0.931198
0
0.012615
0.231171
72,998
934
449
78.156317
0.893324
0.752473
0
0.821862
1
0
0.105174
0
0
0
0
0
0
1
0.016194
false
0
0.02834
0
0.076923
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
675ad3a1114e5d1e3d6282e029dc9c81c3f263b5
132
py
Python
qshell/__init__.py
QSoloX/qshell
3e68d1b048e8424130e296fb34ff602e067eb3b0
[ "MIT" ]
2
2020-12-01T01:41:37.000Z
2020-12-01T18:35:44.000Z
qshell/__init__.py
QSoloX/qshell
3e68d1b048e8424130e296fb34ff602e067eb3b0
[ "MIT" ]
null
null
null
qshell/__init__.py
QSoloX/qshell
3e68d1b048e8424130e296fb34ff602e067eb3b0
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from .shell import Shell from .command import register from .command import commands
18.857143
29
0.719697
19
132
5
0.684211
0.231579
0.357895
0
0
0
0
0
0
0
0
0.008929
0.151515
132
6
30
22
0.839286
0.318182
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
675fea5aab111f762823634dd7dda2a67eb80253
356
py
Python
colossalai/context/random/__init__.py
xdjiangkai/ColossalAI
4a3d3446b04065fa1c89b78cba673e96115c6325
[ "Apache-2.0" ]
null
null
null
colossalai/context/random/__init__.py
xdjiangkai/ColossalAI
4a3d3446b04065fa1c89b78cba673e96115c6325
[ "Apache-2.0" ]
null
null
null
colossalai/context/random/__init__.py
xdjiangkai/ColossalAI
4a3d3446b04065fa1c89b78cba673e96115c6325
[ "Apache-2.0" ]
1
2022-01-06T17:16:32.000Z
2022-01-06T17:16:32.000Z
from ._helper import (seed, set_mode, with_seed, add_seed, get_seeds, get_states, get_current_mode, set_seed_states, sync_states, moe_set_seed) __all__ = [ 'seed', 'set_mode', 'with_seed', 'add_seed', 'get_seeds', 'get_states', 'get_current_mode', 'set_seed_states', 'sync_states', 'moe_set_seed' ]
35.6
71
0.640449
48
356
4.145833
0.3125
0.140704
0.110553
0.150754
0.904523
0.904523
0.904523
0.904523
0.904523
0.904523
0
0
0.235955
356
9
72
39.555556
0.731618
0
0
0
0
0
0.286517
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
678ca60b144ee57d41101739220b458d1807931e
8,309
py
Python
tests/int/test_redeem.py
keyko-io/defi-crawler-py
132fd0a4f4e72cebafd40a884a882d184d9dfb5a
[ "Apache-2.0" ]
45
2021-05-08T03:01:52.000Z
2022-03-21T05:50:45.000Z
tests/int/test_redeem.py
vv111y/defi-crawler-py
135b3d4a89e9382b9f428f35c3fe7432ec03e581
[ "Apache-2.0" ]
22
2021-04-07T13:52:58.000Z
2022-03-29T22:01:36.000Z
tests/int/test_redeem.py
keyko-io/defi-crawler-py
132fd0a4f4e72cebafd40a884a882d184d9dfb5a
[ "Apache-2.0" ]
15
2021-06-25T16:06:15.000Z
2022-02-23T02:36:54.000Z
from deficrawler.lending import Lending def test_redeem_aave_2_eth(): aave = Lending(protocol="Aave", chain="Ethereum", version=2) redeem = aave.get_data_from_date_range( '30/08/2021 00:00:01', '30/08/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Aave") assert(redeem[0]['chain'] == "Ethereum") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(redeem[0]['amount'] > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_aave_2_eth_user(): aave = Lending(protocol="Aave", chain="Ethereum", version=2) redeems = aave.get_data_from_date_range( '30/08/2021 00:00:01', '31/08/2021 18:01:00', "redeem", "0x27239549dd40e1d60f5b80b0c4196923745b1fd2") for redeem in redeems: assert(redeem['user'] == "0x27239549dd40e1d60f5b80b0c4196923745b1fd2") def test_redeem_aave_2_polygon(): aave = Lending(protocol="Aave", chain="Polygon", version=2) redeem = aave.get_data_from_date_range( '30/08/2021 00:00:01', '30/08/2021 02:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Aave") assert(redeem[0]['chain'] == "Polygon") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(redeem[0]['amount'] > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_aave_2_avalanche(): aave = Lending(protocol="Aave", chain="Avalanche", version=2) redeem = aave.get_data_from_date_range( '17/10/2021 00:00:01', '17/10/2021 09:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Aave") assert(redeem[0]['chain'] == "Avalanche") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(redeem[0]['amount'] > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_aave_2_polygon_user(): aave = Lending(protocol="Aave", chain="Polygon", version=2) redeems = aave.get_data_from_date_range( '30/08/2021 00:00:01', '31/08/2021 02:01:00', "redeem", "0x3fcd5de6a9fc8a99995c406c77dda3ed7e406f81") for redeem in redeems: assert(redeem['user'] == "0x3fcd5de6a9fc8a99995c406c77dda3ed7e406f81") def test_redeem_compound_2_eth(): compound = Lending(protocol="Compound", chain="Ethereum", version=2) redeem = compound.get_data_from_date_range( '28/07/2021 00:00:01', '30/07/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Compound") assert(redeem[0]['chain'] == "Ethereum") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_compound_2_eth_user(): compound = Lending(protocol="Compound", chain="Ethereum", version=2) redeems = compound.get_data_from_date_range( '28/07/2021 00:00:01', '3/07/2021 18:01:00', "redeem", "0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5") for redeem in redeems: assert(redeem['user'] == "0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5") def test_redeem_cream_2_eth(): cream = Lending(protocol="Cream", chain="Ethereum", version=2) redeem = cream.get_data_from_date_range( '28/07/2021 00:00:01', '30/07/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Cream") assert(redeem[0]['chain'] == "Ethereum") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_cream_2_polygon(): cream = Lending(protocol="Cream", chain="Polygon", version=2) redeem = cream.get_data_from_date_range( '25/09/2021 00:00:01', '26/09/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Cream") assert(redeem[0]['chain'] == "Polygon") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_cream_2_arbitrum(): cream = Lending(protocol="Cream", chain="Arbitrum", version=2) redeem = cream.get_data_from_date_range( '25/09/2021 00:00:01', '26/09/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Cream") assert(redeem[0]['chain'] == "Arbitrum") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_cream_2_fantom(): cream = Lending(protocol="Cream", chain="fantom", version=2) redeem = cream.get_data_from_date_range( '18/10/2021 00:00:01', '19/10/2021 00:00:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Cream") assert(redeem[0]['chain'] == "fantom") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_cream_2_avalanche(): cream = Lending(protocol="Cream", chain="avalanche", version=2) redeem = cream.get_data_from_date_range( '18/10/2021 00:00:01', '19/10/2021 00:00:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Cream") assert(redeem[0]['chain'] == "avalanche") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_cream_2_eth_user(): cream = Lending(protocol="Cream", chain="Ethereum", version=2) redeems = cream.get_data_from_date_range( '28/07/2021 00:00:01', '30/07/2021 18:01:00', "redeem", "0x85759961b116f1d36fd697855c57a6ae40793d9b") for redeem in redeems: assert(redeem['user'] == "0x85759961b116f1d36fd697855c57a6ae40793d9b") def test_redeem_cream_2_bsc(): cream = Lending(protocol="Cream", chain="Bsc", version=2) redeem = cream.get_data_from_date_range( '28/07/2021 00:00:01', '30/07/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Cream") assert(redeem[0]['chain'] == "Bsc") assert(redeem[0]['version'] == 2) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_cream_2_bsc_user(): cream = Lending(protocol="Cream", chain="Bsc", version=2) redeems = cream.get_data_from_date_range( '28/07/2021 00:00:01', '30/07/2021 18:01:00', "redeem", "0x1ffe17b99b439be0afc831239ddecda2a790ff3a") for redeem in redeems: assert(redeem['user'] == "0x1ffe17b99b439be0afc831239ddecda2a790ff3a") def test_redeem_kashi_1_eth(): kashi = Lending(protocol="Kashi", chain="Ethereum", version=1) redeem = kashi.get_data_from_date_range( '25/09/2021 00:00:01', '30/09/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Kashi") assert(redeem[0]['chain'] == "Ethereum") assert(redeem[0]['version'] == 1) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0) def test_redeem_kashi_1_polygon(): kashi = Lending(protocol="Kashi", chain="Polygon", version=1) redeem = kashi.get_data_from_date_range( '25/09/2021 00:00:01', '30/09/2021 18:01:00', "redeem") assert(redeem[0]['tx_id'] != "") assert(redeem[0]['protocol'] == "Kashi") assert(redeem[0]['chain'] == "Polygon") assert(redeem[0]['version'] == 1) assert(redeem[0]['user'] != "") assert(redeem[0]['token'] != "") assert(float(redeem[0]['amount']) > 0) assert(redeem[0]['timestamp'] > 0)
36.126087
109
0.618847
1,098
8,309
4.543716
0.055556
0.134696
0.226699
0.051112
0.884746
0.828022
0.787332
0.753257
0.675486
0.664863
0
0.130921
0.171741
8,309
229
110
36.283843
0.594013
0
0
0.744444
0
0
0.256469
0.050548
0
0
0.050548
0
0.561111
1
0.094444
false
0
0.005556
0
0.1
0
0
0
0
null
0
1
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
67c1beb17f67d4f92bf06967461ea3fa3c299100
5,164
py
Python
SymbLang.py
IstsPrograms/SymbLang
3c29511e9ae8a885d4f71d2852605c6d4628f08d
[ "MIT" ]
1
2022-01-08T10:42:45.000Z
2022-01-08T10:42:45.000Z
SymbLang.py
IstsPrograms/SymbLang
3c29511e9ae8a885d4f71d2852605c6d4628f08d
[ "MIT" ]
null
null
null
SymbLang.py
IstsPrograms/SymbLang
3c29511e9ae8a885d4f71d2852605c6d4628f08d
[ "MIT" ]
null
null
null
# SymbLang try: hl_file = open(input("Symblang file name... (file.sl)"), "r") except: print("Error: error in open file") quit(0) hl_loaded = hl_file.read() hl_file.close() cursor_position = 0 array = [] for i in range(30000): array.append(0) functs = [[], []] execute_code = False def executeFunctions(lines: str): global cursor_position, array for i in range(len(lines)): match lines[i]: case ">": cursor_position += 1 case "<": if cursor_position == 0: print( f"Error: cursor position < 0 (error in symbol {i + 1})") break else: cursor_position -= 1 case "+": array[cursor_position] += 1 case ".": try: print(chr(array[cursor_position])) except: print(f"Error: error in symbol {i + 1}") break case "-": if array[cursor_position] < 0 or array[cursor_position] == 0: print(f"Error: error in symbol {i + 1}") else: array[cursor_position] -= 1 case "*": try: array[cursor_position] = int( array[cursor_position] * lines[i+1]) except: print(f"Error: error in symbol {i + 1}") break case "/": try: array[cursor_position] = int( array[cursor_position]) / lines[i+1] except: print(f"Error: error in symbol {i + 1}") break case ",": try: array[cursor_position] = int(input("> ")) except: print(f"Error: error in symbol {i + 1}") break case ";": try: for u in range(30000): array[u] = 0 cursor_position = 0 except: print(f"Error: unknown error in symbol {i + 1}") case "?": print(array[cursor_position]) for i in range(len(hl_loaded)): if hl_loaded[i] == "&": execute_code = True if hl_loaded[i] == "@": functs[0].append(int(hl_loaded[i+1])) line = "" for p in range(i+2, 1000): if hl_loaded[p] == ":": break else: line += hl_loaded[p] functs[1].append(line) line = "" if execute_code == True: match hl_loaded[i]: case ">": cursor_position += 1 case "<": if cursor_position == 0: print( f"Error: cursor position < 0 (error in symbol {i + 1})") break else: cursor_position -= 1 case "+": array[cursor_position] += 1 case ".": try: print(chr(array[cursor_position])) except: print(f"Error: error in symbol {i + 1}") break case "-": if array[cursor_position] < 0 or array[cursor_position] == 0: print(f"Error: error in symbol {i + 1}") else: array[cursor_position] -= 1 case "*": try: array[cursor_position] = int( array[cursor_position] * hl_loaded[i+1]) except: print(f"Error: error in symbol {i + 1}") break case "/": try: array[cursor_position] = int( array[cursor_position]) / hl_loaded[i+1] except: print(f"Error: error in symbol {i + 1}") break case ",": try: array[cursor_position] = int(input("> ")) except: print(f"Error: error in symbol {i + 1}") break case ";": try: for u in range(30000): array[u] = 0 cursor_position = 0 except: print(f"Error: unknown error in symbol {i + 1}") case "%": print("Line ended!") break case "$": try: if int(hl_loaded[i+1]) in functs[0]: executeFunctions(functs[1][int(hl_loaded[i+1])]) except: print(f"Error: error in symbol {i + 1}") case "?": print(array[cursor_position])
32.477987
81
0.380519
474
5,164
4.040084
0.116034
0.248564
0.218277
0.109661
0.766057
0.725326
0.725326
0.725326
0.725326
0.725326
0
0.027545
0.514911
5,164
158
82
32.683544
0.736926
0.001549
0
0.762238
0
0
0.121497
0
0
0
0
0
0
1
0.006993
false
0
0
0
0.006993
0.146853
0
0
0
null
1
1
0
0
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
67c7cca0011310cb8f38580ded76c23ba7b93ea5
8,499
py
Python
notion_extensions/base/props/block/heading.py
yuta0306/notion-extensions
8a26f8c2abeef03a55c4d433439ce35ae5728a75
[ "MIT" ]
1
2022-01-12T15:53:43.000Z
2022-01-12T15:53:43.000Z
notion_extensions/base/props/block/heading.py
yuta0306/notion-extensions
8a26f8c2abeef03a55c4d433439ce35ae5728a75
[ "MIT" ]
1
2022-03-24T01:38:42.000Z
2022-03-24T01:38:42.000Z
notion_extensions/base/props/block/heading.py
yuta0306/notion-extensions
8a26f8c2abeef03a55c4d433439ce35ae5728a75
[ "MIT" ]
null
null
null
from typing import Dict, List, Union from .block import Block from ..common import Text, RichText __all__ = [ "Heading1", "Heading2", "Heading3", ] class Heading1(Block): """ Heading1 Heading1 property values of block Attributes ---------- Methods ------- clear() Clear data of title json() Return this class as dictionary """ TEMPLATE: Dict[str, Union[str, Dict]] = { "object": "block", "type": "heading_1", "heading_1": { "text": [], }, } def __init__( self, *text: Union[RichText, Text], ): super().__init__() base = [] for t in text: if isinstance(t, RichText): base.extend(list(t[t.key])) elif isinstance(t, Text): base.append(t) else: raise ValueError( f"Expected type is `RichText` or `Text`, but {type(t)} is given" ) self.__texts = RichText(key="text", *base) self.update( { "object": "block", "type": "heading_1", "heading_1": self.__texts, }, ) def __add__(self, other: Union[Text, List[Text]]): if isinstance(other, list): self.extend(other) return self self.append(other) return self def __iadd__(self, other: Union[Text, List[Text]]): return self.__add__(other) def append(self, text: Text) -> None: """ append(text: Text) Append Text to existing list of Text Parameters ---------- text : Text Text you append to RichText """ self.__texts.append(text) self["heading_1"] = self.__texts def extend(self, texts: List[Text]) -> None: """ extens(texts: Text) Append Text to existing list of Text Parameters ---------- text : list of Text List of text you append to RichText """ self.__texts.extend(texts) self["heading_1"] = self.__texts def insert(self, index: int, text: Text) -> None: """ insert(index: int, text: Text) Append Text to existing list of Text Parameters ---------- index : int Index you insert Text into text : Text Text you insert into RichText """ self.__texts.insert(index, text) self["heading_1"] = self.__texts def pop(self, index=None): """ pop(text: Text) Pop Text to existing list of Text Parameters ---------- index : int, default=None Text you pop from RichText """ item = self.__texts.pop(index) self["heading_1"] = self.__texts return item class Heading2(Block): """ Heading2 Heading2 property values of block Attributes ---------- Methods ------- clear() Clear data of title json() Return this class as dictionary """ TEMPLATE: Dict[str, Union[str, Dict]] = { "object": "block", "type": "heading_2", "heading_2": { "text": [], }, } def __init__( self, *text: Union[RichText, Text], ): super().__init__() base = [] for t in text: if isinstance(t, RichText): base.extend(list(t[t.key])) elif isinstance(t, Text): base.append(t) else: raise ValueError( f"Expected type is `RichText` or `Text`, but {type(t)} is given" ) self.__texts = RichText(key="text", *base) self.update( { "object": "block", "type": "heading_2", "heading_2": self.__texts, }, ) def __add__(self, other: Union[Text, List[Text]]): if isinstance(other, list): self.extend(other) return self self.append(other) return self def __iadd__(self, other: Union[Text, List[Text]]): return self.__add__(other) def append(self, text: Text) -> None: """ append(text: Text) Append Text to existing list of Text Parameters ---------- text : Text Text you append to RichText """ self.__texts.append(text) self["heading_2"] = self.__texts def extend(self, texts: List[Text]) -> None: """ extens(texts: Text) Append Text to existing list of Text Parameters ---------- text : list of Text List of text you append to RichText """ self.__texts.extend(texts) self["heading_2"] = self.__texts def insert(self, index: int, text: Text) -> None: """ insert(index: int, text: Text) Append Text to existing list of Text Parameters ---------- index : int Index you insert Text into text : Text Text you insert into RichText """ self.__texts.insert(index, text) self["heading_2"] = self.__texts def pop(self, index=None): """ pop(text: Text) Pop Text to existing list of Text Parameters ---------- index : int, default=None Text you pop from RichText """ item = self.__texts.pop(index) self["heading_2"] = self.__texts return item class Heading3(Block): """ Heading3 Heading3 property values of block Attributes ---------- Methods ------- clear() Clear data of title json() Return this class as dictionary """ TEMPLATE: Dict[str, Union[str, Dict]] = { "object": "block", "type": "heading_3", "heading_3": { "text": [], }, } def __init__( self, *text: Union[RichText, Text], ): super().__init__() base = [] for t in text: if isinstance(t, RichText): base.extend(list(t[t.key])) elif isinstance(t, Text): base.append(t) else: raise ValueError( f"Expected type is `RichText` or `Text`, but {type(t)} is given" ) self.__texts = RichText(key="text", *base) self.update( { "object": "block", "type": "heading_3", "heading_3": self.__texts, }, ) def __add__(self, other: Union[Text, List[Text]]): if isinstance(other, list): self.extend(other) return self self.append(other) return self def __iadd__(self, other: Union[Text, List[Text]]): return self.__add__(other) def append(self, text: Text) -> None: """ append(text: Text) Append Text to existing list of Text Parameters ---------- text : Text Text you append to RichText """ self.__texts.append(text) self["heading_3"] = self.__texts def extend(self, texts: List[Text]) -> None: """ extens(texts: Text) Append Text to existing list of Text Parameters ---------- text : list of Text List of text you append to RichText """ self.__texts.extend(texts) self["heading_3"] = self.__texts def insert(self, index: int, text: Text) -> None: """ insert(index: int, text: Text) Append Text to existing list of Text Parameters ---------- index : int Index you insert Text into text : Text Text you insert into RichText """ self.__texts.insert(index, text) self["heading_3"] = self.__texts def pop(self, index=None): """ pop(text: Text) Pop Text to existing list of Text Parameters ---------- index : int, default=None Text you pop from RichText """ item = self.__texts.pop(index) self["heading_3"] = self.__texts return item
23.873596
84
0.484292
877
8,499
4.524515
0.087799
0.074849
0.045363
0.054435
0.948085
0.932964
0.932208
0.917087
0.917087
0.917087
0
0.006993
0.394282
8,499
355
85
23.940845
0.763792
0.26356
0
0.786585
0
0
0.101302
0
0
0
0
0
0
1
0.128049
false
0
0.018293
0.018293
0.256098
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
67f38454dd8443f0b3721ee0802f2ed82a45cc5e
4,120
py
Python
asnets/experiments/det_parking.py
xf1590281/ASNets
5f4b29fb62a5e72004b813228442d06246c9ec33
[ "MIT" ]
21
2017-12-05T13:27:36.000Z
2021-11-16T20:32:33.000Z
asnets/experiments/det_parking.py
xf1590281/ASNets
5f4b29fb62a5e72004b813228442d06246c9ec33
[ "MIT" ]
2
2018-07-16T12:15:46.000Z
2020-10-31T00:02:49.000Z
asnets/experiments/det_parking.py
xf1590281/ASNets
5f4b29fb62a5e72004b813228442d06246c9ec33
[ "MIT" ]
7
2018-03-19T13:45:13.000Z
2022-03-24T07:52:20.000Z
"""For experiments on parking from IPC'08 learning track.""" PDDL_DIR = '../problems/ipc08-learn/parking/' COMMON_PDDLS = ['learning/parking-typed.pddl'] TRAIN_PDDLS = [ # 'learning/bootstrap/typed/bootstrap-typed-01.pddl', # 'learning/bootstrap/typed/bootstrap-typed-02.pddl', # 'learning/bootstrap/typed/bootstrap-typed-03.pddl', # 'learning/bootstrap/typed/bootstrap-typed-04.pddl', # 'learning/bootstrap/typed/bootstrap-typed-05.pddl', # 'learning/bootstrap/typed/bootstrap-typed-06.pddl', 'learning/bootstrap/typed/bootstrap-typed-07.pddl', # 'learning/bootstrap/typed/bootstrap-typed-08.pddl', 'learning/bootstrap/typed/bootstrap-typed-09.pddl', # 'learning/bootstrap/typed/bootstrap-typed-10.pddl', 'learning/bootstrap/typed/bootstrap-typed-11.pddl', # 'learning/bootstrap/typed/bootstrap-typed-12.pddl', 'learning/bootstrap/typed/bootstrap-typed-13.pddl', # 'learning/bootstrap/typed/bootstrap-typed-14.pddl', 'learning/bootstrap/typed/bootstrap-typed-15.pddl', # 'learning/bootstrap/typed/bootstrap-typed-16.pddl', 'learning/bootstrap/typed/bootstrap-typed-17.pddl', # 'learning/bootstrap/typed/bootstrap-typed-18.pddl', 'learning/bootstrap/typed/bootstrap-typed-19.pddl', # 'learning/bootstrap/typed/bootstrap-typed-20.pddl', 'learning/bootstrap/typed/bootstrap-typed-21.pddl', # 'learning/bootstrap/typed/bootstrap-typed-22.pddl', 'learning/bootstrap/typed/bootstrap-typed-23.pddl', # 'learning/bootstrap/typed/bootstrap-typed-24.pddl', 'learning/bootstrap/typed/bootstrap-typed-25.pddl', # 'learning/bootstrap/typed/bootstrap-typed-26.pddl', 'learning/bootstrap/typed/bootstrap-typed-27.pddl', # 'learning/bootstrap/typed/bootstrap-typed-28.pddl', # 'learning/bootstrap/typed/bootstrap-typed-29.pddl', # 'learning/bootstrap/typed/bootstrap-typed-30.pddl', ] # yapf: disable TRAIN_NAMES = None TEST_RUNS = [ (['testing/target/typed/parking-target--c10-05-typed.pddl'], None), (['testing/target/typed/parking-target--c10-07-typed.pddl'], None), (['testing/target/typed/parking-target--c10-08-typed.pddl'], None), (['testing/target/typed/parking-target--c10-09-typed.pddl'], None), (['testing/target/typed/parking-target--c10-10-typed.pddl'], None), (['testing/target/typed/parking-target--c12-06-typed.pddl'], None), (['testing/target/typed/parking-target--c12-07-typed.pddl'], None), (['testing/target/typed/parking-target--c12-08-typed.pddl'], None), (['testing/target/typed/parking-target--c12-09-typed.pddl'], None), (['testing/target/typed/parking-target--c12-10-typed.pddl'], None), (['testing/target/typed/parking-target--c13-06-typed.pddl'], None), (['testing/target/typed/parking-target--c13-07-typed.pddl'], None), (['testing/target/typed/parking-target--c13-08-typed.pddl'], None), (['testing/target/typed/parking-target--c13-09-typed.pddl'], None), (['testing/target/typed/parking-target--c13-10-typed.pddl'], None), (['testing/target/typed/parking-target--c15-01-typed.pddl'], None), (['testing/target/typed/parking-target--c15-02-typed.pddl'], None), (['testing/target/typed/parking-target--c15-03-typed.pddl'], None), (['testing/target/typed/parking-target--c15-04-typed.pddl'], None), (['testing/target/typed/parking-target--c15-05-typed.pddl'], None), (['testing/target/typed/parking-target--c15-06-typed.pddl'], None), (['testing/target/typed/parking-target--c15-07-typed.pddl'], None), (['testing/target/typed/parking-target--c15-08-typed.pddl'], None), (['testing/target/typed/parking-target--c15-09-typed.pddl'], None), (['testing/target/typed/parking-target--c15-10-typed.pddl'], None), (['testing/target/typed/parking-target--c20-01-typed.pddl'], None), (['testing/target/typed/parking-target--c20-02-typed.pddl'], None), (['testing/target/typed/parking-target--c20-03-typed.pddl'], None), (['testing/target/typed/parking-target--c20-04-typed.pddl'], None), (['testing/target/typed/parking-target--c20-05-typed.pddl'], None), ] # yapf: disable
58.857143
71
0.704126
542
4,120
5.343173
0.112546
0.290055
0.227901
0.321133
0.915401
0.90297
0.490677
0.490677
0.490677
0
0
0.049237
0.092961
4,120
69
72
59.710145
0.725716
0.259951
0
0
0
0
0.730069
0.730069
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e1db3116ac364106b3eba50bc2bb72069308945b
8,891
py
Python
pooling/legacy_sort.py
jiforcen/orderedweightedpooling
8cf13f86fcfb132080b5dd56463701f597bf3b60
[ "MIT" ]
1
2020-01-15T21:17:57.000Z
2020-01-15T21:17:57.000Z
pooling/legacy_sort.py
jiforcen/orderedweightedpooling
8cf13f86fcfb132080b5dd56463701f597bf3b60
[ "MIT" ]
null
null
null
pooling/legacy_sort.py
jiforcen/orderedweightedpooling
8cf13f86fcfb132080b5dd56463701f597bf3b60
[ "MIT" ]
2
2020-07-03T01:56:09.000Z
2021-12-20T20:44:49.000Z
import numpy as np import tensorflow as tf def sort_p2x2(x): _, pool_height, pool_width, channels, elems = x.get_shape().as_list() x = tf.reshape(x, [-1, elems]) rows, _ = x.get_shape().as_list() # 1st stage x_1 = tf.slice(x, [0, 0], [-1, 1]) x_2 = tf.slice(x, [0, 1], [-1, 1]) x_3 = tf.slice(x, [0, 2], [-1, 1]) x_4 = tf.slice(x, [0, 3], [-1, 1]) x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x_34_greater = tf.greater(x_3, x_4) x_aux = tf.where(x_34_greater, x_3, x_4) x_4 = tf.where(tf.logical_not(x_34_greater), x_3, x_4) x_3 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4], axis=1)) # 2nd stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4], axis=1)) # 3rd stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4], axis=1)) x = tf.reshape(x, [-1, pool_height, pool_width, channels, elems]) # Reshape tensor return x def sort_p3x3(x): _, pool_height, pool_width, channels, elems = x.get_shape().as_list() x = tf.reshape(x, [-1, elems]) rows, _ = x.get_shape().as_list() # 1st stage x_1 = tf.slice(x, [0, 0], [-1, 1]) x_2 = tf.slice(x, [0, 1], [-1, 1]) x_3 = tf.slice(x, [0, 2], [-1, 1]) x_4 = tf.slice(x, [0, 3], [-1, 1]) x_5 = tf.slice(x, [0, 4], [-1, 1]) x_6 = tf.slice(x, [0, 5], [-1, 1]) x_7 = tf.slice(x, [0, 6], [-1, 1]) x_8 = tf.slice(x, [0, 7], [-1, 1]) x_9 = tf.slice(x, [0, 8], [-1, 1]) x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x_34_greater = tf.greater(x_3, x_4) x_aux = tf.where(x_34_greater, x_3, x_4) x_4 = tf.where(tf.logical_not(x_34_greater), x_3, x_4) x_3 = x_aux x_45_greater = tf.greater(x_4, x_5) x_aux = tf.where(x_45_greater, x_4, x_5) x_5 = tf.where(tf.logical_not(x_45_greater), x_4, x_5) x_4 = x_aux x_56_greater = tf.greater(x_5, x_6) x_aux = tf.where(x_56_greater, x_5, x_6) x_6 = tf.where(tf.logical_not(x_56_greater), x_5, x_6) x_5 = x_aux x_67_greater = tf.greater(x_6, x_7) x_aux = tf.where(x_67_greater, x_6, x_7) x_7 = tf.where(tf.logical_not(x_67_greater), x_6, x_7) x_6 = x_aux x_78_greater = tf.greater(x_7, x_8) x_aux = tf.where(x_78_greater, x_7, x_8) x_8 = tf.where(tf.logical_not(x_78_greater), x_7, x_8) x_7 = x_aux x_89_greater = tf.greater(x_8, x_9) x_aux = tf.where(x_89_greater, x_8, x_9) x_9 = tf.where(tf.logical_not(x_89_greater), x_8, x_9) x_8 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # 2nd stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x_34_greater = tf.greater(x_3, x_4) x_aux = tf.where(x_34_greater, x_3, x_4) x_4 = tf.where(tf.logical_not(x_34_greater), x_3, x_4) x_3 = x_aux x_45_greater = tf.greater(x_4, x_5) x_aux = tf.where(x_45_greater, x_4, x_5) x_5 = tf.where(tf.logical_not(x_45_greater), x_4, x_5) x_4 = x_aux x_56_greater = tf.greater(x_5, x_6) x_aux = tf.where(x_56_greater, x_5, x_6) x_6 = tf.where(tf.logical_not(x_56_greater), x_5, x_6) x_5 = x_aux x_67_greater = tf.greater(x_6, x_7) x_aux = tf.where(x_67_greater, x_6, x_7) x_7 = tf.where(tf.logical_not(x_67_greater), x_6, x_7) x_6 = x_aux x_78_greater = tf.greater(x_7, x_8) x_aux = tf.where(x_78_greater, x_7, x_8) x_8 = tf.where(tf.logical_not(x_78_greater), x_7, x_8) x_7 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # 3rd stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x_34_greater = tf.greater(x_3, x_4) x_aux = tf.where(x_34_greater, x_3, x_4) x_4 = tf.where(tf.logical_not(x_34_greater), x_3, x_4) x_3 = x_aux x_45_greater = tf.greater(x_4, x_5) x_aux = tf.where(x_45_greater, x_4, x_5) x_5 = tf.where(tf.logical_not(x_45_greater), x_4, x_5) x_4 = x_aux x_56_greater = tf.greater(x_5, x_6) x_aux = tf.where(x_56_greater, x_5, x_6) x_6 = tf.where(tf.logical_not(x_56_greater), x_5, x_6) x_5 = x_aux x_67_greater = tf.greater(x_6, x_7) x_aux = tf.where(x_67_greater, x_6, x_7) x_7 = tf.where(tf.logical_not(x_67_greater), x_6, x_7) x_6 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # 4th stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x_34_greater = tf.greater(x_3, x_4) x_aux = tf.where(x_34_greater, x_3, x_4) x_4 = tf.where(tf.logical_not(x_34_greater), x_3, x_4) x_3 = x_aux x_45_greater = tf.greater(x_4, x_5) x_aux = tf.where(x_45_greater, x_4, x_5) x_5 = tf.where(tf.logical_not(x_45_greater), x_4, x_5) x_4 = x_aux x_56_greater = tf.greater(x_5, x_6) x_aux = tf.where(x_56_greater, x_5, x_6) x_6 = tf.where(tf.logical_not(x_56_greater), x_5, x_6) x_5 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # 5th stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x_34_greater = tf.greater(x_3, x_4) x_aux = tf.where(x_34_greater, x_3, x_4) x_4 = tf.where(tf.logical_not(x_34_greater), x_3, x_4) x_3 = x_aux x_45_greater = tf.greater(x_4, x_5) x_aux = tf.where(x_45_greater, x_4, x_5) x_5 = tf.where(tf.logical_not(x_45_greater), x_4, x_5) x_4 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # 6th stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x_34_greater = tf.greater(x_3, x_4) x_aux = tf.where(x_34_greater, x_3, x_4) x_4 = tf.where(tf.logical_not(x_34_greater), x_3, x_4) x_3 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # 7th stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x_23_greater = tf.greater(x_2, x_3) x_aux = tf.where(x_23_greater, x_2, x_3) x_3 = tf.where(tf.logical_not(x_23_greater), x_2, x_3) x_2 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # 8th stage x_12_greater = tf.greater(x_1, x_2) x_aux = tf.where(x_12_greater, x_1, x_2) x_2 = tf.where(tf.logical_not(x_12_greater),x_1, x_2) x_1 = x_aux x = tf.squeeze(tf.stack([x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9], axis=1)) # Reshape x = tf.reshape(x, [-1, pool_height, pool_width, channels, elems]) # Reshape tensor return x
31.528369
86
0.631088
2,073
8,891
2.304872
0.028461
0.210967
0.05023
0.036836
0.966722
0.96463
0.958141
0.958141
0.951863
0.951863
0
0.110265
0.217636
8,891
281
87
31.640569
0.576625
0.016534
0
0.936893
0
0
0
0
0
0
0
0
0
1
0.009709
false
0
0.009709
0
0.029126
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
e1dc258245d5fc011e0dd3a9f4cc7cc663c0a2cd
2,110
py
Python
pytest_pilot/test_cases/basic/test_basic.py
The-Compiler/python-pytest-pilot
30be93c0b21fce225f5aa6d33687f4628af6b124
[ "BSD-3-Clause" ]
1
2020-10-30T04:44:41.000Z
2020-10-30T04:44:41.000Z
pytest_pilot/test_cases/basic/test_basic.py
The-Compiler/python-pytest-pilot
30be93c0b21fce225f5aa6d33687f4628af6b124
[ "BSD-3-Clause" ]
18
2019-08-09T16:03:43.000Z
2020-10-31T20:41:42.000Z
pytest_pilot/test_cases/basic/test_basic.py
The-Compiler/python-pytest-pilot
30be93c0b21fce225f5aa6d33687f4628af6b124
[ "BSD-3-Clause" ]
2
2020-10-01T17:11:20.000Z
2020-10-29T14:00:37.000Z
import pytest from .conftest import flavour, envid, silo, hardfilter @silo def test_silo(easymarkers): assert easymarkers.silo is True assert easymarkers.hf is False assert easymarkers.envid is None or isinstance(easymarkers.envid, str) assert easymarkers.flavour is None or easymarkers.flavour in ("red", "yellow") @hardfilter def test_hf(easymarkers): assert easymarkers.silo is False assert easymarkers.hf is False or easymarkers.hf is True assert easymarkers.envid is None or isinstance(easymarkers.envid, str) assert easymarkers.flavour is None or easymarkers.flavour in ("red", "yellow") @flavour('yellow') @hardfilter def test_yellow_noenv(easymarkers): assert easymarkers.silo is False assert easymarkers.flavour is None or easymarkers.flavour == 'yellow' assert easymarkers.envid is None or isinstance(easymarkers.envid, str) assert easymarkers.hf is False or easymarkers.hf is True @flavour('yellow') @envid('env1') def test_yellow_env1(easymarkers): assert easymarkers.silo is False assert easymarkers.flavour is None or easymarkers.flavour == 'yellow' assert easymarkers.envid == 'env1' assert easymarkers.hf is False @envid('env2') def test_env2(easymarkers): assert easymarkers.silo is False assert easymarkers.envid == 'env2' assert easymarkers.hf is False assert easymarkers.flavour is None or easymarkers.flavour in ("red", "yellow") @flavour('red') def test_red_noenv(easymarkers): assert easymarkers.silo is False assert easymarkers.envid is None or isinstance(easymarkers.envid, str) assert easymarkers.flavour is None or easymarkers.flavour == 'red' assert easymarkers.hf is False def test_nomark(easymarkers): """ we use this opportunity to test that this raises a value error""" assert easymarkers.silo is False assert easymarkers.envid is None or isinstance(easymarkers.envid, str) assert easymarkers.hf is False assert easymarkers.flavour is None or easymarkers.flavour in ("red", "yellow") with pytest.raises(ValueError): flavour('pink')
31.492537
82
0.747867
280
2,110
5.6
0.142857
0.303571
0.061224
0.137755
0.797194
0.742347
0.742347
0.73023
0.705357
0.653061
0
0.003429
0.170616
2,110
66
83
31.969697
0.892571
0.029384
0
0.595745
0
0
0.042157
0
0
0
0
0
0.595745
1
0.148936
false
0
0.042553
0
0.191489
0
0
0
0
null
1
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
c015aea69c2a26f60e4edb4ad8fe0a3ccefd82d8
2,880
py
Python
interview/candidate_field.py
Jacklovely/django
4c4d0d37868d774311fbb25cfab13fa9de38720e
[ "Apache-2.0" ]
63
2020-10-14T13:13:58.000Z
2022-03-22T15:55:16.000Z
interview/candidate_field.py
Jacklovely/django
4c4d0d37868d774311fbb25cfab13fa9de38720e
[ "Apache-2.0" ]
5
2021-04-08T20:24:53.000Z
2021-09-22T19:38:40.000Z
interview/candidate_field.py
pywjh/-recruitment-django
5ad7a0b725f0a1557b5b220d3197b1d9dbda5de4
[ "Apache-2.0" ]
28
2020-10-14T13:23:45.000Z
2022-02-15T09:21:17.000Z
# 分组展示字段,分三块,基础信息、第一轮面试记录、第二轮面试(专业复试)、HR复试 default_fieldsets = ( (None, {'fields': ( "userid", ("username", "city", "phone"), ("email", "apply_position", "born_address", "gender", "candidate_remark"), ("bachelor_school", "master_school", "doctor_school"), ("major", "degree"), "test_score_of_general_ability", "paper_score",)}), ('第一轮面试', {'fields': ( ("first_score", "first_learning_ability", "first_professional_competency"), "first_advantage", "first_disadvantage", "first_result", "first_recommend_position", "first_interviewer_user", "first_remark",)}), ('第二轮面试(专业复试)', {'fields': ("second_score", ("second_learning_ability", "second_professional_competency"), ( "second_pursue_of_excellence", "second_communication_ability", "second_pressure_score"), "second_advantage", "second_disadvantage", "second_result", "second_recommend_position", "second_interviewer_user", "second_remark",)}), ('HR复试', {'fields': ( "hr_score", ("hr_responsibility", "hr_communication_ability", "hr_logic_ability"), ("hr_potential", "hr_stability"), "hr_advantage", "hr_disadvantage", "hr_result", "hr_interviewer_user", "hr_remark",)}), ) default_fieldsets_first = ( (None, {'fields': ("userid", ("username", "city", "phone"), ("email", "apply_position", "born_address", "gender", "candidate_remark"), ("bachelor_school", "master_school", "doctor_school"), ("major", "degree"), "test_score_of_general_ability", "paper_score",)}), ('第一轮面试', {'fields': ( ("first_score", "first_learning_ability", "first_professional_competency"), "first_advantage", "first_disadvantage", "first_result", "first_recommend_position", "first_interviewer_user", "first_remark",)}), ) default_fieldsets_second = ( (None, {'fields': ("userid", ("username", "city", "phone"), ("email", "apply_position", "born_address", "gender", "candidate_remark"), ("bachelor_school", "master_school", "doctor_school"), ("major", "degree"), "test_score_of_general_ability", "paper_score",)}), ('第一轮面试', {'fields': ( ("first_score", "first_learning_ability", "first_professional_competency"), "first_advantage", "first_disadvantage", "first_result", "first_recommend_position", "first_interviewer_user", "first_remark",)}), ('第二轮面试(专业复试)', {'fields': ("second_score", ("second_learning_ability", "second_professional_competency"), ( "second_pursue_of_excellence", "second_communication_ability", "second_pressure_score"), "second_advantage", "second_disadvantage", "second_result", "second_recommend_position", "second_interviewer_user", "second_remark",)}), )
70.243902
120
0.643056
271
2,880
6.350554
0.206642
0.052295
0.027891
0.041836
0.859965
0.859965
0.859965
0.859965
0.859965
0.859965
0
0
0.181944
2,880
41
121
70.243902
0.730475
0.013889
0
0.605263
0
0
0.615358
0.266291
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c01f3d53ae4e89ed0fbb634d431490b2b75929a9
14,810
py
Python
aiotgbot/handler_table.py
gleb-chipiga/aiotgbot
2b8a889a3642ae36f1e5ab7659dc54ff6e62a95d
[ "MIT" ]
null
null
null
aiotgbot/handler_table.py
gleb-chipiga/aiotgbot
2b8a889a3642ae36f1e5ab7659dc54ff6e62a95d
[ "MIT" ]
null
null
null
aiotgbot/handler_table.py
gleb-chipiga/aiotgbot
2b8a889a3642ae36f1e5ab7659dc54ff6e62a95d
[ "MIT" ]
null
null
null
import re from typing import Callable, Final, Iterable, List, Optional, Union from frozenlist import FrozenList from .bot import Bot, FilterProtocol, Handler, HandlerCallable from .bot_update import BotUpdate from .constants import ContentType, UpdateType from .filters import (CallbackQueryDataFilter, CommandsFilter, ContentTypeFilter, MessageTextFilter, StateFilter, UpdateTypeFilter) __all__ = ('HandlerTable',) HandlerDecorator = Callable[[HandlerCallable], HandlerCallable] class HandlerTable: def __init__(self) -> None: self._handlers: Final[FrozenList[Handler]] = FrozenList() def freeze(self) -> None: self._handlers.freeze() @property def frozen(self) -> bool: return self._handlers.frozen async def get_handler(self, bot: Bot, update: BotUpdate) -> Optional[HandlerCallable]: for handler in self._handlers: if await handler.check(bot, update): return handler.callable return None def message_handler( self, handler: HandlerCallable, state: Optional[str] = None, commands: Optional[Iterable[str]] = None, content_types: Optional[Iterable[ContentType]] = None, text_match: Union[str, 're.Pattern[str]', None] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.MESSAGE) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if commands is not None: handler_filters.append(CommandsFilter(tuple(commands))) if content_types is not None: handler_filters.append(ContentTypeFilter(tuple(content_types))) if isinstance(text_match, re.Pattern): handler_filters.append(MessageTextFilter(text_match)) elif isinstance(text_match, str): handler_filters.append(MessageTextFilter(re.compile(text_match))) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def message( self, state: Optional[str] = None, commands: Optional[Iterable[str]] = None, content_types: Optional[Iterable[ContentType]] = None, text_match: Union[str, 're.Pattern[str]', None] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.message_handler(handler=handler, state=state, commands=commands, content_types=content_types, text_match=text_match, filters=filters) return handler return decorator def edited_message_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.EDITED_MESSAGE) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def edited_message( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.edited_message_handler(handler=handler, state=state, filters=filters) return handler return decorator def channel_post_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.CHANNEL_POST) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def channel_post( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.channel_post_handler(handler=handler, state=state, filters=filters) return handler return decorator def edited_channel_post_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.EDITED_CHANNEL_POST) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def edited_channel_post( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.edited_channel_post_handler(handler=handler, state=state, filters=filters) return handler return decorator def inline_query_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.INLINE_QUERY) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def inline_query( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.inline_query_handler(handler=handler, state=state, filters=filters) return handler return decorator def chosen_inline_result_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.CHOSEN_INLINE_RESULT) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def chosen_inline_result( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.chosen_inline_result_handler(handler=handler, state=state, filters=filters) return handler return decorator def callback_query_handler( self, handler: HandlerCallable, state: Optional[str] = None, data_match: Union[str, 're.Pattern[str]', None] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.CALLBACK_QUERY) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if isinstance(data_match, re.Pattern): handler_filters.append(CallbackQueryDataFilter(data_match)) elif isinstance(data_match, str): handler_filters.append(CallbackQueryDataFilter( re.compile(data_match))) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def callback_query( self, state: Optional[str] = None, data_match: Union[str, 're.Pattern[str]', None] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.callback_query_handler(handler=handler, state=state, data_match=data_match, filters=filters) return handler return decorator def shipping_query_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.SHIPPING_QUERY) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def shipping_query( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.shipping_query_handler(handler=handler, state=state, filters=filters) return handler return decorator def pre_checkout_query_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.PRE_CHECKOUT_QUERY) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def pre_checkout_query( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.pre_checkout_query_handler(handler=handler, state=state, filters=filters) return handler return decorator def poll_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.POLL) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def poll( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.poll_handler(handler=handler, state=state, filters=filters) return handler return decorator def poll_answer_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.POLL_ANSWER) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def poll_answer( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.poll_answer_handler(handler=handler, state=state, filters=filters) return handler return decorator def my_chat_member_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.MY_CHAT_MEMBER) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def my_chat_member( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.my_chat_member_handler(handler=handler, state=state, filters=filters) return handler return decorator def chat_member_handler( self, handler: HandlerCallable, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> None: update_type_filter = UpdateTypeFilter(UpdateType.CHAT_MEMBER) handler_filters: List[FilterProtocol] = [update_type_filter] if state is not None: handler_filters.append(StateFilter(state)) if filters is not None: handler_filters.extend(filters) self._handlers.append(Handler(handler, tuple(handler_filters))) def chat_member( self, state: Optional[str] = None, filters: Optional[Iterable[FilterProtocol]] = None ) -> HandlerDecorator: def decorator(handler: HandlerCallable) -> HandlerCallable: self.chat_member_handler(handler=handler, state=state, filters=filters) return handler return decorator
41.368715
79
0.6447
1,439
14,810
6.464906
0.059069
0.087284
0.027088
0.048157
0.862088
0.856068
0.830915
0.830915
0.818876
0.809739
0
0
0.270628
14,810
357
80
41.484594
0.861229
0
0
0.70405
0
0
0.004862
0
0
0
0
0
0
1
0.130841
false
0
0.021807
0.003115
0.246106
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c02b4fd11e56d48cd46819bb3b195159abcf64c8
3,625
py
Python
boofuzz/requests/rendezvous.py
youngcraft/boofuzz-modbus
bfeb48345b56797b48079e0620e7b06b27085789
[ "Apache-2.0" ]
23
2018-08-11T12:12:33.000Z
2022-01-28T10:22:49.000Z
boofuzz/requests/rendezvous.py
ctf-fuzzer/boofuzz-modbus
bfeb48345b56797b48079e0620e7b06b27085789
[ "Apache-2.0" ]
2
2018-07-24T15:15:40.000Z
2020-07-12T13:06:56.000Z
boofuzz/requests/rendezvous.py
ctf-fuzzer/boofuzz-modbus
bfeb48345b56797b48079e0620e7b06b27085789
[ "Apache-2.0" ]
10
2018-04-02T13:21:36.000Z
2022-01-17T09:20:27.000Z
from boofuzz import * s_initialize("trillian 1") s_static("\x00\x00") # transaction ID s_static("\x00\x00") # flags (standard query) s_word(1, endian=">") # number of questions s_word(0, endian=">", fuzzable=False) # answer RRs s_word(0, endian=">", fuzzable=False) # authority RRs s_word(0, endian=">", fuzzable=False) # additional RRs # queries s_lego("dns_hostname", "_presence._tcp.local") s_word(0x000c, endian=">") # type = pointer s_word(0x8001, endian=">") # class = flush s_initialize("trillian 2") if s_block_start("pamini.local"): if s_block_start("header"): s_static("\x00\x00") # transaction ID s_static("\x00\x00") # flags (standard query) s_word(2, endian=">") # number of questions s_word(0, endian=">", fuzzable=False) # answer RRs s_word(2, endian=">", fuzzable=False) # authority RRs s_word(0, endian=">", fuzzable=False) # additional RRs s_block_end() # queries s_lego("dns_hostname", "pamini.local") s_word(0x00ff, endian=">") # type = any s_word(0x8001, endian=">") # class = flush s_block_end() s_lego("dns_hostname", "pedram@PAMINI._presence._tcp") s_word(0x00ff, endian=">") # type = any s_word(0x8001, endian=">") # class = flush # authoritative nameservers s_static("\xc0") # offset specifier s_size("header", length=1) # offset to pamini.local s_static("\x00\x01") # type = A (host address) s_static("\x00\x01") # class = in s_static("\x00\x00\x00\xf0") # ttl 4 minutes s_static("\x00\x04") # data length s_static(chr(152) + chr(67) + chr(137) + chr(53)) # ip address s_static("\xc0") # offset specifier s_size("pamini.local", length=1) # offset to pedram@PAMINI... s_static("\x00\x21") # type = SRV (service location) s_static("\x00\x01") # class = in s_static("\x00\x00\x00\xf0") # ttl 4 minutes s_static("\x00\x08") # data length s_static("\x00\x00") # priority s_static("\x00\x00") # weight s_static("\x14\xb2") # port s_static("\xc0") # offset specifier s_size("header", length=1) # offset to pamini.local s_initialize("trillian 3") if s_block_start("pamini.local"): if s_block_start("header"): s_static("\x00\x00") # transaction ID s_static("\x00\x00") # flags (standard query) s_word(2, endian=">") # number of questions s_word(0, endian=">", fuzzable=False) # answer RRs s_word(2, endian=">", fuzzable=False) # authority RRs s_word(0, endian=">", fuzzable=False) # additional RRs s_block_end() # queries s_lego("dns_hostname", "pamini.local") s_word(0x00ff, endian=">") # type = any s_word(0x0001, endian=">") # class = in s_block_end() s_lego("dns_hostname", "pedram@PAMINI._presence._tcp") s_word(0x00ff, endian=">") # type = any s_word(0x0001, endian=">") # class = in # authoritative nameservers s_static("\xc0") # offset specifier s_size("header", length=1) # offset to pamini.local s_static("\x00\x01") # type = A (host address) s_static("\x00\x01") # class = in s_static("\x00\x00\x00\xf0") # ttl 4 minutes s_static("\x00\x04") # data length s_static(chr(152) + chr(67) + chr(137) + chr(53)) # ip address s_static("\xc0") # offset specifier s_size("pamini.local", length=1) # offset to pedram@PAMINI... s_static("\x00\x21") # type = SRV (service location) s_static("\x00\x01") # class = in s_static("\x00\x00\x00\xf0") # ttl 4 minutes s_static("\x00\x08") # data length s_static("\x00\x00") # priority s_static("\x00\x00") # weight s_static("\x14\xb2") # port s_static("\xc0") # offset specifier s_size("header", length=1) # offset to pamini.local
34.855769
63
0.651862
539
3,625
4.200371
0.159555
0.111307
0.114841
0.080389
0.946996
0.936837
0.936837
0.924028
0.924028
0.924028
0
0.071191
0.170759
3,625
104
64
34.855769
0.681969
0.294897
0
0.91358
0
0
0.231604
0.022517
0
0
0.024125
0
0
1
0
true
0
0.012346
0
0.012346
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
8
c06658612a6d2c5efcb501e98d48abd96c186b00
178
py
Python
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLE/__init__.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLE/__init__.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLE/__init__.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
"""GL Extrusion Routine Library (GLE) wrapper for OpenGL-ctypes""" from OpenGL.raw.GLE import * from OpenGL.raw.GLE.annotations import * from OpenGL.GLE.exceptional import *
35.6
67
0.758427
25
178
5.4
0.56
0.222222
0.192593
0.237037
0
0
0
0
0
0
0
0
0.134831
178
5
68
35.6
0.876623
0.337079
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
2204b8a5df3cf1755b48d3b125a8e9f87633a029
247,334
py
Python
py2_file.py
yesdolphin/soda
70f0fd5c58554d26eae606576263392f87be301b
[ "MIT" ]
null
null
null
py2_file.py
yesdolphin/soda
70f0fd5c58554d26eae606576263392f87be301b
[ "MIT" ]
null
null
null
py2_file.py
yesdolphin/soda
70f0fd5c58554d26eae606576263392f87be301b
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # 1. Moved Workflows into Services # 2. Sudo on consistent refresh # 3. Copy over cp and killall and cat # 4. Move ScriptMonitor to Backup # 5. Curl Brew # 6. Brew venv pypi mysql database bash_profile brew pypi phantomjs chromedriver geckodriver Chrome Firefox ChromeProfile # 7. Accounts # 8. Cookiess # 9. (Sublime Text Install Package: PackageResourceViewer, Glue, ImprovedMacros, Default File Type, PackageResourceViewer: Extract Package / PAULFONT / ) # Required: Mac OS Mojave class Setter_Upper(object): def __init__(self): developer_mode = False SETTINGS = '{\n\t"auto_complete": true, //#\n\t"auto_match_enabled": true,\n\t"color_scheme": "Packages/Color Scheme - Default/Monokai.tmTheme",\n\t"draw_indent_guides": true, //#\n\t"font_face": "Menlo",\n\t"font_size": 16,\n\t"ignored_packages":\n\t[\n\t\t"Vintage"\n\t],\n\t"line_numbers": true, //#\n\t"line_padding_bottom": 0,\n\t"line_padding_top": 0,\n\t"show_definitions": true, //#\n\t"tab_size": 2,\n\t"theme": "Adaptive.sublime-theme",\n\t"translate_tabs_to_spaces": true,\n\t"update_check": false\n}\n' KEYBINDINGS = '[\n { "keys": ["ctrl+,", "ctrl+1"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":1} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+2"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":2} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+3"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":3} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+4"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":4} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+5"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":5} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+6"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":6} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+7"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":7} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+8"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":8} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+9"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "fold_by_level", "args":{"level":9} } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+0"], "command": "run_multiple_commands", "args": { "commands": [ {"command": "unfold_all", } , {"command": "move_to", "args":{"to":"bof"}} ] } },\n { "keys": ["ctrl+,", "ctrl+t"], "command": "fold_tag_attributes" },\n { "keys": ["ctrl+,", "ctrl+,"], "command": "run_macro_file", "args": {"file": "res://Packages/Default/Delete to Hard EOL.sublime-macro"} },\n { "keys": ["ctrl+shift+tab"], "command": "prev_view" },\n { "keys": ["ctrl+tab"], "command": "next_view" },\n { "keys": ["super+[", "super+["], "command": "noop"},\n { "keys": ["super+,", "super+["], "command": "noop"},\n { "keys": ["super+,", "super+]"], "command": "noop"},\n { "keys": ["super+]", "super+]"], "command": "noop"},\n { "keys": ["super+]", "super+,"], "command": "noop"},\n { "keys": ["super+]", "super+["], "command": "noop"},\n { "keys": ["super+[", "super+]"], "command": "noop"},\n { "keys": ["super+backspace"], "command": "delete_word", "args": { "forward": false, "sub_words": true } },\n { "keys": ["super+delete"], "command": "delete_word", "args": { "forward": true, "sub_words": true } },\n { "keys": ["ctrl+backspace"], "command": "run_macro_file", "args": {"file": "res://Packages/Default/Delete to Hard BOL.sublime-macro"} },\n { "keys": ["ctrl+delete"], "command": "run_macro_file", "args": {"file": "res://Packages/Default/Delete to Hard EOL.sublime-macro"} },\n { "keys": ["ctrl+,", "ctrl+,"], "command": "run_macro_file", "args": {"file": "res://Packages/Default/Delete to Hard EOL.sublime-macro"} },\n { "keys": ["ctrl+n"], "command": "new_window" },\n { "keys": ["ctrl+1"], "command": "select_by_index", "args": { "index": 0 } },\n { "keys": ["ctrl+2"], "command": "select_by_index", "args": { "index": 1 } },\n { "keys": ["ctrl+3"], "command": "select_by_index", "args": { "index": 2 } },\n { "keys": ["ctrl+4"], "command": "select_by_index", "args": { "index": 3 } },\n { "keys": ["ctrl+5"], "command": "select_by_index", "args": { "index": 4 } },\n { "keys": ["ctrl+6"], "command": "select_by_index", "args": { "index": 5 } },\n { "keys": ["ctrl+7"], "command": "select_by_index", "args": { "index": 6 } },\n { "keys": ["ctrl+8"], "command": "select_by_index", "args": { "index": 7 } },\n { "keys": ["ctrl+9"], "command": "select_by_index", "args": { "index": 8 } },\n { "keys": ["ctrl+0"], "command": "select_by_index", "args": { "index": 9 } },\n { "keys": ["ctrl+equals"], "command": "increase_font_size" },\n { "keys": ["ctrl+plus"], "command": "increase_font_size" },\n { "keys": ["ctrl+minus"], "command": "decrease_font_size" },\n\n //{ "keys": ["super+shift+n"], "command": "new_window" },\n //{ "keys": ["super+shift+w"], "command": "close_window" },\n //{ "keys": ["super+n"], "command": "new_file" },\n //{ "keys": ["super+t"], "command": "new_file" },\n //{ "keys": ["super+s"], "command": "save" },\n //{ "keys": ["super+w"], "command": "close" },\n //{ "keys": ["ctrl+q"], "command": "noop" },\n {\n "keys": ["super+alt+8"],\n "command": "set_layout",\n "args":\n {\n "cols": [0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0],\n "rows": [0.0, 1.0],\n "cells": [[0, 0, 1, 1], [1, 0, 2, 1], [2, 0, 3, 1], [3, 0, 4, 1], [4, 0, 5, 1], [5, 0, 6, 1], [6, 0, 7, 1], [7, 0, 8, 1]]\n }\n },\n\n]\n' import os, subprocess, shutil, sys, time, random, re, getpass from types import MethodType, ModuleType, FunctionType exec('def tryprocess(func, *args, **kwargs):\n import multiprocessing\n t = multiprocessing.Process(target=func, args=args, kwargs=kwargs)\n #t = multiprocessing.Process(target=func)#, args=args, kwargs=kwargs)\n try:\n t.start()\n return 1\n except Exception as e:\n #OSA.notify("%s, %s, %s" % (str(func), str(args), str(kwargs)))\n #OSA.notify("tryprocess: " + str(e))\n #pyperclip.copy(str(e))\n return 0\n', globals()) bind3 = lambda obj, func: setattr(obj, func.__name__, MethodType(func,obj)) globals().update(locals()) def __call__(self): self.m1_initiate_quicktime_recording() self.m1_initiate_sudo() self.m2_initiate_create_md() self.m3_initiate_create_endall() self.m4_initiate_create_bat() self.m18_initiate_checkcreate_bash_profile() self.mX__developer_mode__initiate_add_directory_starting_location_to_bash_profile() self.m22_initiate_install_matplotlib_matplotlibrc() self.m5_initiate_create_scriptmonitor_backup() self.m6_initiate_init_get_brew() self.m7_initiate_install_zlib() self.m8_initiate_install_macos_sdk_headers() self.m8_initiate_install_pyenv() self.m9_initiate_install_python3() self.m10_initiate_install_virtualenv() self.m11__13_initiate_create_tavern() self.m14_initiate_chdir_tavern() self.m15__17_initiate_install_mysql__and__create_database_soda() self.m20_initiate_install_all_brew_packages() self.m21_initiate_install_all_pip_packages() self.mX__developer_mode__install_ipython_startup_profile() self.mX__give_full_bash_profile() self.mX__turn_off_auto_updates() self.m22_initiate_install_phantomjs() self.m23_initiate_install_chromedriver() self.m24_initiate_install_geckodriver() self.m25_initiate_install_google_chrome() self.m28_initiate_create_google_chrome_profile_designated() self.m26_initiate_do_not_auto_update_google_chrome() self.m27_initiate_install_firefox_67() self.m27_initiate_install_firefox_46() self.m29_initiate_create_firefox_profile_designated() self.m30_initiate_do_not_auto_update_firefox() self.m31_add_shortcuts() self.m32_migrate_sql() self.m31_create_initial_data() self.m31_change_default_file_extensions() self.m32_print_complete__and__sys_exit() def m1_initiate_quicktime_recording(self): os.system("""osascript -e 'tell application "QuickTime Player" to activate' -e 'tell application "QuickTime Player" to start (new screen recording)'""") def m1_initiate_sudo(self): import os, subprocess, shutil, sys, time, random, re print("Enter in password: ") tryprocess(lambda: [os.system("echo Starting setup"), [[os.system("sudo ls &>/dev/null"), time.sleep(60)] for i in range(1000000)]]) print("1. Initiated sudo") time.sleep(3) def m2_initiate_create_md(self): import os, subprocess, shutil, sys, time, random, re if developer_mode == True: if not os.path.exists("/bin/md"): os.system("sudo cp -r /bin/cp /bin/md") print("1. Created md") time.sleep(3) def m3_initiate_create_endall(self): import os, subprocess, shutil, sys, time, random, re if developer_mode == True: if not os.path.exists("/usr/bin/endall"): os.system("sudo cp -r /usr/bin/killall /usr/bin/endall") print("2. Created endall") time.sleep(3) def m4_initiate_create_bat(self): import os, subprocess, shutil, sys, time, random, re if developer_mode == True: if not os.path.exists("/bin/bat"): os.system("sudo cp -r /bin/cat /bin/bat") print("3. Created bat") time.sleep(3) def m18_initiate_checkcreate_bash_profile(self): import os, subprocess, shutil, sys, time, random, re # ~/.bash_profile if developer_mode == True: if not os.path.exists("/Users/%s/.bash_profile"%getpass.getuser()): open("/Users/%s/.bash_profile"%getpass.getuser(), "w").write("\n") print("X. Created bash_profile") time.sleep(3) def mX__developer_mode__initiate_add_directory_starting_location_to_bash_profile(self): import os, subprocess, shutil, sys, time, random, re if developer_mode == True: if "\ncd ~/tavern/tavern && source ~/tavern/bin/activate\n" not in open("/Users/%s/.bash_profile"%getpass.getuser(), "r").read(): open("/Users/%s/.bash_profile"%getpass.getuser(), "a").write("\ncd ~/tavern/tavern && source ~/tavern/bin/activate\n") print("X. Added directory starting location to bash_profile") time.sleep(3) def m22_initiate_install_matplotlib_matplotlibrc(self): changed_any = False if not os.path.exists("/Users/%s/.matplotlib"%getpass.getuser()): os.makedirs("/Users/%s/.matplotlib"%getpass.getuser()) changed_any = True if not os.path.exists("/Users/%s/.matplotlib/matplotlibrc"%getpass.getuser()): with open("/Users/%s/.matplotlib/matplotlibrc"%getpass.getuser(), "w") as f: f.write("backend: agg") changed_any = True if "backend: agg" != open("/Users/%s/.matplotlib/matplotlibrc"%getpass.getuser(), "r").read(): with open("/Users/%s/.matplotlib/matplotlibrc"%getpass.getuser(), "w") as f: f.write("backend: agg") changed_any = True assert "backend: agg" == open("/Users/%s/.matplotlib/matplotlibrc"%getpass.getuser(), "r").read() if changed_any == True: print("22. Asserted matplotlib directory created and matplotlibrc file created and matplotlibrc file contents") time.sleep(3) def m5_initiate_create_scriptmonitor_backup(self): if developer_mode == True: import os, subprocess, shutil, sys, time, random, re if os.path.exists("/System/Library/CoreServices/ScriptMonitor.app"): os.system("sudo mv /System/Library/CoreServices/ScriptMonitor.app /System/Library/CoreServices/ScriptMonitor_Backup.app") print("4. Moved ScriptMonitor App to ScriptMonitor_Backup") time.sleep(3) def m6_initiate_init_get_brew(self): import os, subprocess, shutil, sys, time, random, re # Install Brew # "Hmph, shouldn't take too long". Remember, the goal is to sell this. (with your mouth) if not os.path.exists("/usr/local/bin/brew"): installation_brew_url = "https://raw.githubusercontent.com/Homebrew/install/master/install" tryprocess(os.system, """sleep 10 && osascript -e 'tell application "System Events" to keystroke return' && sleep 8 && sleep 30 && osascript -e 'tell application "System Events" to keystroke return' &""") os.system("""/usr/bin/ruby -e "$(curl -fsSL {})" """.format(installation_brew_url) ) print("5. Installed Brew") time.sleep(3) def m7_initiate_install_zlib(self): import os, subprocess, shutil, sys, time, random, re # PACKAGES & PIP # INSTALLED BREW if "zlib" not in subprocess.check_output("/usr/local/bin/brew list", shell = True): os.system("brew install zlib") print("6. Installed zlib") time.sleep(3) def m8_initiate_install_macos_sdk_headers(self): os.system("sudo installer -pkg /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg -target / ") # zlib fix def m8_initiate_install_pyenv(self): import os, subprocess, shutil, sys, time, random, re if "pyenv" not in subprocess.check_output("/usr/local/bin/brew list", shell = True): os.system("brew install pyenv") os.system("rm -rf /Users/%s/.pyenv/versions/3.5.0"%getpass.getuser()) # os.system(""" CPPFLAGS="-I$(brew --prefix zlib)/include" pyenv install -v 3.5.0 """) install_command = None try: install_command = subprocess.check_output("pyenv install 3.5.0", shell = True) except Exception as e: print(e) if install_command == None or "BUILD FAILED" in str(x): os.chdir("/Users/%s"%(getpass.getuser())) os.system("unzip ~/Downloads/soda/pyenv") os.system("rm -rf ~/.pyenv") os.system("mv pyenv ~/.pyenv") print("Unzipped pyenv folder") # does this work if i have brew install python3 as the next step? print("7. Installed pyenv") time.sleep(3) def m9_initiate_install_python3(self): import os, subprocess, shutil, sys, time, random, re if "python" not in subprocess.check_output("/usr/local/bin/brew list", shell = True): os.system("brew install python3") print("8. Installed python3") time.sleep(3) def m10_initiate_install_virtualenv(self): import os, subprocess, shutil, sys, time, random, re # VIRTUAL ENVIRONMENT if "virtualenv" not in subprocess.check_output("/usr/local/opt/python/libexec/bin/pip list", shell = True): os.system("/usr/local/opt/python/libexec/bin/pip install virtualenv") print("9. Installed virtualenv") time.sleep(3) def m11__13_initiate_create_tavern(self): import os, subprocess, shutil, sys, time, random, re if not os.path.exists("/Users/%s/tavern/tavern"%getpass.getuser()): print("X. removed /Users/%s/tavern"%getpass.getuser()) os.system("rm -rf /Users/%s/tavern"%getpass.getuser()) time.sleep(3) os.system("virtualenv --python=/Users/$USER/.pyenv/versions/3.5.0/bin/python3.5 /Users/%s/tavern"%getpass.getuser()) os.system("source ~/tavern/bin/activate") # MAKE THE VIRTUALENV!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! import os,shutil os.makedirs("/Users/%s/tavern/tavern"%getpass.getuser()) print("11. Created tavern") time.sleep(3) os.makedirs("/Users/%s/tavern/tavern/bag"%getpass.getuser()) print("12. Created bag") time.sleep(3) os.system("""cp -r "/Users/%s/Downloads/soda" "/Users/%s/tavern/tavern/soda" """%(getpass.getuser(),getpass.getuser())) os.chdir("/Users/%s/tavern/tavern"%getpass.getuser()) print("13. Moved soda") time.sleep(3) def m14_initiate_chdir_tavern(self): import os, subprocess, shutil, sys, time, random, re # in for testing this one os.chdir("/Users/%s/tavern/tavern"%getpass.getuser()) def m15__17_initiate_install_mysql__and__create_database_soda(self): # the config file import os, subprocess, shutil, sys, time, random, re # MYSQL & DB if not os.path.exists("/usr/local/bin/mysql"): os.system("brew install mysql") os.system("brew services restart mysql") time.sleep(20) os.system("brew install mysql") os.system("brew services restart mysql") time.sleep(20) os.system("brew install mysql") os.system("brew services restart mysql") time.sleep(20) print("12. Installed MySQL") time.sleep(3) os.system("""mysql -uroot -e "ALTER USER 'root'@'localhost' IDENTIFIED BY 'w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A';"; """) os.system("""mysql -uroot --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A -e "drop database soda" &>/dev/null""") print("13. Cleared database soda") time.sleep(3) os.system("""mysql -uroot --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A -e "create database soda" """) print("14. Created database soda") time.sleep(3) print("15. Mysql done") changed_any = False if not os.path.exists("/usr/local/etc/my.cnf"): os.system("touch /usr/local/etc/my.cnf") changed_any = True def config2dict(address): def re_findall_overlaps(regex_string, x): import re groups_regex = '(?=(' + regex_string + '))' print("groups regex: %s" % groups_regex) matches = re.finditer(groups_regex,x) results = [match.group(1) for match in matches] print("%s matches" % len(results)) return results T = re_findall_overlaps(r"(?s)\n\[.*?\n(?:\[|\Z)", "\n"+open(address).read()) R = [] for i in T: if i.endswith("["): i = i[:-1] R.append("\n".join([j.strip() for j in i.strip().split("\n") if not j.strip().startswith("#")])) else: i = i R.append("\n".join([j.strip() for j in i.strip().split("\n") if not j.strip().startswith("#")])) those = {} for i in R: print(i) d = dict() f = i.split("\n") key = f[0][1:-1] x = f[1:] for a in x: k, v = a.split("=") k = k.strip() v = v.strip() d[k] = v those[key] = d return those """ r = '# Default Homebrew MySQL server config\n[mysqld]\n# Only allow connections from localhost\nbind-address = 127.0.0.1\nmax_allowed_packet = 10000000000000G\nmax_connections = 1000000000000\nopen_files_limit = 1024000000000\n\n[mysqldump]\nmax_allowed_packet = 10000000000000G\n# open_files_limit = 10240000\n\n\n\n' open("x.txt", "w").write(r) t = config2dict("x.txt") os.remove("x.txt") os.system("ls") """ def dict2config(d, address = None): if address == None: address = "dict2config.cnf" f = open(address, "w") for a in d: f.write("[%s]\n"%a) for b in d[a]: f.write("%s = %s\n" % (b, d[a][b])) return address """ dict2config(t) """ config = config2dict("/usr/local/etc/my.cnf") #new = {'mysqld': {'bind-address': '127.0.0.1', # 'max_allowed_packet': '10000000000000G', # 'max_connections': '1000000000000', # 'open_files_limit': '1024000000000'}, # 'mysqldump': {'max_allowed_packet': '10000000000000G'}} new = {'mysqld': {'bind-address': '127.0.0.1', 'max_allowed_packet': '10000000000000G', 'max_connections': '1000000000000', # [Warning] [MY-000081] [Server] option 'max_connections': unsigned value 1000000000000 adjusted to 100000. 'open_files_limit': '4294967295'}, 'mysqldump': {'max_allowed_packet': '2147483648G'}} # [Warning] option 'max_allowed_packet': unsigned value 2305843009213693952 adjusted to 2147483648 for a in new: if a in config: for b, v in new[a].items(): if config[a].get(b) != v: changed_any = True if changed_any == True: config.update(new) dict2config(config, "/usr/local/etc/my.cnf") print("15. Created mysql configuration file with configurations") time.sleep(3) def m20_initiate_install_all_brew_packages(self): import os, subprocess, shutil, sys, time, random, re # # #@os.system("brew install aalib"); os.system("brew install gd"); os.system("brew install imagesnap"); os.system("brew install libsndfile"); os.system("brew install mpv"); os.system("brew install pkg-config"); os.system("brew install sox"); os.system("brew install autoconf"); os.system("brew install gdbm"); os.system("brew install jasper"); os.system("brew install libspiro"); os.system("brew install mujs"); os.system("brew install poppler"); os.system("brew install sqlite"); os.system("brew install aview"); os.system("brew install gettext"); os.system("brew install jpeg"); os.system("brew install libtasn1"); os.system("brew install mysql"); os.system("brew install portaudio"); os.system("brew install swig"); os.system("brew install bdw-gc"); os.system("brew install giflib"); os.system("brew install lame"); os.system("brew install netpbm"); os.system("brew install pyenv"); os.system("brew install tesseract"); os.system("brew install c-ares"); os.system("brew install git"); os.system("brew install leptonica"); os.system("brew install libtiff"); os.system("brew install nettle"); os.system("brew install pyqt@4"); os.system("brew install w3m"); os.system("brew install cairo"); os.system("brew install glib"); os.system("brew install libass"); os.system("brew install libtool"); os.system("brew install nmap"); os.system("brew install python"); os.system("brew install webp"); os.system("brew install cmake"); os.system("brew install gmp"); os.system("brew install libcaca"); os.system("brew install libuninameslist"); os.system("brew install node"); os.system("brew install python@2"); os.system("brew install wget"); os.system("brew install cmu-pocketsphinx"); os.system("brew install gnutls"); os.system("brew install libffi"); os.system("brew install libunistring"); os.system("brew install x264"); os.system("brew install cmu-sphinxbase"); os.system("brew install graphite2"); os.system("brew install libgcrypt"); os.system("brew install libvorbis"); os.system("brew install nss"); os.system("brew install qt-webkit@2.3"); os.system("brew install xvid"); os.system("brew install ffmpeg"); os.system("brew install harfbuzz"); os.system("brew install libgpg-error"); os.system("brew install libyaml"); os.system("brew install openjpeg"); os.system("brew install qt@4"); os.system("brew install xz"); os.system("brew install flac"); os.system("brew install icu4c"); os.system("brew install libidn2"); os.system("brew install little-cms2"); os.system("brew install openssl"); os.system("brew install rbenv"); os.system("brew install youtube-dl"); os.system("brew install fontconfig"); os.system("brew install id3lib"); os.system("brew install libmaxminddb"); os.system("brew install lua@5.1"); os.system("brew install p11-kit"); os.system("brew install readline"); os.system("brew install zlib"); os.system("brew install id3v2"); os.system("brew install libogg"); os.system("brew install pango"); os.system("brew install ruby"); os.system("brew install freetype"); os.system("brew install iftop"); os.system("brew install libpng"); os.system("brew install mad"); os.system("brew install pcre"); os.system("brew install ruby-build"); os.system("brew install fribidi"); os.system("brew install imagemagick"); os.system("brew install libsamplerate"); os.system("brew install maven"); os.system("brew install pixman"); os.system("brew install sip") zex = subprocess.check_output("/usr/local/bin/brew list", shell=True).decode() os.system("brew install aalib")if("aalib" not in zex)else(); os.system("brew install duti")if("duti" not in zex)else(); os.system("brew install ffmpeg")if("ffmpeg" not in zex)else(); os.system("brew install gd")if("gd" not in zex)else(); os.system("brew install imagesnap")if("imagesnap" not in zex)else(); os.system("brew install libsndfile")if("libsndfile" not in zex)else(); os.system("brew install pkg-config")if("config" not in zex)else(); os.system("brew install autoconf")if("autoconf" not in zex)else(); os.system("brew install gdbm")if("gdbm" not in zex)else(); os.system("brew install jasper")if("jasper" not in zex)else(); os.system("brew install libspiro")if("libspiro" not in zex)else(); os.system("brew install mujs")if("mujs" not in zex)else(); os.system("brew install poppler")if("poppler" not in zex)else(); os.system("brew install sqlite")if("sqlite" not in zex)else(); os.system("brew install aview")if("aview" not in zex)else(); os.system("brew install gettext")if("gettext" not in zex)else(); os.system("brew install jpeg")if("jpeg" not in zex)else(); os.system("brew install libtasn1")if("libtasn1" not in zex)else(); os.system("brew install mysql")if("mysql" not in zex)else(); os.system("brew install portaudio")if("portaudio" not in zex)else(); os.system("brew install swig")if("swig" not in zex)else(); os.system("brew install bdw-gc")if("gc" not in zex)else(); os.system("brew install giflib")if("giflib" not in zex)else(); os.system("brew install lame")if("lame" not in zex)else(); os.system("brew install netpbm")if("netpbm" not in zex)else(); os.system("brew install pyenv")if("pyenv" not in zex)else(); os.system("brew install tesseract")if("tesseract" not in zex)else(); os.system("brew install c-ares")if("ares" not in zex)else(); os.system("brew install git")if("git" not in zex)else(); os.system("brew install leptonica")if("leptonica" not in zex)else(); os.system("brew install libtiff")if("libtiff" not in zex)else(); os.system("brew install nettle")if("nettle" not in zex)else(); os.system("brew install w3m")if("w3m" not in zex)else(); os.system("brew install cairo")if("cairo" not in zex)else(); os.system("brew install glib")if("glib" not in zex)else(); os.system("brew install libass")if("libass" not in zex)else(); os.system("brew install libtool")if("libtool" not in zex)else(); os.system("brew install webp")if("webp" not in zex)else(); os.system("brew install cmake")if("cmake" not in zex)else(); os.system("brew install gmp")if("gmp" not in zex)else(); os.system("brew install libuninameslist")if("libuninameslist" not in zex)else(); os.system("brew install node")if("node" not in zex)else(); os.system("brew install wget")if("wget" not in zex)else(); os.system("brew install gnutls")if("gnutls" not in zex)else(); os.system("brew install libffi")if("libffi" not in zex)else(); os.system("brew install libunistring")if("libunistring" not in zex)else(); os.system("brew install graphite2")if("graphite2" not in zex)else(); os.system("brew install libgcrypt")if("libgcrypt" not in zex)else(); os.system("brew install libvorbis")if("libvorbis" not in zex)else(); os.system("brew install nss")if("nss" not in zex)else(); os.system("brew install xvid")if("xvid" not in zex)else(); os.system("brew install harfbuzz")if("harfbuzz" not in zex)else(); os.system("brew install libgpg-error")if("error" not in zex)else(); os.system("brew install libyaml")if("libyaml" not in zex)else(); os.system("brew install openjpeg")if("openjpeg" not in zex)else(); os.system("brew install xz")if("xz" not in zex)else(); os.system("brew install flac")if("flac" not in zex)else(); os.system("brew install icu4c")if("icu4c" not in zex)else(); os.system("brew install libidn2")if("libidn2" not in zex)else(); os.system("brew install little-cms2")if("cms2" not in zex)else(); os.system("brew install openssl")if("openssl" not in zex)else(); os.system("brew install rbenv")if("rbenv" not in zex)else(); os.system("brew install fontconfig")if("fontconfig" not in zex)else(); os.system("brew install libmaxminddb")if("libmaxminddb" not in zex)else(); os.system("brew install lua@5.1")if("1" not in zex)else(); os.system("brew install p11-kit")if("kit" not in zex)else(); os.system("brew install readline")if("readline" not in zex)else(); os.system("brew install zlib")if("zlib" not in zex)else(); os.system("brew install libogg")if("libogg" not in zex)else(); os.system("brew install pango")if("pango" not in zex)else(); os.system("brew install ruby")if("ruby" not in zex)else(); os.system("brew install freetype")if("freetype" not in zex)else(); os.system("brew install iftop")if("iftop" not in zex)else(); os.system("brew install libpng")if("libpng" not in zex)else(); os.system("brew install mad")if("mad" not in zex)else(); os.system("brew install pcre")if("pcre" not in zex)else(); os.system("brew install ruby-build")if("build" not in zex)else(); os.system("brew install fribidi")if("fribidi" not in zex)else(); os.system("brew install imagemagick")if("imagemagick" not in zex)else(); os.system("brew install libsamplerate")if("libsamplerate" not in zex)else(); os.system("brew install maven")if("maven" not in zex)else(); os.system("brew install pixman")if("pixman" not in zex)else(); os.system("brew install sip")if("sip" not in zex)else(); os.system("brew install sc-im")if("sc-im" not in zex)else(); os.system("brew cask install wkhtmltopdf")if("wkhtmltopdf" not in zex)else() print("17. Installed all brew packages") time.sleep(3) def m21_initiate_install_all_pip_packages(self): import os, subprocess, shutil, sys, time, random, re os.system("brew update && brew upgrade; brew uninstall --ignore-dependencies openssl; brew uninstall --ignore-dependencies openssl; brew install https://github.com/tebelorg/Tump/releases/download/v1.0.0/openssl.rb") os.system("brew uninstall vapor && brew install vapor/tap/vapor") #@os.system("pip install apiclient==1.0.3"); os.system("pip install appnope==0.1.0"); os.system("pip install argcomplete==1.8.2"); os.system("pip install args==0.1.0"); os.system("pip install asn1crypto==0.24.0"); os.system("pip install astor==0.7.1"); os.system("pip install atomicwrites==1.2.1"); os.system("pip install attrs==18.2.0"); os.system("pip install backcall==0.1.0"); os.system("pip install beautifulsoup4==4.5.3"); os.system("pip install biplist==1.0.3"); os.system("pip install bitly-api==0.3"); os.system("pip install bleach==2.1.4"); os.system("pip install botocore==1.12.4"); os.system("pip install bottlenose==1.1.8"); os.system("pip install browsermob-proxy==0.8.0"); os.system("pip install bs4==0.0.1"); os.system("pip install cachetools==2.1.0"); os.system("pip install certifi==2018.4.16"); os.system("pip install cffi==1.11.2"); os.system("pip install chardet==3.0.4"); os.system("pip install cli-helpers==1.0.2"); os.system("pip install click==6.7"); os.system("pip install clint==0.5.1"); os.system("pip install cloudpickle==0.6.1"); os.system("pip install colorama==0.3.9"); os.system("pip install colored==1.3.93"); os.system("pip install colormath==3.0.0"); os.system("pip install configobj==5.0.6"); os.system("pip install configparser==3.5.0"); os.system("pip install cryptography==2.2.2"); os.system("pip install cycler==0.10.0"); os.system("pip install dask==0.19.4"); os.system("pip install decorator==4.3.0"); os.system("pip install dill==0.2.8.2"); os.system("pip install Django==1.8"); os.system("pip install django-json-field==0.5.7"); os.system("pip install dlib==19.16.0"); os.system("pip install docopt==0.6.2"); os.system("pip install docutils==0.14"); os.system("pip install docx2txt==0.6"); os.system("pip install ds-store==1.1.2"); os.system("pip install EasyProcess==0.2.3"); os.system("pip install EbookLib==0.15"); os.system("pip install emoji==0.5.1"); os.system("pip install entrypoints==0.2.3"); os.system("pip install et-xmlfile==1.0.1"); os.system("pip install face-recognition==1.2.3"); os.system("pip install face-recognition-models==0.3.0"); os.system("pip install facebook-business==3.1.1"); os.system("pip install facebookads==2.11.4"); os.system("pip install feedparser==5.2.1"); os.system("pip install Flask==0.12.2"); os.system("pip install fonttools==3.29.0"); os.system("pip install gast==0.2.0"); os.system("pip install google-api-core==1.3.0"); os.system("pip install google-api-python-client==1.6.4"); os.system("pip install google-auth==1.5.1"); os.system("pip install google-auth-oauthlib==0.2.0"); os.system("pip install google-cloud-core==0.28.1"); os.system("pip install google-images-download==2.3.0"); os.system("pip install google-resumable-media==0.3.1"); os.system("pip install googleads==9.0.0"); os.system("pip install googleapis-common-protos==1.5.3"); os.system("pip install grpcio==1.14.1"); os.system("pip install gspread==0.6.2"); os.system("pip install gTTS==2.0.1"); os.system("pip install gTTS-token==1.1.2"); os.system("pip install html5lib==1.0.1"); os.system("pip install httplib2==0.10.3"); os.system("pip install idna==2.7"); os.system("pip install imageio==2.1.2"); os.system("pip install imgkit==1.0.1"); os.system("pip install imgurpython==1.1.7"); os.system("pip install imutils==0.5.1"); os.system("pip install inflection==0.3.1"); os.system("pip install ipykernel==4.9.0"); os.system("pip install ipython==6.5.0"); os.system("pip install ipython-genutils==0.2.0"); os.system("pip install ipywidgets==7.4.1"); os.system("pip install itsdangerous==0.24"); os.system("pip install jdcal==1.4"); os.system("pip install jedi==0.12.1"); os.system("pip install Jinja2==2.10"); os.system("pip install jmespath==0.9.3"); os.system("pip install jsonschema==2.6.0"); os.system("pip install jupyter-client==5.2.3"); os.system("pip install jupyter-console==5.2.0"); os.system("pip install jupyter-core==4.4.0"); os.system("pip install keyring==10.5.1"); os.system("pip install Logbook==1.4.0"); os.system("pip install lxml==4.1.1"); os.system("pip install mac-alias==2.0.7"); os.system("pip install macropy3==1.1.0b2"); os.system("pip install Markdown==2.6.11"); os.system("pip install MarkupSafe==1.0"); os.system("pip install matplotlib==2.1.1"); os.system("pip install mistune==0.8.3"); os.system("pip install mock==2.0.0"); os.system("pip install more-itertools==4.3.0"); os.system("pip install moviepy==0.2.3.5"); os.system("pip install mp3play==0.1.15"); os.system("pip install multiprocess==0.70.6.1"); os.system("pip install mutagen==1.40.0"); os.system("pip install mycli==1.17.0"); os.system("pip install mysqlclient==1.3.12"); os.system("pip install nbconvert==5.3.1"); os.system("pip install nbformat==4.4.0"); os.system("pip install networkx==2.1"); os.system("pip install nltk==3.2.5"); os.system("pip install notebook==5.6.0"); os.system("pip install numpy==1.14.5"); os.system("pip install oauth2==1.9.0.post1"); os.system("pip install oauth2client==4.1.2"); os.system("pip install oauthlib==2.1.0"); os.system("pip install olefile==0.44"); os.system("pip install opencv-python==3.4.1.15"); os.system("pip install openpyxl==2.5.4"); os.system("pip install pandas==0.23.4"); os.system("pip install pandocfilters==1.4.2"); os.system("pip install parso==0.3.1"); os.system("pip install pathlib2==2.3.2"); os.system("pip install pathos==0.2.2.1"); os.system("pip install pbr==4.2.0"); os.system("pip install pdf2image==1.1.0"); os.system("pip install pdfminer.six==20170720"); os.system("pip install peewee==3.7.1"); os.system("pip install pexpect==4.6.0"); os.system("pip install pickleshare==0.7.4"); os.system("pip install Pillow==4.3.0"); os.system("pip install pip==18.1"); os.system("pip install pkginfo==1.4.2"); os.system("pip install playsound==1.2.2"); os.system("pip install pluggy==0.7.1"); os.system("pip install pocketsphinx==0.1.3"); os.system("pip install polling==0.3.0"); os.system("pip install pox==0.2.4"); os.system("pip install ppft==1.6.4.8"); os.system("pip install prometheus-client==0.3.1"); os.system("pip install prompt-toolkit==1.0.15"); os.system("pip install protobuf==3.6.0"); os.system("pip install psutil==5.4.7"); os.system("pip install ptyprocess==0.6.0"); os.system("pip install py==1.5.4"); os.system("pip install pyactiveresource==2.1.2"); os.system("pip install pyasn1==0.4.2"); os.system("pip install pyasn1-modules==0.2.1"); os.system("pip install PyAudio==0.2.11"); os.system("pip install pycairo==1.15.4"); os.system("pip install pycparser==2.18"); os.system("pip install pycryptodome==3.6.4"); os.system("pip install pydub==0.22.0"); os.system("pip install Pygments==2.2.0"); os.system("pip install pymediainfo==2.2.0"); os.system("pip install PyMySQL==0.9.2"); os.system("pip install pyobjc==5.0"); os.system("pip install pyobjc-core==5.0"); os.system("pip install pyobjc-framework-Accounts==5.0"); os.system("pip install pyobjc-framework-AddressBook==5.0"); os.system("pip install pyobjc-framework-AdSupport==5.0"); os.system("pip install pyobjc-framework-AppleScriptKit==5.0"); os.system("pip install pyobjc-framework-AppleScriptObjC==5.0"); os.system("pip install pyobjc-framework-ApplicationServices==5.0"); os.system("pip install pyobjc-framework-Automator==5.0"); os.system("pip install pyobjc-framework-AVFoundation==5.0"); os.system("pip install pyobjc-framework-AVKit==5.0"); os.system("pip install pyobjc-framework-BusinessChat==5.0"); os.system("pip install pyobjc-framework-CalendarStore==5.0"); os.system("pip install pyobjc-framework-CFNetwork==5.0"); os.system("pip install pyobjc-framework-CloudKit==5.0"); os.system("pip install pyobjc-framework-Cocoa==5.0"); os.system("pip install pyobjc-framework-Collaboration==5.0"); os.system("pip install pyobjc-framework-ColorSync==5.0"); os.system("pip install pyobjc-framework-Contacts==5.0"); os.system("pip install pyobjc-framework-ContactsUI==5.0"); os.system("pip install pyobjc-framework-CoreAudio==5.0"); os.system("pip install pyobjc-framework-CoreAudioKit==5.0"); os.system("pip install pyobjc-framework-CoreBluetooth==5.0"); os.system("pip install pyobjc-framework-CoreData==5.0"); os.system("pip install pyobjc-framework-CoreLocation==5.0"); os.system("pip install pyobjc-framework-CoreMedia==5.0"); os.system("pip install pyobjc-framework-CoreMediaIO==5.0"); os.system("pip install pyobjc-framework-CoreML==5.0"); os.system("pip install pyobjc-framework-CoreServices==5.0"); os.system("pip install pyobjc-framework-CoreSpotlight==5.0"); os.system("pip install pyobjc-framework-CoreText==5.0"); os.system("pip install pyobjc-framework-CoreWLAN==5.0"); os.system("pip install pyobjc-framework-CryptoTokenKit==5.0"); os.system("pip install pyobjc-framework-DictionaryServices==5.0"); os.system("pip install pyobjc-framework-DiscRecording==5.0"); os.system("pip install pyobjc-framework-DiscRecordingUI==5.0"); os.system("pip install pyobjc-framework-DiskArbitration==5.0"); os.system("pip install pyobjc-framework-DVDPlayback==5.0"); os.system("pip install pyobjc-framework-EventKit==5.0"); os.system("pip install pyobjc-framework-ExceptionHandling==5.0"); os.system("pip install pyobjc-framework-ExternalAccessory==5.0"); os.system("pip install pyobjc-framework-FinderSync==5.0"); os.system("pip install pyobjc-framework-FSEvents==5.0"); os.system("pip install pyobjc-framework-GameCenter==5.0"); os.system("pip install pyobjc-framework-GameController==5.0"); os.system("pip install pyobjc-framework-GameKit==5.0"); os.system("pip install pyobjc-framework-GameplayKit==5.0"); os.system("pip install pyobjc-framework-ImageCaptureCore==5.0"); os.system("pip install pyobjc-framework-IMServicePlugIn==5.0"); os.system("pip install pyobjc-framework-InputMethodKit==5.0"); os.system("pip install pyobjc-framework-InstallerPlugins==5.0"); os.system("pip install pyobjc-framework-InstantMessage==5.0"); os.system("pip install pyobjc-framework-Intents==5.0"); os.system("pip install pyobjc-framework-IOSurface==5.0"); os.system("pip install pyobjc-framework-iTunesLibrary==5.0"); os.system("pip install pyobjc-framework-LatentSemanticMapping==5.0"); os.system("pip install pyobjc-framework-LaunchServices==5.0"); os.system("pip install pyobjc-framework-libdispatch==5.0"); os.system("pip install pyobjc-framework-LocalAuthentication==5.0"); os.system("pip install pyobjc-framework-MapKit==5.0"); os.system("pip install pyobjc-framework-MediaAccessibility==5.0"); os.system("pip install pyobjc-framework-MediaLibrary==5.0"); os.system("pip install pyobjc-framework-MediaPlayer==5.0"); os.system("pip install pyobjc-framework-MediaToolbox==5.0"); os.system("pip install pyobjc-framework-ModelIO==5.0"); os.system("pip install pyobjc-framework-MultipeerConnectivity==5.0"); os.system("pip install pyobjc-framework-NaturalLanguage==5.0"); os.system("pip install pyobjc-framework-NetFS==5.0"); os.system("pip install pyobjc-framework-Network==5.0"); os.system("pip install pyobjc-framework-NetworkExtension==5.0"); os.system("pip install pyobjc-framework-NotificationCenter==5.0"); os.system("pip install pyobjc-framework-OpenDirectory==5.0"); os.system("pip install pyobjc-framework-OSAKit==5.0"); os.system("pip install pyobjc-framework-Photos==5.0"); os.system("pip install pyobjc-framework-PhotosUI==5.0"); os.system("pip install pyobjc-framework-PreferencePanes==5.0"); os.system("pip install pyobjc-framework-PubSub==5.0"); os.system("pip install pyobjc-framework-QTKit==5.0"); os.system("pip install pyobjc-framework-Quartz==5.0"); os.system("pip install pyobjc-framework-SafariServices==5.0"); os.system("pip install pyobjc-framework-SceneKit==5.0"); os.system("pip install pyobjc-framework-ScreenSaver==5.0"); os.system("pip install pyobjc-framework-ScriptingBridge==5.0"); os.system("pip install pyobjc-framework-SearchKit==5.0"); os.system("pip install pyobjc-framework-Security==5.0"); os.system("pip install pyobjc-framework-SecurityFoundation==5.0"); os.system("pip install pyobjc-framework-SecurityInterface==5.0"); os.system("pip install pyobjc-framework-ServiceManagement==5.0"); os.system("pip install pyobjc-framework-Social==5.0"); os.system("pip install pyobjc-framework-SpriteKit==5.0"); os.system("pip install pyobjc-framework-StoreKit==5.0"); os.system("pip install pyobjc-framework-SyncServices==5.0"); os.system("pip install pyobjc-framework-SystemConfiguration==5.0"); os.system("pip install pyobjc-framework-UserNotifications==5.0"); os.system("pip install pyobjc-framework-VideoSubscriberAccount==5.0"); os.system("pip install pyobjc-framework-VideoToolbox==5.0"); os.system("pip install pyobjc-framework-Vision==5.0"); os.system("pip install pyobjc-framework-WebKit==5.0"); os.system("pip install pyocr==0.5.2"); os.system("pip install pyparsing==2.2.0"); os.system("pip install PyPDF2==1.26.0"); os.system("pip install pyperclip==1.6.0"); os.system("pip install pypng==0.0.18"); os.system("pip install PyQt5==5.9.2"); os.system("pip install pyshark==0.3.8"); os.system("pip install PySocks==1.6.8"); os.system("pip install pytesseract==0.2.4"); os.system("pip install pytest==3.8.0"); os.system("pip install python-amazon-simple-product-api==2.2.11"); os.system("pip install python-crontab==2.2.8"); os.system("pip install python-dateutil==2.7.3"); os.system("pip install python-docx==0.8.6"); os.system("pip install python-pptx==0.6.5"); os.system("pip install python-resize-image==1.1.11"); os.system("pip install python-upwork==1.3"); os.system("pip install python-vlc==3.0.101"); os.system("pip install pytz==2017.3"); os.system("pip install PyVirtualDisplay==0.2.1"); os.system("pip install PyWavelets==1.0.1"); os.system("pip install pyxl3==1.0"); os.system("pip install PyYAML==3.12"); os.system("pip install pyzmq==17.1.2"); os.system("pip install qtconsole==4.4.1"); os.system("pip install reportlab==3.5.9"); os.system("pip install requests==2.18.1"); os.system("pip install requests-oauthlib==1.0.0"); os.system("pip install requests-toolbelt==0.8.0"); os.system("pip install rsa==3.4.2"); os.system("pip install rumps==0.2.2"); os.system("pip install s3transfer==0.1.13"); os.system("pip install scdl==1.6.12"); os.system("pip install scikit-image==0.14.1"); os.system("pip install scikit-learn==0.19.2"); os.system("pip install scipy==1.0.0"); os.system("pip install seaborn==0.9.0"); os.system("pip install selenium==3.14.1"); os.system("pip install Send2Trash==1.5.0"); os.system("pip install setuptools==40.2.0"); os.system("pip install ShopifyAPI==4.0.0"); os.system("pip install simplegeneric==0.8.1"); os.system("pip install simplejson==3.15.0"); os.system("pip install sip==4.19.6"); os.system("pip install six==1.11.0"); os.system("pip install sklearn==0.0"); os.system("pip install SkPy==0.9"); os.system("pip install sounddevice==0.3.10"); os.system("pip install SpeechRecognition==3.6.3"); os.system("pip install speedtest-cli==2.0.2"); os.system("pip install SQLAlchemy==1.2.9"); os.system("pip install sqlparse==0.2.4"); os.system("pip install suds-jurko==0.6"); os.system("pip install svglib==0.8.1"); os.system("pip install tabulate==0.8.2"); os.system("pip install tensorboard==1.10.0"); os.system("pip install tensorflow==1.10.0"); os.system("pip install termcolor==1.1.0"); os.system("pip install terminado==0.8.1"); os.system("pip install terminaltables==3.1.0"); os.system("pip install testpath==0.3.1"); os.system("pip install text-to-image==0.0.5"); os.system("pip install textract==1.6.1"); os.system("pip install toolz==0.9.0"); os.system("pip install tornado==5.1"); os.system("pip install tqdm==4.26.0"); os.system("pip install traitlets==4.3.2"); os.system("pip install trollius==1.0.4"); os.system("pip install twine==1.11.0"); os.system("pip install Unidecode==1.0.22"); os.system("pip install uritemplate==3.0.0"); os.system("pip install urllib3==1.22"); os.system("pip install virtualenv==16.0.0"); os.system("pip install Wand==0.4.5"); os.system("pip install wcwidth==0.1.7"); os.system("pip install webcolors==1.7"); os.system("pip install webencodings==0.5.1"); os.system("pip install Werkzeug==0.14.1"); os.system("pip install wheel==0.31.1"); os.system("pip install widgetsnbextension==3.4.1"); os.system("pip install wikipedia==1.4.0"); os.system("pip install wkhtmltopdf==0.2"); os.system("pip install xlrd==1.0.0"); os.system("pip install XlsxWriter==1.0.5"); os.system("pip install xmltodict==0.11.0"); os.system("pip install xonsh==0.7.8"); os.system("pip install youtube-dl==2018.12.3") x = ["apiclient==1.0.3","appnope==0.1.0","argcomplete==1.8.2","args==0.1.0","asn1crypto==0.24.0","astor==0.7.1","atomicwrites==1.2.1","attrs==18.2.0","backcall==0.1.0","biplist==1.0.3","bitly-api==0.3","bleach==2.1.4","botocore==1.12.4","bottlenose==1.1.8","bs4==0.0.1","cachetools==2.1.0","certifi==2018.4.16","cffi==1.11.2","cli-helpers==1.0.2","click==6.7","clint==0.5.1","cloudpickle==0.6.1","colorama==0.3.9","colored==1.3.93","colormath==3.0.0","configobj==5.0.6","configparser==3.5.0","cryptography==2.2.2","cycler==0.10.0","dask==0.19.4","decorator==4.3.0","dicttoxml==1.7.4","dill==0.2.8.2","Django==1.8","django-json-field==0.5.7","dlib==19.16.0","docopt==0.6.2","docutils==0.14","ds-store==1.1.2","EasyProcess==0.2.3","EbookLib==0.15","emoji==0.5.1","entrypoints==0.2.3","et-xmlfile==1.0.1","facebook-business==3.3.0","facebookads==2.11.4","feedparser==5.2.1","fonttools==3.29.0","gast==0.2.0","google-api-core==1.3.0","google-api-python-client==1.6.4","google-auth==1.5.1","google-auth-httplib2","google-auth-oauthlib==0.2.0","google-cloud-core==0.28.1","google-images-download==2.3.0","google-resumable-media==0.3.1","googleapis-common-protos==1.5.3","grpcio==1.14.1","gspread==0.6.2","gTTS==2.0.1","html5lib==1.0.1","httplib2==0.10.3","idna==2.7","imageio==2.1.2","imgkit==1.0.1","imgurpython==1.1.7","imutils==0.5.1","instapy==0.6.2","ipython==6.5.0","itsdangerous==0.24","jdcal==1.4","jedi==0.12.1","Jinja2==2.10","jmespath==0.9.3","keyring==10.5.1","lxml==4.1.1","matplotlib==2.1.1","moviepy==1.0.0","multiprocess==0.70.6.1","mutagen==1.40.0","mysqlclient","numpy==1.14.5","oauth2==1.9.0.post1","oauth2client==4.1.2","oauthlib==2.1.0","olefile==0.44","opencv-python==3.4.1.15","openpyxl==2.5.4","pandas==0.23.4","pathlib2==2.3.2","pathos==0.2.2.1","pbr==4.2.0","pdf2image==1.1.0","pexpect==4.6.0","pickleshare==0.7.4","Pillow==4.3.0","playsound==1.2.2","pluggy==0.7.1","polling==0.3.0","pox==0.2.4","ppft==1.6.4.8","prometheus-client==0.3.1","prompt-toolkit==1.0.15","protobuf==3.6.0","psutil==5.4.7","ptyprocess==0.6.0","pyactiveresource==2.1.2","pyasn1==0.4.2","pyasn1-modules==0.2.1","pycairo==1.15.4","pycparser==2.18","pycryptodome==3.6.4","PyDictionary==1.5.2","pydub==0.22.0","Pygments==2.2.0","pymediainfo==2.2.0","PyMySQL==0.9.2","pyobjc==5.0","pyocr==0.5.2","pyparsing==2.2.0","PyPDF2==1.26.0","pyperclip==1.6.0","pypng==0.0.18","PySocks==1.6.8","pytesseract==0.2.4","python-dateutil==2.7.3","python-docx==0.8.6","python-pptx==0.6.5","python-resize-image==1.1.11","pytz==2017.3","PyVirtualDisplay==0.2.1","PyWavelets==1.0.1","pyxl3==1.0","PyYAML==3.12","pyzmq==17.1.2","qtconsole==4.4.1","reportlab==3.5.9","requests-oauthlib==1.0.0","requests-toolbelt==0.8.0","rsa==3.4.2","rumps==0.2.2","s3transfer==0.1.13","scikit-image==0.14.1","selenium==3.14.1","setuptools==40.2.0","ShopifyAPI==4.0.0","simplegeneric==0.8.1","simplejson==3.15.0","sip==4.19.6","six==1.11.0","sounddevice==0.3.10","speedtest-cli==2.0.2","SQLAlchemy==1.2.9","sqlparse==0.2.4","suds-jurko==0.6","tabulate==0.8.2","termcolor==1.1.0","terminado==0.8.1","text-to-image==0.0.5","textract","toolz==0.9.0","tornado==5.1","tqdm==4.26.0","traitlets==4.3.2","twine==1.11.0","Unidecode==1.0.22","uritemplate==3.0.0","urllib3==1.22","virtualenv==16.0.0","Wand==0.4.5","wcwidth==0.1.7","webcolors==1.7","webencodings==0.5.1","Werkzeug==0.14.1","wheel==0.31.1","xlrd==1.0.0","XlsxWriter==1.0.5","xmltodict==0.11.0","xonsh==0.7.8","requests","youtube-dl"] for i in range(2): installed_pip_packages = subprocess.check_output("/Users/%s/tavern/bin/pip list"%getpass.getuser(), shell = True).decode().split("\n") installed_pip_packages = [re.sub(" +", "==", i.strip()) for i in installed_pip_packages] for j in x: # [?Testation] os.system("""source ~/tavern/bin/activate && /usr/local/opt/python/libexec/bin/pip install %s"""%(i)) # [?Testation] os.system("""source ~/tavern/bin/activate && pip install %s"""%(i)) if j not in installed_pip_packages: os.system("""source ~/tavern/bin/activate && /Users/%s/tavern/bin/pip install %s"""%(getpass.getuser(),j)) while "0" == subprocess.check_output("/Users/%s/tavern/bin/python3.5 -c 'try:\n import MySQLdb\nexcept:\n print(0)'"%getpass.getuser(), shell = True).decode().strip(): os.system("/Users/%s/tavern/bin/pip install mysqlclient"%getpass.getuser()) #pool(lambda i: os.system("source ~/tavern/pip install %s &" % i) if i not in installed_pip_packages else(), x, nodes = 10) # PIP INSTALLATIONS # ANOTHER PIP INSTALLATION print("18. Installed all pip packages") time.sleep(3) def mX__developer_mode__install_ipython_startup_profile(self): import os, subprocess, shutil, sys, time, random, re if developer_mode == True: if not os.path.exists("/Users/%s/.ipython/profile_default/startup/ipython_startup_file.py"%getpass.getuser()): os.system("cp -r /Users/%s/tavern/tavern/soda/ipython_startup_file.py /Users/%s/.ipython/profile_default/startup/ipython_startup_file.py"%(getpass.getuser(),getpass.getuser())) os.system("ipython profile create") os.system("sed -i '' -e 's/#c.InteractiveShell.history_length = 10000/c.InteractiveShell.history_length = 10000000/g' /Users/%s/.ipython/profile_default/ipython_config.py"%getpass.getuser()) os.system("sed -i '' -e 's/#c.InteractiveShell.history_load_length = 1000/c.InteractiveShell.history_load_length = 10000000/g' /Users/%s/.ipython/profile_default/ipython_config.py"%getpass.getuser()) print("X. Moved IPython startup file to default IPython startup directory, created new ipython profile and changed history length and history load length") time.sleep(3) def mX__give_full_bash_profile(self): if developer_mode == True: if open("/Users/%s/.bash_profile"%getpass.getuser()).read() != open("/Users/%s/tavern/tavern/soda/bash_profile"%getpass.getuser()).read(): os.remove("/Users/%s/.bash_profile"%getpass.getuser()) os.system("cp -r /Users/%s/tavern/tavern/soda/bash_profile /Users/%s/.bash_profile"%(getpass.getuser(),getpass.getuser())) print("X. Moved bash_profile") time.sleep(3) def mX__turn_off_auto_updates(self): if developer_mode == True: os.system("sudo /usr/sbin/softwareupdate --schedule off") def m22_initiate_install_phantomjs(self): import os, subprocess, shutil, sys, time, random, re #drivers if not os.path.exists("/Users/%s/tavern/tavern/soda/.phantomjs"%getpass.getuser()): os.system("wget https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-macosx.zip") os.system("unzip -a phantomjs-2.1.1-macosx.zip") os.system("mv phantomjs-2.1.1-macosx/bin/phantomjs ~/tavern/tavern/soda/.phantomjs && rm -rf phantomjs-2.1.1-macosx") assert os.path.exists("/Users/%s/tavern/tavern/soda/.phantomjs"%getpass.getuser()) print("19. Installed phantomjs") time.sleep(3) def m23_initiate_install_chromedriver(self): import os, subprocess, shutil, sys, time, random, re if not os.path.exists("/Users/%s/tavern/tavern/soda/.chromedriver"%getpass.getuser()): os.system("wget https://chromedriver.storage.googleapis.com/2.42/chromedriver_mac64.zip") os.system("unzip -a chromedriver_mac64.zip") os.system("mv chromedriver ~/tavern/tavern/soda/.chromedriver && rm -rf chromedriver_mac64.zip") assert os.path.exists("/Users/%s/tavern/tavern/soda/.chromedriver"%getpass.getuser()) print("20. Installed chromedriver") time.sleep(3) def m24_initiate_install_geckodriver(self): import os, subprocess, shutil, sys, time, random, re if not os.path.exists("/Users/%s/tavern/tavern/soda/.geckodriver"%getpass.getuser()): os.system("wget https://github.com/mozilla/geckodriver/releases/download/v0.24.0/geckodriver-v0.24.0-macos.tar.gz") os.system("tar -xvzf geckodriver-v0.24.0-macos.tar.gz && rm -rf geckodriver-v0.24.0-macos.tar.gz") os.system("mv geckodriver /Users/%s/tavern/tavern/soda/.geckodriver"%getpass.getuser()) assert os.path.exists("/Users/%s/tavern/tavern/soda/.geckodriver"%getpass.getuser()) print("21. Installed geckodriver") time.sleep(3) def m25_initiate_install_google_chrome(self): import os, subprocess, shutil, sys, time, random, re # Install Google Chrome if not os.path.exists("/Applications/Google Chrome 70.app"): os.system("wget https://www.slimjet.com/chrome/download-chrome.php?file=files%2F70.0.3538.77%2Fgooglechrome.dmg") os.system("""hdiutil attach "download-chrome.php?file=files%2F70.0.3538.77%2Fgooglechrome.dmg" """) time.sleep(4) os.system("""osascript -e 'tell application "System Events" to activate application "Finder"' """) time.sleep(4) os.system("""osascript -e 'tell application "System Events" to keystroke "w" using {command down}'""") os.system("sudo cp -r /Volumes/Google\ Chrome/Google\ Chrome.app /Applications/Google\ Chrome\ 70.app") os.system("diskutil unmount /Volumes/Google\ Chrome") assert os.path.exists("/Applications/Google Chrome 70.app") assert os.path.exists("/Applications/Google Chrome 70.app/Contents/MacOS/Google Chrome") os.remove("download-chrome.php?file=files%2F70.0.3538.77%2Fgooglechrome.dmg") print("22. Installed Google Chrome application") time.sleep(3) def m28_initiate_create_google_chrome_profile_designated(self): import os, subprocess, shutil, sys, time, random, re # Create Profile if not os.path.exists("/Users/%s/Library/Application Support/Google/Chrome/Profile 1000"%getpass.getuser()): time.sleep(5) os.system("/Applications/Google\ Chrome\ 70.app/Contents/MacOS/Google\ Chrome --args --profile-directory=Profile\ 1000 &>/dev/null &") time.sleep(4) os.system("/usr/bin/killall Google\ Chrome") time.sleep(4) print("Created New Profile and switched active window back to terminal") assert os.path.exists("/Users/%s/Library/Application Support/Google/Chrome/Profile 1000"%getpass.getuser()) print("24. Created designated chrome profile") time.sleep(3) def m26_initiate_do_not_auto_update_google_chrome(self): # import os, subprocess, shutil, sys, time, random, re # os.system("defaults write com.google.Keystone.Agent checkInterval 0") # print("25. Turned off auto update for Google Chrome") # time.sleep(3) os.system("zip -r /Users/%s/Library/Google/GoogleSoftwareUpdate.zip /Users/%s/Library/Google/GoogleSoftwareUpdate"%(getpass.getuser(),getpass.getuser())) os.system("sudo rm -rf /Users/%s/Library/Google/GoogleSoftwareUpdate"%getpass.getuser()) os.system("mv /Users/%s/Library/Google/GoogleSoftwareUpdate.zip /Users/%s/Library/Google/GoogleSoftwareUpdate"%(getpass.getuser(),getpass.getuser())) time.sleep(3) def m27_initiate_install_firefox_67(self): import os, subprocess, shutil, sys, time, random, re # Install Firefox if not os.path.exists("/Applications/Firefox 67.app"): os.system("wget https://ftp.mozilla.org/pub/firefox/releases/67.0/mac/en-US/Firefox%2067.0.dmg") os.system("""hdiutil attach "Firefox 67.0.dmg" """) time.sleep(4) os.system("""osascript -e 'tell application "System Events" to activate application "Finder"' """) time.sleep(4) os.system("""osascript -e 'tell application "System Events" to keystroke "w" using {command down}'""") os.system("sudo cp -r /Volumes/Firefox/Firefox.app /Applications/Firefox\ 67.app") os.system("diskutil unmount /Volumes/Firefox") assert os.path.exists("/Applications/Firefox 67.app") assert os.path.exists("/Applications/Firefox 67.app/Contents/MacOS/firefox-bin") os.remove("Firefox 67.0.dmg") print("22. Installed Firefox 67 application") time.sleep(3) def m27_initiate_install_firefox_46(self): import os, subprocess, shutil, sys, time, random, re # Install Firefox if not os.path.exists("/Applications/Firefox 46.app"): os.system("wget https://ftp.mozilla.org/pub/firefox/releases/46.0/mac/en-US/Firefox%2046.0.dmg") os.system("""hdiutil attach "Firefox 46.0.dmg" """) time.sleep(4) os.system("""osascript -e 'tell application "System Events" to activate application "Finder"' """) time.sleep(4) os.system("""osascript -e 'tell application "System Events" to keystroke "w" using {command down}'""") os.system("sudo cp -r /Volumes/Firefox/Firefox.app /Applications/Firefox\ 46.app") os.system("diskutil unmount /Volumes/Firefox") assert os.path.exists("/Applications/Firefox 46.app") assert os.path.exists("/Applications/Firefox 46.app/Contents/MacOS/firefox-bin") os.remove("Firefox 46.0.dmg") print("22. Installed Firefox 46 application") time.sleep(3) def m29_initiate_create_firefox_profile_designated(self): import os, subprocess, shutil, sys, time, random, re if not os.path.exists("/Users/%s/Library/Application Support/Firefox/Profiles"%getpass.getuser()): os.system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin -CreateProfile sele") profiles = os.listdir("/Users/%s/Library/Application Support/Firefox/Profiles"%getpass.getuser()) profile_names = ["".join(i.split(".")[1:]) for i in profiles] assert "sele" in profile_names designated_profile_name = [i for i in profiles if "sele" == "".join(i.split(".")[1:])][0] designated_profile_full_path = "/Users/%s/Library/Application Support/Firefox/Profiles/%s" % (getpass.getuser(),designated_profile_name) assert os.path.exists(designated_profile_full_path) os.system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin -p sele &") time.sleep(15) os.system("killall firefox-bin") time.sleep(5) print("25. Created Firefox Profile and initiated files") time.sleep(3) profiles = os.listdir("/Users/%s/Library/Application Support/Firefox/Profiles"%getpass.getuser()) profile_names = ["".join(i.split(".")[1:]) for i in profiles] if "sele" not in profile_names : os.system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin -CreateProfile sele") profiles = os.listdir("/Users/%s/Library/Application Support/Firefox/Profiles"%getpass.getuser()) profile_names = ["".join(i.split(".")[1:]) for i in profiles] assert "sele" in profile_names designated_profile_name = [i for i in profiles if "sele" == "".join(i.split(".")[1:])][0] designated_profile_full_path = "/Users/%s/Library/Application Support/Firefox/Profiles/%s" % (getpass.getuser(),designated_profile_name) assert os.path.exists(designated_profile_full_path) os.system("/Applications/Firefox\ 46.app/Contents/MacOS/firefox-bin -p sele &") time.sleep(15) os.system("killall firefox-bin") time.sleep(5) print("25. Created Firefox profile and initiated files") time.sleep(3) def m30_initiate_do_not_auto_update_firefox(self): import os, subprocess, shutil, sys, time, random, re profiles = os.listdir("/Users/%s/Library/Application Support/Firefox/Profiles"%getpass.getuser()) profiles = ["/Users/%s/Library/Application Support/Firefox/Profiles/%s" % (getpass.getuser(),i) for i in profiles] changed_any = False for i in profiles: # profiles cannot create with "/" in them. profile_name = i.split("/")[-1] pref_file_location = "%s/prefs.js" % (i) pref_file_read = open(pref_file_location, "r").read() if 'user_pref("app.update.enabled", true);' in pref_file_read: os.system("""sed -i '' -e s/'user_pref("app.update.enabled", true);'/'user_pref("app.update.enabled", false);'/g ~/Library/Application\ Support/Firefox/Profiles/{}/prefs.js """.format(profile_name)) changed_any = True for i in profiles: profile_name = i.split("/")[-1] pref_file_location = "%s/prefs.js" % (i) pref_file_read = open(pref_file_location, "r").read() if 'user_pref("app.update.enabled",' not in pref_file_read: open(pref_file_location, "a").write('\nuser_pref("app.update.enabled", false);\n') changed_any = True designated_profile = [i for i in profiles if "sele" == "".join(i.split("/")[-1].split(".")[1:])][0] designated_profile_pref_file = "%s/prefs.js" % (designated_profile) designated_profile_pref_file_read = open(designated_profile_pref_file, "r").read() if 'user_pref("browser.shell.checkDefaultBrowser", false);' not in designated_profile_pref_file_read: open(designated_profile_pref_file, "a").write('\nuser_pref("browser.shell.checkDefaultBrowser", false);') if changed_any == True: print("26. Turned off or added auto update for all firefox profiles and set check default browser to false") # may need to change existing profiles/add it to a new profile time.sleep(3) def m31_add_shortcuts(self): if developer_mode == True: os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Reopen Closed Tab" "^\$t"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "New Tab" "^t"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "New Window" "^n"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "New Incognito Window" "^\$n"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Open File..." "^o"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Open Location..." "^l"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Save Page As..." "^s"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Close Tab" "^w"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Quit Google Chrome" "^q"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Undo" "^z"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Cut" "^x"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Copy" "^c"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Paste" "^v"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Select All" "^a"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Minimize" "^m"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Redo Typing" "^\$z"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Undo Typing" "^z"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Redo" "^\$z"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Find..." "^f"') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Zoom In" "^="') os.system('defaults write com.google.Chrome NSUserKeyEquivalents -dict-add "Zoom Out" "^-"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "New" "^n"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Open..." "^o"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Quit Automator" "^q"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Close" "^w"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Save..." "^s"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Save" "^s"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Undo" "^z"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Undo Paste" "^z"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Undo Typing" "^z"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Redo" "^\$z"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Redo Paste" "^\$z"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Redo Typing" "^\$z"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Cut" "^x"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Copy" "^c"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Paste" "^v"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Select All" "^a"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Minimize" "^m"') os.system('defaults write com.apple.Automator NSUserKeyEquivalents -dict-add "Find..." "^f"') os.system('defaults write com.apple.systempreferences NSUserKeyEquivalents -dict-add "Quit System Preferences" "^q"') os.system('defaults write com.apple.systempreferences NSUserKeyEquivalents -dict-add "Close" "^w"') os.system('defaults write com.apple.systempreferences NSUserKeyEquivalents -dict-add "Minimize" "^m"') os.system('defaults write com.apple.QuickTimePlayerX NSUserKeyEquivalents -dict-add "Open File..." "^o"') os.system('defaults write com.apple.QuickTimePlayerX NSUserKeyEquivalents -dict-add "Quit QuickTime Player" "^q"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Quit TextEdit" "^q"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "New" "^n"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Open..." "^o"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Close" "^w"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Save..." "^s"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Undo" "^z"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Undo Paste" "^z"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Undo Typing" "^z"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Redo" "^\$z"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Redo Paste" "^\$z"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Redo Typing" "^\$z"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Cut" "^x"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Copy" "^c"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Paste" "^v"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Select All" "^a"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Minimize" "^m"') os.system('defaults write com.apple.TextEdit NSUserKeyEquivalents -dict-add "Find..." "^f"') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "New Finder Window" "^n" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "New Tab" "^t" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Open" "^o" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Close Window" "^w" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Undo" "^z" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Undo Rename" "^z" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Undo New Folder" "^z" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Redo" "^\$z" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Redo Rename" "^\$z" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Redo New Folder" "^\$z" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Cut" "^x" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Copy" "^c" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Paste" "^v" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Select All" "^a" &') #os.system('defaults write com.apple.finder NSUserKeyEquivalents -dict-add "Minimize" "^m" &') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Quit Firefox" "^q"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "New Tab" "^t"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "New Window" "^n"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "New Private Window" "^\$p"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Open File..." "^o"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Close Tab" "^w"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Save Page As..." "^s"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Undo" "^z"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Redo" "^\$z"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Cut" "^x"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Copy" "^c"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Paste" "^v"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Select All" "^a"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Find in This Page..." "^f"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Zoom In" "^="') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Zoom Out" "^-"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Minimize" "^m"') os.system('defaults write org.mozilla.firefox NSUserKeyEquivalents -dict-add "Select Location Bar" "^l"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "Quit Terminal" "^q"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "\033Shell\033New Window\033Basic" "^n"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "\033Shell\033New Tab\033Basic" "^t"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "Close Window" "^w"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "Close Tab" "^w"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "Copy" "^c"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "Paste" "^v"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "Find..." "^f"') os.system('defaults write com.apple.Terminal NSUserKeyEquivalents -dict-add "Minimize" "^m"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Quit Sublime Text" "^q"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "New File" "^t"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Open..." "^o"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Reopen Closed File" "^\$t"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Save" "^s"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "New Window" "^n"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Close File" "^w"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Insert Snippet" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Insert Characters" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Paste" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Cut" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Insert Best Completion" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Left Delete" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Delete to Hard EOL" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Delete Word" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Yank" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Delete Left Right" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Reindent" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Add Line" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Indent" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Undo Toggle Comment" "^z"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Insert Snippet" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Insert Characters" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Paste" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Cut" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Insert Best Completion" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Left Delete" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Delete to Hard EOL" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Delete Word" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Yank" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Delete Left Right" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Reindent" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Add Line" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Indent" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Toggle Comment" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Insert Snippet" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Insert Characters" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Paste" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Cut" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Insert Best Completion" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Left Delete" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Delete to Hard EOL" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Delete Word" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Yank" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Delete Left Right" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Reindent" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Repeat Add Line" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Indent" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Redo Toggle Comment" "^y"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Copy" "^c"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Cut" "^x"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Paste" "^v"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Toggle Comment" "^/"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Select All" "^a"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Expand Selection to Line" "^l"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Expand Selection to Word" "^d"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Find..." "^f"') os.system('defaults write com.sublimetext.3 NSUserKeyEquivalents -dict-add "Minimize" "^m"') def m32_migrate_sql(self): import json, os, subprocess, shutil, sys, time, random, re class SQL(object): def getTables(self, db_name): return [i for i in subprocess.check_output("mysql -u root --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A %s -e 'show tables;'"%(db_name), shell=True).split("\n") if i.startswith("soda")] def showColumns(self, db_name, table): os.system("mysql -u root --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A %s -e 'show columns from %s;'"%(db_name, table)) def restore_db_1(self, db, infile): os.system('mysql --max_allowed_packet=1000000000M -u root --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A %s < "%s"'%(db, infile)) def migrate(self, database, modelstring="on_clipboard"): modelstrings = [re.findall(r"(?s)(.*):End:", ("class" + modelstring))[0] for modelstring in modelstring.split("\nclass")] # [TESTING] return modelstrings for modelstring in modelstrings: test_modelstring = """ class Audience(Worksheet): id = AutoField() name = CharField() pcs = FloatField() roi = FloatField() spent = FloatField() pcv = FloatField() flex = FloatField() state = IntegerField() fb_page_id = CharField() niche = CharField() objects = BaseModelManager()""" base = "mysql -u root --password=w24uyLMGU2TWdkBdUKMWySQiAcfdjB1A {} -e '%s'".format(database) import string q = "\n".join([i for i in modelstring.strip().split("\n") if "Field(" not in i]) print(q) modelstring = "\n".join([i for i in modelstring.strip().split("\n") if "Field(" in i or i.startswith("class")]) table = re.findall(r"class (.*)\(", modelstring)[0].lower() table = database + "_" + table ### print(table) fieldnames = re.findall(r" ([a-zA-Z0-9_].*?) =", modelstring) fieldtypes = re.findall(r"= (.*?\))", modelstring) from collections import OrderedDict fields = OrderedDict(zip(fieldnames, fieldtypes)) fields.pop("objects", "") def generate_sql_fields(database, fields): sql_fields = OrderedDict() unique_field_count = 0 unique_field_names = [] for k,v in fields.items(): if ("unique=True") in v: unique_field_count = unique_field_count + 1 unique_field_names.append(k) print("unique_field_count: %s | unique_field_names: %s | " % (unique_field_count, unique_field_names)) for k, v in fields.items(): dtype = None if "AutoField()" in v: dtype = " int(11) AUTO_INCREMENT PRIMARY KEY" elif ("BigIntegerField" in v) and ("primary_key" in v): dtype = "bigint(20) PRIMARY KEY" elif ("BigIntegerField" in v) and ("unique=True" in v) and (unique_field_count == 1): # if unique_field_count > 1:, add Constraint @ End. dtype = "bigint(20) UNIQUE" elif "BigIntegerField" in v: dtype = "bigint(20)" elif ("IntegerField" in v) and ("primary_key" in v): dtype = "int(11) PRIMARY KEY" elif ("IntegerField" in v) and ("unique=True" in v) and (unique_field_count == 1): # if unique_field_count > 1:, add Constraint @ End. dtype = "int(11) UNIQUE" elif "IntegerField" in v: dtype = "int(11)" elif "DecimalField" in v: dtype = "decimal(16,3)" elif ("FloatField" in v) and ("unique=True" in v) and (unique_field_count == 1): dtype = "double UNIQUE" elif "FloatField" in v: dtype = "double" elif ("DateTimeField" in v) and ("null=True" in v): dtype = "datetime null default null" elif ("DateTimeField" in v) and ("unique=True" in v): dtype = "datetime null default null UNIQUE" elif "DateTimeField" in v: dtype = "datetime(6)" elif ("DateField" in v) and ("unique=True" in v) and (unique_field_count == 1): # if unique_field_count >1:, add Constraint @ End. dtype = "date UNIQUE" elif "DateField" in v: dtype = "date" # CharField's length not too long cuz of primary_keys. elif ("CharField" in v) and ("primary_key" in v): max_length = int(v[v.index("max_length")+len("max_length")+2:v.index("max_length")+len("max_length")+6]) if ("max_length" in v) else 512 ; dtype = "varchar({}) PRIMARY KEY".format(str(int(max_length))) if ("unique" in v or "primary_key" in v) else "varchar({}) PRIMARY KEY".format(str(int(max_length))); #dtype = "varchar({}) PRIMARY KEY".format(str(int(max_length))) elif ("CharField" in v) and ("unique" in v) and ("True" in v) and (unique_field_count == 1): max_length = int(v[v.index("max_length")+len("max_length")+2:v.index("max_length")+len("max_length")+6]) if ("max_length" in v) else 512 ; dtype = "varchar({}) UNIQUE".format(str(int(max_length))) if ("unique" in v or "primary_key" in v) else "varchar({}) UNIQUE".format(str(int(max_length))); #dtype = "varchar({}) UNIQUE".format(str(int(max_length))) elif "CharField" in v: max_length = int(v[v.index("max_length")+len("max_length")+2:v.index("max_length")+len("max_length")+6]) if ("max_length" in v) else 512 ; dtype = "varchar({})".format(str(int(max_length))) if ("unique" in v or "primary_key" in v) else "varchar({})".format(str(int(max_length))); #dtype = "varchar({})".format(str(int(max_length))) elif "NullBooleanField()" in v: dtype = "tinyint(1)" #@[2018.11.23 10:03 AM] I had previously not included a unique=True here OR a if unique_field_count == 1... Remember that you Must add unique_field_count at End if it is above 1 unique_field, `oh well.` i woulda not been bustered by BooleanField. &*I DO NOT HAVE THE THONGS TO TEST THIS RIGHT NOW. LOL. you use-test it. elif ("BooleanField" in v) and ("unique=True" in v) and (unique_field_count == 1): # if unique_field_count >1:, add Constraint @ End. dtype = "" elif ("BooleanField" in v) and ("unique=True" in v) and ("default=True" in v) and (unique_field_count == 1): # if unique_field_count >1:, add Constraint @ End. dtype = "tinyint(1) NOT NULL DEFAULT TRUE UNIQUE" elif ("BooleanField" in v) and ("unique=True" in v) and ("default=False" in v) and (unique_field_count == 1): # if unique_field_count >1:, add Constraint @ End. dtype = "tinyint(1) NOT NULL DEFAULT FALSE UNIQUE" elif ("BooleanField" in v) and ("default=True" in v): dtype = "tinyint(1) NOT NULL DEFAULT TRUE" elif ("BooleanField" in v) and ("default=False" in v): dtype = "tinyint(1) NOT NULL DEFAULT FALSE" elif ("BooleanField" in v) and ("default" not in v): dtype = "tinyint(1) NOT NULL DEFAULT FALSE" elif "JSONField()" in v: dtype = "longtext DEFAULT NULL" # confused if this works elif "BinaryField" in v: dtype = "blob({})".format(4294967295) sql_fields[k] = dtype print('sql fields: "%s"' % json.dumps(sql_fields,indent=4)) if (unique_field_count != 0) and (unique_field_count != 1): constraint_name = "_".join(unique_field_names) constraint_values = ",".join(unique_field_names) sql_fields["unique_constraint"] = "CONSTRAINT %s UNIQUE (%s)" % (constraint_name, constraint_values) #< return sql_fields sql_fields = generate_sql_fields(database, fields) time.sleep(0.1) def create_database(): print("\n\n == Create Database ==") sql_str = base % "create database %s;" % database print(sql_str) os.system(sql_str) create_database() time.sleep(0.1) def create_table(): print("\n\n == Create Table ==") sql_str = "create table {} (%s);".format(table) column_strs = "" for k,v in sql_fields.items(): if k != "unique_constraint": column_strs += "%s %s," % (k, v) elif k == "unique_constraint": column_strs += v sql_str = sql_str % column_strs sql_str = sql_str.replace(",);", ");") sql_str = base % sql_str print(sql_str) os.system(sql_str) create_table() time.sleep(0.1) def add_columns(): print("\n\n == Add Columns ==") for k,v in sql_fields.items(): if k != "unique_constraint": column_str = "alter table %s add column %s %s" % (table, k, v) sql_str = base % column_str print(sql_str) os.system(sql_str) add_columns() time.sleep(0.1) def alter_columns(): print("\n\n == Alter Columns ==") for k,v in sql_fields.items(): if k != "unique_constraint": column_str = "alter table %s modify column %s %s" % (table, k, v) sql_str = base % column_str print(sql_str) os.system(sql_str) alter_columns() time.sleep(0.1) def rearrange_columns(): print("\n\n == Alter Columns ==") for i, j in zip(list(sql_fields.keys())[1:], list(sql_fields.keys())[0:-1] ): if i != "unique_constraint": column_str = "alter table %s modify column %s %s after %s" % (table, i, sql_fields[i], j) print(column_str) sql_str = base % column_str print(sql_str) os.system(sql_str) rearrange_columns() time.sleep(0.1) print("\n\n == Show Columns ==") self.showColumns(database, table) print(modelstring) time.sleep(0.1) #if SQL().getTables("soda") != ['soda_aceinthehole', 'soda_adset', 'soda_adset_duplicate', 'soda_adsethourlyinsight', 'soda_adsethourlyinsightdata', 'soda_adsetinsight', 'soda_adsettest', 'soda_aliexpressaffiliatelink', 'soda_aliexpressorder', 'soda_aliexpressorder_product', 'soda_aliexpresspayment', 'soda_aliexpressproduct', 'soda_aliexpressstore', 'soda_aliexpressvendor', 'soda_audience', 'soda_binarydata', 'soda_combination', 'soda_cookies', 'soda_cruxsupplier', 'soda_emit', 'soda_executabletext', 'soda_facebookadaccountspend', 'soda_facebookimageupload', 'soda_facebookkeyword', 'soda_facebookkeywordlist', 'soda_facebookpage', 'soda_ghostproduct', 'soda_handle', 'soda_interest', 'soda_interestinsight', 'soda_lastcheckedtime', 'soda_lineitem', 'soda_muta', 'soda_new_email', 'soda_niche', 'soda_order', 'soda_password', 'soda_patchtrack', 'soda_pricedecrease', 'soda_priceincrease', 'soda_product', 'soda_proxy', 'soda_recompilate', 'soda_reposit', 'soda_request', 'soda_routing', 'soda_sciencevessel', 'soda_settlement', 'soda_shop', 'soda_size', 'soda_sizechart', 'soda_soup', 'soda_timedtask', 'soda_todo', 'soda_transaction']: # SQL().migrate('soda', re.findall("(?s)(class \w*\(Records\).*)\"\"\" END \"\"\"",open("/Users/%s/tavern/tavern/soda/can.py"%getpass.getuser()).read())[0]) open(homepath("~/tavern/tavern/soda/soda.sql")).write("-- MySQL dump 10.13 Distrib 8.0.15, for osx10.14 (x86_64)\n--\n-- Host: localhost Database: soda\n-- ------------------------------------------------------\n-- Server version\t8.0.15\n\n/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;\n/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;\n/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;\n SET NAMES utf8mb4 ;\n/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;\n/*!40103 SET TIME_ZONE='+00:00' */;\n/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;\n/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;\n/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;\n/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;\n\n--\n-- Table structure for table `soda_aceinthehole`\n--\n\nDROP TABLE IF EXISTS `soda_aceinthehole`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aceinthehole` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `account` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `date` date DEFAULT NULL,\n `amount` double DEFAULT NULL,\n `type` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tag` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `description` varchar(4096) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=246 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aceinthehole`\n--\n\nLOCK TABLES `soda_aceinthehole` WRITE;\n/*!40000 ALTER TABLE `soda_aceinthehole` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aceinthehole` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aceintheholeheadercolumns`\n--\n\nDROP TABLE IF EXISTS `soda_aceintheholeheadercolumns`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aceintheholeheadercolumns` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `account` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `header_column_dict` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `active` int(11) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aceintheholeheadercolumns`\n--\n\nLOCK TABLES `soda_aceintheholeheadercolumns` WRITE;\n/*!40000 ALTER TABLE `soda_aceintheholeheadercolumns` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aceintheholeheadercolumns` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aceintheholetype`\n--\n\nDROP TABLE IF EXISTS `soda_aceintheholetype`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aceintheholetype` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `name` (`name`),\n UNIQUE KEY `name_2` (`name`),\n UNIQUE KEY `name_3` (`name`)\n) ENGINE=InnoDB AUTO_INCREMENT=24 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aceintheholetype`\n--\n\nLOCK TABLES `soda_aceintheholetype` WRITE;\n/*!40000 ALTER TABLE `soda_aceintheholetype` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aceintheholetype` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aceintheholetypetag`\n--\n\nDROP TABLE IF EXISTS `soda_aceintheholetypetag`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aceintheholetypetag` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `type` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tag` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `sign` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `type_tag` (`type`,`tag`)\n) ENGINE=InnoDB AUTO_INCREMENT=65 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aceintheholetypetag`\n--\n\nLOCK TABLES `soda_aceintheholetypetag` WRITE;\n/*!40000 ALTER TABLE `soda_aceintheholetypetag` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aceintheholetypetag` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_additemstest`\n--\n\nDROP TABLE IF EXISTS `soda_additemstest`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_additemstest` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `x` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=1163 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_additemstest`\n--\n\nLOCK TABLES `soda_additemstest` WRITE;\n/*!40000 ALTER TABLE `soda_additemstest` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_additemstest` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_addproduct`\n--\n\nDROP TABLE IF EXISTS `soda_addproduct`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_addproduct` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `is_free_plus_ship` int(11) DEFAULT NULL,\n `page` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` varchar(4196) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `y` longblob,\n `url` varchar(4196) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `sent` int(11) DEFAULT NULL,\n `created_at` datetime(6) DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `image_idx` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `image_indexes` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `option_indexes` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `variant_indexes` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `size_chart` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `my_description` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `logo` int(11) DEFAULT NULL,\n `logo_size` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `logo_scaling_dimension_size` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `append_pictures` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `aliexpressvendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=320 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_addproduct`\n--\n\nLOCK TABLES `soda_addproduct` WRITE;\n/*!40000 ALTER TABLE `soda_addproduct` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_addproduct` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_adset`\n--\n\nDROP TABLE IF EXISTS `soda_adset`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_adset` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `last_check` datetime(6) DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop_abbreviation` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `facebook_page` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `product_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `image_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `complete_create` tinyint(1) NOT NULL DEFAULT '0',\n `icon` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `is_created` tinyint(1) NOT NULL DEFAULT '0',\n `ad_account_id` bigint(20) DEFAULT NULL,\n `campaign_id` bigint(20) DEFAULT NULL,\n `adset_id` bigint(20) DEFAULT NULL,\n `created_time` datetime(6) DEFAULT NULL,\n `date_last_requested_keyword_stats` int(11) DEFAULT NULL,\n `click_attribution` int(11) DEFAULT NULL,\n `view_attribution` int(11) DEFAULT NULL,\n `custom_event_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `billing_event` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `optimization_goal` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `recommendations` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `bid_info` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `device_platforms` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `publisher_platforms` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `facebook_positions` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `targeting_optimization` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `user_device` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `user_os` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `age_min` int(11) DEFAULT NULL,\n `age_max` int(11) DEFAULT NULL,\n `genders` int(11) DEFAULT NULL,\n `geo_locations` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `status` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `daily_budget` double DEFAULT NULL,\n `body` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `effective_object_story_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `source_adset_id` bigint(20) DEFAULT NULL,\n `custom_audiences` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `original_caid` bigint(20) DEFAULT NULL,\n `interest_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec1` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec2` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec3` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec4` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec5` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `handle` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `campaign_id_adset_id` (`campaign_id`,`adset_id`)\n) ENGINE=InnoDB AUTO_INCREMENT=61 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_adset`\n--\n\nLOCK TABLES `soda_adset` WRITE;\n/*!40000 ALTER TABLE `soda_adset` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_adset` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_adset_duplicate`\n--\n\nDROP TABLE IF EXISTS `soda_adset_duplicate`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_adset_duplicate` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `original_adset_id` bigint(20) DEFAULT NULL,\n `adset_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `budgets` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `to_duplicate` tinyint(1) NOT NULL DEFAULT '0',\n `duplicate_count` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `original_adset_id` (`original_adset_id`),\n UNIQUE KEY `original_adset_id_2` (`original_adset_id`),\n UNIQUE KEY `original_adset_id_3` (`original_adset_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_adset_duplicate`\n--\n\nLOCK TABLES `soda_adset_duplicate` WRITE;\n/*!40000 ALTER TABLE `soda_adset_duplicate` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_adset_duplicate` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_adsethourlyinsight`\n--\n\nDROP TABLE IF EXISTS `soda_adsethourlyinsight`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_adsethourlyinsight` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `ad_account_id` bigint(20) DEFAULT NULL,\n `adset_id` bigint(20) DEFAULT NULL,\n `date` double DEFAULT NULL,\n `frequency` double DEFAULT NULL,\n `impression` int(11) DEFAULT NULL,\n `impression_rate` double DEFAULT NULL,\n `impression_cost` double DEFAULT NULL,\n `post_click` int(11) DEFAULT NULL,\n `post_click_cost` double DEFAULT NULL,\n `post_click_rate` double DEFAULT NULL,\n `click` int(11) DEFAULT NULL,\n `click_cost` double DEFAULT NULL,\n `click_rate` double DEFAULT NULL,\n `add_to_cart` int(11) DEFAULT NULL,\n `add_to_cart_cost` double DEFAULT NULL,\n `add_to_cart_rate` double DEFAULT NULL,\n `website_purchase` int(11) DEFAULT NULL,\n `offsite_conversion` int(11) DEFAULT NULL,\n `website_purchase_cost` double DEFAULT NULL,\n `website_purchase_rate` double DEFAULT NULL,\n `spend` double DEFAULT NULL,\n `website_purchase_value` double DEFAULT NULL,\n `return_on_investment` double DEFAULT NULL,\n `reach` int(11) DEFAULT NULL,\n `reach_cost` double DEFAULT NULL,\n `reach_rate` double DEFAULT NULL,\n `landing_page_view` int(11) DEFAULT NULL,\n `landing_page_view_cost` double DEFAULT NULL,\n `landing_page_view_rate` double DEFAULT NULL,\n `fb_pixel_view_content` int(11) DEFAULT NULL,\n `fb_pixel_view_content_cost` double DEFAULT NULL,\n `fb_pixel_view_content_rate` double DEFAULT NULL,\n `fb_pixel_initiate_checkout` int(11) DEFAULT NULL,\n `fb_pixel_initiate_checkout_cost` double DEFAULT NULL,\n `fb_pixel_initiate_checkout_rate` double DEFAULT NULL,\n `page_engagement` int(11) DEFAULT NULL,\n `page_engagement_cost` double DEFAULT NULL,\n `page_engagement_rate` double DEFAULT NULL,\n `post_engagement` int(11) DEFAULT NULL,\n `post_engagement_cost` double DEFAULT NULL,\n `post_engagement_rate` double DEFAULT NULL,\n `post_reaction` int(11) DEFAULT NULL,\n `post_reaction_cost` double DEFAULT NULL,\n `post_reaction_rate` double DEFAULT NULL,\n `order_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`),\n UNIQUE KEY `adset_id_date` (`adset_id`,`date`)\n) ENGINE=InnoDB AUTO_INCREMENT=376831 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_adsethourlyinsight`\n--\n\nLOCK TABLES `soda_adsethourlyinsight` WRITE;\n/*!40000 ALTER TABLE `soda_adsethourlyinsight` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_adsethourlyinsight` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_adsethourlyinsightdata`\n--\n\nDROP TABLE IF EXISTS `soda_adsethourlyinsightdata`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_adsethourlyinsightdata` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `ad_account_id` bigint(20) DEFAULT NULL,\n `adset_id` bigint(20) DEFAULT NULL,\n `date` double DEFAULT NULL,\n `spend` double DEFAULT NULL,\n `website_purchase_value` double DEFAULT NULL,\n `impression_move` int(11) DEFAULT NULL,\n `post_click_move` int(11) DEFAULT NULL,\n `click_move` int(11) DEFAULT NULL,\n `add_to_cart_move` int(11) DEFAULT NULL,\n `website_purchase_move` int(11) DEFAULT NULL,\n `reach_move` int(11) DEFAULT NULL,\n `landing_page_view_move` int(11) DEFAULT NULL,\n `fb_pixel_view_content_move` int(11) DEFAULT NULL,\n `fb_pixel_initiate_checkout_move` int(11) DEFAULT NULL,\n `page_engagement_move` int(11) DEFAULT NULL,\n `post_engagement_move` int(11) DEFAULT NULL,\n `post_reaction_move` int(11) DEFAULT NULL,\n `impression_adspendvg` double DEFAULT NULL,\n `post_click_adspendvg` double DEFAULT NULL,\n `click_adspendvg` double DEFAULT NULL,\n `add_to_cart_adspendvg` double DEFAULT NULL,\n `website_purchase_adspendvg` double DEFAULT NULL,\n `reach_adspendvg` double DEFAULT NULL,\n `landing_page_view_adspendvg` double DEFAULT NULL,\n `fb_pixel_view_content_adspendvg` double DEFAULT NULL,\n `fb_pixel_initiate_checkout_adspendvg` double DEFAULT NULL,\n `page_engagement_adspendvg` double DEFAULT NULL,\n `post_engagement_adspendvg` double DEFAULT NULL,\n `post_reaction_adspendvg` double DEFAULT NULL,\n `impression_ratevg` double DEFAULT NULL,\n `post_click_ratevg` double DEFAULT NULL,\n `click_ratevg` double DEFAULT NULL,\n `add_to_cart_ratevg` double DEFAULT NULL,\n `website_purchase_ratevg` double DEFAULT NULL,\n `reach_ratevg` double DEFAULT NULL,\n `landing_page_view_ratevg` double DEFAULT NULL,\n `fb_pixel_view_content_ratevg` double DEFAULT NULL,\n `fb_pixel_initiate_checkout_ratevg` double DEFAULT NULL,\n `page_engagement_ratevg` double DEFAULT NULL,\n `post_engagement_ratevg` double DEFAULT NULL,\n `post_reaction_ratevg` double DEFAULT NULL,\n `frequency` double DEFAULT NULL,\n `offsite_conversion` int(11) DEFAULT NULL,\n `return_on_investment` double DEFAULT NULL,\n `order_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop_abbreviation` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `facebook_page` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `product_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `image_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `complete_create` tinyint(1) NOT NULL DEFAULT '0',\n `is_created` tinyint(1) NOT NULL DEFAULT '0',\n `campaign_id` bigint(20) DEFAULT NULL,\n `created_time` datetime(6) DEFAULT NULL,\n `date_last_requested_keyword_stats` int(11) DEFAULT NULL,\n `click_attribution` int(11) DEFAULT NULL,\n `view_attribution` int(11) DEFAULT NULL,\n `custom_event_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `billing_event` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `optimization_goal` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `recommendations` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `bid_info` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `bid_strategy` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `device_platforms` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `publisher_platforms` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `facebook_positions` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `targeting_optimization` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `user_device` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `user_os` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `age_min` int(11) DEFAULT NULL,\n `age_max` int(11) DEFAULT NULL,\n `genders` int(11) DEFAULT NULL,\n `geo_locations` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `status` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `daily_budget` double DEFAULT NULL,\n `body` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `effective_object_story_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `source_adset_id` bigint(20) DEFAULT NULL,\n `custom_audiences` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec1` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec2` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec3` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec4` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `flexible_spec5` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `notes` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`),\n UNIQUE KEY `adset_id_date` (`adset_id`,`date`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_adsethourlyinsightdata`\n--\n\nLOCK TABLES `soda_adsethourlyinsightdata` WRITE;\n/*!40000 ALTER TABLE `soda_adsethourlyinsightdata` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_adsethourlyinsightdata` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_adsetinsight`\n--\n\nDROP TABLE IF EXISTS `soda_adsetinsight`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_adsetinsight` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `ad_account_id` bigint(20) DEFAULT NULL,\n `adset_id` bigint(20) DEFAULT NULL,\n `date` int(11) DEFAULT NULL,\n `frequency` double DEFAULT NULL,\n `impression` int(11) DEFAULT NULL,\n `impression_rate` double DEFAULT NULL,\n `impression_cost` double DEFAULT NULL,\n `post_click` int(11) DEFAULT NULL,\n `post_click_cost` double DEFAULT NULL,\n `post_click_rate` double DEFAULT NULL,\n `click` int(11) DEFAULT NULL,\n `click_cost` double DEFAULT NULL,\n `click_rate` double DEFAULT NULL,\n `add_to_cart` int(11) DEFAULT NULL,\n `add_to_cart_cost` double DEFAULT NULL,\n `add_to_cart_rate` double DEFAULT NULL,\n `website_purchase` int(11) DEFAULT NULL,\n `website_purchase_cost` double DEFAULT NULL,\n `website_purchase_rate` double DEFAULT NULL,\n `spend` double DEFAULT NULL,\n `website_purchase_value` double DEFAULT NULL,\n `return_on_investment` double DEFAULT NULL,\n `reach` int(11) DEFAULT NULL,\n `reach_cost` double DEFAULT NULL,\n `reach_rate` double DEFAULT NULL,\n `landing_page_view` int(11) DEFAULT NULL,\n `landing_page_view_cost` double DEFAULT NULL,\n `landing_page_view_rate` double DEFAULT NULL,\n `fb_pixel_view_content` int(11) DEFAULT NULL,\n `fb_pixel_view_content_cost` double DEFAULT NULL,\n `fb_pixel_view_content_rate` double DEFAULT NULL,\n `fb_pixel_initiate_checkout` int(11) DEFAULT NULL,\n `fb_pixel_initiate_checkout_cost` double DEFAULT NULL,\n `fb_pixel_initiate_checkout_rate` double DEFAULT NULL,\n `page_engagement` int(11) DEFAULT NULL,\n `page_engagement_cost` double DEFAULT NULL,\n `page_engagement_rate` double DEFAULT NULL,\n `post_engagement` int(11) DEFAULT NULL,\n `post_engagement_cost` double DEFAULT NULL,\n `post_engagement_rate` double DEFAULT NULL,\n `post_reaction` int(11) DEFAULT NULL,\n `post_reaction_cost` double DEFAULT NULL,\n `post_reaction_rate` double DEFAULT NULL,\n `order_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`),\n UNIQUE KEY `adset_id_date` (`adset_id`,`date`)\n) ENGINE=InnoDB AUTO_INCREMENT=34 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_adsetinsight`\n--\n\nLOCK TABLES `soda_adsetinsight` WRITE;\n/*!40000 ALTER TABLE `soda_adsetinsight` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_adsetinsight` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_adsettest`\n--\n\nDROP TABLE IF EXISTS `soda_adsettest`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_adsettest` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `original_adset_id` bigint(20) DEFAULT NULL,\n `factor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `adset_id_1` bigint(20) DEFAULT NULL,\n `adset_id_2` bigint(20) DEFAULT NULL,\n `date_start` int(11) DEFAULT NULL,\n `winner` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `is_created` tinyint(1) NOT NULL DEFAULT '0',\n PRIMARY KEY (`id`),\n UNIQUE KEY `original_adset_id_factor` (`original_adset_id`,`factor`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_adsettest`\n--\n\nLOCK TABLES `soda_adsettest` WRITE;\n/*!40000 ALTER TABLE `soda_adsettest` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_adsettest` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_adstip`\n--\n\nDROP TABLE IF EXISTS `soda_adstip`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_adstip` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `adstip` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_adstip`\n--\n\nLOCK TABLES `soda_adstip` WRITE;\n/*!40000 ALTER TABLE `soda_adstip` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_adstip` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpress_dispute`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpress_dispute`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpress_dispute` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `dispute_id` bigint(20) DEFAULT NULL,\n `idx_idx` int(11) DEFAULT NULL,\n `account` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `store` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_id` bigint(20) DEFAULT NULL,\n `lineitem_id` bigint(20) DEFAULT NULL,\n `ali_order_number` bigint(20) DEFAULT NULL,\n `quantity` int(11) DEFAULT NULL,\n `is_refund` int(11) DEFAULT NULL,\n `is_return` int(11) DEFAULT NULL,\n `received_order` int(11) DEFAULT NULL,\n `dispute_reason` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `refund_amount` double DEFAULT NULL,\n `dispute_details` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `dispute_reason_specification` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `dispute_status` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `end` int(11) DEFAULT NULL,\n `is_accept` int(11) DEFAULT NULL,\n `dispute_order_total` double DEFAULT NULL,\n `date` datetime(6) DEFAULT NULL,\n `response` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `initiator` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `action` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `reason_and_detail` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `attachment` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `proposal` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `comments` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `evidence` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `dispute_id_idx_idx` (`dispute_id`,`idx_idx`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpress_dispute`\n--\n\nLOCK TABLES `soda_aliexpress_dispute` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpress_dispute` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpress_dispute` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpressaffiliatelink`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpressaffiliatelink`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpressaffiliatelink` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `page_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tracking_link` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpressaffiliatelink`\n--\n\nLOCK TABLES `soda_aliexpressaffiliatelink` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpressaffiliatelink` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpressaffiliatelink` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpressorder`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpressorder`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpressorder` (\n `id` bigint(20) NOT NULL,\n `email_address` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_time` double DEFAULT NULL,\n `order_status` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_reminder` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `contact_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `address1` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `address2` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `zip_code` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `mobile` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `total_amount` double DEFAULT NULL,\n `datetime` double DEFAULT NULL,\n `tracking_number` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tracking_method` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `comments` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpressorder`\n--\n\nLOCK TABLES `soda_aliexpressorder` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpressorder` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpressorder` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpressorder_event`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpressorder_event`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpressorder_event` (\n `id` bigint(20) NOT NULL,\n `order_time` double DEFAULT NULL,\n `order_status` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_reminder` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `contact_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `address1` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `address2` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `zip_code` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `mobile` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `total_amount` double DEFAULT NULL,\n `datetime` double DEFAULT NULL,\n `tracking_number` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tracking_method` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `comments` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `event` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `reorder_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpressorder_event`\n--\n\nLOCK TABLES `soda_aliexpressorder_event` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpressorder_event` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpressorder_event` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpressorder_product`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpressorder_product`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpressorder_product` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `aliexpress_order_id` bigint(20) DEFAULT NULL,\n `aliexpress_order_id_idx` int(11) DEFAULT NULL,\n `product_id` bigint(20) DEFAULT NULL,\n `ali_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `item_price` double DEFAULT NULL,\n `price` double DEFAULT NULL,\n `quantity` int(11) DEFAULT NULL,\n `options` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `shipping_method` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shipping_price` double DEFAULT NULL,\n `comment` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `status` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`),\n UNIQUE KEY `aliexpress_order_id_aliexpress_order_id_idx` (`aliexpress_order_id`,`aliexpress_order_id_idx`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpressorder_product`\n--\n\nLOCK TABLES `soda_aliexpressorder_product` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpressorder_product` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpressorder_product` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpressorder_update`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpressorder_update`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpressorder_update` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `last_check` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpressorder_update`\n--\n\nLOCK TABLES `soda_aliexpressorder_update` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpressorder_update` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpressorder_update` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpressstore`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpressstore`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpressstore` (\n `id` bigint(20) NOT NULL,\n `search_string` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `followers` bigint(20) DEFAULT NULL,\n `ratings` bigint(20) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpressstore`\n--\n\nLOCK TABLES `soda_aliexpressstore` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpressstore` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpressstore` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_aliexpressvendor`\n--\n\nDROP TABLE IF EXISTS `soda_aliexpressvendor`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_aliexpressvendor` (\n `id` bigint(20) NOT NULL,\n `count_orders` int(11) DEFAULT NULL,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `product_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `average_days_to_ship` int(11) DEFAULT NULL,\n `average_days_to_arrive` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_aliexpressvendor`\n--\n\nLOCK TABLES `soda_aliexpressvendor` WRITE;\n/*!40000 ALTER TABLE `soda_aliexpressvendor` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_aliexpressvendor` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_application`\n--\n\nDROP TABLE IF EXISTS `soda_application`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_application` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `field` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `location` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `time` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `contact` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `compensation` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `employment_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `got_contact` int(11) DEFAULT NULL,\n `responded` int(11) DEFAULT NULL,\n `visited` int(11) DEFAULT NULL,\n `not_respond_to_title` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_application`\n--\n\nLOCK TABLES `soda_application` WRITE;\n/*!40000 ALTER TABLE `soda_application` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_application` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_application2`\n--\n\nDROP TABLE IF EXISTS `soda_application2`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_application2` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `contact` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `info` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `responded` int(11) DEFAULT NULL,\n `msg` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`),\n UNIQUE KEY `url` (`url`),\n UNIQUE KEY `url_2` (`url`),\n UNIQUE KEY `url_3` (`url`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_application2`\n--\n\nLOCK TABLES `soda_application2` WRITE;\n/*!40000 ALTER TABLE `soda_application2` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_application2` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_approvedtransaction`\n--\n\nDROP TABLE IF EXISTS `soda_approvedtransaction`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_approvedtransaction` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `account` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `date` date DEFAULT NULL,\n `price` double DEFAULT NULL,\n `type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tag` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `description` varchar(4096) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `what_i_think_my_current_balance_is` double DEFAULT NULL,\n `approved` int(11) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=246 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_approvedtransaction`\n--\n\nLOCK TABLES `soda_approvedtransaction` WRITE;\n/*!40000 ALTER TABLE `soda_approvedtransaction` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_approvedtransaction` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_audience`\n--\n\nDROP TABLE IF EXISTS `soda_audience`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_audience` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `pcs` decimal(16,3) DEFAULT NULL,\n `roi` decimal(16,3) DEFAULT NULL,\n `spent` decimal(16,3) DEFAULT NULL,\n `pcv` decimal(16,3) DEFAULT NULL,\n `flex` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `state` int(11) DEFAULT NULL,\n `fb_page_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_audience`\n--\n\nLOCK TABLES `soda_audience` WRITE;\n/*!40000 ALTER TABLE `soda_audience` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_audience` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_binarydata`\n--\n\nDROP TABLE IF EXISTS `soda_binarydata`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_binarydata` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `filetype` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `filename` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `datatype` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `binarydata` longblob,\n PRIMARY KEY (`id`),\n UNIQUE KEY `filename` (`filename`),\n UNIQUE KEY `filename_2` (`filename`),\n UNIQUE KEY `filename_3` (`filename`)\n) ENGINE=InnoDB AUTO_INCREMENT=22 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_binarydata`\n--\n\nLOCK TABLES `soda_binarydata` WRITE;\n/*!40000 ALTER TABLE `soda_binarydata` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_binarydata` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_combination`\n--\n\nDROP TABLE IF EXISTS `soda_combination`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_combination` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `slot` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `combination` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_combination`\n--\n\nLOCK TABLES `soda_combination` WRITE;\n/*!40000 ALTER TABLE `soda_combination` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_combination` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_content`\n--\n\nDROP TABLE IF EXISTS `soda_content`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_content` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `content_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `text` longblob,\n `page_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `collection_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=94 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_content`\n--\n\nLOCK TABLES `soda_content` WRITE;\n/*!40000 ALTER TABLE `soda_content` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_content` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_content_image`\n--\n\nDROP TABLE IF EXISTS `soda_content_image`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_content_image` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `source` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `page_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `collection_name` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `used_times` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `source_url_collection_name` (`source`,`url`,`collection_name`)\n) ENGINE=InnoDB AUTO_INCREMENT=18299 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_content_image`\n--\n\nLOCK TABLES `soda_content_image` WRITE;\n/*!40000 ALTER TABLE `soda_content_image` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_content_image` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_content_link`\n--\n\nDROP TABLE IF EXISTS `soda_content_link`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_content_link` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `page_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `collection_name` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `collection_name_unpluralized` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `collection_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `page_name_collection_name` (`page_name`,`collection_name`)\n) ENGINE=InnoDB AUTO_INCREMENT=44 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_content_link`\n--\n\nLOCK TABLES `soda_content_link` WRITE;\n/*!40000 ALTER TABLE `soda_content_link` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_content_link` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_cookies`\n--\n\nDROP TABLE IF EXISTS `soda_cookies`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_cookies` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `company` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `user` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `cookies` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `test_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `textNot` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `urlNot` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `status_codeNotNot` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `headers` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `self_function` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `company_user` (`company`,`user`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_cookies`\n--\n\nLOCK TABLES `soda_cookies` WRITE;\n/*!40000 ALTER TABLE `soda_cookies` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_cookies` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_cruxsupplier`\n--\n\nDROP TABLE IF EXISTS `soda_cruxsupplier`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_cruxsupplier` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `supplier` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `supplier` (`supplier`),\n UNIQUE KEY `supplier_2` (`supplier`),\n UNIQUE KEY `supplier_3` (`supplier`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_cruxsupplier`\n--\n\nLOCK TABLES `soda_cruxsupplier` WRITE;\n/*!40000 ALTER TABLE `soda_cruxsupplier` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_cruxsupplier` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_degradantlist`\n--\n\nDROP TABLE IF EXISTS `soda_degradantlist`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_degradantlist` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `degradantlist` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `is_completed` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_degradantlist`\n--\n\nLOCK TABLES `soda_degradantlist` WRITE;\n/*!40000 ALTER TABLE `soda_degradantlist` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_degradantlist` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_dictionaryentry`\n--\n\nDROP TABLE IF EXISTS `soda_dictionaryentry`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_dictionaryentry` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `word` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `definition` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `videod` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_dictionaryentry`\n--\n\nLOCK TABLES `soda_dictionaryentry` WRITE;\n/*!40000 ALTER TABLE `soda_dictionaryentry` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_dictionaryentry` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_emailaccount`\n--\n\nDROP TABLE IF EXISTS `soda_emailaccount`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_emailaccount` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `username` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `password` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_emailaccount`\n--\n\nLOCK TABLES `soda_emailaccount` WRITE;\n/*!40000 ALTER TABLE `soda_emailaccount` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_emailaccount` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_emit`\n--\n\nDROP TABLE IF EXISTS `soda_emit`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_emit` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `true_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `direction` int(11) DEFAULT NULL,\n `point_a` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `point_b` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `moment` double DEFAULT NULL,\n `payload` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `answertime` double DEFAULT NULL,\n `e1` double DEFAULT NULL,\n `summary` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `answer` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `true_id` (`true_id`),\n UNIQUE KEY `true_id_2` (`true_id`),\n UNIQUE KEY `true_id_3` (`true_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_emit`\n--\n\nLOCK TABLES `soda_emit` WRITE;\n/*!40000 ALTER TABLE `soda_emit` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_emit` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_executabletext`\n--\n\nDROP TABLE IF EXISTS `soda_executabletext`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_executabletext` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `w` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`),\n UNIQUE KEY `w` (`w`),\n UNIQUE KEY `w_2` (`w`),\n UNIQUE KEY `w_3` (`w`)\n) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_executabletext`\n--\n\nLOCK TABLES `soda_executabletext` WRITE;\n/*!40000 ALTER TABLE `soda_executabletext` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_executabletext` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_facebookadaccountspend`\n--\n\nDROP TABLE IF EXISTS `soda_facebookadaccountspend`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_facebookadaccountspend` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `date` date DEFAULT NULL,\n `ad_account_id` bigint(20) DEFAULT NULL,\n `facebookadaccountspend` double DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `date_ad_account_id` (`date`,`ad_account_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_facebookadaccountspend`\n--\n\nLOCK TABLES `soda_facebookadaccountspend` WRITE;\n/*!40000 ALTER TABLE `soda_facebookadaccountspend` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_facebookadaccountspend` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_facebookimageupload`\n--\n\nDROP TABLE IF EXISTS `soda_facebookimageupload`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_facebookimageupload` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `origin` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `destination` varchar(8192) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `origin` (`origin`),\n UNIQUE KEY `origin_2` (`origin`),\n UNIQUE KEY `origin_3` (`origin`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_facebookimageupload`\n--\n\nLOCK TABLES `soda_facebookimageupload` WRITE;\n/*!40000 ALTER TABLE `soda_facebookimageupload` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_facebookimageupload` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_facebookkeyword`\n--\n\nDROP TABLE IF EXISTS `soda_facebookkeyword`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_facebookkeyword` (\n `id` bigint(20) NOT NULL,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `audience_size` bigint(20) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_facebookkeyword`\n--\n\nLOCK TABLES `soda_facebookkeyword` WRITE;\n/*!40000 ALTER TABLE `soda_facebookkeyword` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_facebookkeyword` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_facebookkeywordlist`\n--\n\nDROP TABLE IF EXISTS `soda_facebookkeywordlist`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_facebookkeywordlist` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `keywordlist` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `audience_size` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `purchases` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=14 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_facebookkeywordlist`\n--\n\nLOCK TABLES `soda_facebookkeywordlist` WRITE;\n/*!40000 ALTER TABLE `soda_facebookkeywordlist` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_facebookkeywordlist` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_facebookpage`\n--\n\nDROP TABLE IF EXISTS `soda_facebookpage`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_facebookpage` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `facebook_id` bigint(20) DEFAULT NULL,\n `name` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `url` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `settings` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `publish_times` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `token` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `facebook_id` (`facebook_id`),\n UNIQUE KEY `facebook_id_2` (`facebook_id`),\n UNIQUE KEY `facebook_id_3` (`facebook_id`),\n UNIQUE KEY `facebook_id_4` (`facebook_id`),\n UNIQUE KEY `facebook_id_5` (`facebook_id`)\n) ENGINE=InnoDB AUTO_INCREMENT=2521 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_facebookpage`\n--\n\nLOCK TABLES `soda_facebookpage` WRITE;\n/*!40000 ALTER TABLE `soda_facebookpage` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_facebookpage` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_ghostproduct`\n--\n\nDROP TABLE IF EXISTS `soda_ghostproduct`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_ghostproduct` (\n `id` bigint(20) NOT NULL,\n `last_check` datetime(6) DEFAULT NULL,\n `size_chart` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `created_at` datetime(6) DEFAULT NULL,\n `image` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `images` longblob,\n `options` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `variants` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `product_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `description` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `vendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `body_html` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `seller_platform` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `last_quantity_check` datetime(6) DEFAULT NULL,\n `tags` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `handle` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `similar_urls` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `ca_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ca_views` int(11) DEFAULT NULL,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `short_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `story_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `pcs` decimal(16,3) DEFAULT NULL,\n `roi` decimal(16,3) DEFAULT NULL,\n `spent` decimal(16,3) DEFAULT NULL,\n `pcv` decimal(16,3) DEFAULT NULL,\n `item_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazon_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ali_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazonvendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `aliexpressvendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazon_json` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `amazon_inspect` tinyint(1) NOT NULL DEFAULT '0',\n `state` int(11) DEFAULT NULL,\n `adset_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `adset_id` bigint(20) DEFAULT NULL,\n `size_chart_power_urls` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `freelancer_description` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `my_description` varchar(4096) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `original` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `shifted` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `HTML` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `url_title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `video_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `icon_image` longblob,\n `product_tags` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `sku_image_dict` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_ghostproduct`\n--\n\nLOCK TABLES `soda_ghostproduct` WRITE;\n/*!40000 ALTER TABLE `soda_ghostproduct` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_ghostproduct` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_ghostproductupdate`\n--\n\nDROP TABLE IF EXISTS `soda_ghostproductupdate`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_ghostproductupdate` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `last_check` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=190 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_ghostproductupdate`\n--\n\nLOCK TABLES `soda_ghostproductupdate` WRITE;\n/*!40000 ALTER TABLE `soda_ghostproductupdate` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_ghostproductupdate` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_handle`\n--\n\nDROP TABLE IF EXISTS `soda_handle`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_handle` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `handle` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `custom_audience_id` bigint(20) DEFAULT NULL,\n `has_adset` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `reach` bigint(20) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `handle` (`handle`),\n UNIQUE KEY `handle_2` (`handle`),\n UNIQUE KEY `handle_3` (`handle`),\n UNIQUE KEY `handle_4` (`handle`),\n UNIQUE KEY `handle_5` (`handle`)\n) ENGINE=InnoDB AUTO_INCREMENT=16 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_handle`\n--\n\nLOCK TABLES `soda_handle` WRITE;\n/*!40000 ALTER TABLE `soda_handle` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_handle` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_hourslogged`\n--\n\nDROP TABLE IF EXISTS `soda_hourslogged`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_hourslogged` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `hours_logged` double DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_hourslogged`\n--\n\nLOCK TABLES `soda_hourslogged` WRITE;\n/*!40000 ALTER TABLE `soda_hourslogged` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_hourslogged` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_inceptedproduct`\n--\n\nDROP TABLE IF EXISTS `soda_inceptedproduct`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_inceptedproduct` (\n `id` bigint(20) NOT NULL,\n `last_check` datetime(6) DEFAULT NULL,\n `size_chart` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `created_at` datetime(6) DEFAULT NULL,\n `image` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `images` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `options` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `variants` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `product_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `description` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `vendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `body_html` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `seller_platform` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `last_quantity_check` datetime(6) DEFAULT NULL,\n `tags` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `handle` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `similar_urls` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `ca_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ca_views` int(11) DEFAULT NULL,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `short_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `story_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `pcs` decimal(16,3) DEFAULT NULL,\n `roi` decimal(16,3) DEFAULT NULL,\n `spent` decimal(16,3) DEFAULT NULL,\n `pcv` decimal(16,3) DEFAULT NULL,\n `item_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazon_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ali_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazonvendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `aliexpressvendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazon_json` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `amazon_inspect` tinyint(1) NOT NULL DEFAULT '0',\n `state` int(11) DEFAULT NULL,\n `adset_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `adset_id` bigint(20) DEFAULT NULL,\n `size_chart_power_urls` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `freelancer_description` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `my_description` varchar(4096) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `original` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `shifted` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `HTML` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `url_title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `video_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `icon_image` longblob,\n `product_tags` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `sku_image_dict` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_inceptedproduct`\n--\n\nLOCK TABLES `soda_inceptedproduct` WRITE;\n/*!40000 ALTER TABLE `soda_inceptedproduct` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_inceptedproduct` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_interest`\n--\n\nDROP TABLE IF EXISTS `soda_interest`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_interest` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `interest_id` bigint(20) DEFAULT NULL,\n `interest_name` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `spend` double DEFAULT NULL,\n `reach` int(11) DEFAULT NULL,\n `impression` int(11) DEFAULT NULL,\n `click` int(11) DEFAULT NULL,\n `post_click` int(11) DEFAULT NULL,\n `add_to_cart` int(11) DEFAULT NULL,\n `website_purchase` int(11) DEFAULT NULL,\n `page_engagement` int(11) DEFAULT NULL,\n `photo_view` int(11) DEFAULT NULL,\n `post_engagement` int(11) DEFAULT NULL,\n `post_like` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `interest_id_interest_name` (`interest_id`,`interest_name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_interest`\n--\n\nLOCK TABLES `soda_interest` WRITE;\n/*!40000 ALTER TABLE `soda_interest` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_interest` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_interestinsight`\n--\n\nDROP TABLE IF EXISTS `soda_interestinsight`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_interestinsight` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `interest_id` bigint(20) DEFAULT NULL,\n `interest_name` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `date` int(11) DEFAULT NULL,\n `adset_id` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `spend` double DEFAULT NULL,\n `reach` int(11) DEFAULT NULL,\n `impression` int(11) DEFAULT NULL,\n `click` int(11) DEFAULT NULL,\n `post_click` int(11) DEFAULT NULL,\n `add_to_cart` int(11) DEFAULT NULL,\n `website_purchase` int(11) DEFAULT NULL,\n `page_engagement` int(11) DEFAULT NULL,\n `photo_view` int(11) DEFAULT NULL,\n `post_engagement` int(11) DEFAULT NULL,\n `post_like` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `interest_name_date_adset_id` (`interest_name`,`date`,`adset_id`)\n) ENGINE=InnoDB AUTO_INCREMENT=295 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_interestinsight`\n--\n\nLOCK TABLES `soda_interestinsight` WRITE;\n/*!40000 ALTER TABLE `soda_interestinsight` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_interestinsight` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_keep_exchange`\n--\n\nDROP TABLE IF EXISTS `soda_keep_exchange`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_keep_exchange` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `x` datetime(6) DEFAULT NULL,\n `order_id` bigint(20) DEFAULT NULL,\n `lineitem_id` bigint(20) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `send_back_tracking_number` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `send_back_tracking_arrive` int(11) DEFAULT NULL,\n `exchange_ali_order_number` bigint(20) DEFAULT NULL,\n `exchange_ali_tracking_num` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `exchange_tracking_timers_` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `exchange_tracking_arrived` int(11) DEFAULT NULL,\n `ali_price` double DEFAULT NULL,\n `quantity` int(11) DEFAULT NULL,\n `discount_price` double DEFAULT NULL,\n `discount_coupon` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `discount_used` int(11) DEFAULT NULL,\n `is_keep` int(11) DEFAULT NULL,\n `is_kept` int(11) DEFAULT NULL,\n `is_send_back` int(11) DEFAULT NULL,\n `is_sendback_started` int(11) DEFAULT NULL,\n `is_sendback_complete` int(11) DEFAULT NULL,\n `is_refund` int(11) DEFAULT NULL,\n `is_refunded` int(11) DEFAULT NULL,\n `is_exchange` int(11) DEFAULT NULL,\n `is_exchange_started` int(11) DEFAULT NULL,\n `is_exchange_complete` int(11) DEFAULT NULL,\n `is_discount` int(11) DEFAULT NULL,\n `is_discount_completed` int(11) DEFAULT NULL,\n `is_complete` int(11) DEFAULT NULL,\n `creation_time` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `order_id_lineitem_id` (`order_id`,`lineitem_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_keep_exchange`\n--\n\nLOCK TABLES `soda_keep_exchange` WRITE;\n/*!40000 ALTER TABLE `soda_keep_exchange` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_keep_exchange` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_lineitem`\n--\n\nDROP TABLE IF EXISTS `soda_lineitem`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_lineitem` (\n `id` bigint(20) NOT NULL,\n `last_check` datetime(6) DEFAULT NULL,\n `date` date DEFAULT NULL,\n `created_at` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `financial_status` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `variant_id` bigint(20) DEFAULT NULL,\n `product_id` bigint(20) DEFAULT NULL,\n `order_id` bigint(20) DEFAULT NULL,\n `quantity` int(11) DEFAULT NULL,\n `grams` int(11) DEFAULT NULL,\n `sku` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `variant_title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shipping_address` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `updated_shipping_address` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `price` double DEFAULT NULL,\n `ali_price` double DEFAULT NULL,\n `ali_tracking_number` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ali_tracking_method` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ali_order_number` bigint(20) DEFAULT NULL,\n `ali_return_negotiation` longblob,\n `ali_dispute_id` bigint(20) DEFAULT NULL,\n `keep_exchange_id` bigint(20) DEFAULT NULL,\n `ali_event` longblob,\n `fulfillment_id` bigint(20) DEFAULT NULL,\n `fulfillment_tracking_numbers` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `fulfillment_status` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `fulfillment_service` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tracking_events` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `saved_address_screenshot` longblob,\n `e3` double DEFAULT NULL,\n `e2` double DEFAULT NULL,\n `e1` double DEFAULT NULL,\n `t4` double DEFAULT NULL,\n `t3` double DEFAULT NULL,\n `t2` double DEFAULT NULL,\n `t1` double DEFAULT NULL,\n `timesofar` int(11) DEFAULT NULL,\n `stage` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_lineitem`\n--\n\nLOCK TABLES `soda_lineitem` WRITE;\n/*!40000 ALTER TABLE `soda_lineitem` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_lineitem` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_lineitemsfeed`\n--\n\nDROP TABLE IF EXISTS `soda_lineitemsfeed`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_lineitemsfeed` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `last_check` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=22087 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_lineitemsfeed`\n--\n\nLOCK TABLES `soda_lineitemsfeed` WRITE;\n/*!40000 ALTER TABLE `soda_lineitemsfeed` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_lineitemsfeed` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_log`\n--\n\nDROP TABLE IF EXISTS `soda_log`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_log` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `log` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_log`\n--\n\nLOCK TABLES `soda_log` WRITE;\n/*!40000 ALTER TABLE `soda_log` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_log` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_meal`\n--\n\nDROP TABLE IF EXISTS `soda_meal`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_meal` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ounces` double DEFAULT NULL,\n `price` double DEFAULT NULL,\n `time` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_meal`\n--\n\nLOCK TABLES `soda_meal` WRITE;\n/*!40000 ALTER TABLE `soda_meal` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_meal` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_mealinventory`\n--\n\nDROP TABLE IF EXISTS `soda_mealinventory`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_mealinventory` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `price_per_ounce` double DEFAULT NULL,\n `date` date DEFAULT NULL,\n `status` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_mealinventory`\n--\n\nLOCK TABLES `soda_mealinventory` WRITE;\n/*!40000 ALTER TABLE `soda_mealinventory` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_mealinventory` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_message_game`\n--\n\nDROP TABLE IF EXISTS `soda_message_game`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_message_game` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `text` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_message_game`\n--\n\nLOCK TABLES `soda_message_game` WRITE;\n/*!40000 ALTER TABLE `soda_message_game` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_message_game` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_motto`\n--\n\nDROP TABLE IF EXISTS `soda_motto`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_motto` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `motto` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_motto`\n--\n\nLOCK TABLES `soda_motto` WRITE;\n/*!40000 ALTER TABLE `soda_motto` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_motto` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_muta`\n--\n\nDROP TABLE IF EXISTS `soda_muta`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_muta` (\n `current_shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `is_reconfigured_results` int(11) DEFAULT NULL,\n `is_reconfigured_suggestions` int(11) DEFAULT NULL,\n `is_reconfigured_interest_box` int(11) DEFAULT NULL,\n `is_results_is_running` int(11) DEFAULT NULL,\n `is_suggestions_is_running` int(11) DEFAULT NULL,\n `is_interest_box_is_running` int(11) DEFAULT NULL,\n `Active_AceInTheHole_header_column_account_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `store_abbre` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `is_free_plus_ship` int(11) DEFAULT NULL,\n `page` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `image_idx` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `image_indexes` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `option_indexes` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `variant_indexes` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `size_chart` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `my_description` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `amplitude` int(11) DEFAULT NULL,\n `fig_changed` int(11) DEFAULT NULL,\n `new_menulet_changed` int(11) DEFAULT NULL,\n `fig_on` int(11) DEFAULT NULL,\n `addproducts_on` int(11) DEFAULT NULL,\n `sciencevessels_on` int(11) DEFAULT NULL,\n `support_on` int(11) DEFAULT NULL,\n `job_search_on` int(11) DEFAULT NULL,\n `incept_product_on` int(11) DEFAULT NULL,\n `hours_logged_on` int(11) DEFAULT NULL,\n `get_product_source_mode` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `product_adding_mode` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `logo` int(11) DEFAULT NULL,\n `logo_size` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `logo_scaling_dimension_size` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `puffer` int(11) DEFAULT NULL,\n `interest_box` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `interest_results` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `interest_suggestions` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `username` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `bitly_access_token` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `targeting_search_url` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `targeting_suggestions_url` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_muta`\n--\n\nLOCK TABLES `soda_muta` WRITE;\n/*!40000 ALTER TABLE `soda_muta` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_muta` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_new_email`\n--\n\nDROP TABLE IF EXISTS `soda_new_email`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_new_email` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `emailer` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `emailer_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `true_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `hidden_message` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `binarydata` longblob,\n `date` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `responded` int(11) DEFAULT NULL,\n `tagged_order_numbers` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `tags` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `actions_taken` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `response` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`),\n UNIQUE KEY `true_id` (`true_id`),\n UNIQUE KEY `true_id_2` (`true_id`),\n UNIQUE KEY `true_id_3` (`true_id`)\n) ENGINE=InnoDB AUTO_INCREMENT=89 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_new_email`\n--\n\nLOCK TABLES `soda_new_email` WRITE;\n/*!40000 ALTER TABLE `soda_new_email` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_new_email` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_new_email_tag`\n--\n\nDROP TABLE IF EXISTS `soda_new_email_tag`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_new_email_tag` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `tag` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_new_email_tag`\n--\n\nLOCK TABLES `soda_new_email_tag` WRITE;\n/*!40000 ALTER TABLE `soda_new_email_tag` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_new_email_tag` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_new_email_template`\n--\n\nDROP TABLE IF EXISTS `soda_new_email_template`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_new_email_template` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `template` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `shop` varchar(16) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `subject` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `matter` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `shop_subject_matter` (`shop`,`subject`,`matter`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_new_email_template`\n--\n\nLOCK TABLES `soda_new_email_template` WRITE;\n/*!40000 ALTER TABLE `soda_new_email_template` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_new_email_template` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_niche`\n--\n\nDROP TABLE IF EXISTS `soda_niche`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_niche` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `facebook_page` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `date_added` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `niche` (`niche`),\n UNIQUE KEY `niche_2` (`niche`),\n UNIQUE KEY `niche_3` (`niche`)\n) ENGINE=InnoDB AUTO_INCREMENT=50 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_niche`\n--\n\nLOCK TABLES `soda_niche` WRITE;\n/*!40000 ALTER TABLE `soda_niche` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_niche` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_order`\n--\n\nDROP TABLE IF EXISTS `soda_order`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_order` (\n `id` bigint(20) NOT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `created_at` datetime(6) DEFAULT NULL,\n `date` date DEFAULT NULL,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `line_items` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `fulfillments` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `shipping_address` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `order_number` int(11) DEFAULT NULL,\n `total_price` decimal(16,3) DEFAULT NULL,\n `total_line_items_price` double DEFAULT NULL,\n `fulfillment_status` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `gateway` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `note` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `financial_status` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `billing_address_contact_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `posted_to_paypal` tinyint(1) NOT NULL DEFAULT '0',\n `state` int(11) DEFAULT NULL,\n `field_in_event_of_unavailability` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `is_checked` int(11) DEFAULT NULL,\n `refunded_amount` double DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_order`\n--\n\nLOCK TABLES `soda_order` WRITE;\n/*!40000 ALTER TABLE `soda_order` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_order` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_packet`\n--\n\nDROP TABLE IF EXISTS `soda_packet`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_packet` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `v` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `w` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `x` longblob,\n PRIMARY KEY (`id`),\n UNIQUE KEY `v` (`v`),\n UNIQUE KEY `v_2` (`v`),\n UNIQUE KEY `v_3` (`v`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_packet`\n--\n\nLOCK TABLES `soda_packet` WRITE;\n/*!40000 ALTER TABLE `soda_packet` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_packet` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_paidcardnumber`\n--\n\nDROP TABLE IF EXISTS `soda_paidcardnumber`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_paidcardnumber` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `order_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `price` double DEFAULT NULL,\n `card_id` int(11) DEFAULT NULL,\n `paid_at` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=24 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_paidcardnumber`\n--\n\nLOCK TABLES `soda_paidcardnumber` WRITE;\n/*!40000 ALTER TABLE `soda_paidcardnumber` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_paidcardnumber` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_password`\n--\n\nDROP TABLE IF EXISTS `soda_password`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_password` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `company` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `username` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `password` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `company_username` (`company`,`username`)\n) ENGINE=InnoDB AUTO_INCREMENT=47 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_password`\n--\n\nLOCK TABLES `soda_password` WRITE;\n/*!40000 ALTER TABLE `soda_password` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_password` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_payment`\n--\n\nDROP TABLE IF EXISTS `soda_payment`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_payment` (\n `id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,\n `created_at` datetime(6) DEFAULT NULL,\n `amount` double DEFAULT NULL,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `invoice_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_id` bigint(20) DEFAULT NULL,\n `payment_gateway_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_payment`\n--\n\nLOCK TABLES `soda_payment` WRITE;\n/*!40000 ALTER TABLE `soda_payment` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_payment` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_payout`\n--\n\nDROP TABLE IF EXISTS `soda_payout`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_payout` (\n `id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,\n `created_at` datetime(6) DEFAULT NULL,\n `amount` double DEFAULT NULL,\n `account_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `account_last_4_digits` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `payment_gateway_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_payout`\n--\n\nLOCK TABLES `soda_payout` WRITE;\n/*!40000 ALTER TABLE `soda_payout` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_payout` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_price`\n--\n\nDROP TABLE IF EXISTS `soda_price`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_price` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `product_id` bigint(20) DEFAULT NULL,\n `variant_id` bigint(20) DEFAULT NULL,\n `ap` double DEFAULT NULL,\n `sp` double DEFAULT NULL,\n `sales` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `product_id_variant_id` (`product_id`,`variant_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_price`\n--\n\nLOCK TABLES `soda_price` WRITE;\n/*!40000 ALTER TABLE `soda_price` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_price` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_pricechange`\n--\n\nDROP TABLE IF EXISTS `soda_pricechange`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_pricechange` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `argument` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `method` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_pricechange`\n--\n\nLOCK TABLES `soda_pricechange` WRITE;\n/*!40000 ALTER TABLE `soda_pricechange` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_pricechange` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_pricedecrease`\n--\n\nDROP TABLE IF EXISTS `soda_pricedecrease`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_pricedecrease` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `product_id` bigint(20) DEFAULT NULL,\n `variant_id` bigint(20) DEFAULT NULL,\n `corn` double DEFAULT NULL,\n `medium` double DEFAULT NULL,\n `date_added` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=14320 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_pricedecrease`\n--\n\nLOCK TABLES `soda_pricedecrease` WRITE;\n/*!40000 ALTER TABLE `soda_pricedecrease` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_pricedecrease` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_priceincrease`\n--\n\nDROP TABLE IF EXISTS `soda_priceincrease`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_priceincrease` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `product_id` bigint(20) DEFAULT NULL,\n `variant_id` bigint(20) DEFAULT NULL,\n `corn` double DEFAULT NULL,\n `medium` double DEFAULT NULL,\n `date_added` datetime(6) DEFAULT NULL,\n `approved` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=14320 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_priceincrease`\n--\n\nLOCK TABLES `soda_priceincrease` WRITE;\n/*!40000 ALTER TABLE `soda_priceincrease` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_priceincrease` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_product`\n--\n\nDROP TABLE IF EXISTS `soda_product`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_product` (\n `id` bigint(20) NOT NULL,\n `last_check` datetime(6) DEFAULT NULL,\n `size_chart` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `created_at` datetime(6) DEFAULT NULL,\n `image` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `images` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `options` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `variants` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `product_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `description` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `vendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `body_html` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `seller_platform` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `last_quantity_check` datetime(6) DEFAULT NULL,\n `tags` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `handle` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `similar_urls` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `ca_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ca_views` int(11) DEFAULT NULL,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `short_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `story_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `pcs` decimal(16,3) DEFAULT NULL,\n `roi` decimal(16,3) DEFAULT NULL,\n `spent` decimal(16,3) DEFAULT NULL,\n `pcv` decimal(16,3) DEFAULT NULL,\n `item_type` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazon_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ali_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazonvendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `aliexpressvendor` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `amazon_json` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `amazon_inspect` tinyint(1) NOT NULL DEFAULT '0',\n `state` int(11) DEFAULT NULL,\n `adset_ids` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `adset_id` bigint(20) DEFAULT NULL,\n `size_chart_power_urls` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `freelancer_description` varchar(2048) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `my_description` varchar(4096) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `original` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `shifted` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `HTML` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `url_title` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `video_url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `icon_image` longblob,\n `product_tags` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `sku_image_dict` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_product`\n--\n\nLOCK TABLES `soda_product` WRITE;\n/*!40000 ALTER TABLE `soda_product` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_product` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_productsfeed`\n--\n\nDROP TABLE IF EXISTS `soda_productsfeed`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_productsfeed` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `last_check` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=3955 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_productsfeed`\n--\n\nLOCK TABLES `soda_productsfeed` WRITE;\n/*!40000 ALTER TABLE `soda_productsfeed` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_productsfeed` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_producttalk`\n--\n\nDROP TABLE IF EXISTS `soda_producttalk`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_producttalk` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `customer` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_id` bigint(20) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `binarydata` longblob,\n `thread_id` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `lineitem_id` bigint(20) DEFAULT NULL,\n `product_id` bigint(20) DEFAULT NULL,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `supplier` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `feeling` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `thread_id_lineitem_id` (`thread_id`,`lineitem_id`)\n) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_producttalk`\n--\n\nLOCK TABLES `soda_producttalk` WRITE;\n/*!40000 ALTER TABLE `soda_producttalk` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_producttalk` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_proxy`\n--\n\nDROP TABLE IF EXISTS `soda_proxy`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_proxy` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `time_added` double DEFAULT NULL,\n `proxy_ip` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_proxy`\n--\n\nLOCK TABLES `soda_proxy` WRITE;\n/*!40000 ALTER TABLE `soda_proxy` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_proxy` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_pyperclip`\n--\n\nDROP TABLE IF EXISTS `soda_pyperclip`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_pyperclip` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `pyperclip` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_pyperclip`\n--\n\nLOCK TABLES `soda_pyperclip` WRITE;\n/*!40000 ALTER TABLE `soda_pyperclip` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_pyperclip` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_question`\n--\n\nDROP TABLE IF EXISTS `soda_question`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_question` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `question` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `answered` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `question` (`question`),\n UNIQUE KEY `question_2` (`question`),\n UNIQUE KEY `question_3` (`question`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_question`\n--\n\nLOCK TABLES `soda_question` WRITE;\n/*!40000 ALTER TABLE `soda_question` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_question` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_recompilate`\n--\n\nDROP TABLE IF EXISTS `soda_recompilate`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_recompilate` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `w` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `w_x` (`w`,`x`)\n) ENGINE=InnoDB AUTO_INCREMENT=48 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_recompilate`\n--\n\nLOCK TABLES `soda_recompilate` WRITE;\n/*!40000 ALTER TABLE `soda_recompilate` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_recompilate` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_reorder`\n--\n\nDROP TABLE IF EXISTS `soda_reorder`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_reorder` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `order_id` bigint(20) DEFAULT NULL,\n `lineitem_id` bigint(20) DEFAULT NULL,\n `identifier` bigint(20) DEFAULT NULL,\n `quantity` int(11) DEFAULT NULL,\n `price` double DEFAULT NULL,\n `x` datetime(6) DEFAULT NULL,\n `reorder_ali_tracking_num` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `reorder_tracking_timers_` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `reorder_tracking_arrived` int(11) DEFAULT NULL,\n `is_complete` int(11) DEFAULT NULL,\n `creation_time` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `identifier` (`identifier`),\n UNIQUE KEY `identifier_2` (`identifier`),\n UNIQUE KEY `identifier_3` (`identifier`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_reorder`\n--\n\nLOCK TABLES `soda_reorder` WRITE;\n/*!40000 ALTER TABLE `soda_reorder` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_reorder` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_reposit`\n--\n\nDROP TABLE IF EXISTS `soda_reposit`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_reposit` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `state_a` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `action_x` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_reposit`\n--\n\nLOCK TABLES `soda_reposit` WRITE;\n/*!40000 ALTER TABLE `soda_reposit` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_reposit` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_request`\n--\n\nDROP TABLE IF EXISTS `soda_request`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_request` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `cookies_label` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `time_added` datetime(6) DEFAULT NULL,\n `w` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` tinyint(1) NOT NULL DEFAULT '0',\n `notInText` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `notInUrl` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `y` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_request`\n--\n\nLOCK TABLES `soda_request` WRITE;\n/*!40000 ALTER TABLE `soda_request` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_request` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_savedtime`\n--\n\nDROP TABLE IF EXISTS `soda_savedtime`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_savedtime` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `seconds` int(11) DEFAULT NULL,\n `date` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_savedtime`\n--\n\nLOCK TABLES `soda_savedtime` WRITE;\n/*!40000 ALTER TABLE `soda_savedtime` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_savedtime` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_sciencevessel`\n--\n\nDROP TABLE IF EXISTS `soda_sciencevessel`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_sciencevessel` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `niche` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `is_free_plus_ship` int(11) DEFAULT NULL,\n `page` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` varchar(4196) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `y` longblob,\n `url` varchar(4196) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `warped` int(11) DEFAULT NULL,\n `created_at` datetime(6) DEFAULT NULL,\n `caption` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `append_pictures` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_sciencevessel`\n--\n\nLOCK TABLES `soda_sciencevessel` WRITE;\n/*!40000 ALTER TABLE `soda_sciencevessel` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_sciencevessel` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_secondaryaction`\n--\n\nDROP TABLE IF EXISTS `soda_secondaryaction`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_secondaryaction` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `refund_id` bigint(20) DEFAULT NULL,\n `order_id` bigint(20) DEFAULT NULL,\n `product_id` bigint(20) DEFAULT NULL,\n `lineitem_id` bigint(20) DEFAULT NULL,\n `price` double DEFAULT NULL,\n `quantity` int(11) DEFAULT NULL,\n `date` date DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `refund_id` (`refund_id`),\n UNIQUE KEY `refund_id_2` (`refund_id`),\n UNIQUE KEY `refund_id_3` (`refund_id`)\n) ENGINE=InnoDB AUTO_INCREMENT=36454 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_secondaryaction`\n--\n\nLOCK TABLES `soda_secondaryaction` WRITE;\n/*!40000 ALTER TABLE `soda_secondaryaction` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_secondaryaction` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_selfhelptip`\n--\n\nDROP TABLE IF EXISTS `soda_selfhelptip`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_selfhelptip` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `selfhelptip` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_selfhelptip`\n--\n\nLOCK TABLES `soda_selfhelptip` WRITE;\n/*!40000 ALTER TABLE `soda_selfhelptip` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_selfhelptip` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_settlement`\n--\n\nDROP TABLE IF EXISTS `soda_settlement`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_settlement` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `date` date DEFAULT NULL,\n `products_added` int(11) DEFAULT NULL,\n `adsets_added` int(11) DEFAULT NULL,\n `sales` double DEFAULT NULL,\n `ads` double DEFAULT NULL,\n `cogs` double DEFAULT NULL,\n `refunds` double DEFAULT NULL,\n `chargebacks` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `rake` double DEFAULT NULL,\n `settlement` double DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `date` (`date`),\n UNIQUE KEY `date_2` (`date`),\n UNIQUE KEY `date_3` (`date`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_settlement`\n--\n\nLOCK TABLES `soda_settlement` WRITE;\n/*!40000 ALTER TABLE `soda_settlement` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_settlement` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_shop`\n--\n\nDROP TABLE IF EXISTS `soda_shop`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_shop` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `shop_abbreviation` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `product_page_general_information` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `product_use_guidelines_1` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `location_id` bigint(20) DEFAULT NULL,\n `Has_Gmail_Account` int(11) DEFAULT NULL,\n `Has_Shopify_Store` int(11) DEFAULT NULL,\n `Has_Namecheap_Account` int(11) DEFAULT NULL,\n `Has_GSuite_Account` int(11) DEFAULT NULL,\n `Has_Facebook_Account` int(11) DEFAULT NULL,\n `Has_Facebook_Business_Manager_Account` int(11) DEFAULT NULL,\n `Has_Facebook_Ad_Account_ID` int(11) DEFAULT NULL,\n `Has_Facebook_Pixel` int(11) DEFAULT NULL,\n `Has_Facebook_Developer_Account` int(11) DEFAULT NULL,\n `Has_AliExpress_Account` int(11) DEFAULT NULL,\n `Has_DBC_Account` int(11) DEFAULT NULL,\n `Gmail_Email_Address` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Gmail_Password` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Number_To_Receive_Videocalls_And_Messages` int(11) DEFAULT NULL,\n `Business_Name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Employee_Count` bigint(20) DEFAULT NULL,\n `Country_Of_Business` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `First_Name` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Last_Name` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Street_Address` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Street_Address_Line_2` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `State` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `City` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `ZIP_Code` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Business_Phone_Number` bigint(20) DEFAULT NULL,\n `Business_Email_Address` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `GSuite_Password` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Product_Return_Address` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Shopify_Email` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Shopify_Password` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Administrative_Url` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Homepage_Title` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Homepage_Meta_Description` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Metric_System` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Facebook_Pixel_ID` bigint(20) DEFAULT NULL,\n `Checkout_With_Account` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Checkout_With_Identifier` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Receive_Shipping_Updates` int(11) DEFAULT NULL,\n `Checkout_With_Name` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Checkout_With_Company` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Checkout_With_Address_Line_2` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Checkout_With_Phone_Number` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Checkout_With_Shipping_As_Billing` int(11) DEFAULT NULL,\n `Checkout_Enable_Address_Autocomplete` int(11) DEFAULT NULL,\n `Show_Email_Signup_Option_At_Checkout` int(11) DEFAULT NULL,\n `Preselect_Email_Signup_Option_At_Checkout` int(11) DEFAULT NULL,\n `Use_Free_Plus_Shipping` int(11) DEFAULT NULL,\n `Shopify_App_API_Key` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Shopify_App_API_Password` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Shopify_App_API_Secret` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Shopify_App_API_Url` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Fields_To_Update` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Pages` int(11) DEFAULT NULL,\n `Default_Product_Description` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Domain_Name_To_Transfer` int(11) DEFAULT NULL,\n `Domain_Name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Renew_Domain_Every_Year` int(11) DEFAULT NULL,\n `Facebook_Email` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Facebook_Password` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Date_Of_Birth` bigint(20) DEFAULT NULL,\n `Gender` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Facebook_Business_Account_Business_Name` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Facebook_Business_Manager_ID` bigint(20) DEFAULT NULL,\n `Facebook_Business_Ad_Account_Name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Facebook_Business_Ad_Account_Time_Zone` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Facebook_Business_Ad_Account_ID` bigint(20) DEFAULT NULL,\n `Facebook_Business_App_ID` bigint(20) DEFAULT NULL,\n `Facebook_Business_App_Secret` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Facebook_Business_App_Token` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `AliExpress_Email` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `AliExpress_Password` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `AliExpress_Most_Recent_Date` datetime(6) DEFAULT NULL,\n `AliExpress_Account_Order_Scan_Earliest_Date` date DEFAULT NULL,\n `AliPay_Pin` varchar(16) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `AliPay_Security_Question_1` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `AliPay_Security_Question_2` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `AliPay_Security_Question_3` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `AliExpress_Card_Transaction_Tags` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Adspend_Card_Transaction_Tags` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Payment_Gateway_Payout_Card_Transaction_Tags` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Lineitem_Most_Recent_Date` datetime(6) DEFAULT NULL,\n `Allow_Additional_Shipping_Methods` int(11) DEFAULT NULL,\n `Send_Tracking_Number` int(11) DEFAULT NULL,\n `DBC_Username` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `DBC_Password` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `GSuite_Financial_Card_Information` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Shopify_Financial_Card_Information` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Facebook_Financial_Card_Information` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `AliExpress_Financial_Card_Information` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `Price_Change_Round_Type` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Price_Change_Ending_Amount` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `Active` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_shop`\n--\n\nLOCK TABLES `soda_shop` WRITE;\n/*!40000 ALTER TABLE `soda_shop` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_shop` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_sku`\n--\n\nDROP TABLE IF EXISTS `soda_sku`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_sku` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `product_id` int(11) DEFAULT NULL,\n `sku` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `nsku` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `product_id_sku` (`product_id`,`sku`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_sku`\n--\n\nLOCK TABLES `soda_sku` WRITE;\n/*!40000 ALTER TABLE `soda_sku` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_sku` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_soup`\n--\n\nDROP TABLE IF EXISTS `soda_soup`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_soup` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `page_source` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `url` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `last_check` datetime(6) DEFAULT NULL,\n `is_available` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `url` (`url`),\n UNIQUE KEY `url_2` (`url`),\n UNIQUE KEY `url_3` (`url`)\n) ENGINE=InnoDB AUTO_INCREMENT=606 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_soup`\n--\n\nLOCK TABLES `soda_soup` WRITE;\n/*!40000 ALTER TABLE `soda_soup` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_soup` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_statusverification`\n--\n\nDROP TABLE IF EXISTS `soda_statusverification`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_statusverification` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `last_check` datetime(6) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=161 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_statusverification`\n--\n\nLOCK TABLES `soda_statusverification` WRITE;\n/*!40000 ALTER TABLE `soda_statusverification` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_statusverification` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_subscription`\n--\n\nDROP TABLE IF EXISTS `soda_subscription`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_subscription` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `price_per_month` double DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `name` (`name`),\n UNIQUE KEY `name_2` (`name`),\n UNIQUE KEY `name_3` (`name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_subscription`\n--\n\nLOCK TABLES `soda_subscription` WRITE;\n/*!40000 ALTER TABLE `soda_subscription` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_subscription` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_tertiaryaction`\n--\n\nDROP TABLE IF EXISTS `soda_tertiaryaction`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_tertiaryaction` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `payment_gateway_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `invoice_id` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `invoice_id_instance_number` int(11) DEFAULT NULL,\n `shop` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_id` bigint(20) DEFAULT NULL,\n `date` datetime(6) DEFAULT NULL,\n `requires_response_by` datetime(6) DEFAULT NULL,\n `status` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `tracking_numbers` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `message` varchar(4096) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `order_problem` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `email` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `customer_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `dispute_amount` double DEFAULT NULL,\n `order_amount` double DEFAULT NULL,\n `case_close_ending_amount` double DEFAULT NULL,\n `case_close_dispute_fee` double DEFAULT NULL,\n `responded` int(11) DEFAULT NULL,\n `dispute_timeout` int(11) DEFAULT NULL,\n `is_complete` int(11) DEFAULT NULL,\n `actions_taken` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `keep_exchange_id` bigint(20) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `invoice_id_invoice_id_instance_number` (`invoice_id`,`invoice_id_instance_number`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_tertiaryaction`\n--\n\nLOCK TABLES `soda_tertiaryaction` WRITE;\n/*!40000 ALTER TABLE `soda_tertiaryaction` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_tertiaryaction` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_tertiaryaction_template`\n--\n\nDROP TABLE IF EXISTS `soda_tertiaryaction_template`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_tertiaryaction_template` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `template` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,\n `shop` varchar(16) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `subject` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `matter` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `shop_subject_matter` (`shop`,`subject`,`matter`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_tertiaryaction_template`\n--\n\nLOCK TABLES `soda_tertiaryaction_template` WRITE;\n/*!40000 ALTER TABLE `soda_tertiaryaction_template` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_tertiaryaction_template` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_testpastedata`\n--\n\nDROP TABLE IF EXISTS `soda_testpastedata`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_testpastedata` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `w` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` longblob,\n PRIMARY KEY (`id`),\n UNIQUE KEY `w` (`w`),\n UNIQUE KEY `w_2` (`w`),\n UNIQUE KEY `w_3` (`w`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_testpastedata`\n--\n\nLOCK TABLES `soda_testpastedata` WRITE;\n/*!40000 ALTER TABLE `soda_testpastedata` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_testpastedata` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_testpickledata`\n--\n\nDROP TABLE IF EXISTS `soda_testpickledata`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_testpickledata` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `w` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` longblob,\n PRIMARY KEY (`id`),\n UNIQUE KEY `w` (`w`),\n UNIQUE KEY `w_2` (`w`),\n UNIQUE KEY `w_3` (`w`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_testpickledata`\n--\n\nLOCK TABLES `soda_testpickledata` WRITE;\n/*!40000 ALTER TABLE `soda_testpickledata` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_testpickledata` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_timedtask`\n--\n\nDROP TABLE IF EXISTS `soda_timedtask`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_timedtask` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `function_name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `start` datetime(6) DEFAULT NULL,\n `end` datetime(6) DEFAULT NULL,\n `elapsed_time` double DEFAULT NULL,\n `my_time_elapsed` double DEFAULT NULL,\n `my_time_start` double DEFAULT NULL,\n `my_time_end` double DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_timedtask`\n--\n\nLOCK TABLES `soda_timedtask` WRITE;\n/*!40000 ALTER TABLE `soda_timedtask` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_timedtask` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_tipsingeneral`\n--\n\nDROP TABLE IF EXISTS `soda_tipsingeneral`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_tipsingeneral` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `tipsingeneral` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_tipsingeneral`\n--\n\nLOCK TABLES `soda_tipsingeneral` WRITE;\n/*!40000 ALTER TABLE `soda_tipsingeneral` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_tipsingeneral` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_todo`\n--\n\nDROP TABLE IF EXISTS `soda_todo`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_todo` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `todo` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `is_completed` int(11) DEFAULT NULL,\n `pyperclips` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_todo`\n--\n\nLOCK TABLES `soda_todo` WRITE;\n/*!40000 ALTER TABLE `soda_todo` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_todo` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_transaction`\n--\n\nDROP TABLE IF EXISTS `soda_transaction`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_transaction` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `date` int(11) DEFAULT NULL,\n `amount` int(11) DEFAULT NULL,\n `bank` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `text` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_transaction`\n--\n\nLOCK TABLES `soda_transaction` WRITE;\n/*!40000 ALTER TABLE `soda_transaction` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_transaction` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_transactionverification`\n--\n\nDROP TABLE IF EXISTS `soda_transactionverification`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_transactionverification` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `last_check` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_transactionverification`\n--\n\nLOCK TABLES `soda_transactionverification` WRITE;\n/*!40000 ALTER TABLE `soda_transactionverification` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_transactionverification` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_typing_format`\n--\n\nDROP TABLE IF EXISTS `soda_typing_format`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_typing_format` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `variable` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `save_lines` int(11) DEFAULT NULL,\n `verbal` int(11) DEFAULT NULL,\n `printful` int(11) DEFAULT NULL,\n `naming` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `naming_collision` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `rudeness` int(11) DEFAULT NULL,\n `pseudocode` int(11) DEFAULT NULL,\n `depth_of_recursion` int(11) DEFAULT NULL,\n `ugliness` int(11) DEFAULT NULL,\n `error_method` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_typing_format`\n--\n\nLOCK TABLES `soda_typing_format` WRITE;\n/*!40000 ALTER TABLE `soda_typing_format` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_typing_format` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_uniqueproductalgorithm`\n--\n\nDROP TABLE IF EXISTS `soda_uniqueproductalgorithm`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_uniqueproductalgorithm` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `shop` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `x` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `shop` (`shop`),\n UNIQUE KEY `shop_2` (`shop`),\n UNIQUE KEY `shop_3` (`shop`)\n) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_uniqueproductalgorithm`\n--\n\nLOCK TABLES `soda_uniqueproductalgorithm` WRITE;\n/*!40000 ALTER TABLE `soda_uniqueproductalgorithm` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_uniqueproductalgorithm` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_uniqueproductidentifier`\n--\n\nDROP TABLE IF EXISTS `soda_uniqueproductidentifier`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_uniqueproductidentifier` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `shop` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `product_id` bigint(20) DEFAULT NULL,\n `x` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `y` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `shop_product_id_x` (`shop`,`product_id`,`x`)\n) ENGINE=InnoDB AUTO_INCREMENT=9549 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_uniqueproductidentifier`\n--\n\nLOCK TABLES `soda_uniqueproductidentifier` WRITE;\n/*!40000 ALTER TABLE `soda_uniqueproductidentifier` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_uniqueproductidentifier` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_unsolvableproblem`\n--\n\nDROP TABLE IF EXISTS `soda_unsolvableproblem`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_unsolvableproblem` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `unsolvableproblem` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_unsolvableproblem`\n--\n\nLOCK TABLES `soda_unsolvableproblem` WRITE;\n/*!40000 ALTER TABLE `soda_unsolvableproblem` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_unsolvableproblem` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_updates`\n--\n\nDROP TABLE IF EXISTS `soda_updates`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_updates` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,\n `base_hours` int(11) DEFAULT NULL,\n `time` datetime(6) DEFAULT NULL,\n PRIMARY KEY (`id`)\n) ENGINE=InnoDB AUTO_INCREMENT=962 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_updates`\n--\n\nLOCK TABLES `soda_updates` WRITE;\n/*!40000 ALTER TABLE `soda_updates` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_updates` ENABLE KEYS */;\nUNLOCK TABLES;\n\n--\n-- Table structure for table `soda_video`\n--\n\nDROP TABLE IF EXISTS `soda_video`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n SET character_set_client = utf8mb4 ;\nCREATE TABLE `soda_video` (\n `id` int(11) NOT NULL AUTO_INCREMENT,\n `product_id` bigint(20) DEFAULT NULL,\n `video` longblob,\n `video_id` bigint(20) DEFAULT NULL,\n `downloaded` int(11) DEFAULT NULL,\n PRIMARY KEY (`id`),\n UNIQUE KEY `product_id` (`product_id`),\n UNIQUE KEY `product_id_2` (`product_id`),\n UNIQUE KEY `product_id_3` (`product_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n--\n-- Dumping data for table `soda_video`\n--\n\nLOCK TABLES `soda_video` WRITE;\n/*!40000 ALTER TABLE `soda_video` DISABLE KEYS */;\n/*!40000 ALTER TABLE `soda_video` ENABLE KEYS */;\nUNLOCK TABLES;\n/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;\n\n/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;\n/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;\n/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;\n/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;\n/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;\n/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;\n/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;\n\n-- Dump completed on 2019-12-14 22:55:04\n".encode()) SQL().restore_db_1("soda",homepath("~/tavern/tavern/soda/soda.sql")) def m31_create_initial_data(self): os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Muta,fig_on=True,sciencevessels_on=False,interest_box=[],interest_results=[],interest_suggestions=[],store_abbre='-',niche='-',is_free_plus_ship=False,page='-')" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='GhostProductUpdate',base_hours=24,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='ProductsFeed',base_hours=24,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='LineitemsFeed',base_hours=24,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='AdsetUpdates',base_hours=2,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='LineitemUpdates',base_hours=3,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='Aliexpressorder_update',base_hours=24,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='ProductUpdates',base_hours=24,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='New_EmailUpdates',base_hours=24,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) os.system("""cd /Users/%s/tavern/tavern && /Users/%s/tavern/bin/python3.5 -c "from soda.can import *; Save(Updates,name='Aliexpressorder_event_update',base_hours=24,time=(Time()-10)())" """%(getpass.getuser(),getpass.getuser())) def m31_change_default_file_extensions(self): import os, subprocess, shutil, sys, time, random, re if developer_mode == True: def duti(ext = "informative", application_address = "/Applications/Sublime Text.app"): bundle_identifier = subprocess.check_output("/usr/libexec/PlistBuddy -c 'Print CFBundleIdentifier' '%s/Contents/Info.plist'" % application_address, shell=True) os.system("brew install duti")if("duti" not in brewlist())else() redprint("duti -s %s %s all" % (bundle_identifier, ext)) os.system("duti -s %s %s all" % (bundle_identifier, ext)) return try: duti("doc","/Applications/LibreOffice.app") except Exception as e: print(e) print("25.. Complete") time.sleep(3) def m32_print_complete__and__sys_exit(self): import os, subprocess, shutil, sys, time, random, re print("25.. Complete") time.sleep(3) sys.exit() def notes(self): """ Notes: sudo installer -pkg /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg -target / # zlib fix export LDFLAGS="-L/usr/local/opt/zlib/lib" # zlib fix export CPPFLAGS="-I/usr/local/opt/zlib/include" # zlib fix pyenv install 3.5.0 (Versus CPPFLAGS="-I$(brew --prefix zlib)/include" pyenv install -v 3.5.0) export LDFLAGS="-L/usr/local/opt/sqlite/lib" # same error export CPPFLAGS="-I/usr/local/opt/sqlite/include" # same error pyenv install 3.5.0 # It's OK now. python3.5 -m venv cave # works but uses /usr/local/bin/pip and needs pip3 install ipython virtualenv -p python3 pear # works virtualenv -p python3.5 pear # error: The path python3.5 (from --python=python3.5) does not exist # ipython error: /Users/USERNAME/tavern/lib/python3.5/site-packages/IPython/core/history.py:226: UserWarning: IPython History requires SQLite, your history will not be saved warn("IPython History requires SQLite, your history will not be saved") # ipython error: ImportError: No module named '_sqlite3' # import sqlite3 -> ImportError: No module named '_sqlite3' """ def uninstall(self): import shutil os.system('ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/uninstall)"') shutil.rmtree("/Users/%s/.matplotlib"%getpass.getuser()) shutil.rmtree("/Users/%s/.pyenv"%(getpass.getuser())) os.system("/usr/local/opt/python/libexec/bin/pip uninstall virtualenv") shutil.rmtree("/Users/%s/tavern"%(getpass.getuser())) shutil.rmtree("/Users/%s/.ipython"%(getpass.getuser())) if __name__ == "__main__": setter_upper = Setter_Upper() setter_upper()
257.371488
150,103
0.737444
37,133
247,334
4.757547
0.047397
0.026123
0.061881
0.10841
0.858883
0.803687
0.726002
0.661387
0.608592
0.582972
0
0.047274
0.119523
247,334
960
150,104
257.639583
0.76395
0.107227
0
0.244848
0
0.070303
0.855248
0.172371
0
0
0
0.001042
0.019394
1
0.067879
false
0.092121
0.059394
0.001212
0.13697
0.082424
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
22305d06ca22b53f284760af2a9a5d3b9abc38e0
6,944
py
Python
test_project/test_app/tests.py
bjuretko/django-admin-object-actions
2c80ffcbb53b3d585f191d1fe662daf36fa7e204
[ "BSD-3-Clause" ]
null
null
null
test_project/test_app/tests.py
bjuretko/django-admin-object-actions
2c80ffcbb53b3d585f191d1fe662daf36fa7e204
[ "BSD-3-Clause" ]
null
null
null
test_project/test_app/tests.py
bjuretko/django-admin-object-actions
2c80ffcbb53b3d585f191d1fe662daf36fa7e204
[ "BSD-3-Clause" ]
null
null
null
# Python from __future__ import with_statement from __future__ import unicode_literals # Django from django.contrib.admin.models import LogEntry from django.contrib import messages try: from django.urls import reverse except ImportError: from django.core.urlresolvers import reverse def test_object_actions_on_changelist(admin_client, test_model_instance): changelist_url = reverse('admin:test_app_testmodel_changelist') enable_url = reverse('admin:test_app_testmodel_enable', args=[test_model_instance.pk]) disable_url = reverse('admin:test_app_testmodel_disable', args=[test_model_instance.pk]) refresh_url = reverse('admin:test_app_testmodel_refresh', args=[test_model_instance.pk]) response = admin_client.get(changelist_url) assert response.status_code == 200 assert enable_url not in response.content.decode('utf-8') assert disable_url not in response.content.decode('utf-8') assert refresh_url in response.content.decode('utf-8') def test_object_actions_on_changeform(admin_client, test_model_instance): change_url = reverse('admin:test_app_testmodel_change', args=[test_model_instance.pk]) enable_url = reverse('admin:test_app_testmodel_enable', args=[test_model_instance.pk]) disable_url = reverse('admin:test_app_testmodel_disable', args=[test_model_instance.pk]) refresh_url = reverse('admin:test_app_testmodel_refresh', args=[test_model_instance.pk]) response = admin_client.get(change_url) assert response.status_code == 200 assert enable_url in response.content.decode('utf-8') assert disable_url in response.content.decode('utf-8') assert refresh_url not in response.content.decode('utf-8') def test_object_actions_on_addform(admin_client, test_model_instance): add_url = reverse('admin:test_app_testmodel_add') response = admin_client.get(add_url) assert response.status_code == 200 def test_object_action_does_not_exist(admin_client): admin_url = reverse('admin:index') enable_url = reverse('admin:test_app_testmodel_enable', args=[0]) response = admin_client.get(enable_url, follow=True) assert response.status_code == 200 assert response.redirect_chain[0][0] == admin_url message_list = list(response.context['messages']) assert message_list[0].level == messages.WARNING assert 'doesn\'t exist' in message_list[0].message def test_object_action_enable(admin_client, test_model_instance): assert LogEntry.objects.count() == 0 assert not test_model_instance.enabled changelist_url = reverse('admin:test_app_testmodel_changelist') enable_url = reverse('admin:test_app_testmodel_enable', args=[test_model_instance.pk]) response = admin_client.get(enable_url, follow=True) assert response.status_code == 200 assert not response.redirect_chain response = admin_client.post(enable_url, {'confirm': 'on'}, follow=True) assert response.status_code == 200 assert response.redirect_chain[0][0] == changelist_url message_list = list(response.context['messages']) assert message_list[0].level == messages.SUCCESS assert 'enabled' in message_list[0].message test_model_instance.refresh_from_db() assert test_model_instance.enabled assert LogEntry.objects.count() == 1 log_entry = LogEntry.objects.first() assert 'enabled' in log_entry.change_message.lower() def test_object_action_disable(admin_client, test_model_instance): assert LogEntry.objects.count() == 0 test_model_instance.enabled = True test_model_instance.save(update_fields=['enabled']) assert test_model_instance.enabled change_url = reverse('admin:test_app_testmodel_change', args=[test_model_instance.pk]) disable_url = reverse('admin:test_app_testmodel_disable', args=[test_model_instance.pk]) disable_url = '{}?next={}'.format(disable_url, change_url) response = admin_client.get(disable_url, follow=True) assert response.status_code == 200 assert not response.redirect_chain response = admin_client.post(disable_url, {}, follow=True) assert response.status_code == 200 assert not response.redirect_chain test_model_instance.refresh_from_db() assert test_model_instance.enabled response = admin_client.post(disable_url, {'confirm': 'on'}, follow=True) assert response.status_code == 200 assert response.redirect_chain[0][0] == change_url message_list = list(response.context['messages']) assert message_list[0].level == messages.SUCCESS assert 'disabled' in message_list[0].message test_model_instance.refresh_from_db() assert not test_model_instance.enabled assert LogEntry.objects.count() == 1 log_entry = LogEntry.objects.first() assert 'disabled' in log_entry.change_message.lower() def test_object_action_refresh(admin_client, test_model_instance): assert LogEntry.objects.count() == 0 assert not test_model_instance.refreshed changelist_url = reverse('admin:test_app_testmodel_changelist') refresh_url = reverse('admin:test_app_testmodel_refresh', args=[test_model_instance.pk]) response = admin_client.get(refresh_url, follow=True) assert response.status_code == 200 assert response.redirect_chain[0][0] == changelist_url message_list = list(response.context['messages']) assert message_list[0].level == messages.SUCCESS assert 'refreshed' in message_list[0].message test_model_instance.refresh_from_db() assert test_model_instance.refreshed assert LogEntry.objects.count() == 1 log_entry = LogEntry.objects.first() assert 'refreshed' in log_entry.change_message.lower() def test_object_action_check(admin_client, test_model_instance): assert LogEntry.objects.count() == 0 assert not test_model_instance.refreshed changelist_url = reverse('admin:test_app_testmodel_changelist') check_url = reverse('admin:test_app_testmodel_check', args=[test_model_instance.pk]) response = admin_client.get(check_url, follow=True) assert response.status_code == 200 assert response.redirect_chain[0][0] == changelist_url message_list = list(response.context['messages']) assert message_list[0].level == messages.ERROR assert 'not checked' in message_list[0].message assert LogEntry.objects.count() == 0 def test_object_action_fail(admin_client, test_model_instance): assert LogEntry.objects.count() == 0 assert not test_model_instance.refreshed changelist_url = reverse('admin:test_app_testmodel_changelist') fail_url = reverse('admin:test_app_testmodel_fail', args=[test_model_instance.pk]) response = admin_client.get(fail_url, follow=True) assert response.status_code == 200 assert response.redirect_chain[0][0] == changelist_url message_list = list(response.context['messages']) assert message_list[0].level == messages.ERROR assert 'not failed' in message_list[0].message assert LogEntry.objects.count() == 0
46.604027
92
0.765841
943
6,944
5.321315
0.099682
0.064568
0.121961
0.075727
0.859705
0.828418
0.795935
0.793942
0.793344
0.712635
0
0.012791
0.133065
6,944
148
93
46.918919
0.820764
0.001872
0
0.574803
0
0
0.121103
0.092379
0
0
0
0
0.480315
1
0.070866
false
0
0.055118
0
0.125984
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
223e593079119dfd4a8162a461e01c1d4e5513ec
165
py
Python
tests/test_helper.py
bigbag/starlette-request-id
7abae8d65fd38c2837a77cffd3b357a9ce2efc09
[ "Apache-2.0" ]
1
2021-07-01T12:12:41.000Z
2021-07-01T12:12:41.000Z
tests/test_helper.py
bigbag/starlette-request-id
7abae8d65fd38c2837a77cffd3b357a9ce2efc09
[ "Apache-2.0" ]
3
2021-06-24T00:13:20.000Z
2021-07-26T09:32:54.000Z
tests/test_helper.py
bigbag/starlette-request-id
7abae8d65fd38c2837a77cffd3b357a9ce2efc09
[ "Apache-2.0" ]
null
null
null
from starlette_request_id import request_id_ctx def test_request_id_ctx(): assert request_id_ctx.set("TEST") is None assert request_id_ctx.get() == "TEST"
23.571429
47
0.763636
27
165
4.259259
0.481481
0.391304
0.417391
0.313043
0
0
0
0
0
0
0
0
0.145455
165
6
48
27.5
0.815603
0
0
0
0
0
0.048485
0
0
0
0
0
0.5
1
0.25
true
0
0.25
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
8
226630f3307dc64459de55d5afca6c92b8238a02
21,606
py
Python
strategies/Treway.py
Mai-Te-Pora/Demex-Trading-Bot
4b9a034ec00e7aca8cdc9e4ddd0221ebb2820513
[ "MIT" ]
6
2021-09-17T01:14:23.000Z
2022-01-16T15:14:30.000Z
strategies/Treway.py
Mai-Te-Pora/Demex-Trading-Bot
4b9a034ec00e7aca8cdc9e4ddd0221ebb2820513
[ "MIT" ]
null
null
null
strategies/Treway.py
Mai-Te-Pora/Demex-Trading-Bot
4b9a034ec00e7aca8cdc9e4ddd0221ebb2820513
[ "MIT" ]
2
2021-11-25T22:23:45.000Z
2022-01-16T15:14:33.000Z
import pandas as pd import json import itertools import asyncio import time import os, sys, logging sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from authenticated_client import demex_auth from data_processing import SavingRecords loop = 0 balances = [] swth_usdc_orderbook = [] swth_eth_orderbook = [] eth_usdc_orderbook = [] eth_wbtc_orderbook = [] wbtc_usdc_orderbook = [] usdc_max_quantity = 400 wbtc_max_quantity = 0.01 swth_max_quantity = 50000 eth_max_quantity = 0.125 swth_min_quantity_extra = 180 eth_min_quantity_extra = 0.00025 dem_client = demex_auth.auth_client() logger = logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger() p = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def analyze_wbtc(wbtc_max_quantity, over): wbtc_max_quantity = wbtc_max_quantity over = over with open(p + r"/data_processing/storage/orderbooks/eth_usdc_orderbook.json", "r") as read_file: eth_usdc_orderbook = pd.read_json(read_file) with open(p + r"/data_processing/storage/orderbooks/eth_wbtc_orderbook.json", "r") as read_file: eth_wbtc_orderbook = pd.read_json(read_file) with open(p + r"/data_processing/storage/orderbooks/wbtc_usdc_orderbook.json", "r") as read_file: wbtc_usdc_orderbook = pd.read_json(read_file) eth_usdc_orderbook['total'] = eth_usdc_orderbook['quantity'] * eth_usdc_orderbook['price'] eth_wbtc_orderbook['total'] = eth_wbtc_orderbook['quantity'] * eth_wbtc_orderbook['price'] wbtc_usdc_orderbook['total'] = wbtc_usdc_orderbook['quantity'] * wbtc_usdc_orderbook['price'] #Checking WBTC-USDC (sell), ETH-USDC (Buy), ETH-WBTC (Sell) balance #WBTC-USDC logger.info("Starting WBTC-USDC, ETH-USDC, ETH-WBTC Imbalance Check") logger.info("Starting WBTC Qty: " + str(wbtc_max_quantity)) hold_qty = wbtc_max_quantity hold_price = 0 paid_percentage = .0025 df = wbtc_usdc_orderbook.loc[(wbtc_usdc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] hold_price += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: #Document prices for next order hold_price += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 usdc_paid = hold_price*paid_percentage total = hold_price-usdc_paid wbtc_usdc_received = total logger.info("Received USDC Qty: " + str(wbtc_usdc_received)) #ETH-USDC hold_qty = wbtc_usdc_received new_hold_qty = 0 hold_price = 0 df = eth_usdc_orderbook.loc[(eth_usdc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] hold_price += df.iloc[position]['total'] elif df.iloc[position]['total'] > hold_qty: #Document prices for next order new_hold_qty += hold_qty/df.iloc[position]['price'] hold_price += hold_qty hold_qty = 0 position += 1 eth_paid = new_hold_qty*paid_percentage total = new_hold_qty-eth_paid eth_usdc_received = total logger.info("Received ETH Qty: " + str(eth_usdc_received)) #ETH-WBTC hold_qty = eth_usdc_received hold_price = 0 df = eth_wbtc_orderbook.loc[(eth_wbtc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] hold_price += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: #Document prices for next order hold_price += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 wbtc_paid = hold_price*paid_percentage total = hold_price-wbtc_paid eth_wbtc_received = total logger.info("End Result WBTC Qty: " + str(eth_wbtc_received)) if (eth_wbtc_received-wbtc_max_quantity) > over: logger.info("Trades Recommended") logger.info("Performing Recommended Trades") dem_client.market_sell(pair='wbtc1_usdc1', quantity=str(wbtc_max_quantity)) dem_client.market_buy(pair='eth1_usdc1', quantity=str(wbtc_usdc_received)) dem_client.market_sell(pair='eth1_wbtc1', quantity=str(eth_usdc_received)) else: logger.info("No Trades Recommended") #Checking ETH-WBTC (sell), ETH-USDC (Sell), WBTC-USDC (Buy) balance #WBTC-USDC logger.info("Starting ETH-WBTC, ETH-USDC, WBTC-USDC Imbalance Check") logger.info("Starting WBTC Qty: " + str(wbtc_max_quantity)) hold_qty = wbtc_max_quantity new_hold_qty = 0 hold_price = 0 df = eth_wbtc_orderbook.loc[(eth_wbtc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] #hold_price += df.iloc[position]['total'] elif df.iloc[position]['total'] > hold_qty: #Document prices for next order new_hold_qty += hold_qty/df.iloc[position]['price'] #hold_price += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 eth_paid = new_hold_qty*paid_percentage total = new_hold_qty-eth_paid eth_wbtc_received = total logger.info("Received ETH Qty: " + str(eth_wbtc_received)) #ETH-USDC hold_qty = eth_wbtc_received new_hold_qty = 0 hold_price = 0 df = eth_usdc_orderbook.loc[(eth_usdc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] new_hold_qty += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: #Document prices for next order new_hold_qty += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 eth_paid = new_hold_qty*paid_percentage total = new_hold_qty-eth_paid eth_usdc_received = total logger.info("Received USDC Qty: " + str(eth_usdc_received)) #WBTC-USDC hold_qty = eth_usdc_received new_hold_qty = 0 hold_price = 0 df = wbtc_usdc_orderbook.loc[(wbtc_usdc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] elif df.iloc[position]['total'] > hold_qty: #Document prices for next order new_hold_qty += hold_qty/df.iloc[position]['price'] hold_qty = 0 position += 1 wbtc_paid = new_hold_qty*paid_percentage total = new_hold_qty-wbtc_paid wbtc_usdc_received = total logger.info("Received WBTC Qty: " + str(wbtc_usdc_received)) if (wbtc_usdc_received - wbtc_max_quantity) > over: logger.info("Trades Recommended") logger.info("Performing Recommended Trades") dem_client.market_buy(pair='eth1_wbtc1', quantity=str(wbtc_max_quantity)) dem_client.market_sell(pair='eth1_usdc1', quantity=str(eth_wbtc_received)) dem_client.market_buy(pair='wbtc1_usdc1', quantity=str(eth_usdc_received)) else: logger.info("No Trades Recommended") def analyze_swth(swth_max_quantity, over): swth_max_quantity = swth_max_quantity over = over with open( p + r"/data_processing/storage/orderbooks/swth_usdc_orderbook.json", "r") as read_file: swth_usdc_orderbook = pd.read_json(read_file) with open(p + r"/data_processing/storage/orderbooks/swth_eth_orderbook.json", "r") as read_file: swth_eth_orderbook = pd.read_json(read_file) with open(p + r"/data_processing/storage/orderbooks/eth_usdc_orderbook.json", "r") as read_file: eth_usdc_orderbook = pd.read_json(read_file) swth_usdc_orderbook['total'] = swth_usdc_orderbook['quantity'] * swth_usdc_orderbook['price'] swth_eth_orderbook['total'] = swth_eth_orderbook['quantity'] * swth_eth_orderbook['price'] eth_usdc_orderbook['total'] = eth_usdc_orderbook['quantity'] * eth_usdc_orderbook['price'] #Checking SWTH-USDC (Sell), ETH-USDC (Buy), SWTH-ETH (Buy) #SWTH-USDC logger.info("Starting SWTH-USDC, ETH-USDC, SWTH-ETH Imbalance Check") logger.info("Starting SWTH Qty: " + str(swth_max_quantity)) hold_qty = swth_max_quantity new_hold_qty = 0 paid_percentage = 0.0025 paid_qty = 0 df = swth_usdc_orderbook.loc[(swth_usdc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] new_hold_qty += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: new_hold_qty += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 usdc_paid = new_hold_qty*paid_percentage total = new_hold_qty-usdc_paid swth_usdc_received = total logger.info("Received USDC Qty: " + str(swth_usdc_received)) #ETH-USDC hold_qty = swth_usdc_received new_hold_qty = 0 df = eth_usdc_orderbook.loc[(eth_usdc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] elif df.iloc[position]['total'] > hold_qty: new_hold_qty += hold_qty/df.iloc[position]['price'] hold_qty = 0 position += 1 eth_paid = new_hold_qty*paid_percentage total = new_hold_qty-eth_paid eth_usdc_received = total logger.info("Received ETH Qty: " + str(eth_usdc_received)) #SWTH-ETH hold_qty = eth_usdc_received new_hold_qty = 0 df = swth_eth_orderbook.loc[(swth_eth_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] elif df.iloc[position]['total'] > hold_qty: new_hold_qty += hold_qty/df.iloc[position]['price'] hold_qty = 0 position += 1 swth_paid = new_hold_qty*paid_percentage total = new_hold_qty-swth_paid swth_eth_received = total logger.info("Received SWTH Qty: " + str(swth_eth_received)) if (swth_eth_received - swth_max_quantity) > over: logger.info("Trades Recommended") logger.info("Performing Recommended Trades") dem_client.market_sell(pair='swth_usdc1', quantity=str(swth_max_quantity)) dem_client.market_buy(pair='eth1_usdc1', quantity=str(swth_usdc_received)) dem_client.market_buy(pair='swth_eth1', quantity=str(eth_usdc_received)) else: logger.info("No Trades Recommended") #Checking SWTH-ETH, ETH-USDC, SWTH-USDC #SWTH-ETH logger.info("Starting SWTH-ETH, ETH-USDC, SWTH-USDC Imbalance Check") logger.info("Starting SWTH Qty: " + str(swth_max_quantity)) hold_qty = swth_max_quantity new_hold_qty = 0 df = swth_eth_orderbook.loc[(swth_eth_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] new_hold_qty += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: new_hold_qty += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 eth_paid = new_hold_qty*paid_percentage total = new_hold_qty-eth_paid swth_eth_received = total logger.info("Received ETH Qty: " + str(swth_eth_received)) #ETH-USDC hold_qty = swth_eth_received new_hold_qty = 0 df = eth_usdc_orderbook.loc[(eth_usdc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] new_hold_qty += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: new_hold_qty += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 usdc_paid = new_hold_qty*paid_percentage total = new_hold_qty-usdc_paid eth_usdc_received = total logger.info("Received USDC Qty: " + str(eth_usdc_received)) #SWTH-USDC hold_qty = eth_usdc_received new_hold_qty = 0 df = swth_usdc_orderbook.loc[(swth_usdc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] elif df.iloc[position]['total'] > hold_qty: new_hold_qty += hold_qty/df.iloc[position]['price'] hold_qty = 0 position += 1 swth_paid = new_hold_qty*paid_percentage total = new_hold_qty-swth_paid swth_usdc_received = total logger.info("Received USDC Qty: " + str(swth_usdc_received)) if (swth_usdc_received - swth_max_quantity) > over: logger.info("Trades Recommended") logger.info("Performing Recommended Trades") dem_client.market_sell(pair='swth_eth1', quantity=str(swth_max_quantity)) dem_client.market_sell(pair='eth1_usdc1', quantity=str(swth_eth_received)) dem_client.market_buy(pair='swth_usdc1', quantity=str(eth_usdc_received)) else: logger.info("No Trades Recommended") def analyze_eth(eth_max_quantity, over): eth_max_quantity = eth_max_quantity over = over with open( p + r"/data_processing/storage/orderbooks/wbtc_usdc_orderbook.json", "r") as read_file: wbtc_usdc_orderbook = pd.read_json(read_file) with open(p + r"/data_processing/storage/orderbooks/eth_usdc_orderbook.json", "r") as read_file: eth_usdc_orderbook = pd.read_json(read_file) with open(p + r"/data_processing/storage/orderbooks/eth_wbtc_orderbook.json", "r") as read_file: eth_wbtc_orderbook = pd.read_json(read_file) wbtc_usdc_orderbook['total'] = wbtc_usdc_orderbook['quantity'] * wbtc_usdc_orderbook['price'] eth_wbtc_orderbook['total'] = eth_wbtc_orderbook['quantity'] * eth_wbtc_orderbook['price'] eth_usdc_orderbook['total'] = eth_usdc_orderbook['quantity'] * eth_usdc_orderbook['price'] #Checking ETH-WBTC (Sell), WBTC-USDC(Sell), ETH-USDC(Buy) #ETH-WBTC logger.info("Starting ETH-WBTC, WBTC-USDC, ETH-USDC Imbalance Check") logger.info("Starting ETH Qty: " + str(eth_max_quantity)) hold_qty = eth_max_quantity new_hold_qty = 0 paid_percentage = 0.0025 df = eth_wbtc_orderbook.loc[(eth_wbtc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] new_hold_qty += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: new_hold_qty += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 wbtc_paid = new_hold_qty*paid_percentage total = new_hold_qty-wbtc_paid eth_wbtc_received = total logger.info("Received WBTC Qty: " + str(eth_wbtc_received)) hold_qty = eth_wbtc_received new_hold_qty = 0 df = wbtc_usdc_orderbook.loc[(wbtc_usdc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] new_hold_qty += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: new_hold_qty += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 usdc_paid = new_hold_qty*paid_percentage total = new_hold_qty-usdc_paid wbtc_usdc_received = total logger.info("Received WBTC Qty: " + str(wbtc_usdc_received)) hold_qty = wbtc_usdc_received new_hold_qty = 0 df = eth_usdc_orderbook.loc[(eth_usdc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] elif df.iloc[position]['total'] > hold_qty: new_hold_qty += hold_qty/df.iloc[position]['price'] hold_qty = 0 position += 1 eth_paid = new_hold_qty*paid_percentage total = new_hold_qty-eth_paid eth_usdc_received = total logger.info("Received ETH Qty: " + str(eth_usdc_received)) if (eth_usdc_received - eth_max_quantity) > over: logger.info("Trades Recommended") logger.info("Performing Recommended Trades") dem_client.market_sell(pair='eth1_wbtc1', quantity=str(eth_max_quantity)) dem_client.market_sell(pair='wbtc1_usdc1', quantity=str(eth_wbtc_received)) dem_client.market_buy(pair='eth1_usdc1', quantity=str(wbtc_usdc_received)) else: logger.info("No Trades Recommended") #Checking ETH-USDC (Sell), WBTC-USDC(Buy), ETH-WBTC(Buy) #ETH-USDC logger.info("Starting ETH-USDC, WBTC-USDC, ETH-WBTC Imbalance Check") logger.info("Starting ETH Qty: " + str(eth_max_quantity)) hold_qty = eth_max_quantity new_hold_qty = 0 df = eth_usdc_orderbook.loc[(eth_usdc_orderbook['side'] == 'buy')] df = df.sort_values(by='price', ascending=False) position = 0 while hold_qty > 0: if df.iloc[position]['quantity'] <= hold_qty: hold_qty -= df.iloc[position]['quantity'] new_hold_qty += df.iloc[position]['total'] elif df.iloc[position]['quantity'] > hold_qty: new_hold_qty += hold_qty*df.iloc[position]['price'] hold_qty = 0 position += 1 usdc_paid = new_hold_qty*paid_percentage total = new_hold_qty-usdc_paid eth_usdc_received = total logger.info("Received USDC Qty: " + str(eth_usdc_received)) #WBTC-USDC hold_qty = eth_usdc_received new_hold_qty = 0 df = wbtc_usdc_orderbook.loc[(wbtc_usdc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] elif df.iloc[position]['total'] > hold_qty: new_hold_qty += hold_qty/df.iloc[position]['price'] hold_qty = 0 position += 1 wbtc_paid = new_hold_qty*paid_percentage total = new_hold_qty-wbtc_paid wbtc_usdc_received = total logger.info("Received WBTC Qty: " + str(wbtc_usdc_received)) #ETH-WBTC hold_qty = wbtc_usdc_received new_hold_qty = 0 df = eth_wbtc_orderbook.loc[(eth_wbtc_orderbook['side'] == 'sell')] df = df.sort_values(by='price', ascending=True) position = 0 while hold_qty > 0: if df.iloc[position]['total'] <= hold_qty: hold_qty -= df.iloc[position]['total'] new_hold_qty += df.iloc[position]['quantity'] elif df.iloc[position]['total'] > hold_qty: new_hold_qty += hold_qty/df.iloc[position]['price'] hold_qty = 0 position += 1 eth_paid = new_hold_qty*paid_percentage total = new_hold_qty-eth_paid eth_wbtc_received = total logger.info("Received ETH Qty: " + str(eth_wbtc_received)) if (eth_wbtc_received - eth_max_quantity) > over: logger.info("Trades Recommended") logger.info("Performing Recommended Trades") dem_client.market_sell(pair='eth1_usdc1', quantity=str(eth_max_quantity)) dem_client.market_buy(pair='wbtc1_usdc1', quantity=str(eth_usdc_received)) dem_client.market_buy(pair='eth1_wbtc1', quantity=str(wbtc_usdc_received)) else: logger.info("No Trades Recommended")
41.390805
103
0.64732
2,918
21,606
4.493831
0.037012
0.111035
0.099291
0.052543
0.924884
0.913063
0.891558
0.881492
0.852742
0.837261
0
0.010007
0.23225
21,606
521
104
41.47025
0.780504
0.034296
0
0.812785
0
0
0.140642
0.026287
0
0
0
0
0
1
0.006849
false
0
0.018265
0
0.025114
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
3f0707b8a66c002757752276176a57727fbf9179
190
py
Python
pygame_breakout/breakout/entities/entities.py
nhaney/breakoutexamples
9593581df081765cb68f61aca56b641b670a8ab5
[ "Apache-2.0" ]
null
null
null
pygame_breakout/breakout/entities/entities.py
nhaney/breakoutexamples
9593581df081765cb68f61aca56b641b670a8ab5
[ "Apache-2.0" ]
null
null
null
pygame_breakout/breakout/entities/entities.py
nhaney/breakoutexamples
9593581df081765cb68f61aca56b641b670a8ab5
[ "Apache-2.0" ]
null
null
null
class Player: def __init__(self, id): self.id = id class Block: def __init__(self, id): self.id = id class Ball: def __init__(self, id): self.id = id
13.571429
27
0.552632
27
190
3.444444
0.296296
0.387097
0.354839
0.419355
0.784946
0.784946
0.784946
0.55914
0
0
0
0
0.336842
190
13
28
14.615385
0.738095
0
0
0.666667
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
1
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
8
3f487ea28d1197da511242d06089b45750d9c81b
2,894
bzl
Python
tests/macros.bzl
jgavert/Higanbana
380ec914a3a1540464e1f2fc5140349fb35ccb71
[ "MIT" ]
15
2020-01-15T13:04:36.000Z
2022-02-18T17:08:25.000Z
tests/macros.bzl
jgavert/Higanbana
380ec914a3a1540464e1f2fc5140349fb35ccb71
[ "MIT" ]
null
null
null
tests/macros.bzl
jgavert/Higanbana
380ec914a3a1540464e1f2fc5140349fb35ccb71
[ "MIT" ]
1
2021-12-06T07:19:05.000Z
2021-12-06T07:19:05.000Z
def src_graphics_test(target_name): native.cc_test( name = "test_graphics_" + target_name, srcs = ["graphics/test_" + target_name + ".cpp", "graphics/graphics_config.hpp", "graphics/graphics_config.cpp"], deps = ["//graphics:graphics", "//ext/Catch2:catch2_main"], copts = select({ "@bazel_tools//src/conditions:windows": ["/std:c++latest", "/arch:AVX2", "/Z7", "/permissive-"], "//conditions:default": ["-std=c++2a", "-msse4.2", "-m64"], }), data = ["//tests/data:mapping"], defines = ["_ENABLE_EXTENDED_ALIGNED_STORAGE"], linkopts = select({ "@bazel_tools//src/conditions:windows": ["/subsystem:CONSOLE", "/DEBUG"], "//conditions:default": ["-pthread", "-ltbb", "-ldl"], }), ) def src_graphics_test_with_header(target_name): native.cc_test( name = "test_graphics_" + target_name, srcs = ["graphics/test_" + target_name + ".cpp", "graphics/test_" + target_name + ".hpp", "graphics/graphics_config.hpp", "graphics/graphics_config.cpp"], deps = ["//graphics:graphics", "//ext/Catch2:catch2_main"], copts = select({ "@bazel_tools//src/conditions:windows": ["/std:c++latest", "/arch:AVX2", "/Z7", "/permissive-"], "//conditions:default": ["-std=c++2a", "-msse4.2", "-m64"], }), data = ["//tests/data:mapping"], defines = ["_ENABLE_EXTENDED_ALIGNED_STORAGE"], linkopts = select({ "@bazel_tools//src/conditions:windows": ["/subsystem:CONSOLE", "/DEBUG"], "//conditions:default": ["-pthread", "-ltbb", "-ldl"], }), ) def src_core_test(target_name): native.cc_test( name = "test_core_" + target_name, srcs = ["core/test_" + target_name + ".cpp"], deps = ["//core:core", "//ext/Catch2:catch2_main"], copts = select({ "@bazel_tools//src/conditions:windows": ["/std:c++latest", "/arch:AVX2", "/permissive-", "/Z7"], "//conditions:default": ["-std=c++2a", "-msse4.2", "-m64", "-pthread"], }), data = ["//tests/data:mapping"], defines = ["_ENABLE_EXTENDED_ALIGNED_STORAGE"], linkopts = select({ "@bazel_tools//src/conditions:windows": ["/subsystem:CONSOLE", "/DEBUG"], "//conditions:default": ["-pthread"], }), ) def src_core_test_with_header(target_name): native.cc_test( name = "test_core_" + target_name, srcs = ["core/test_" + target_name + ".cpp", "core/test_" + target_name + ".hpp"], deps = ["//core:core", "//ext/Catch2:catch2_main"], copts = select({ "@bazel_tools//src/conditions:windows": ["/std:c++latest", "/arch:AVX2", "/permissive-", "/Z7"], "//conditions:default": ["-std=c++2a", "-msse4.2", "-m64"], }), data = ["//tests/data:mapping"], defines = ["_ENABLE_EXTENDED_ALIGNED_STORAGE"], linkopts = select({ "@bazel_tools//src/conditions:windows": ["/subsystem:CONSOLE", "/DEBUG"], "//conditions:default": ["-pthread"], }), )
43.19403
158
0.603663
319
2,894
5.238245
0.163009
0.083782
0.067026
0.090963
0.951526
0.951526
0.951526
0.951526
0.946738
0.946738
0
0.014895
0.164824
2,894
67
159
43.19403
0.676458
0
0
0.796875
0
0
0.50639
0.215544
0
0
0
0
0
1
0.0625
false
0
0
0
0.0625
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
453ad34398bcd54868f380187bc3caa902ae466c
84,017
py
Python
silver/tests/commands/test_generate_docs.py
DocTocToc/silver
f1b4a8871fc4a37c8813d3c010bc70dc59c0a6e5
[ "Apache-2.0" ]
222
2017-01-15T10:30:57.000Z
2022-03-08T20:34:46.000Z
silver/tests/commands/test_generate_docs.py
DocTocToc/silver
f1b4a8871fc4a37c8813d3c010bc70dc59c0a6e5
[ "Apache-2.0" ]
141
2017-01-11T10:56:49.000Z
2021-10-12T11:51:00.000Z
silver/tests/commands/test_generate_docs.py
DocTocToc/silver
f1b4a8871fc4a37c8813d3c010bc70dc59c0a6e5
[ "Apache-2.0" ]
76
2017-01-10T13:50:27.000Z
2022-03-25T21:37:00.000Z
# Copyright (c) 2015 Presslabs SRL # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import datetime as dt from decimal import Decimal from io import StringIO from mock import patch, PropertyMock, MagicMock from django.core.management import call_command from django.test import TestCase from silver.management.commands.generate_docs import date as generate_docs_date from silver.models import (Proforma, DocumentEntry, Invoice, Subscription, Customer, Plan, BillingLog) from silver.fixtures.factories import (SubscriptionFactory, PlanFactory, MeteredFeatureFactory, MeteredFeatureUnitsLogFactory, CustomerFactory, ProviderFactory) from silver.utils.dates import ONE_DAY class TestInvoiceGenerationCommand(TestCase): """ Tests: * non-canceled * consolidated billing w/ included units -- * consolidated billing w/a included units -- * prorated subscriptions w/ consumed mfs underflow -- * prorated subscriptions w/ consumed mfs overflow -- * consolidated -> subscriptions full as well as full trial * non-consolidated billing w/ included units -- * non-consolidated billing w/a included units -- * non-consolidated billing w/ prorated subscriptions * Generate with different default states * draft -- * issued -- * trial over multiple months -- * variations for non-canceled subscriptions. Check the cases paper -- * canceled * canceled subscription w/ trial -- * canceled subscription w/a trial -- * canceled subscription w trial underflow -- * canceled subscription w trial overflow -- * dates -- with the current tests we only test value. The tests should should include the dates for the items too. * sales tax percent * generate_after TODO: add missing test descriptions """ def __init__(self, *args, **kwargs): super(TestInvoiceGenerationCommand, self).__init__(*args, **kwargs) self.output = StringIO() ########################################################################### # Non-Canceled ########################################################################### def test_trial_spanning_over_multiple_months(self): # # SETUP ## customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) mf_price = Decimal('2.5') metered_feature = MeteredFeatureFactory( included_units_during_trial=Decimal('0.00'), price_per_unit=mf_price) currency = 'EUR' plan = PlanFactory.create(interval=Plan.INTERVALS.MONTH, interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), trial_period_days=24, metered_features=[metered_feature], currency=currency) start_date = dt.date(2015, 5, 20) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() consumed_1 = Decimal('5.00') consumed_2 = Decimal('5.00') mf_log = MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 6, 1), end_date=subscription.trial_end, consumed_units=consumed_1) prev_billing_date = generate_docs_date('2015-06-04') # During trial period curr_billing_date = subscription.trial_end + ONE_DAY # # TEST ## call_command('generate_docs', billing_date=prev_billing_date, stdout=self.output) proforma = Proforma.objects.first() # Expect 4 entries: # - prorated plan trial (+-) first month # - prorated plan trial (+-) next month assert proforma.proforma_entries.count() == 4 assert Invoice.objects.all().count() == 0 assert Proforma.objects.all()[0].total == Decimal('0.00') mf_log.consumed_units += consumed_2 mf_log.save() call_command('generate_docs', billing_date=curr_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 2 entries: # - consumed mfs from trial (as included_during_trial=0) # - prorated subscription for the remaining period assert proforma.proforma_entries.count() == 2 assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) prorated_plan_value = Decimal(18 / 30.0).quantize(Decimal('0.0000')) * plan.amount consumed_mfs_value = (consumed_1 + consumed_2) * mf_price assert proforma.total == prorated_plan_value + consumed_mfs_value assert proforma.currency == currency def test_gen_for_non_consolidated_billing_with_consumed_units(self): """ A customer has 3 subscriptions for which we use the normal case: * add consumed mfs for the previous month * add the value of the plan for the next month => 3 different proformas """ billing_date = generate_docs_date('2015-03-01') customer = CustomerFactory.create(consolidated_billing=False) metered_feature = MeteredFeatureFactory(included_units=Decimal('0.00')) plan_price = Decimal('200.00') currency = 'RON' plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=plan_price, metered_features=[metered_feature], currency=currency) start_date = dt.date(2014, 1, 1) # Create 3 subscriptions for the same customer SubscriptionFactory.create_batch(size=3, plan=plan, start_date=start_date, customer=customer) consumed_mfs = Decimal('50.00') for subscription in Subscription.objects.all(): subscription.activate() subscription.save() # For each subscription, add consumed units MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 2, 1), end_date=dt.date(2015, 2, 28), consumed_units=consumed_mfs) # Add a BillingLog to declare when the subscription was last billed BillingLog.objects.create(subscription=subscription, billing_date=dt.date(2015, 1, 1), plan_billed_up_to=dt.date(2015, 2, 28), metered_features_billed_up_to=dt.date(2015, 1, 31)) call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 3 assert Invoice.objects.all().count() == 0 for proforma in Proforma.objects.all(): entries = proforma.proforma_entries.all() assert entries.count() == 2 # Plan for current month, Metered features for last month assert proforma.currency == currency for entry in entries: if entry.product_code == plan.product_code: assert entry.quantity == 1 assert entry.unit_price == plan_price else: assert entry.quantity == consumed_mfs assert entry.unit_price == metered_feature.price_per_unit def test_gen_for_non_consolidated_billing_without_consumed_units(self): """ A customer has 3 subscriptions for which he does not have any consumed units => 3 different proformas, each containing only the plan's value. """ billing_date = generate_docs_date('2015-03-01') customer = CustomerFactory.create(consolidated_billing=False) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00')) start_date = dt.date(2015, 1, 1) # Create 3 subscriptions for the same customer SubscriptionFactory.create_batch( size=3, plan=plan, start_date=start_date, customer=customer) for subscription in Subscription.objects.all(): subscription.activate() subscription.save() call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 3 assert Invoice.objects.all().count() == 0 for proforma in Proforma.objects.all(): entries = proforma.proforma_entries.all() # plan for january # plan for february # plan for march assert entries.count() == 3 assert proforma.currency == 'USD' for entry in entries: assert entry.quantity == 1 assert entry.unit_price == plan.amount def test_gen_consolidated_billing_with_consumed_mfs(self): """ A customer has 3 subscriptions for which we use the normal case: * add consumed mfs for the previous month for each subscription * add the value of the plan for the next month for each subscription => 1 proforma with all the aforementioned data """ billing_date = generate_docs_date('2015-03-01') subscriptions_cnt = 3 customer = CustomerFactory.create( consolidated_billing=True, sales_tax_percent=Decimal('0.00')) mf_price = Decimal('2.5') metered_feature = MeteredFeatureFactory( included_units=Decimal('0.00'), price_per_unit=mf_price) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2014, 1, 3) subscriptions = SubscriptionFactory.create_batch( size=subscriptions_cnt, plan=plan, start_date=start_date, customer=customer) consumed_mfs = Decimal('50.00') for subscription in subscriptions: subscription.activate() subscription.save() # For each subscription, add consumed units MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 2, 1), end_date=dt.date(2015, 2, 28), consumed_units=consumed_mfs) BillingLog.objects.create(subscription=subscription, billing_date=dt.date(2015, 2, 1), plan_billed_up_to=dt.date(2015, 2, 28), metered_features_billed_up_to=dt.date(2015, 1, 31)) call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] # For each doc, expect 2 entries: the plan value and the mfs assert proforma.proforma_entries.all().count() == subscriptions_cnt * 2 expected_total = (subscriptions_cnt * plan.amount + subscriptions_cnt * (mf_price * consumed_mfs)) assert proforma.total == expected_total def test_gen_consolidated_billing_without_mfs(self): """ A customer has 3 subscriptions for which it does not have any consumed metered features. """ billing_date = generate_docs_date('2015-03-01') subscriptions_cnt = 3 customer = CustomerFactory.create( consolidated_billing=True, sales_tax_percent=Decimal('0.00')) mf_price = Decimal('2.5') metered_feature = MeteredFeatureFactory( included_units=Decimal('0.00'), price_per_unit=mf_price) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 1, 3) subscriptions = SubscriptionFactory.create_batch( size=subscriptions_cnt, plan=plan, start_date=start_date, customer=customer ) for subscription in subscriptions: subscription.activate() subscription.save() BillingLog.objects.create(subscription=subscription, billing_date=dt.date(2015, 2, 1), plan_billed_up_to=dt.date(2015, 2, 28), metered_features_billed_up_to=dt.date(2015, 1, 31)) call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] # For each doc, expect 2 entries: the plan's value + the 'extra' # mfs with 0 value assert proforma.proforma_entries.all().count() == 2 * subscriptions_cnt expected_total = subscriptions_cnt * plan.amount assert proforma.total == expected_total def test_prorated_subscription_with_consumed_mfs_underflow(self): """ The subscription started last month and it does not have a trial => prorated value for the plan; the consumed_mfs < included_mfs => 1 proforma with 1 single value, corresponding to the plan for the next month """ prev_billing_date = generate_docs_date('2015-02-14') curr_billing_date = generate_docs_date('2015-03-02') customer = CustomerFactory.create( consolidated_billing=False, sales_tax_percent=Decimal('0.00')) metered_feature = MeteredFeatureFactory(included_units=Decimal('20.00')) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 2, 14) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer, trial_end=None) subscription.activate() subscription.save() call_command('generate_docs', date=prev_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 2, 14), end_date=dt.date(2015, 2, 28), consumed_units=Decimal('10.00')) call_command('generate_docs', date=curr_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 2 entries: the plan for the next month and the consumed mfs with 0 total. assert proforma.proforma_entries.all().count() == 2 assert proforma.total == plan.amount def test_prorated_subscription_with_consumed_mfs_overflow(self): prev_billing_date = generate_docs_date('2015-02-15') curr_billing_date = generate_docs_date('2015-03-02') customer = CustomerFactory.create(consolidated_billing=False, sales_tax_percent=Decimal('0.00')) mf_price = Decimal('2.5') metered_feature = MeteredFeatureFactory(included_units=Decimal('20.00'), price_per_unit=mf_price) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 2, 15) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() call_command('generate_docs', date=prev_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] assert proforma.total == Decimal(14 / 28.0) * plan.amount assert all([entry.prorated for entry in proforma.proforma_entries.all()]) MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 2, 15), end_date=dt.date(2015, 2, 28), consumed_units=Decimal('12.00')) call_command('generate_docs', date=curr_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 2 entries: the plan for the next month + the extra consumed # units. extra_mfs = 2, since included_mfs=20 but the plan is # 50% prorated => only 50% of the total included_mfs are included. # The mfs will not be added as the consumed_mfs < included_mfs assert proforma.proforma_entries.all().count() == 2 assert proforma.total == plan.amount + mf_price * 2 # mfs for last month assert proforma.proforma_entries.all()[0].prorated is True # plan for upcoming month assert proforma.proforma_entries.all()[1].prorated is False def test_subscription_with_trial_without_metered_features_to_draft(self): billing_date = generate_docs_date('2015-03-02') plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=14, amount=Decimal('200.00')) start_date = dt.date(2015, 2, 4) subscription = SubscriptionFactory.create(plan=plan, start_date=start_date) subscription.activate() subscription.save() Customer.objects.all()[0].sales_tax_percent = Decimal('0.00') call_command('generate_docs', billing_date=billing_date, stdout=self.output) # Expect one Proforma assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # In draft state assert Proforma.objects.all()[0].state == Proforma.STATES.DRAFT document_entries = DocumentEntry.objects.all() # Expect 4 entries: # Plan Trial (+-), Plan Prorated (+), Plan for next month(+) assert len(document_entries) == 4 doc = document_entries[0] assert doc.unit_price == Decimal(14.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[1] assert doc.unit_price == - Decimal(14.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[2] assert doc.unit_price == Decimal(11.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[3] assert doc.unit_price == plan.amount # And quantity 1 assert doc.quantity == 1 def test_subscription_with_trial_with_metered_features_underflow_to_draft(self): included_units_during_trial = Decimal('5.00') metered_feature = MeteredFeatureFactory( included_units=Decimal('0.00'), included_units_during_trial=included_units_during_trial ) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 2, 1) subscription = SubscriptionFactory.create(plan=plan, start_date=start_date) subscription.activate() subscription.save() consumed_mfs_during_trial = Decimal('3.00') MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=start_date, end_date=subscription.trial_end, consumed_units=consumed_mfs_during_trial ) mf_units_log_after_trial = MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=subscription.trial_end + dt.timedelta(days=1), end_date=dt.datetime(2015, 2, 28) ) call_command('generate_docs', billing_date=dt.date(2015, 3, 1), stdout=self.output) # Expect one Proforma assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # In draft state proforma = Proforma.objects.all()[0] assert proforma.state == Proforma.STATES.DRAFT document_entries = proforma.proforma_entries.all() # Expect 7 entries: # Plan Trial (+-), Plan Trial Metered Feature (+-), Plan After Trial (+) # Metered Features After Trial (+), Plan for next month (+) assert len(document_entries) == 7 doc = document_entries[0] assert doc.unit_price == Decimal('50.00') # 7 / 28 * 200 doc = document_entries[1] assert doc.unit_price == Decimal('-50.00') doc = document_entries[2] assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == consumed_mfs_during_trial doc = document_entries[3] assert doc.unit_price == - metered_feature.price_per_unit assert doc.quantity == consumed_mfs_during_trial doc = document_entries[4] assert doc.unit_price == Decimal('150.00') # 21 / 28 * 200 doc = document_entries[5] assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == mf_units_log_after_trial.consumed_units doc = document_entries[6] assert doc.unit_price == plan.amount # And quantity 1 assert doc.quantity == 1 def test_subscription_with_trial_with_metered_features_overflow_to_draft(self): billing_date = generate_docs_date('2015-03-01') metered_feature = MeteredFeatureFactory( included_units=Decimal('0.00'), included_units_during_trial=Decimal('5.00')) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 2, 1) trial_end = start_date + dt.timedelta(days=plan.trial_period_days) subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, trial_end=trial_end) subscription.activate() subscription.save() units_consumed_during_trial = Decimal('7.00') MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=start_date, end_date=trial_end, consumed_units=units_consumed_during_trial ) mf_units_log_after_trial = MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=trial_end + dt.timedelta(days=1), end_date=dt.datetime(2015, 2, 28) ) call_command('generate_docs', billing_date=billing_date, stdout=self.output) # Expect one Proforma assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # In draft state assert Proforma.objects.all()[0].state == Proforma.STATES.DRAFT document_entries = DocumentEntry.objects.all() # Expect 7 entries: # Plan Trial (+-), Plan Trial Metered Feature (+-), # Extra units consumed during trial (+) # Plan After Trial (+) # Metered Features After Trial (+), Plan for next month (+) assert len(document_entries) == 8 doc = document_entries[0] assert doc.unit_price == Decimal('57.14') doc = document_entries[1] assert doc.unit_price == Decimal('-57.14') doc = document_entries[2] assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == metered_feature.included_units_during_trial doc = document_entries[3] assert doc.unit_price == - metered_feature.price_per_unit assert doc.quantity == metered_feature.included_units_during_trial doc = document_entries[4] assert doc.unit_price == metered_feature.price_per_unit included_trial_units = metered_feature.included_units_during_trial assert doc.quantity == units_consumed_during_trial - included_trial_units doc = document_entries[5] assert doc.unit_price == Decimal('142.8600') # 20 / 28 * 200 doc = document_entries[6] assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == mf_units_log_after_trial.consumed_units doc = document_entries[7] assert doc.unit_price == Decimal('200.00') # And quantity 1 assert doc.quantity == 1 def test_on_trial_with_consumed_units_underflow(self): customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) metered_feature = MeteredFeatureFactory( included_units_during_trial=Decimal('10.00')) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), trial_period_days=14, metered_features=[metered_feature]) start_date = dt.date(2015, 2, 20) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 2, 20), end_date=dt.date(2015, 2, 28), consumed_units=Decimal('8.00')) billing_date = generate_docs_date('2015-03-02') call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] # Expect 6 entries: # - plan trial february (+-) # - mfs trial february (+-) # - plan trial march (+-) assert proforma.proforma_entries.count() == 6 assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) assert proforma.total == Decimal('0.0000') billing_date = generate_docs_date('2015-03-07') call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 3 entries: # - mfs trial march (+-) # - remaining plan march (+) assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) assert proforma.total != Decimal('0.0000') def test_on_trial_with_consumed_units_overflow(self): billing_date = generate_docs_date('2015-03-07') customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) mf_price = Decimal('2.5') included_during_trial = Decimal('10.00') metered_feature = MeteredFeatureFactory( included_units_during_trial=included_during_trial, price_per_unit=mf_price) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), trial_period_days=14, metered_features=[metered_feature]) start_date = dt.date(2015, 2, 20) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() consumed_during_trial = Decimal('12.00') MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 2, 20), end_date=dt.date(2015, 2, 28), consumed_units=consumed_during_trial) call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] # Expect 8 entries: # - plan trial february (+-) # - mfs trial february (+-) # - extra consumed mfs february (+) # - plan trial march (+-) # - remaining plan march (+) assert proforma.proforma_entries.count() == 8 assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) extra_during_trial = consumed_during_trial - included_during_trial prorated_plan_value = Decimal(26 / 31.0).quantize(Decimal('0.0000')) * plan.amount assert proforma.total == extra_during_trial * mf_price + prorated_plan_value def test_2nd_sub_after_trial_with_consumed_units_underflow(self): """ The subscription: * start_date=2015-05-20 * trial_end=2015-06-03 * first billing_date=2015-06-01 * second billing_date=2015-06-04 (right after the trial_end) The consumed_during_trial < included_during_trial """ # # SETUP ## prev_billing_date = generate_docs_date('2015-06-01') curr_billing_date = generate_docs_date('2015-06-04') # First day after trial_end customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) included_during_trial = Decimal('10.00') metered_feature = MeteredFeatureFactory( included_units_during_trial=included_during_trial) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), trial_period_days=14, metered_features=[metered_feature]) start_date = dt.date(2015, 5, 20) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() consumed_during_first_trial_part = Decimal('5.00') consumed_during_second_trial_part = Decimal('5.00') MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 5, 20), end_date=dt.date(2015, 5, 31), consumed_units=consumed_during_first_trial_part) MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 6, 1), end_date=dt.date(2015, 6, 2), consumed_units=consumed_during_second_trial_part) # # TEST ## call_command('generate_docs', billing_date=prev_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.first() # Expect 6 entries: # - plan trial may (+-) # - mfs trial may (+-) # - plan trial june (+-) assert proforma.proforma_entries.count() == 6 assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) call_command('generate_docs', billing_date=curr_billing_date, stdout=self.output) assert proforma.total == Decimal('0.0000') call_command('generate_docs', billing_date=curr_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 3 entries: # - mfs trial june (+-) # - remaining plan june (+) assert proforma.proforma_entries.count() == 3 for entry in proforma.proforma_entries.all(): assert entry.prorated if entry.product_code == plan.product_code: assert entry.start_date == subscription.trial_end + ONE_DAY assert entry.end_date == dt.date(2015, 6, 30) else: assert entry.start_date == dt.date(2015, 6, 1) assert entry.end_date == subscription.trial_end assert proforma.total == Decimal(28 / 30.0).quantize(Decimal('0.0000')) * plan.amount def test_2nd_sub_billing_after_trial_with_all_consumed_units_overflow(self): """ The subscription: * start_date=2015-05-20 * trial_end=2015-06-03 * first billing_date=2015-06-01 * second billing_date=2015-06-04 (right after the trial_end) During 2014-05-20->2015-06-03 all the included_during_trial units have been consumed. """ # # SETUP ## prev_billing_date = generate_docs_date('2015-06-01') curr_billing_date = generate_docs_date('2015-06-04') # First day after trial_end customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) included_during_trial = Decimal('10.00') mf_price = Decimal('2.5') metered_feature = MeteredFeatureFactory( included_units_during_trial=included_during_trial, price_per_unit=mf_price) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), trial_period_days=14, metered_features=[metered_feature]) start_date = dt.date(2015, 5, 20) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() consumed_during_first_trial_part = Decimal('10.00') consumed_during_second_trial_part = Decimal('12.00') MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 5, 20), end_date=dt.date(2015, 5, 31), consumed_units=consumed_during_first_trial_part) MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 6, 1), end_date=dt.date(2015, 6, 2), consumed_units=consumed_during_second_trial_part) # # TEST ## call_command('generate_docs', billing_date=prev_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] assert proforma.total == Decimal('0.0000') # Expect 6 entries: # - plan trial may (+-) # - mfs trial may (+-) # - plan trial june (+-) assert proforma.proforma_entries.count() == 6 assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) call_command('generate_docs', billing_date=curr_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 2 entries: # - mfs trial extra june (+) # - remaining plan for june (+) assert proforma.proforma_entries.count() == 2 first_entry = proforma.proforma_entries.first() assert first_entry.start_date == dt.date(2015, 6, 1) assert first_entry.end_date == subscription.trial_end second_entry = proforma.proforma_entries.last() assert second_entry.start_date == subscription.trial_end + ONE_DAY assert second_entry.end_date == dt.date(2015, 6, 30) prorated_plan_value = Decimal(28 / 30.0).quantize(Decimal('0.0000')) * plan.amount extra_mfs_during_trial = consumed_during_second_trial_part * mf_price assert proforma.total == prorated_plan_value + extra_mfs_during_trial def test_2nd_sub_billing_after_trial_with_some_consumed_units_overflow(self): """ The subscription: * start_date=2015-05-20 * trial_end=2015-06-03 * first billing_date=2015-06-01 * second billing_date=2015-06-04 (right after the trial_end) During 2015-05-20->2015-06-03 only a part of the included units have been consumed => a part remain for the 2015-06-01->2015-06-03 """ # # SETUP ## prev_billing_date = generate_docs_date('2015-06-01') curr_billing_date = generate_docs_date('2015-06-03') # First day after trial_end customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) included_during_trial = Decimal('12.00') mf_price = Decimal('2.5') metered_feature = MeteredFeatureFactory( included_units_during_trial=included_during_trial, price_per_unit=mf_price) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), trial_period_days=14, metered_features=[metered_feature]) start_date = dt.date(2015, 5, 20) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() consumed_during_first_trial_part = Decimal('10.00') consumed_during_second_trial_part = Decimal('12.00') MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 5, 20), end_date=dt.date(2015, 5, 31), consumed_units=consumed_during_first_trial_part ) MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 6, 1), end_date=dt.date(2015, 6, 2), consumed_units=consumed_during_second_trial_part ) # # TEST ## call_command('generate_docs', billing_date=prev_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.first() # Expect 2 entries: # - plan trial may (+-) # - mfs trial may (+-) # - plan trial june (+-) assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert proforma.proforma_entries.count() == 6 assert proforma.total == Decimal('0.0000') call_command('generate_docs', billing_date=curr_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 4 entries: # - mfs trial june (+-) # - mfs trial extra june (+) # - prorated subscription for the remaining period assert proforma.proforma_entries.count() == 4 assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) prorated_plan_value = Decimal(28 / 30.0).quantize(Decimal('0.0000')) * plan.amount extra_mfs_during_trial = 10 * mf_price assert proforma.total == prorated_plan_value + extra_mfs_during_trial def test_2nd_sub_after_prorated_month_without_trial_without_consumed_units(self): """ The subscription: * start_date=2015-05-20, no trial * first billing_date=2015-05-20 (right after activating the subscription) * second billing_date=2015-06-01 It has 0 consumed units during 2015-05-20 -> 2015-06-01. """ # # SETUP ## prev_billing_date = generate_docs_date('2015-05-20') curr_billing_date = generate_docs_date('2015-06-01') customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) metered_feature = MeteredFeatureFactory() plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 5, 20) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() # # TEST ## call_command('generate_docs', date=prev_billing_date, subscription=subscription.id, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 percent = Decimal(12 / 31.0).quantize(Decimal('0.0000')) assert Proforma.objects.all()[0].total == percent * plan.amount call_command('generate_docs', date=curr_billing_date, subscription=subscription.id, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] # Expect 2 entries: the subscription for the next month # One entry for the 0 consumed mfs assert proforma.proforma_entries.count() == 2 assert proforma.total == plan.amount def test_full_month_with_consumed_units(self): billing_date = generate_docs_date('2015-07-01') customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) mf_price = Decimal('2.5') included_units = Decimal('20.00') metered_feature = MeteredFeatureFactory( price_per_unit=mf_price, included_units=Decimal('20.00')) provider = ProviderFactory.create() plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), provider=provider, metered_features=[metered_feature]) start_date = dt.date(2015, 2, 14) subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() BillingLog.objects.create(subscription=subscription, billing_date=dt.date(2015, 6, 1), metered_features_billed_up_to=dt.date(2015, 5, 31), plan_billed_up_to=dt.date(2015, 6, 30)) consumed_units = Decimal('40.0000') MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=dt.date(2015, 6, 1), end_date=dt.date(2015, 6, 30), consumed_units=consumed_units) call_command('generate_docs', date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] assert proforma.proforma_entries.all().count() == 2 assert all([not entry.prorated for entry in proforma.proforma_entries.all()]) consumed_mfs_value = (consumed_units - included_units) * mf_price assert proforma.total == plan.amount + consumed_mfs_value def test_full_month_without_consumed_units(self): billing_date = generate_docs_date('2015-07-01') customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) metered_feature = MeteredFeatureFactory() provider = ProviderFactory.create() plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), provider=provider, metered_features=[metered_feature]) start_date = dt.date(2015, 2, 14) subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() BillingLog.objects.create(subscription=subscription, billing_date=dt.date(2015, 6, 1), metered_features_billed_up_to=dt.date(2015, 5, 31), plan_billed_up_to=dt.date(2015, 6, 30)) call_command('generate_docs', date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] assert proforma.proforma_entries.all().count() == 2 assert proforma.total == plan.amount def test_gen_proforma_to_issued_state_for_one_provider(self): billing_date = generate_docs_date('2015-03-02') customer = CustomerFactory.create( consolidated_billing=False, sales_tax_percent=Decimal('0.00')) metered_feature = MeteredFeatureFactory(included_units=Decimal('20.00')) provider = ProviderFactory.create(default_document_state='issued') plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), provider=provider, metered_features=[metered_feature]) start_date = dt.date(2015, 2, 14) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer) subscription.activate() subscription.save() mocked_should_be_billed = MagicMock(return_value=True) with patch.multiple('silver.models.Subscription', should_be_billed=mocked_should_be_billed): call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 assert Proforma.objects.all().first().state == Proforma.STATES.ISSUED def test_gen_mixed_states_for_multiple_providers(self): billing_date = generate_docs_date('2015-03-02') customer = CustomerFactory.create( consolidated_billing=False, sales_tax_percent=Decimal('0.00')) metered_feature = MeteredFeatureFactory( included_units=Decimal('20.00')) provider_draft = ProviderFactory.create( default_document_state='draft') provider_issued = ProviderFactory.create( default_document_state='issued') plan_price = Decimal('200.00') plan1 = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=plan_price, provider=provider_draft, metered_features=[metered_feature]) plan2 = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, amount=plan_price, provider=provider_issued, metered_features=[metered_feature]) start_date = dt.date(2015, 2, 14) # Create the prorated subscription subscription1 = SubscriptionFactory.create( plan=plan1, start_date=start_date, customer=customer) subscription1.activate() subscription1.save() subscription2 = SubscriptionFactory.create( plan=plan2, start_date=start_date, customer=customer) subscription2.activate() subscription2.save() mocked_last_billing_date = PropertyMock( return_value=dt.date(2015, 2, 14)) mocked_is_billed_first_time = PropertyMock(return_value=False) mocked_get_consumed_units_during_trial = MagicMock(return_value=(0, 0)) with patch.multiple( 'silver.models.Subscription', last_billing_date=mocked_last_billing_date, is_billed_first_time=mocked_is_billed_first_time, _get_extra_consumed_units_during_trial=mocked_get_consumed_units_during_trial ): call_command('generate_docs', billing_date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 assert Proforma.objects.all()[0].state == Proforma.STATES.ISSUED assert Proforma.objects.all()[1].state == Proforma.STATES.DRAFT def test_cancel_sub_without_trial_at_end_of_billing_cycle(self): """ It has consumed mfs between start_date -> end_of_month """ # # SETUP ## prev_billing_date = generate_docs_date('2015-05-20') random_billing_date = generate_docs_date('2015-05-27') curr_billing_date = generate_docs_date('2015-06-01') customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00')) metered_feature = MeteredFeatureFactory( included_units=Decimal('0.0000'), price_per_unit=Decimal('2.5')) plan = PlanFactory.create(interval=Plan.INTERVALS.MONTH, interval_count=1, generate_after=120, enabled=True, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 5, 20) end_of_month = dt.date(2015, 5, 31) # Create the prorated subscription subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, customer=customer ) subscription.activate() subscription.save() # # TEST ## # RUN 1 call_command('generate_docs', billing_date=prev_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # It should add the prorated value of the plan for the rest of the # month prorated_days = (end_of_month - start_date).days + 1 prorated_plan_value = Decimal(prorated_days / 31.0).quantize( Decimal('0.0000')) * plan.amount assert Proforma.objects.all()[0].total == prorated_plan_value # RUN 2 call_command('generate_docs', billing_date=random_billing_date, stdout=self.output) # It should be ignored assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # Move it to `canceled` state subscription.cancel(when=Subscription.CANCEL_OPTIONS.END_OF_BILLING_CYCLE) subscription.cancel_date = dt.date(2015, 5, 31) subscription.save() # Consume some mfs consumed_mfs = Decimal('5.00') MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=start_date, end_date=end_of_month, consumed_units=consumed_mfs) # RUN 3 call_command('generate_docs', billing_date=curr_billing_date, stdout=self.output) assert Proforma.objects.all().count() == 2 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[1] assert proforma.proforma_entries.count() == 1 assert all([entry.prorated for entry in proforma.proforma_entries.all()]) assert all([entry.total != Decimal('0.0000') for entry in proforma.proforma_entries.all()]) consumed_mfs_value = consumed_mfs * metered_feature.price_per_unit assert proforma.total == consumed_mfs_value ########################################################################### # Canceled ########################################################################### def test_canceled_subscription_with_trial_and_consumed_metered_features_draft(self): """ Subscription with consumed mfs both during trial and afterwards, canceled in the same month it started. start_date = 2015-02-01 trial_end = 2015-02-08 -- has consumed units during trial period end_date = 2015-02-28 -- has consumed units between trial and end_date """ billing_date = generate_docs_date('2015-03-01') metered_feature = MeteredFeatureFactory(included_units=Decimal('0.00')) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 2, 1) trial_end = start_date + dt.timedelta(days=plan.trial_period_days) subscription = SubscriptionFactory.create( plan=plan, start_date=start_date, trial_end=trial_end) subscription.activate() subscription.save() mf_units_log_during_trial = MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=start_date, end_date=trial_end) mf_units_log_after_trial = MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=trial_end + dt.timedelta(days=1), # canceled 4 days before the end of the month end_date=dt.datetime(2015, 2, 28)) subscription.cancel(when=Subscription.CANCEL_OPTIONS.END_OF_BILLING_CYCLE) subscription.cancel_date = dt.date(2015, 2, 28) subscription.save() call_command('generate_docs', billing_date=billing_date, stdout=self.output) # Expect one Proforma assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # In draft state assert Proforma.objects.all()[0].state == Proforma.STATES.DRAFT # Expect 6 entries: # Plan Trial (+-), Plan Trial Metered Feature (+-), # Plan After Trial (+), Metered Features After Trial (+) document_entries = DocumentEntry.objects.all() assert len(document_entries) == 6 doc = document_entries[0] # Plan trial (+) assert doc.unit_price == Decimal('57.14') doc = document_entries[1] # Plan trial (-) assert doc.unit_price == Decimal('-57.14') doc = document_entries[2] # Consumed mf (+) assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == mf_units_log_during_trial.consumed_units doc = document_entries[3] # Consumed mf (-) assert doc.unit_price == - metered_feature.price_per_unit assert doc.quantity == mf_units_log_during_trial.consumed_units doc = document_entries[4] # Plan after trial end assert doc.unit_price == Decimal(20.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[5] # Consumed mf after trial assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == mf_units_log_after_trial.consumed_units def test_canceled_subscription_with_metered_features_to_draft(self): """ start_date = 2015-01-01 trial_end = 2015-01-08 last_billing_date = 2015-02-01 """ billing_date = generate_docs_date('2015-03-01') metered_feature = MeteredFeatureFactory(included_units=Decimal('0.00')) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 1, 1) subscription = SubscriptionFactory.create(plan=plan, start_date=start_date) subscription.activate() subscription.save() mf_units_log = MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=dt.datetime(2015, 2, 1), end_date=dt.datetime(2015, 2, 28) ) subscription.cancel(when=Subscription.CANCEL_OPTIONS.END_OF_BILLING_CYCLE) subscription.cancel_date = dt.date(2015, 2, 28) subscription.save() BillingLog.objects.create(subscription=subscription, billing_date=dt.date(2015, 2, 1), plan_billed_up_to=dt.date(2015, 2, 28), metered_features_billed_up_to=dt.date(2015, 1, 31)) call_command('generate_docs', billing_date=billing_date, stdout=self.output) # Expect one Proforma assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # Expect 1 entry: # Extra Metered Features (+) assert DocumentEntry.objects.all().count() == 1 doc = DocumentEntry.objects.all()[0] assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == mf_units_log.consumed_units def test_canceled_subscription_with_trial_and_trial_underflow(self): """ A subscription that was canceled in the same month as it started, the customer consuming less metered features than included_units_during_trial. """ billing_date = generate_docs_date('2015-03-01') metered_feature = MeteredFeatureFactory( included_units=Decimal('0.00'), included_units_during_trial=Decimal('5.00')) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 2, 1) subscription = SubscriptionFactory.create(plan=plan, start_date=start_date) subscription.activate() subscription.save() trial_quantity = Decimal('3.00') MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=start_date, end_date=subscription.trial_end, consumed_units=trial_quantity) mf_units_log_after_trial = MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=subscription.trial_end + dt.timedelta(days=1), end_date=dt.datetime(2015, 2, 28) ) subscription.cancel(when=Subscription.CANCEL_OPTIONS.END_OF_BILLING_CYCLE) subscription.cancel_date = dt.date(2015, 2, 28) subscription.save() call_command('generate_docs', billing_date=billing_date, stdout=self.output) # Expect one Proforma assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # In draft state assert Proforma.objects.all()[0].state == Proforma.STATES.DRAFT document_entries = DocumentEntry.objects.all() # Expect 6 entries: # Plan Trial (+-), Plan Trial Metered Feature (+-), # Plan After Trial (+), Metered Features After Trial (+) assert len(document_entries) == 6 doc = document_entries[0] # Plan trial (+) assert doc.unit_price == Decimal(7.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[1] # Plan trial (-) assert doc.unit_price == Decimal(-7.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[2] # Consumed mf (+) assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == trial_quantity doc = document_entries[3] # Consumed mf (-) assert doc.unit_price == - metered_feature.price_per_unit assert doc.quantity == trial_quantity doc = document_entries[4] # Plan after trial end assert doc.unit_price == Decimal(21.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[5] # Consumed mf after trial assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == mf_units_log_after_trial.consumed_units def test_canceled_subscription_with_trial_and_trial_overflow(self): billing_date = generate_docs_date('2015-03-01') units_included_during_trial = Decimal('5.00') metered_feature = MeteredFeatureFactory( included_units=Decimal('0.00'), included_units_during_trial=units_included_during_trial) plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00'), metered_features=[metered_feature]) start_date = dt.date(2015, 2, 1) subscription = SubscriptionFactory.create(plan=plan, start_date=start_date) subscription.activate() subscription.save() units_consumed_during_trial = Decimal('7.00') MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=start_date, end_date=subscription.trial_end, consumed_units=units_consumed_during_trial) mf_units_log_after_trial = MeteredFeatureUnitsLogFactory( subscription=subscription, metered_feature=metered_feature, start_date=subscription.trial_end + dt.timedelta(days=1), end_date=dt.datetime(2015, 2, 28) ) subscription.cancel( when=Subscription.CANCEL_OPTIONS.END_OF_BILLING_CYCLE ) subscription.cancel_date = dt.date(2015, 2, 28) subscription.save() call_command('generate_docs', billing_date=billing_date, stdout=self.output) # Expect one Proforma assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 # In draft state assert Proforma.objects.all()[0].state == Proforma.STATES.DRAFT document_entries = DocumentEntry.objects.all() # Expect 7 entries: # Plan Trial (+-), Plan Trial Metered Feature (+-), # Extra consumed mf # Plan After Trial (+), Metered Features After Trial (+) assert len(document_entries) == 7 doc = document_entries[0] # Plan trial (+) assert doc.unit_price == Decimal(7.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[1] # Plan trial (-) assert doc.unit_price == Decimal(-7.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[2] # Consumed mf (+) assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == units_included_during_trial doc = document_entries[3] # Consumed mf (-) assert doc.unit_price == - metered_feature.price_per_unit assert doc.quantity == units_included_during_trial doc = document_entries[4] # Consumed mf (-) assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == units_consumed_during_trial - units_included_during_trial doc = document_entries[5] # Plan after trial end assert doc.unit_price == Decimal(21.0 / 28).quantize(Decimal('0.0000')) * plan.amount doc = document_entries[6] # Consumed mf after trial assert doc.unit_price == metered_feature.price_per_unit assert doc.quantity == mf_units_log_after_trial.consumed_units def test_gen_for_single_canceled_subscription_during_trial(self): plan = PlanFactory.create(interval=Plan.INTERVALS.MONTH, interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00')) subscription = SubscriptionFactory.create(plan=plan, start_date=dt.date(2015, 1, 3)) subscription.activate() subscription.cancel(when=Subscription.CANCEL_OPTIONS.NOW) subscription.cancel_date = dt.date(2015, 1, 6) subscription.save() call_command('generate_docs', date=generate_docs_date('2015-01-06'), subscription=subscription.pk, stdout=self.output) assert Subscription.objects.filter(state='ended').count() == 0 # the date after the cancel date call_command('generate_docs', date=generate_docs_date('2015-01-07'), subscription=subscription.pk, stdout=self.output) assert Subscription.objects.filter(state='ended').count() == 1 assert Proforma.objects.all().count() == 1 assert Invoice.objects.all().count() == 0 proforma = Proforma.objects.all()[0] assert proforma.proforma_entries.count() == 2 for entry in proforma.proforma_entries.all(): assert entry.prorated assert entry.start_date == subscription.start_date assert entry.end_date == subscription.cancel_date assert proforma.total == Decimal('0.0000') def test_gen_active_and_canceled_selection(self): plan = PlanFactory.create(interval='month', interval_count=1, generate_after=120, enabled=True, trial_period_days=7, amount=Decimal('200.00')) start_date = dt.date(2015, 1, 29) SubscriptionFactory.create_batch(size=5, plan=plan, start_date=start_date) for subscription in Subscription.objects.all(): subscription.activate() subscription.save() cancel_date = dt.date(2015, 1, 29) for subscription in Subscription.objects.all()[2:5]: subscription.cancel(when=Subscription.CANCEL_OPTIONS.NOW) subscription.cancel_date = cancel_date subscription.save() call_command('generate_docs', billing_date=cancel_date, stdout=self.output) # Expect 2 Proformas from the active subs assert Proforma.objects.all().count() == 2 assert Subscription.objects.filter(state='ended').count() == 0 call_command('generate_docs', billing_date=cancel_date + ONE_DAY, stdout=self.output) # Expect 5 Proformas (2 active Subs, 3 canceled) assert Proforma.objects.all().count() == 5 assert Invoice.objects.all().count() == 0 assert Subscription.objects.filter(state='ended').count() == 3 def test_subscription_with_separate_cycles_during_trial(self): separate_cycles_during_trial = True prebill_plan = False generate_documents_on_trial_end = False metered_feature = MeteredFeatureFactory( included_units_during_trial=Decimal('5.00'), price_per_unit=Decimal('1.00') ) plan = PlanFactory.create(interval=Plan.INTERVALS.MONTH, interval_count=1, generate_after=120, enabled=True, trial_period_days=15, amount=Decimal('200.00'), separate_cycles_during_trial=separate_cycles_during_trial, generate_documents_on_trial_end=generate_documents_on_trial_end, prebill_plan=prebill_plan, metered_features=[metered_feature]) subscription = SubscriptionFactory.create(plan=plan, start_date=dt.date(2015, 1, 25)) subscription.activate() subscription.save() subscription.customer.sales_tax_percent = None subscription.customer.save() mf_log = MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=subscription.start_date, end_date=dt.date(2015, 1, 31), consumed_units=Decimal('5.00') ) call_command('generate_docs', date=generate_docs_date('2015-01-25'), stdout=self.output) assert Proforma.objects.all().count() == 0 call_command('generate_docs', date=generate_docs_date('2015-02-01'), stdout=self.output) assert Proforma.objects.all().count() == 1 proforma = Proforma.objects.all()[0] assert proforma.total == Decimal('0.00') assert proforma.proforma_entries.count() == 4 # plan trial and consumed mfs for entry in proforma.proforma_entries.all(): if entry.product_code == plan.product_code: unit_price = Decimal(7 / 31.0).quantize(Decimal('0.0000')) * plan.amount assert entry.quantity == 1 else: assert entry.quantity == mf_log.consumed_units unit_price = metered_feature.price_per_unit if entry.unit_price < 0: # discount unit_price *= -1 assert entry.unit_price == unit_price assert entry.prorated assert entry.start_date == subscription.start_date assert entry.end_date == dt.date(2015, 1, 31) call_command('generate_docs', date=generate_docs_date('2015-03-01'), subscription=subscription.pk, stdout=self.output) assert Proforma.objects.all().count() == 2 proforma = Proforma.objects.all()[1] billed_plan_amount = Decimal(20 / 28.0).quantize(Decimal('0.0000')) * plan.amount assert proforma.total == billed_plan_amount assert proforma.proforma_entries.count() == 4 # plan trial (+-), plan (+) and mfs (0) for entry in proforma.proforma_entries.all(): if entry.product_code == plan.product_code: assert entry.quantity == 1 if entry.start_date == dt.date(2015, 2, 1): # trial unit_price = plan.amount - billed_plan_amount assert entry.end_date == dt.date(2015, 2, 8) else: assert entry.start_date == dt.date(2015, 2, 9) assert entry.end_date == dt.date(2015, 2, 28) unit_price = billed_plan_amount else: assert entry.quantity == Decimal('0.00') assert entry.start_date == subscription.trial_end + ONE_DAY assert entry.end_date == dt.date(2015, 2, 28) unit_price = entry.unit_price if entry.unit_price < 0: # discount unit_price *= -1 assert entry.unit_price == unit_price assert entry.prorated call_command('generate_docs', date=generate_docs_date('2015-02-10'), subscription=subscription.pk, stdout=self.output) assert Proforma.objects.all().count() == 2 def test_subscription_with_documents_generation_on_trial_end(self): separate_cycles_during_trial = False generate_documents_on_trial_end = True metered_feature = MeteredFeatureFactory( included_units_during_trial=Decimal('5.00'), price_per_unit=Decimal('1.00') ) plan = PlanFactory.create(interval=Plan.INTERVALS.MONTH, interval_count=1, generate_after=120, enabled=True, trial_period_days=15, amount=Decimal('200.00'), separate_cycles_during_trial=separate_cycles_during_trial, generate_documents_on_trial_end=generate_documents_on_trial_end, metered_features=[metered_feature]) subscription = SubscriptionFactory.create(plan=plan, start_date=dt.date(2015, 1, 25)) subscription.activate() subscription.save() subscription.customer.sales_tax_percent = None subscription.customer.save() mf_log = MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=subscription.start_date, end_date=dt.date(2015, 1, 31), consumed_units=Decimal('5.00') ) call_command('generate_docs', date=generate_docs_date('2015-01-25'), stdout=self.output) assert Proforma.objects.all().count() == 1 proforma = Proforma.objects.all()[0] assert proforma.total == Decimal('0.00') assert proforma.proforma_entries.count() == 4 # plan trial (+-) split by months (*2) for entry in proforma.proforma_entries.all(): if entry.start_date == subscription.start_date: assert entry.end_date == dt.date(2015, 1, 31) unit_price = Decimal(7 / 31.0).quantize(Decimal('0.0000')) * plan.amount else: unit_price = Decimal(8 / 28.0).quantize(Decimal('0.0000')) * plan.amount assert entry.start_date == dt.date(2015, 2, 1) assert entry.end_date == subscription.trial_end if entry.unit_price < 0: # discount unit_price *= -1 assert entry.quantity == 1 assert entry.unit_price == unit_price assert entry.prorated call_command('generate_docs', date=generate_docs_date('2015-02-01'), stdout=self.output) assert Proforma.objects.all().count() == 1 call_command('generate_docs', date=generate_docs_date('2015-02-09'), stdout=self.output) proforma = Proforma.objects.all()[1] plan_amount = Decimal(20 / 28.0).quantize(Decimal('0.0000')) * plan.amount assert proforma.proforma_entries.count() == 3 # mfs during trial (+-) and remaining plan for entry in proforma.proforma_entries.all(): if entry.product_code == plan.product_code: assert entry.quantity == 1 unit_price = plan_amount else: assert entry.quantity == mf_log.consumed_units unit_price = metered_feature.price_per_unit if entry.unit_price < 0: # discount unit_price *= -1 assert entry.unit_price == unit_price assert entry.prorated assert proforma.total == plan_amount call_command('generate_docs', date=generate_docs_date('2015-02-10'), stdout=self.output) assert Proforma.objects.all().count() == 2 def test_subscription_with_documents_generation_during_and_after_trial(self): separate_cycles_during_trial = True generate_documents_on_trial_end = True metered_feature = MeteredFeatureFactory( included_units_during_trial=Decimal('5.00'), price_per_unit=Decimal('1.00') ) plan = PlanFactory.create(interval=Plan.INTERVALS.MONTH, interval_count=1, generate_after=120, enabled=True, trial_period_days=15, amount=Decimal('200.00'), separate_cycles_during_trial=separate_cycles_during_trial, generate_documents_on_trial_end=generate_documents_on_trial_end, metered_features=[metered_feature]) subscription = SubscriptionFactory.create(plan=plan, start_date=dt.date(2015, 1, 25)) subscription.activate() subscription.save() subscription.customer.sales_tax_percent = None subscription.customer.save() mf_log = MeteredFeatureUnitsLogFactory.create( subscription=subscription, metered_feature=metered_feature, start_date=subscription.start_date, end_date=dt.date(2015, 1, 31), consumed_units=Decimal('5.00') ) call_command('generate_docs', date=generate_docs_date('2015-01-25'), stdout=self.output) assert Proforma.objects.all().count() == 1 proforma = Proforma.objects.all()[0] assert proforma.total == Decimal('0.00') assert proforma.proforma_entries.count() == 2 # plan trial for january (+-) for entry in proforma.proforma_entries.all(): assert entry.start_date == subscription.start_date assert entry.end_date == dt.date(2015, 1, 31) unit_price = Decimal(7 / 31.0).quantize(Decimal('0.0000')) * plan.amount if entry.unit_price < 0: unit_price *= -1 assert entry.quantity == 1 assert entry.unit_price == unit_price assert entry.prorated call_command('generate_docs', date=generate_docs_date('2015-02-01'), stdout=self.output) assert Proforma.objects.all().count() == 2 proforma = Proforma.objects.all()[1] assert proforma.total == Decimal('0.00') # mfs for january (+-) # plan trial for february (+-) assert proforma.proforma_entries.count() == 4 for entry in proforma.proforma_entries.all(): if entry.product_code == plan.product_code: assert entry.quantity == 1 unit_price = Decimal(8 / 28.0).quantize(Decimal('0.0000')) * plan.amount else: assert entry.quantity == mf_log.consumed_units unit_price = metered_feature.price_per_unit if entry.unit_price < 0: # discount unit_price *= -1 assert entry.unit_price == unit_price assert entry.prorated call_command('generate_docs', date=generate_docs_date('2015-02-09'), stdout=self.output) assert Proforma.objects.all().count() == 3 proforma = Proforma.objects.all()[2] plan_amount = Decimal(20 / 28.0).quantize(Decimal('0.0000')) * plan.amount assert proforma.total == plan_amount assert proforma.proforma_entries.count() == 1 # remaining plan (+) for entry in proforma.proforma_entries.all(): if entry.product_code == plan.product_code: assert entry.quantity == 1 unit_price = plan_amount else: assert entry.quantity == mf_log.consumed_units unit_price = metered_feature.price_per_unit if entry.unit_price < 0: # discount unit_price *= -1 assert entry.unit_price == unit_price assert entry.prorated call_command('generate_docs', date=generate_docs_date('2015-02-10'), stdout=self.output) assert Proforma.objects.all().count() == 3 def test_subscription_cycle_billing_duration(self): plan = PlanFactory.create(interval=Plan.INTERVALS.MONTH, interval_count=1, generate_after=120, enabled=True, trial_period_days=15, amount=Decimal('200.00'), cycle_billing_duration=dt.timedelta(days=5)) subscription = SubscriptionFactory.create(plan=plan, start_date=dt.date(2015, 1, 25)) subscription.activate() subscription.save() subscription.customer.sales_tax_percent = None subscription.customer.save() call_command('generate_docs', date=generate_docs_date('2015-01-25'), stdout=self.output) assert Proforma.objects.all().count() == 0 billing_date = dt.date(2015, 2, 1) call_command('generate_docs', date=billing_date, stdout=self.output) assert Proforma.objects.all().count() == 1 billing_log = BillingLog.objects.filter(subscription=subscription).last() assert billing_log.billing_date == billing_date
43.554692
98
0.618958
9,379
84,017
5.32093
0.03817
0.02645
0.020238
0.024406
0.873339
0.850235
0.8208
0.79499
0.770143
0.750626
0
0.043855
0.281062
84,017
1,928
99
43.577282
0.782345
0.105574
0
0.767424
0
0
0.030349
0.000704
0
0
0
0.000519
0.239624
1
0.025059
false
0
0.008614
0
0.034456
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
18da536a6b1061633d60eeb2a4de153fc0910603
172
py
Python
RobotSimulation/PlanningCore/billiard/__init__.py
benbenlijie/BilliardRobot
13f72c045c69a0a5ffb8a3cfc3f90897519dabb1
[ "Apache-2.0" ]
null
null
null
RobotSimulation/PlanningCore/billiard/__init__.py
benbenlijie/BilliardRobot
13f72c045c69a0a5ffb8a3cfc3f90897519dabb1
[ "Apache-2.0" ]
null
null
null
RobotSimulation/PlanningCore/billiard/__init__.py
benbenlijie/BilliardRobot
13f72c045c69a0a5ffb8a3cfc3f90897519dabb1
[ "Apache-2.0" ]
null
null
null
from PlanningCore.billiard.billiard import Ball, init_table, Pocket, Table try: from PlanningCore.billiard.visualize import animate except ModuleNotFoundError: pass
34.4
74
0.819767
20
172
7
0.7
0.228571
0.342857
0
0
0
0
0
0
0
0
0
0.127907
172
5
75
34.4
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.2
0.4
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
7
7a1cdee5cfada9e6d2934d64b0f738dc9d9e5cca
1,174
py
Python
clean_architecture_helper/interactors.py
HerlanAssis/django-clean-architecture-helper
194c5bd524eb4a66c71904fc57cfef6f25916c0e
[ "Apache-2.0" ]
7
2019-11-08T20:08:24.000Z
2022-03-27T17:43:04.000Z
clean_architecture_helper/interactors.py
HerlanAssis/django-clean-architecture-helper
194c5bd524eb4a66c71904fc57cfef6f25916c0e
[ "Apache-2.0" ]
null
null
null
clean_architecture_helper/interactors.py
HerlanAssis/django-clean-architecture-helper
194c5bd524eb4a66c71904fc57cfef6f25916c0e
[ "Apache-2.0" ]
1
2019-10-31T20:45:32.000Z
2019-10-31T20:45:32.000Z
class BaseGetInteractor: def __init__(self, repo): self.repo = repo def set_params(self, id): self.id = id return self def execute(self): return self.repo.get(id=self.id) class BaseCreateInteractor: def __init__(self, repo): self.repo = repo def set_params(self, **kwargs): self.kwargs = kwargs return self def execute(self): return self.repo.create(**self.kwargs) class BaseUpdateInteractor: def __init__(self, repo): self.repo = repo def set_params(self, **kwargs): self.kwargs = kwargs return self def execute(self): return self.repo.update(**self.kwargs) class BaseFilterInteractor: def __init__(self, repo): self.repo = repo def set_params(self, **kwargs): self.kwargs = kwargs return self def execute(self): return self.repo.all(**self.kwargs) class BaseDeleteInteractor: def __init__(self, repo): self.repo = repo def set_params(self, id): self.id = id return self def execute(self): return self.repo.delete(id=self.id)
19.566667
46
0.603066
142
1,174
4.809859
0.147887
0.175695
0.080527
0.10981
0.718887
0.718887
0.718887
0.718887
0.718887
0.718887
0
0
0.295571
1,174
59
47
19.898305
0.825877
0
0
0.75
0
0
0
0
0
0
0
0
0
1
0.375
false
0
0
0.125
0.75
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
9
e166239183d8c9bef4d492a925624c9e0d1be9dd
41,231
py
Python
tests/test_tms_views.py
PADAS/django-raster
68b2d181c70827dffad3c07f4f38d3490872a3eb
[ "BSD-3-Clause" ]
null
null
null
tests/test_tms_views.py
PADAS/django-raster
68b2d181c70827dffad3c07f4f38d3490872a3eb
[ "BSD-3-Clause" ]
null
null
null
tests/test_tms_views.py
PADAS/django-raster
68b2d181c70827dffad3c07f4f38d3490872a3eb
[ "BSD-3-Clause" ]
null
null
null
from unittest import skipIf from django.core.urlresolvers import reverse from django.test.utils import override_settings from .raster_testcase import RasterTestCase EMPTY_TILE = b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x01\x15IDATx\x9c\xed\xc11\x01\x00\x00\x00\xc2\xa0\xf5O\xedk\x08\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x03\x01<\x00\x01<\xedS\t\x00\x00\x00\x00IEND\xaeB`\x82' @override_settings(RASTER_TILE_CACHE_TIMEOUT=0) class RasterTmsTests(RasterTestCase): def test_tms_nonexisting_layer(self): url = reverse('tms', kwargs={ 'z': self.tile.tilez, 'y': self.tile.tiley, 'x': self.tile.tilex, 'layer': 'raster_nonexistent.tif', 'format': '.png' }) response = self.client.get(url) self.assertEqual(response.status_code, 404) def test_tms_nonexisting_tile(self): url = reverse('tms', kwargs={'z': 100, 'y': 0, 'x': 0, 'layer': self.rasterlayer.id, 'format': '.png'}) response = self.client.get(url) self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, EMPTY_TILE ) self.assertEqual(response.status_code, 200) def test_tms_duplicated_layer_filename(self): url = reverse('tms', kwargs={'z': 100, 'y': 0, 'x': 0, 'layer': 'raster.tif', 'format': '.png'}) DUPL_MSG = 'get() returned more than one RasterLayer -- it returned 2!' with self.assertRaisesMessage(Exception, DUPL_MSG): self.client.get(url) def test_tms_existing_tile(self): # Get tms tile rendered with legend response = self.client.get(self.tile_url) self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x06{IDATx\x9c\xed\xdd\xe1m\xdb:\x10\x00`\xe7\xa1\x13t\x96\xce\xd2!3Kf\xc9\ny\xbf\x0c\xa8\x06e\x91\x12%\xf1x\xdf\x07\x14H\x93\x98\x96\x8b\xeaxGR\xe4\xe3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\x94\x8f\xbb/\x00z\xfa\xfd\xe7\xef\xcf\xda\xcf\xbe\xbf>/\xf9\xff\xfe\xee\x1a\xf68\xf3\xba\xff;\xaba`|\xbf\xee\xbe\x00\xb8\xc2\xd1^\xf4\xd9\xab/\xdb\xf9\xfd\xe7\xef\xcf\x15YE\xaf\x8c\xe2y\xad\xcb\xf6d\x00L\xaf\xe7M\xfa\xeef\xfc\xfd\xe7\xefO\xef\xf4\xffl\x02\x00$\xa6\x04`\x1aW\xf4\xbe\xdf_\x9f\x1f\xcb\xf7)\x95\x06\x91\x08\x00L\xab\xd7M\xb9V\xeb\xbf\x8e\x07\xf4x\xaf\x16\xa5kZ^\xc7k\xb0*Q\x02@b\x02\x004xf\x03\xcb\x11\xf5\xabf\x03j\xb4f%\x02\x00lX\xbb\xb9Kc\x01W\xea1\xeb \x00@b\x06\x01\x99V\xcf\x11\xfae[\xd1\xe6\xfa\xdf\x11\x00\x08\xaftC\x96n\xd4\xd7\x11\xf2\xb5\xef\x95\xda\x1f\xa5\xc6\xafQ\xfal\xa5\xbf?\x1eJ\x00HM\x06\xc0\xb4\x96=\xe1Z\x0f\xfenm\xff\xda\xfa\xff\xf3\xae\xb8\xcd\xde\xacd\xf9\xba0i\r\xacY\xbb)\xd7R\xe1\xd2\xef\xb4\xb4;\x8a\x9a\xd9\x89\xad\xd7)\x01 1%\x00\xa1\xbd\xeb\xfd_\x7f\xb6\xd5c\xd6\x94\x03\xb3Q\x02\x10\xdaV\xfa_\xfa\xbd\x9a\xb4\xff\xe8t_M\xf9q\xa4\xdd5\xad\xe5\x8e\x12\x00\x12S\x020\x95\xb5Q\xfce\x8f^\x9b\r\x8c\xe6\x8ck\x15\x00H\xa3%}>\x9a\xbaG\x193P\x02@b\x02\x00!\xb5<\tW\xfb{\xcfRaO\xaa\x1d\xa9\x94XR\x020\x95\xadM;\x9f_o\xed\xa6Scm\\!\x12\x19\x00$&\x03`*\xefR\xf1\xadu\xfe5\xed\xde\xbd\t\xc8R\x8f\xf7\x97\x01\x10^\xa9n\xafY\x0f\xdfz\xf3\xefy\xed\xe8\x04\x00Hl\x8a(F>-O\xbc\x95^[\xd3\x83\x8f\xb2\x0bP\x8f\x01\xcb\xb5vd\x00L\xe9\xe8)\xc1#\xd5\xfag\x12\x00 1\xb3\x00L\xafe\xc4\x7f\xb4\xde\xfe\xec\x83M\x05\x00\xc2\xa9\xb9IG\x1f\xa5\xdf\xbb\x13Q\xef\x00\xa5\x04\x80\xc4d\x00LiOO9Z\xfa\x7f\x05\x01\x80\xa9\xd4\xec\x04|\xb4\xfd#\x07\x8e\xb4\x06\x99\xb3\xcf$P\x02@bC\x0f\x94@I\xefA\xc0\xbd\xa9\xff\x15\x19@\xcd\xd6\xdf{\xb7\x07\x7f<\x94\x00\x04\xd5+-\xbe\xab\xee\xefy\x1e\xc1\x912A\t@H\xcf\rA\x96\x7f\xee\xb8\x86;^\xfbx\xf4\x0b\x80\x02\x00$\xa6\x04 \xb4\xda\xd5}w.\x0c\xaa\xe9\xed\x8f^\xe3\xde\x9d\x8e\r\x02\x12N\xed\x89?{\xdbkq\xe4\xf4\xa0w\x87\x87\\\xb5lY\t\x00\x89\xc9\x00\x08\xa3e\x0f\x80\x96\x94\xf8\xae\x0c\xa0\xa6\xcd\x1aG\xdeW\x00 \x8c\xdas\x00{\xb7\x7f\xb5+\x03\x80\x12\x00\x123\x0b@x\xad\x99\xc1Y=}\xcb\xaa\xbdQ\xb2\r\x01\x80)E\xd8\x0f`-\x08\xb4N\xe9\xd5\x06:{\x02\x02\xff\x90\x01\x90\xc6\x95iwM\xef>\x02\x01\x80\xd0z\xa4\xfa\xa3\xd5\xe5\xad\xd6V\x11\xd6LQ*\x01 1\x19\x00\xa1\xed]\x03\xbf\xd6\xc6\xd9j\x96\xff^y=\x02\x00\xe1\x8d>\xe2_k\xcf>\x00{_\xfb\xa4\x04\x80\xc4\xa6\x88\x9c\xe4\xd0\xba\xbe\xff\x8cu\xfa\xef\xecM\xe1\xef|*P\t\xc04^o\x84#\xbb\xf7^\xa16`\x9c\xf99\x94\x00\x90\x98\x0c\x80\xa9\x9cu\x16\xc0\xf3\xeb\x9e\xc7v\xd5\xee\x14\xb4\xfc\xba\xf7\xe7\x13\x00\x98\xc6\xeb\xcdq\xd6\xc6\x9bg\x8d-\x9c}\xa8I\x89\x12\x00\x12\x93\x01\x10\xce\xa8\x83z\xad^3\x89;\x06-\x05\x00\xa6\xd23%_\x9bv\xdc\xda\x7f`\xcf5\xdc\x15\xd4\x94\x00\x90\x98\x0c\x80\x90\xae~r\xafu\xc4~\xcdh\xe5\x8b\x00@xWo\xfd5\x13%\x00$&\x00\x10\xd2\xf7\xd7\xe7G\xed\t\xbbG\xd3\xee\x9ei\xfb\x9d\x87\x99\x96(\x01\x08ck\xd7\x9b\xb3\x8cr\xb3\x9eA\x06\x00\x89\xc9\x00\x98\xdeH=\xb8Y\x008\xe8\xee\xe3\xbeG\xd0\xeb<C%\x00$&\x00\xc0\xc2\xd6\xec\xc2l\x94\x000\xb03\xa63\x97m\xca\x00 1\x19\x00\xe1\x9d\xf5\x04\xe0lJ\x9fM\x00 \xa4\xd17\xfc<\xa2\xe6T\xdf^\x94\x00\x90\x98\x0c\x80p\xbe\xbf>?fN\xd5_\x9d\xf9Ye\x00\x84\x97m\xea\xae\'\x01\x80\xd0"g\x02#<\x15(\x00@b\xc6\x00\x98F\xefs\x01\xaer\xe6u\x96J#\x0b\x81\x98^\x94\x9b\xffn\x02\x00$\xa6\x04 \xa4\xad=\xf8\xaf>\x1a<*\x01\x80\xf0\x8e>0s\xe4@\x8f\x91\xd5\xfc[(\x01 1\x19\x00\xd3{\xd7\xc3/{\xc9\xabg\x11F8\xcf@\x00 <\xab\x00\xf7S\x02@b"\'\xe9\xd4n*ze\tP\xfb^-\x03\x965%\x8d\x12\x80\xf0jg\x01j\xf6\x10\x18}&`\xef\x01\xa4k\xafS\x02@b2\x00\xd2\x18y\xb0\xb0\xe7\xc2\xa5\x96\xd7\x0b\x00L\xa3\xf6\x06\xbfb\xcb\xad\xad\x1bz\x94`\xa4\x04\x80\xc4\x86\x88Bp\xc4\x9e\xa5\xc0[\xaf\xd9\x93\x86\x1fY\xd8SS\x02\xd4\xb4\xbf\xb6]\xdaZ\xfbJ\x00\xc2[N\x8d\xf5\xda-x\xcf\xbe\x83\xa3\xcf \x94(\x01 1\x19\x00Sj9A8\xca\x19\x035\x19Fk\x16"\x000\x8d\x96\xb4\xbd\xb4\xa2.b\n\x7f\x94\x12\x00\x12\x93\x010\xa5\xadt>[oo)0\xa9<g\x04J\xff\xf1G\xbc\xf9\xef\xba&\x01\x00\x12S\x020\x95\xa8\xfb\xfb\xddu\xde\xa1\x0c\x80tF\x9f\xee[s\xc6u\x0b\x00\x90X\xc8H\x08[j\x9f\xf8\x8bV*\xf4f\x0c\x80)\xddUSG\xb0\x0c\x86J\x00HL\x00 \x05\xd9\xc0\xbf\x9ek$\x04\x00\xa6\xf5\xfd\xf5\xf9\xf1\xfa\x1c\xbc@\xf0/\x01\x00\x12\x13\x00H\xe35#x~\xef\xae\xeb\x19\x81Y\x00\xa6\xb75#pG\x10\x18\xa5\x14\x91\x01@b2\x00(\xb8\xebd\xe0\x1e\x9c\x0b\x00/\x96e\xc0\x08u\x7fi\'\xdf;\x1edR\x02@pG\x02\x9a\x00\x00\x89\xdd\x9e\n\xc1\xd5F9\x1e\xbc\x97\xd6s\x05\x97\xe5\x90\x0c\x80\x94jV\x05\x96\xd6\r\xccF\x00\x80\xc4\xa6\x8en\xb0f\xcfy\x82-m\xb6zM\xe3{\x1e\x17^z\xafg\x9b\xff\x03"C\xe6\x90$C\xcfq\x00\x00\x00\x00IEND\xaeB`\x82' ) self.assertEqual(response.status_code, 200) @skipIf(True, 'Fails on CI environment.') def test_tms_existing_tile_without_legend(self): # Get tms tile for layer without legend self.rasterlayer.legend = None self.rasterlayer.save() response = self.client.get(self.tile_url) self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x06{IDATx\x9c\xed\xdd\xe1m\xdb:\x10\x00`\xe7\xa1\x13t\x96\xce\xd2!3Kf\xc9\ny\xbf\x0c\xa8\x06e\x91\x12%\xf1x\xdf\x07\x14H\x93\x98\x96\x8b\xeaxGR\xe4\xe3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\x94\x8f\xbb/\x00z\xfa\xfd\xe7\xef\xcf\xda\xcf\xbe\xbf>/\xf9\xff\xfe\xee\x1a\xf68\xf3\xba\xff;\xaba`|\xbf\xee\xbe\x00\xb8\xc2\xd1^\xf4\xd9\xab/\xdb\xf9\xfd\xe7\xef\xcf\x15YE\xaf\x8c\xe2y\xad\xcb\xf6d\x00L\xaf\xe7M\xfa\xeef\xfc\xfd\xe7\xefO\xef\xf4\xffl\x02\x00$\xa6\x04`\x1aW\xf4\xbe\xdf_\x9f\x1f\xcb\xf7)\x95\x06\x91\x08\x00L\xab\xd7M\xb9V\xeb\xbf\x8e\x07\xf4x\xaf\x16\xa5kZ^\xc7k\xb0*Q\x02@b\x02\x004xf\x03\xcb\x11\xf5\xabf\x03j\xb4f%\x02\x00lX\xbb\xb9Kc\x01W\xea1\xeb \x00@b\x06\x01\x99V\xcf\x11\xfae[\xd1\xe6\xfa\xdf\x11\x00\x08\xaftC\x96n\xd4\xd7\x11\xf2\xb5\xef\x95\xda\x1f\xa5\xc6\xafQ\xfal\xa5\xbf?\x1eJ\x00HM\x06\xc0\xb4\x96=\xe1Z\x0f\xfenm\xff\xda\xfa\xff\xf3\xae\xb8\xcd\xde\xacd\xf9\xba0i\r\xacY\xbb)\xd7R\xe1\xd2\xef\xb4\xb4;\x8a\x9a\xd9\x89\xad\xd7)\x01 1%\x00\xa1\xbd\xeb\xfd_\x7f\xb6\xd5c\xd6\x94\x03\xb3Q\x02\x10\xdaV\xfa_\xfa\xbd\x9a\xb4\xff\xe8t_M\xf9q\xa4\xdd5\xad\xe5\x8e\x12\x00\x12S\x020\x95\xb5Q\xfce\x8f^\x9b\r\x8c\xe6\x8ck\x15\x00H\xa3%}>\x9a\xbaG\x193P\x02@b\x02\x00!\xb5<\tW\xfb{\xcfRaO\xaa\x1d\xa9\x94XR\x020\x95\xadM;\x9f_o\xed\xa6Scm\\!\x12\x19\x00$&\x03`*\xefR\xf1\xadu\xfe5\xed\xde\xbd\t\xc8R\x8f\xf7\x97\x01\x10^\xa9n\xafY\x0f\xdfz\xf3\xefy\xed\xe8\x04\x00Hl\x8a(F>-O\xbc\x95^[\xd3\x83\x8f\xb2\x0bP\x8f\x01\xcb\xb5vd\x00L\xe9\xe8)\xc1#\xd5\xfag\x12\x00 1\xb3\x00L\xafe\xc4\x7f\xb4\xde\xfe\xec\x83M\x05\x00\xc2\xa9\xb9IG\x1f\xa5\xdf\xbb\x13Q\xef\x00\xa5\x04\x80\xc4d\x00LiOO9Z\xfa\x7f\x05\x01\x80\xa9\xd4\xec\x04|\xb4\xfd#\x07\x8e\xb4\x06\x99\xb3\xcf$P\x02@bC\x0f\x94@I\xefA\xc0\xbd\xa9\xff\x15\x19@\xcd\xd6\xdf{\xb7\x07\x7f<\x94\x00\x04\xd5+-\xbe\xab\xee\xefy\x1e\xc1\x912A\t@H\xcf\rA\x96\x7f\xee\xb8\x86;^\xfbx\xf4\x0b\x80\x02\x00$\xa6\x04 \xb4\xda\xd5}w.\x0c\xaa\xe9\xed\x8f^\xe3\xde\x9d\x8e\r\x02\x12N\xed\x89?{\xdbkq\xe4\xf4\xa0w\x87\x87\\\xb5lY\t\x00\x89\xc9\x00\x08\xa3e\x0f\x80\x96\x94\xf8\xae\x0c\xa0\xa6\xcd\x1aG\xdeW\x00 \x8c\xdas\x00{\xb7\x7f\xb5+\x03\x80\x12\x00\x123\x0b@x\xad\x99\xc1Y=}\xcb\xaa\xbdQ\xb2\r\x01\x80)E\xd8\x0f`-\x08\xb4N\xe9\xd5\x06:{\x02\x02\xff\x90\x01\x90\xc6\x95iwM\xef>\x02\x01\x80\xd0z\xa4\xfa\xa3\xd5\xe5\xad\xd6V\x11\xd6LQ*\x01 1\x19\x00\xa1\xed]\x03\xbf\xd6\xc6\xd9j\x96\xff^y=\x02\x00\xe1\x8d>\xe2_k\xcf>\x00{_\xfb\xa4\x04\x80\xc4\xa6\x88\x9c\xe4\xd0\xba\xbe\xff\x8cu\xfa\xef\xecM\xe1\xef|*P\t\xc04^o\x84#\xbb\xf7^\xa16`\x9c\xf99\x94\x00\x90\x98\x0c\x80\xa9\x9cu\x16\xc0\xf3\xeb\x9e\xc7v\xd5\xee\x14\xb4\xfc\xba\xf7\xe7\x13\x00\x98\xc6\xeb\xcdq\xd6\xc6\x9bg\x8d-\x9c}\xa8I\x89\x12\x00\x12\x93\x01\x10\xce\xa8\x83z\xad^3\x89;\x06-\x05\x00\xa6\xd23%_\x9bv\xdc\xda\x7f`\xcf5\xdc\x15\xd4\x94\x00\x90\x98\x0c\x80\x90\xae~r\xafu\xc4~\xcdh\xe5\x8b\x00@xWo\xfd5\x13%\x00$&\x00\x10\xd2\xf7\xd7\xe7G\xed\t\xbbG\xd3\xee\x9ei\xfb\x9d\x87\x99\x96(\x01\x08ck\xd7\x9b\xb3\x8cr\xb3\x9eA\x06\x00\x89\xc9\x00\x98\xdeH=\xb8Y\x008\xe8\xee\xe3\xbeG\xd0\xeb<C%\x00$&\x00\xc0\xc2\xd6\xec\xc2l\x94\x000\xb03\xa63\x97m\xca\x00 1\x19\x00\xe1\x9d\xf5\x04\xe0lJ\x9fM\x00 \xa4\xd17\xfc<\xa2\xe6T\xdf^\x94\x00\x90\x98\x0c\x80p\xbe\xbf>?fN\xd5_\x9d\xf9Ye\x00\x84\x97m\xea\xae\'\x01\x80\xd0"g\x02#<\x15(\x00@b\xc6\x00\x98F\xefs\x01\xaer\xe6u\x96J#\x0b\x81\x98^\x94\x9b\xffn\x02\x00$\xa6\x04 \xa4\xad=\xf8\xaf>\x1a<*\x01\x80\xf0\x8e>0s\xe4@\x8f\x91\xd5\xfc[(\x01 1\x19\x00\xd3{\xd7\xc3/{\xc9\xabg\x11F8\xcf@\x00 <\xab\x00\xf7S\x02@b"\'\xe9\xd4n*ze\tP\xfb^-\x03\x965%\x8d\x12\x80\xf0jg\x01j\xf6\x10\x18}&`\xef\x01\xa4k\xafS\x02@b2\x00\xd2\x18y\xb0\xb0\xe7\xc2\xa5\x96\xd7\x0b\x00L\xa3\xf6\x06\xbfb\xcb\xad\xad\x1bz\x94`\xa4\x04\x80\xc4\x86\x88Bp\xc4\x9e\xa5\xc0[\xaf\xd9\x93\x86\x1fY\xd8SS\x02\xd4\xb4\xbf\xb6]\xdaZ\xfbJ\x00\xc2[N\x8d\xf5\xda-x\xcf\xbe\x83\xa3\xcf \x94(\x01 1\x19\x00Sj9A8\xca\x19\x035\x19Fk\x16"\x000\x8d\x96\xb4\xbd\xb4\xa2.b\n\x7f\x94\x12\x00\x12\x93\x010\xa5\xadt>[oo)0\xa9<g\x04J\xff\xf1G\xbc\xf9\xef\xba&\x01\x00\x12S\x020\x95\xa8\xfb\xfb\xddu\xde\xa1\x0c\x80tF\x9f\xee[s\xc6u\x0b\x00\x90X\xc8H\x08[j\x9f\xf8\x8bV*\xf4f\x0c\x80)\xddUSG\xb0\x0c\x86J\x00HL\x00 \x05\xd9\xc0\xbf\x9ek$\x04\x00\xa6\xf5\xfd\xf5\xf9\xf1\xfa\x1c\xbc@\xf0/\x01\x00\x12\x13\x00H\xe35#x~\xef\xae\xeb\x19\x81Y\x00\xa6\xb75#pG\x10\x18\xa5\x14\x91\x01@b2\x00(\xb8\xebd\xe0\x1e\x9c\x0b\x00/\x96e\xc0\x08u\x7fi\'\xdf;\x1edR\x02@pG\x02\x9a\x00\x00\x89\xdd\x9e\n\xc1\xd5F9\x1e\xbc\x97\xd6s\x05\x97\xe5\x90\x0c\x80\x94jV\x05\x96\xd6\r\xccF\x00\x80\xc4\xa6\x8en\xb0f\xcfy\x82-m\xb6zM\xe3{\x1e\x17^z\xafg\x9b\xff\x03"C\xe6\x90$C\xcfq\x00\x00\x00\x00IEND\xaeB`\x82' ) self.assertEqual(response.status_code, 200) def test_tms_existing_tile_using_rasterlayer_id_in_url(self): url = reverse('tms', kwargs={ 'z': self.tile.tilez, 'y': self.tile.tiley, 'x': self.tile.tilex, 'layer': self.rasterlayer.id, 'format': '.png' }) # Get tms tile rendered with legend response = self.client.get(url) self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x06{IDATx\x9c\xed\xdd\xe1m\xdb:\x10\x00`\xe7\xa1\x13t\x96\xce\xd2!3Kf\xc9\ny\xbf\x0c\xa8\x06e\x91\x12%\xf1x\xdf\x07\x14H\x93\x98\x96\x8b\xeaxGR\xe4\xe3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\x94\x8f\xbb/\x00z\xfa\xfd\xe7\xef\xcf\xda\xcf\xbe\xbf>/\xf9\xff\xfe\xee\x1a\xf68\xf3\xba\xff;\xaba`|\xbf\xee\xbe\x00\xb8\xc2\xd1^\xf4\xd9\xab/\xdb\xf9\xfd\xe7\xef\xcf\x15YE\xaf\x8c\xe2y\xad\xcb\xf6d\x00L\xaf\xe7M\xfa\xeef\xfc\xfd\xe7\xefO\xef\xf4\xffl\x02\x00$\xa6\x04`\x1aW\xf4\xbe\xdf_\x9f\x1f\xcb\xf7)\x95\x06\x91\x08\x00L\xab\xd7M\xb9V\xeb\xbf\x8e\x07\xf4x\xaf\x16\xa5kZ^\xc7k\xb0*Q\x02@b\x02\x004xf\x03\xcb\x11\xf5\xabf\x03j\xb4f%\x02\x00lX\xbb\xb9Kc\x01W\xea1\xeb \x00@b\x06\x01\x99V\xcf\x11\xfae[\xd1\xe6\xfa\xdf\x11\x00\x08\xaftC\x96n\xd4\xd7\x11\xf2\xb5\xef\x95\xda\x1f\xa5\xc6\xafQ\xfal\xa5\xbf?\x1eJ\x00HM\x06\xc0\xb4\x96=\xe1Z\x0f\xfenm\xff\xda\xfa\xff\xf3\xae\xb8\xcd\xde\xacd\xf9\xba0i\r\xacY\xbb)\xd7R\xe1\xd2\xef\xb4\xb4;\x8a\x9a\xd9\x89\xad\xd7)\x01 1%\x00\xa1\xbd\xeb\xfd_\x7f\xb6\xd5c\xd6\x94\x03\xb3Q\x02\x10\xdaV\xfa_\xfa\xbd\x9a\xb4\xff\xe8t_M\xf9q\xa4\xdd5\xad\xe5\x8e\x12\x00\x12S\x020\x95\xb5Q\xfce\x8f^\x9b\r\x8c\xe6\x8ck\x15\x00H\xa3%}>\x9a\xbaG\x193P\x02@b\x02\x00!\xb5<\tW\xfb{\xcfRaO\xaa\x1d\xa9\x94XR\x020\x95\xadM;\x9f_o\xed\xa6Scm\\!\x12\x19\x00$&\x03`*\xefR\xf1\xadu\xfe5\xed\xde\xbd\t\xc8R\x8f\xf7\x97\x01\x10^\xa9n\xafY\x0f\xdfz\xf3\xefy\xed\xe8\x04\x00Hl\x8a(F>-O\xbc\x95^[\xd3\x83\x8f\xb2\x0bP\x8f\x01\xcb\xb5vd\x00L\xe9\xe8)\xc1#\xd5\xfag\x12\x00 1\xb3\x00L\xafe\xc4\x7f\xb4\xde\xfe\xec\x83M\x05\x00\xc2\xa9\xb9IG\x1f\xa5\xdf\xbb\x13Q\xef\x00\xa5\x04\x80\xc4d\x00LiOO9Z\xfa\x7f\x05\x01\x80\xa9\xd4\xec\x04|\xb4\xfd#\x07\x8e\xb4\x06\x99\xb3\xcf$P\x02@bC\x0f\x94@I\xefA\xc0\xbd\xa9\xff\x15\x19@\xcd\xd6\xdf{\xb7\x07\x7f<\x94\x00\x04\xd5+-\xbe\xab\xee\xefy\x1e\xc1\x912A\t@H\xcf\rA\x96\x7f\xee\xb8\x86;^\xfbx\xf4\x0b\x80\x02\x00$\xa6\x04 \xb4\xda\xd5}w.\x0c\xaa\xe9\xed\x8f^\xe3\xde\x9d\x8e\r\x02\x12N\xed\x89?{\xdbkq\xe4\xf4\xa0w\x87\x87\\\xb5lY\t\x00\x89\xc9\x00\x08\xa3e\x0f\x80\x96\x94\xf8\xae\x0c\xa0\xa6\xcd\x1aG\xdeW\x00 \x8c\xdas\x00{\xb7\x7f\xb5+\x03\x80\x12\x00\x123\x0b@x\xad\x99\xc1Y=}\xcb\xaa\xbdQ\xb2\r\x01\x80)E\xd8\x0f`-\x08\xb4N\xe9\xd5\x06:{\x02\x02\xff\x90\x01\x90\xc6\x95iwM\xef>\x02\x01\x80\xd0z\xa4\xfa\xa3\xd5\xe5\xad\xd6V\x11\xd6LQ*\x01 1\x19\x00\xa1\xed]\x03\xbf\xd6\xc6\xd9j\x96\xff^y=\x02\x00\xe1\x8d>\xe2_k\xcf>\x00{_\xfb\xa4\x04\x80\xc4\xa6\x88\x9c\xe4\xd0\xba\xbe\xff\x8cu\xfa\xef\xecM\xe1\xef|*P\t\xc04^o\x84#\xbb\xf7^\xa16`\x9c\xf99\x94\x00\x90\x98\x0c\x80\xa9\x9cu\x16\xc0\xf3\xeb\x9e\xc7v\xd5\xee\x14\xb4\xfc\xba\xf7\xe7\x13\x00\x98\xc6\xeb\xcdq\xd6\xc6\x9bg\x8d-\x9c}\xa8I\x89\x12\x00\x12\x93\x01\x10\xce\xa8\x83z\xad^3\x89;\x06-\x05\x00\xa6\xd23%_\x9bv\xdc\xda\x7f`\xcf5\xdc\x15\xd4\x94\x00\x90\x98\x0c\x80\x90\xae~r\xafu\xc4~\xcdh\xe5\x8b\x00@xWo\xfd5\x13%\x00$&\x00\x10\xd2\xf7\xd7\xe7G\xed\t\xbbG\xd3\xee\x9ei\xfb\x9d\x87\x99\x96(\x01\x08ck\xd7\x9b\xb3\x8cr\xb3\x9eA\x06\x00\x89\xc9\x00\x98\xdeH=\xb8Y\x008\xe8\xee\xe3\xbeG\xd0\xeb<C%\x00$&\x00\xc0\xc2\xd6\xec\xc2l\x94\x000\xb03\xa63\x97m\xca\x00 1\x19\x00\xe1\x9d\xf5\x04\xe0lJ\x9fM\x00 \xa4\xd17\xfc<\xa2\xe6T\xdf^\x94\x00\x90\x98\x0c\x80p\xbe\xbf>?fN\xd5_\x9d\xf9Ye\x00\x84\x97m\xea\xae\'\x01\x80\xd0"g\x02#<\x15(\x00@b\xc6\x00\x98F\xefs\x01\xaer\xe6u\x96J#\x0b\x81\x98^\x94\x9b\xffn\x02\x00$\xa6\x04 \xa4\xad=\xf8\xaf>\x1a<*\x01\x80\xf0\x8e>0s\xe4@\x8f\x91\xd5\xfc[(\x01 1\x19\x00\xd3{\xd7\xc3/{\xc9\xabg\x11F8\xcf@\x00 <\xab\x00\xf7S\x02@b"\'\xe9\xd4n*ze\tP\xfb^-\x03\x965%\x8d\x12\x80\xf0jg\x01j\xf6\x10\x18}&`\xef\x01\xa4k\xafS\x02@b2\x00\xd2\x18y\xb0\xb0\xe7\xc2\xa5\x96\xd7\x0b\x00L\xa3\xf6\x06\xbfb\xcb\xad\xad\x1bz\x94`\xa4\x04\x80\xc4\x86\x88Bp\xc4\x9e\xa5\xc0[\xaf\xd9\x93\x86\x1fY\xd8SS\x02\xd4\xb4\xbf\xb6]\xdaZ\xfbJ\x00\xc2[N\x8d\xf5\xda-x\xcf\xbe\x83\xa3\xcf \x94(\x01 1\x19\x00Sj9A8\xca\x19\x035\x19Fk\x16"\x000\x8d\x96\xb4\xbd\xb4\xa2.b\n\x7f\x94\x12\x00\x12\x93\x010\xa5\xadt>[oo)0\xa9<g\x04J\xff\xf1G\xbc\xf9\xef\xba&\x01\x00\x12S\x020\x95\xa8\xfb\xfb\xddu\xde\xa1\x0c\x80tF\x9f\xee[s\xc6u\x0b\x00\x90X\xc8H\x08[j\x9f\xf8\x8bV*\xf4f\x0c\x80)\xddUSG\xb0\x0c\x86J\x00HL\x00 \x05\xd9\xc0\xbf\x9ek$\x04\x00\xa6\xf5\xfd\xf5\xf9\xf1\xfa\x1c\xbc@\xf0/\x01\x00\x12\x13\x00H\xe35#x~\xef\xae\xeb\x19\x81Y\x00\xa6\xb75#pG\x10\x18\xa5\x14\x91\x01@b2\x00(\xb8\xebd\xe0\x1e\x9c\x0b\x00/\x96e\xc0\x08u\x7fi\'\xdf;\x1edR\x02@pG\x02\x9a\x00\x00\x89\xdd\x9e\n\xc1\xd5F9\x1e\xbc\x97\xd6s\x05\x97\xe5\x90\x0c\x80\x94jV\x05\x96\xd6\r\xccF\x00\x80\xc4\xa6\x8en\xb0f\xcfy\x82-m\xb6zM\xe3{\x1e\x17^z\xafg\x9b\xff\x03"C\xe6\x90$C\xcfq\x00\x00\x00\x00IEND\xaeB`\x82' ) self.assertEqual(response.status_code, 200) def test_tms_legend_query_arg(self): response = self.client.get(self.tile_url + '?legend=other') self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x06{IDATx\x9c\xed\xdd\xe1m\xdb:\x10\x00`\xe7\xa1\x8bt\x96\xce\xd2\xa92Kf\xc9(y\xbf\x0c\xa8\x06e\x91\x12%\xf1x\xdf\x07\x14H\x93\x98\x96\x8b\xeaxGR\xe4\xe3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\x94\x8f\xbb/\x00z\xfa\xfb\xe7\xf7\xcf\xda\xcf>\xbf\xbe/\xf9\xff\xfe\xee\x1a\xf68\xf3\xba\xff;\xaba`|\xbf\xee\xbe\x00\xb8\xc2\xd1^\xf4\xd9\xab/\xdb\xf9\xfb\xe7\xf7\xcf\x15YE\xaf\x8c\xe2y\xad\xcb\xf6d\x00L\xaf\xe7M\xfa\xeef\xfc\xfb\xe7\xf7O\xef\xf4\xffl\x02\x00$\xa6\x04`\x1aW\xf4\xbe\x9f_\xdf\x1f\xcb\xf7)\x95\x06\x91\x08\x00L\xab\xd7M\xb9V\xeb\xbf\x8e\x07\xf4x\xaf\x16\xa5kZ^\xc7k\xb0*Q\x02@b\x02\x004xf\x03\xcb\x11\xf5\xabf\x03j\xb4f%\x02\x00lX\xbb\xb9Kc\x01W\xea1\xeb \x00@b\x06\x01\x99V\xcf\x11\xfae[\xd1\xe6\xfa\xdf\x11\x00\x08\xaftC\x96n\xd4\xd7\x11\xf2\xb5\xef\x95\xda\x1f\xa5\xc6\xafQ\xfal\xa5\xbf?\x1eJ\x00HM\x06\xc0\xb4\x96=\xe1Z\x0f\xfenm\xff\xda\xfa\xff\xf3\xae\xb8\xcd\xde\xacd\xf9\xba0i\r\xacY\xbb)\xd7R\xe1\xd2\xef\xb4\xb4;\x8a\x9a\xd9\x89\xad\xd7)\x01 1%\x00\xa1\xbd\xeb\xfd_\x7f\xb6\xd5c\xd6\x94\x03\xb3Q\x02\x10\xdaV\xfa_\xfa\xbd\x9a\xb4\xff\xe8t_M\xf9q\xa4\xdd5\xad\xe5\x8e\x12\x00\x12S\x020\x95\xb5Q\xfce\x8f^\x9b\r\x8c\xe6\x8ck\x15\x00H\xa3%}>\x9a\xbaG\x193P\x02@b\x02\x00!\xb5<\tW\xfb{\xcfRaO\xaa\x1d\xa9\x94XR\x020\x95\xadM;\x9f_o\xed\xa6Scm\\!\x12\x19\x00$&\x03`*\xefR\xf1\xadu\xfe5\xed\xde\xbd\t\xc8R\x8f\xf7\x97\x01\x10^\xa9n\xafY\x0f\xdfz\xf3\xefy\xed\xe8\x04\x00Hl\x8a(F>-O\xbc\x95^[\xd3\x83\x8f\xb2\x0bP\x8f\x01\xcb\xb5vd\x00L\xe9\xe8)\xc1#\xd5\xfag\x12\x00 1\xb3\x00L\xafe\xc4\x7f\xb4\xde\xfe\xec\x83M\x05\x00\xc2\xa9\xb9IG\x1f\xa5\xdf\xbb\x13Q\xef\x00\xa5\x04\x80\xc4d\x00LiOO9Z\xfa\x7f\x05\x01\x80\xa9\xd4\xec\x04|\xb4\xfd#\x07\x8e\xb4\x06\x99\xb3\xcf$P\x02@bC\x0f\x94@I\xefA\xc0\xbd\xa9\xff\x15\x19@\xcd\xd6\xdf{\xb7\x07\x7f<\x94\x00\x04\xd5+-\xbe\xab\xee\xefy\x1e\xc1\x912A\t@H\xcf\rA\x96\x7f\xee\xb8\x86;^\xfbx\xf4\x0b\x80\x02\x00$\xa6\x04 \xb4\xda\xd5}w.\x0c\xaa\xe9\xed\x8f^\xe3\xde\x9d\x8e\r\x02\x12N\xed\x89?{\xdbkq\xe4\xf4\xa0w\x87\x87\\\xb5lY\t\x00\x89\xc9\x00\x08\xa3e\x0f\x80\x96\x94\xf8\xae\x0c\xa0\xa6\xcd\x1aG\xdeW\x00 \x8c\xdas\x00{\xb7\x7f\xb5+\x03\x80\x12\x00\x123\x0b@x\xad\x99\xc1Y=}\xcb\xaa\xbdQ\xb2\r\x01\x80)E\xd8\x0f`-\x08\xb4N\xe9\xd5\x06:{\x02\x02\xff\x90\x01\x90\xc6\x95iwM\xef>\x02\x01\x80\xd0z\xa4\xfa\xa3\xd5\xe5\xad\xd6V\x11\xd6LQ*\x01 1\x19\x00\xa1\xed]\x03\xbf\xd6\xc6\xd9j\x96\xff^y=\x02\x00\xe1\x8d>\xe2_k\xcf>\x00{_\xfb\xa4\x04\x80\xc4\xa6\x88\x9c\xe4\xd0\xba\xbe\xff\x8cu\xfa\xef\xecM\xe1\xef|*P\t\xc04^o\x84#\xbb\xf7^\xa16`\x9c\xf99\x94\x00\x90\x98\x0c\x80\xa9\x9cu\x16\xc0\xf3\xeb\x9e\xc7v\xd5\xee\x14\xb4\xfc\xba\xf7\xe7\x13\x00\x98\xc6\xeb\xcdq\xd6\xc6\x9bg\x8d-\x9c}\xa8I\x89\x12\x00\x12\x93\x01\x10\xce\xa8\x83z\xad^3\x89;\x06-\x05\x00\xa6\xd23%_\x9bv\xdc\xda\x7f`\xcf5\xdc\x15\xd4\x94\x00\x90\x98\x0c\x80\x90\xae~r\xafu\xc4~\xcdh\xe5\x8b\x00@xWo\xfd5\x13%\x00$&\x00\x10\xd2\xe7\xd7\xf7G\xed\t\xbbG\xd3\xee\x9ei\xfb\x9d\x87\x99\x96(\x01\x08ck\xd7\x9b\xb3\x8cr\xb3\x9eA\x06\x00\x89\xc9\x00\x98\xdeH=\xb8Y\x008\xe8\xee\xe3\xbeG\xd0\xeb<C%\x00$&\x00\xc0\xc2\xd6\xec\xc2l\x94\x000\xb03\xa63\x97m\xca\x00 1\x19\x00\xe1\x9d\xf5\x04\xe0lJ\x9fM\x00 \xa4\xd17\xfc<\xa2\xe6T\xdf^\x94\x00\x90\x98\x0c\x80p>\xbf\xbe?fN\xd5_\x9d\xf9Ye\x00\x84\x97m\xea\xae\'\x01\x80\xd0"g\x02#<\x15(\x00@b\xc6\x00\x98F\xefs\x01\xaer\xe6u\x96J#\x0b\x81\x98^\x94\x9b\xffn\x02\x00$\xa6\x04 \xa4\xad=\xf8\xaf>\x1a<*\x01\x80\xf0\x8e>0s\xe4@\x8f\x91\xd5\xfc[(\x01 1\x19\x00\xd3{\xd7\xc3/{\xc9\xabg\x11F8\xcf@\x00 <\xab\x00\xf7S\x02@b"\'\xe9\xd4n*ze\tP\xfb^-\x03\x965%\x8d\x12\x80\xf0jg\x01j\xf6\x10\x18}&`\xef\x01\xa4k\xafS\x02@b2\x00\xd2\x18y\xb0\xb0\xe7\xc2\xa5\x96\xd7\x0b\x00L\xa3\xf6\x06\xbfb\xcb\xad\xad\x1bz\x94`\xa4\x04\x80\xc4\x86\x88Bp\xc4\x9e\xa5\xc0[\xaf\xd9\x93\x86\x1fY\xd8SS\x02\xd4\xb4\xbf\xb6]\xdaZ\xfbJ\x00\xc2[N\x8d\xf5\xda-x\xcf\xbe\x83\xa3\xcf \x94(\x01 1\x19\x00Sj9A8\xca\x19\x035\x19Fk\x16"\x000\x8d\x96\xb4\xbd\xb4\xa2.b\n\x7f\x94\x12\x00\x12\x93\x010\xa5\xadt>[oo)0\xa9<g\x04J\xff\xf1G\xbc\xf9\xef\xba&\x01\x00\x12S\x020\x95\xa8\xfb\xfb\xddu\xde\xa1\x0c\x80tF\x9f\xee[s\xc6u\x0b\x00\x90X\xc8H\x08[j\x9f\xf8\x8bV*\xf4f\x0c\x80)\xddUSG\xb0\x0c\x86J\x00HL\x00 \x05\xd9\xc0\xbf\x9ek$\x04\x00\xa6\xf5\xf9\xf5\xfd\xf1\xfa\x1c\xbc@\xf0/\x01\x00\x12\x13\x00H\xe35#x~\xef\xae\xeb\x19\x81Y\x00\xa6\xb75#pG\x10\x18\xa5\x14\x91\x01@b2\x00(\xb8\xebd\xe0\x1e\x9c\x0b\x00/\x96e\xc0\x08u\x7fi\'\xdf;\x1edR\x02@pG\x02\x9a\x00\x00\x89\xdd\x9e\n\xc1\xd5F9\x1e\xbc\x97\xd6s\x05\x97\xe5\x90\x0c\x80\x94jV\x05\x96\xd6\r\xccF\x00\x80\xc4\xa6\x8en\xb0f\xcfy\x82-m\xb6zM\xe3{\x1e\x17^z\xafg\x9b\xff\x03\x13\xcb\xed>\xfd\xcd\'\xd6\x00\x00\x00\x00IEND\xaeB`\x82' ) self.assertEqual(response.status_code, 200) def test_tms_manual_colormap_query_arg(self): response = self.client.get(self.tile_url + '?colormap={"4": [101, 67, 33, 255]}') self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x06{IDATx\x9c\xed\xdd\xe1m\xdb:\x10\x00`\xe7\xa1\x8bt\x96\xce\xd2\xa92Kf\xc9(y\xbf\x0c\xa8\x06e\x91\x12%\xf1x\xdf\x07\x14H\x93\x98\x96\x8b\xeaxGR\xe4\xe3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\x94\x8f\xbb/\x00z\xfa\xfb\xe7\xf7\xcf\xda\xcf>\xbf\xbe/\xf9\xff\xfe\xee\x1a\xf68\xf3\xba\xff;\xaba`|\xbf\xee\xbe\x00\xb8\xc2\xd1^\xf4\xd9\xab/\xdb\xf9\xfb\xe7\xf7\xcf\x15YE\xaf\x8c\xe2y\xad\xcb\xf6d\x00L\xaf\xe7M\xfa\xeef\xfc\xfb\xe7\xf7O\xef\xf4\xffl\x02\x00$\xa6\x04`\x1aW\xf4\xbe\x9f_\xdf\x1f\xcb\xf7)\x95\x06\x91\x08\x00L\xab\xd7M\xb9V\xeb\xbf\x8e\x07\xf4x\xaf\x16\xa5kZ^\xc7k\xb0*Q\x02@b\x02\x004xf\x03\xcb\x11\xf5\xabf\x03j\xb4f%\x02\x00lX\xbb\xb9Kc\x01W\xea1\xeb \x00@b\x06\x01\x99V\xcf\x11\xfae[\xd1\xe6\xfa\xdf\x11\x00\x08\xaftC\x96n\xd4\xd7\x11\xf2\xb5\xef\x95\xda\x1f\xa5\xc6\xafQ\xfal\xa5\xbf?\x1eJ\x00HM\x06\xc0\xb4\x96=\xe1Z\x0f\xfenm\xff\xda\xfa\xff\xf3\xae\xb8\xcd\xde\xacd\xf9\xba0i\r\xacY\xbb)\xd7R\xe1\xd2\xef\xb4\xb4;\x8a\x9a\xd9\x89\xad\xd7)\x01 1%\x00\xa1\xbd\xeb\xfd_\x7f\xb6\xd5c\xd6\x94\x03\xb3Q\x02\x10\xdaV\xfa_\xfa\xbd\x9a\xb4\xff\xe8t_M\xf9q\xa4\xdd5\xad\xe5\x8e\x12\x00\x12S\x020\x95\xb5Q\xfce\x8f^\x9b\r\x8c\xe6\x8ck\x15\x00H\xa3%}>\x9a\xbaG\x193P\x02@b\x02\x00!\xb5<\tW\xfb{\xcfRaO\xaa\x1d\xa9\x94XR\x020\x95\xadM;\x9f_o\xed\xa6Scm\\!\x12\x19\x00$&\x03`*\xefR\xf1\xadu\xfe5\xed\xde\xbd\t\xc8R\x8f\xf7\x97\x01\x10^\xa9n\xafY\x0f\xdfz\xf3\xefy\xed\xe8\x04\x00Hl\x8a(F>-O\xbc\x95^[\xd3\x83\x8f\xb2\x0bP\x8f\x01\xcb\xb5vd\x00L\xe9\xe8)\xc1#\xd5\xfag\x12\x00 1\xb3\x00L\xafe\xc4\x7f\xb4\xde\xfe\xec\x83M\x05\x00\xc2\xa9\xb9IG\x1f\xa5\xdf\xbb\x13Q\xef\x00\xa5\x04\x80\xc4d\x00LiOO9Z\xfa\x7f\x05\x01\x80\xa9\xd4\xec\x04|\xb4\xfd#\x07\x8e\xb4\x06\x99\xb3\xcf$P\x02@bC\x0f\x94@I\xefA\xc0\xbd\xa9\xff\x15\x19@\xcd\xd6\xdf{\xb7\x07\x7f<\x94\x00\x04\xd5+-\xbe\xab\xee\xefy\x1e\xc1\x912A\t@H\xcf\rA\x96\x7f\xee\xb8\x86;^\xfbx\xf4\x0b\x80\x02\x00$\xa6\x04 \xb4\xda\xd5}w.\x0c\xaa\xe9\xed\x8f^\xe3\xde\x9d\x8e\r\x02\x12N\xed\x89?{\xdbkq\xe4\xf4\xa0w\x87\x87\\\xb5lY\t\x00\x89\xc9\x00\x08\xa3e\x0f\x80\x96\x94\xf8\xae\x0c\xa0\xa6\xcd\x1aG\xdeW\x00 \x8c\xdas\x00{\xb7\x7f\xb5+\x03\x80\x12\x00\x123\x0b@x\xad\x99\xc1Y=}\xcb\xaa\xbdQ\xb2\r\x01\x80)E\xd8\x0f`-\x08\xb4N\xe9\xd5\x06:{\x02\x02\xff\x90\x01\x90\xc6\x95iwM\xef>\x02\x01\x80\xd0z\xa4\xfa\xa3\xd5\xe5\xad\xd6V\x11\xd6LQ*\x01 1\x19\x00\xa1\xed]\x03\xbf\xd6\xc6\xd9j\x96\xff^y=\x02\x00\xe1\x8d>\xe2_k\xcf>\x00{_\xfb\xa4\x04\x80\xc4\xa6\x88\x9c\xe4\xd0\xba\xbe\xff\x8cu\xfa\xef\xecM\xe1\xef|*P\t\xc04^o\x84#\xbb\xf7^\xa16`\x9c\xf99\x94\x00\x90\x98\x0c\x80\xa9\x9cu\x16\xc0\xf3\xeb\x9e\xc7v\xd5\xee\x14\xb4\xfc\xba\xf7\xe7\x13\x00\x98\xc6\xeb\xcdq\xd6\xc6\x9bg\x8d-\x9c}\xa8I\x89\x12\x00\x12\x93\x01\x10\xce\xa8\x83z\xad^3\x89;\x06-\x05\x00\xa6\xd23%_\x9bv\xdc\xda\x7f`\xcf5\xdc\x15\xd4\x94\x00\x90\x98\x0c\x80\x90\xae~r\xafu\xc4~\xcdh\xe5\x8b\x00@xWo\xfd5\x13%\x00$&\x00\x10\xd2\xe7\xd7\xf7G\xed\t\xbbG\xd3\xee\x9ei\xfb\x9d\x87\x99\x96(\x01\x08ck\xd7\x9b\xb3\x8cr\xb3\x9eA\x06\x00\x89\xc9\x00\x98\xdeH=\xb8Y\x008\xe8\xee\xe3\xbeG\xd0\xeb<C%\x00$&\x00\xc0\xc2\xd6\xec\xc2l\x94\x000\xb03\xa63\x97m\xca\x00 1\x19\x00\xe1\x9d\xf5\x04\xe0lJ\x9fM\x00 \xa4\xd17\xfc<\xa2\xe6T\xdf^\x94\x00\x90\x98\x0c\x80p>\xbf\xbe?fN\xd5_\x9d\xf9Ye\x00\x84\x97m\xea\xae\'\x01\x80\xd0"g\x02#<\x15(\x00@b\xc6\x00\x98F\xefs\x01\xaer\xe6u\x96J#\x0b\x81\x98^\x94\x9b\xffn\x02\x00$\xa6\x04 \xa4\xad=\xf8\xaf>\x1a<*\x01\x80\xf0\x8e>0s\xe4@\x8f\x91\xd5\xfc[(\x01 1\x19\x00\xd3{\xd7\xc3/{\xc9\xabg\x11F8\xcf@\x00 <\xab\x00\xf7S\x02@b"\'\xe9\xd4n*ze\tP\xfb^-\x03\x965%\x8d\x12\x80\xf0jg\x01j\xf6\x10\x18}&`\xef\x01\xa4k\xafS\x02@b2\x00\xd2\x18y\xb0\xb0\xe7\xc2\xa5\x96\xd7\x0b\x00L\xa3\xf6\x06\xbfb\xcb\xad\xad\x1bz\x94`\xa4\x04\x80\xc4\x86\x88Bp\xc4\x9e\xa5\xc0[\xaf\xd9\x93\x86\x1fY\xd8SS\x02\xd4\xb4\xbf\xb6]\xdaZ\xfbJ\x00\xc2[N\x8d\xf5\xda-x\xcf\xbe\x83\xa3\xcf \x94(\x01 1\x19\x00Sj9A8\xca\x19\x035\x19Fk\x16"\x000\x8d\x96\xb4\xbd\xb4\xa2.b\n\x7f\x94\x12\x00\x12\x93\x010\xa5\xadt>[oo)0\xa9<g\x04J\xff\xf1G\xbc\xf9\xef\xba&\x01\x00\x12S\x020\x95\xa8\xfb\xfb\xddu\xde\xa1\x0c\x80tF\x9f\xee[s\xc6u\x0b\x00\x90X\xc8H\x08[j\x9f\xf8\x8bV*\xf4f\x0c\x80)\xddUSG\xb0\x0c\x86J\x00HL\x00 \x05\xd9\xc0\xbf\x9ek$\x04\x00\xa6\xf5\xf9\xf5\xfd\xf1\xfa\x1c\xbc@\xf0/\x01\x00\x12\x13\x00H\xe35#x~\xef\xae\xeb\x19\x81Y\x00\xa6\xb75#pG\x10\x18\xa5\x14\x91\x01@b2\x00(\xb8\xebd\xe0\x1e\x9c\x0b\x00/\x96e\xc0\x08u\x7fi\'\xdf;\x1edR\x02@pG\x02\x9a\x00\x00\x89\xdd\x9e\n\xc1\xd5F9\x1e\xbc\x97\xd6s\x05\x97\xe5\x90\x0c\x80\x94jV\x05\x96\xd6\r\xccF\x00\x80\xc4\xa6\x8en\xb0f\xcfy\x82-m\xb6zM\xe3{\x1e\x17^z\xafg\x9b\xff\x03\x13\xcb\xed>\xfd\xcd\'\xd6\x00\x00\x00\x00IEND\xaeB`\x82' ) self.assertEqual(response.status_code, 200) def test_tms_manual_colormap_query_arg_hex(self): response = self.client.get(self.tile_url + '?colormap={"4": "654321"}') self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x06{IDATx\x9c\xed\xdd\xe1m\xdb:\x10\x00`\xe7\xa1\x8bt\x96\xce\xd2\xa92Kf\xc9(y\xbf\x0c\xa8\x06e\x91\x12%\xf1x\xdf\x07\x14H\x93\x98\x96\x8b\xeaxGR\xe4\xe3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\x94\x8f\xbb/\x00z\xfa\xfb\xe7\xf7\xcf\xda\xcf>\xbf\xbe/\xf9\xff\xfe\xee\x1a\xf68\xf3\xba\xff;\xaba`|\xbf\xee\xbe\x00\xb8\xc2\xd1^\xf4\xd9\xab/\xdb\xf9\xfb\xe7\xf7\xcf\x15YE\xaf\x8c\xe2y\xad\xcb\xf6d\x00L\xaf\xe7M\xfa\xeef\xfc\xfb\xe7\xf7O\xef\xf4\xffl\x02\x00$\xa6\x04`\x1aW\xf4\xbe\x9f_\xdf\x1f\xcb\xf7)\x95\x06\x91\x08\x00L\xab\xd7M\xb9V\xeb\xbf\x8e\x07\xf4x\xaf\x16\xa5kZ^\xc7k\xb0*Q\x02@b\x02\x004xf\x03\xcb\x11\xf5\xabf\x03j\xb4f%\x02\x00lX\xbb\xb9Kc\x01W\xea1\xeb \x00@b\x06\x01\x99V\xcf\x11\xfae[\xd1\xe6\xfa\xdf\x11\x00\x08\xaftC\x96n\xd4\xd7\x11\xf2\xb5\xef\x95\xda\x1f\xa5\xc6\xafQ\xfal\xa5\xbf?\x1eJ\x00HM\x06\xc0\xb4\x96=\xe1Z\x0f\xfenm\xff\xda\xfa\xff\xf3\xae\xb8\xcd\xde\xacd\xf9\xba0i\r\xacY\xbb)\xd7R\xe1\xd2\xef\xb4\xb4;\x8a\x9a\xd9\x89\xad\xd7)\x01 1%\x00\xa1\xbd\xeb\xfd_\x7f\xb6\xd5c\xd6\x94\x03\xb3Q\x02\x10\xdaV\xfa_\xfa\xbd\x9a\xb4\xff\xe8t_M\xf9q\xa4\xdd5\xad\xe5\x8e\x12\x00\x12S\x020\x95\xb5Q\xfce\x8f^\x9b\r\x8c\xe6\x8ck\x15\x00H\xa3%}>\x9a\xbaG\x193P\x02@b\x02\x00!\xb5<\tW\xfb{\xcfRaO\xaa\x1d\xa9\x94XR\x020\x95\xadM;\x9f_o\xed\xa6Scm\\!\x12\x19\x00$&\x03`*\xefR\xf1\xadu\xfe5\xed\xde\xbd\t\xc8R\x8f\xf7\x97\x01\x10^\xa9n\xafY\x0f\xdfz\xf3\xefy\xed\xe8\x04\x00Hl\x8a(F>-O\xbc\x95^[\xd3\x83\x8f\xb2\x0bP\x8f\x01\xcb\xb5vd\x00L\xe9\xe8)\xc1#\xd5\xfag\x12\x00 1\xb3\x00L\xafe\xc4\x7f\xb4\xde\xfe\xec\x83M\x05\x00\xc2\xa9\xb9IG\x1f\xa5\xdf\xbb\x13Q\xef\x00\xa5\x04\x80\xc4d\x00LiOO9Z\xfa\x7f\x05\x01\x80\xa9\xd4\xec\x04|\xb4\xfd#\x07\x8e\xb4\x06\x99\xb3\xcf$P\x02@bC\x0f\x94@I\xefA\xc0\xbd\xa9\xff\x15\x19@\xcd\xd6\xdf{\xb7\x07\x7f<\x94\x00\x04\xd5+-\xbe\xab\xee\xefy\x1e\xc1\x912A\t@H\xcf\rA\x96\x7f\xee\xb8\x86;^\xfbx\xf4\x0b\x80\x02\x00$\xa6\x04 \xb4\xda\xd5}w.\x0c\xaa\xe9\xed\x8f^\xe3\xde\x9d\x8e\r\x02\x12N\xed\x89?{\xdbkq\xe4\xf4\xa0w\x87\x87\\\xb5lY\t\x00\x89\xc9\x00\x08\xa3e\x0f\x80\x96\x94\xf8\xae\x0c\xa0\xa6\xcd\x1aG\xdeW\x00 \x8c\xdas\x00{\xb7\x7f\xb5+\x03\x80\x12\x00\x123\x0b@x\xad\x99\xc1Y=}\xcb\xaa\xbdQ\xb2\r\x01\x80)E\xd8\x0f`-\x08\xb4N\xe9\xd5\x06:{\x02\x02\xff\x90\x01\x90\xc6\x95iwM\xef>\x02\x01\x80\xd0z\xa4\xfa\xa3\xd5\xe5\xad\xd6V\x11\xd6LQ*\x01 1\x19\x00\xa1\xed]\x03\xbf\xd6\xc6\xd9j\x96\xff^y=\x02\x00\xe1\x8d>\xe2_k\xcf>\x00{_\xfb\xa4\x04\x80\xc4\xa6\x88\x9c\xe4\xd0\xba\xbe\xff\x8cu\xfa\xef\xecM\xe1\xef|*P\t\xc04^o\x84#\xbb\xf7^\xa16`\x9c\xf99\x94\x00\x90\x98\x0c\x80\xa9\x9cu\x16\xc0\xf3\xeb\x9e\xc7v\xd5\xee\x14\xb4\xfc\xba\xf7\xe7\x13\x00\x98\xc6\xeb\xcdq\xd6\xc6\x9bg\x8d-\x9c}\xa8I\x89\x12\x00\x12\x93\x01\x10\xce\xa8\x83z\xad^3\x89;\x06-\x05\x00\xa6\xd23%_\x9bv\xdc\xda\x7f`\xcf5\xdc\x15\xd4\x94\x00\x90\x98\x0c\x80\x90\xae~r\xafu\xc4~\xcdh\xe5\x8b\x00@xWo\xfd5\x13%\x00$&\x00\x10\xd2\xe7\xd7\xf7G\xed\t\xbbG\xd3\xee\x9ei\xfb\x9d\x87\x99\x96(\x01\x08ck\xd7\x9b\xb3\x8cr\xb3\x9eA\x06\x00\x89\xc9\x00\x98\xdeH=\xb8Y\x008\xe8\xee\xe3\xbeG\xd0\xeb<C%\x00$&\x00\xc0\xc2\xd6\xec\xc2l\x94\x000\xb03\xa63\x97m\xca\x00 1\x19\x00\xe1\x9d\xf5\x04\xe0lJ\x9fM\x00 \xa4\xd17\xfc<\xa2\xe6T\xdf^\x94\x00\x90\x98\x0c\x80p>\xbf\xbe?fN\xd5_\x9d\xf9Ye\x00\x84\x97m\xea\xae\'\x01\x80\xd0"g\x02#<\x15(\x00@b\xc6\x00\x98F\xefs\x01\xaer\xe6u\x96J#\x0b\x81\x98^\x94\x9b\xffn\x02\x00$\xa6\x04 \xa4\xad=\xf8\xaf>\x1a<*\x01\x80\xf0\x8e>0s\xe4@\x8f\x91\xd5\xfc[(\x01 1\x19\x00\xd3{\xd7\xc3/{\xc9\xabg\x11F8\xcf@\x00 <\xab\x00\xf7S\x02@b"\'\xe9\xd4n*ze\tP\xfb^-\x03\x965%\x8d\x12\x80\xf0jg\x01j\xf6\x10\x18}&`\xef\x01\xa4k\xafS\x02@b2\x00\xd2\x18y\xb0\xb0\xe7\xc2\xa5\x96\xd7\x0b\x00L\xa3\xf6\x06\xbfb\xcb\xad\xad\x1bz\x94`\xa4\x04\x80\xc4\x86\x88Bp\xc4\x9e\xa5\xc0[\xaf\xd9\x93\x86\x1fY\xd8SS\x02\xd4\xb4\xbf\xb6]\xdaZ\xfbJ\x00\xc2[N\x8d\xf5\xda-x\xcf\xbe\x83\xa3\xcf \x94(\x01 1\x19\x00Sj9A8\xca\x19\x035\x19Fk\x16"\x000\x8d\x96\xb4\xbd\xb4\xa2.b\n\x7f\x94\x12\x00\x12\x93\x010\xa5\xadt>[oo)0\xa9<g\x04J\xff\xf1G\xbc\xf9\xef\xba&\x01\x00\x12S\x020\x95\xa8\xfb\xfb\xddu\xde\xa1\x0c\x80tF\x9f\xee[s\xc6u\x0b\x00\x90X\xc8H\x08[j\x9f\xf8\x8bV*\xf4f\x0c\x80)\xddUSG\xb0\x0c\x86J\x00HL\x00 \x05\xd9\xc0\xbf\x9ek$\x04\x00\xa6\xf5\xf9\xf5\xfd\xf1\xfa\x1c\xbc@\xf0/\x01\x00\x12\x13\x00H\xe35#x~\xef\xae\xeb\x19\x81Y\x00\xa6\xb75#pG\x10\x18\xa5\x14\x91\x01@b2\x00(\xb8\xebd\xe0\x1e\x9c\x0b\x00/\x96e\xc0\x08u\x7fi\'\xdf;\x1edR\x02@pG\x02\x9a\x00\x00\x89\xdd\x9e\n\xc1\xd5F9\x1e\xbc\x97\xd6s\x05\x97\xe5\x90\x0c\x80\x94jV\x05\x96\xd6\r\xccF\x00\x80\xc4\xa6\x8en\xb0f\xcfy\x82-m\xb6zM\xe3{\x1e\x17^z\xafg\x9b\xff\x03\x13\xcb\xed>\xfd\xcd\'\xd6\x00\x00\x00\x00IEND\xaeB`\x82' ) self.assertEqual(response.status_code, 200) def test_tms_entries_query_arg(self): response = self.client.get(self.tile_url + '?entries=4&legend=dual') self.assertEqual(response['Content-type'], 'PNG') self.assertEqual( response.content, b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\\r\xa8f\x00\x00\x06{IDATx\x9c\xed\xdd\xe1m\xdb:\x10\x00`\xe7\xa1\x8bt\x96\xce\xd2\xa92Kf\xc9(y\xbf\x0c\xa8\x06e\x91\x12%\xf1x\xdf\x07\x14H\x93\x98\x96\x8b\xeaxGR\xe4\xe3\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\x94\x8f\xbb/\x00z\xfa\xfb\xe7\xf7\xcf\xda\xcf>\xbf\xbe/\xf9\xff\xfe\xee\x1a\xf68\xf3\xba\xff;\xaba`|\xbf\xee\xbe\x00\xb8\xc2\xd1^\xf4\xd9\xab/\xdb\xf9\xfb\xe7\xf7\xcf\x15YE\xaf\x8c\xe2y\xad\xcb\xf6d\x00L\xaf\xe7M\xfa\xeef\xfc\xfb\xe7\xf7O\xef\xf4\xffl\x02\x00$\xa6\x04`\x1aW\xf4\xbe\x9f_\xdf\x1f\xcb\xf7)\x95\x06\x91\x08\x00L\xab\xd7M\xb9V\xeb\xbf\x8e\x07\xf4x\xaf\x16\xa5kZ^\xc7k\xb0*Q\x02@b\x02\x004xf\x03\xcb\x11\xf5\xabf\x03j\xb4f%\x02\x00lX\xbb\xb9Kc\x01W\xea1\xeb \x00@b\x06\x01\x99V\xcf\x11\xfae[\xd1\xe6\xfa\xdf\x11\x00\x08\xaftC\x96n\xd4\xd7\x11\xf2\xb5\xef\x95\xda\x1f\xa5\xc6\xafQ\xfal\xa5\xbf?\x1eJ\x00HM\x06\xc0\xb4\x96=\xe1Z\x0f\xfenm\xff\xda\xfa\xff\xf3\xae\xb8\xcd\xde\xacd\xf9\xba0i\r\xacY\xbb)\xd7R\xe1\xd2\xef\xb4\xb4;\x8a\x9a\xd9\x89\xad\xd7)\x01 1%\x00\xa1\xbd\xeb\xfd_\x7f\xb6\xd5c\xd6\x94\x03\xb3Q\x02\x10\xdaV\xfa_\xfa\xbd\x9a\xb4\xff\xe8t_M\xf9q\xa4\xdd5\xad\xe5\x8e\x12\x00\x12S\x020\x95\xb5Q\xfce\x8f^\x9b\r\x8c\xe6\x8ck\x15\x00H\xa3%}>\x9a\xbaG\x193P\x02@b\x02\x00!\xb5<\tW\xfb{\xcfRaO\xaa\x1d\xa9\x94XR\x020\x95\xadM;\x9f_o\xed\xa6Scm\\!\x12\x19\x00$&\x03`*\xefR\xf1\xadu\xfe5\xed\xde\xbd\t\xc8R\x8f\xf7\x97\x01\x10^\xa9n\xafY\x0f\xdfz\xf3\xefy\xed\xe8\x04\x00Hl\x8a(F>-O\xbc\x95^[\xd3\x83\x8f\xb2\x0bP\x8f\x01\xcb\xb5vd\x00L\xe9\xe8)\xc1#\xd5\xfag\x12\x00 1\xb3\x00L\xafe\xc4\x7f\xb4\xde\xfe\xec\x83M\x05\x00\xc2\xa9\xb9IG\x1f\xa5\xdf\xbb\x13Q\xef\x00\xa5\x04\x80\xc4d\x00LiOO9Z\xfa\x7f\x05\x01\x80\xa9\xd4\xec\x04|\xb4\xfd#\x07\x8e\xb4\x06\x99\xb3\xcf$P\x02@bC\x0f\x94@I\xefA\xc0\xbd\xa9\xff\x15\x19@\xcd\xd6\xdf{\xb7\x07\x7f<\x94\x00\x04\xd5+-\xbe\xab\xee\xefy\x1e\xc1\x912A\t@H\xcf\rA\x96\x7f\xee\xb8\x86;^\xfbx\xf4\x0b\x80\x02\x00$\xa6\x04 \xb4\xda\xd5}w.\x0c\xaa\xe9\xed\x8f^\xe3\xde\x9d\x8e\r\x02\x12N\xed\x89?{\xdbkq\xe4\xf4\xa0w\x87\x87\\\xb5lY\t\x00\x89\xc9\x00\x08\xa3e\x0f\x80\x96\x94\xf8\xae\x0c\xa0\xa6\xcd\x1aG\xdeW\x00 \x8c\xdas\x00{\xb7\x7f\xb5+\x03\x80\x12\x00\x123\x0b@x\xad\x99\xc1Y=}\xcb\xaa\xbdQ\xb2\r\x01\x80)E\xd8\x0f`-\x08\xb4N\xe9\xd5\x06:{\x02\x02\xff\x90\x01\x90\xc6\x95iwM\xef>\x02\x01\x80\xd0z\xa4\xfa\xa3\xd5\xe5\xad\xd6V\x11\xd6LQ*\x01 1\x19\x00\xa1\xed]\x03\xbf\xd6\xc6\xd9j\x96\xff^y=\x02\x00\xe1\x8d>\xe2_k\xcf>\x00{_\xfb\xa4\x04\x80\xc4\xa6\x88\x9c\xe4\xd0\xba\xbe\xff\x8cu\xfa\xef\xecM\xe1\xef|*P\t\xc04^o\x84#\xbb\xf7^\xa16`\x9c\xf99\x94\x00\x90\x98\x0c\x80\xa9\x9cu\x16\xc0\xf3\xeb\x9e\xc7v\xd5\xee\x14\xb4\xfc\xba\xf7\xe7\x13\x00\x98\xc6\xeb\xcdq\xd6\xc6\x9bg\x8d-\x9c}\xa8I\x89\x12\x00\x12\x93\x01\x10\xce\xa8\x83z\xad^3\x89;\x06-\x05\x00\xa6\xd23%_\x9bv\xdc\xda\x7f`\xcf5\xdc\x15\xd4\x94\x00\x90\x98\x0c\x80\x90\xae~r\xafu\xc4~\xcdh\xe5\x8b\x00@xWo\xfd5\x13%\x00$&\x00\x10\xd2\xe7\xd7\xf7G\xed\t\xbbG\xd3\xee\x9ei\xfb\x9d\x87\x99\x96(\x01\x08ck\xd7\x9b\xb3\x8cr\xb3\x9eA\x06\x00\x89\xc9\x00\x98\xdeH=\xb8Y\x008\xe8\xee\xe3\xbeG\xd0\xeb<C%\x00$&\x00\xc0\xc2\xd6\xec\xc2l\x94\x000\xb03\xa63\x97m\xca\x00 1\x19\x00\xe1\x9d\xf5\x04\xe0lJ\x9fM\x00 \xa4\xd17\xfc<\xa2\xe6T\xdf^\x94\x00\x90\x98\x0c\x80p>\xbf\xbe?fN\xd5_\x9d\xf9Ye\x00\x84\x97m\xea\xae\'\x01\x80\xd0"g\x02#<\x15(\x00@b\xc6\x00\x98F\xefs\x01\xaer\xe6u\x96J#\x0b\x81\x98^\x94\x9b\xffn\x02\x00$\xa6\x04 \xa4\xad=\xf8\xaf>\x1a<*\x01\x80\xf0\x8e>0s\xe4@\x8f\x91\xd5\xfc[(\x01 1\x19\x00\xd3{\xd7\xc3/{\xc9\xabg\x11F8\xcf@\x00 <\xab\x00\xf7S\x02@b"\'\xe9\xd4n*ze\tP\xfb^-\x03\x965%\x8d\x12\x80\xf0jg\x01j\xf6\x10\x18}&`\xef\x01\xa4k\xafS\x02@b2\x00\xd2\x18y\xb0\xb0\xe7\xc2\xa5\x96\xd7\x0b\x00L\xa3\xf6\x06\xbfb\xcb\xad\xad\x1bz\x94`\xa4\x04\x80\xc4\x86\x88Bp\xc4\x9e\xa5\xc0[\xaf\xd9\x93\x86\x1fY\xd8SS\x02\xd4\xb4\xbf\xb6]\xdaZ\xfbJ\x00\xc2[N\x8d\xf5\xda-x\xcf\xbe\x83\xa3\xcf \x94(\x01 1\x19\x00Sj9A8\xca\x19\x035\x19Fk\x16"\x000\x8d\x96\xb4\xbd\xb4\xa2.b\n\x7f\x94\x12\x00\x12\x93\x010\xa5\xadt>[oo)0\xa9<g\x04J\xff\xf1G\xbc\xf9\xef\xba&\x01\x00\x12S\x020\x95\xa8\xfb\xfb\xddu\xde\xa1\x0c\x80tF\x9f\xee[s\xc6u\x0b\x00\x90X\xc8H\x08[j\x9f\xf8\x8bV*\xf4f\x0c\x80)\xddUSG\xb0\x0c\x86J\x00HL\x00 \x05\xd9\xc0\xbf\x9ek$\x04\x00\xa6\xf5\xf9\xf5\xfd\xf1\xfa\x1c\xbc@\xf0/\x01\x00\x12\x13\x00H\xe35#x~\xef\xae\xeb\x19\x81Y\x00\xa6\xb75#pG\x10\x18\xa5\x14\x91\x01@b2\x00(\xb8\xebd\xe0\x1e\x9c\x0b\x00/\x96e\xc0\x08u\x7fi\'\xdf;\x1edR\x02@pG\x02\x9a\x00\x00\x89\xdd\x9e\n\xc1\xd5F9\x1e\xbc\x97\xd6s\x05\x97\xe5\x90\x0c\x80\x94jV\x05\x96\xd6\r\xccF\x00\x80\xc4\xa6\x8en\xb0f\xcfy\x82-m\xb6zM\xe3{\x1e\x17^z\xafg\x9b\xff\x03\x13\xcb\xed>\xfd\xcd\'\xd6\x00\x00\x00\x00IEND\xaeB`\x82' ) self.assertEqual(response.status_code, 200)
371.45045
5,174
0.724334
9,234
41,231
3.219731
0.052523
0.2454
0.344489
0.44277
0.975077
0.972857
0.970637
0.970637
0.970637
0.96909
0
0.265811
0.026703
41,231
110
5,175
374.827273
0.475056
0.002547
0
0.549451
0
0.241758
0.874982
0.858689
0
0
0
0
0.285714
1
0.10989
false
0
0.043956
0
0.164835
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
1
0
0
0
1
1
1
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
16
bee4224b6c1290d6e161a92235308503b7dfe244
349
py
Python
tests/internal/instance_type/test_instance_type_u__auto.py
frolovv/aws.ec2.compare
582805823492f833d65c0441c4a14dce697c12aa
[ "Apache-2.0" ]
null
null
null
tests/internal/instance_type/test_instance_type_u__auto.py
frolovv/aws.ec2.compare
582805823492f833d65c0441c4a14dce697c12aa
[ "Apache-2.0" ]
null
null
null
tests/internal/instance_type/test_instance_type_u__auto.py
frolovv/aws.ec2.compare
582805823492f833d65c0441c4a14dce697c12aa
[ "Apache-2.0" ]
null
null
null
# Testing module instance_type.u_ import pytest import ec2_compare.internal.instance_type.u_ def test_get_internal_data_instance_type_u__get_instances_list(): assert len(ec2_compare.internal.instance_type.u_.get_instances_list()) > 0 def test_get_internal_data_instance_type_u__get(): assert len(ec2_compare.internal.instance_type.u_.get) > 0
34.9
76
0.848138
56
349
4.732143
0.339286
0.271698
0.29434
0.241509
0.826415
0.826415
0.611321
0.611321
0.611321
0
0
0.01548
0.074499
349
9
77
38.777778
0.804954
0.088825
0
0
0
0
0
0
0
0
0
0
0.333333
1
0.333333
true
0
0.333333
0
0.666667
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
10
beea6f66cf087a467f63985f9cebabbe0d2b52b0
13,142
py
Python
src/py/analysis_lib/behaviour/analyze_behaviour_test.py
LandonFuhr/aseen
9a7d6b0a66930872cf1da7d3a5493326285f3bd1
[ "MIT" ]
null
null
null
src/py/analysis_lib/behaviour/analyze_behaviour_test.py
LandonFuhr/aseen
9a7d6b0a66930872cf1da7d3a5493326285f3bd1
[ "MIT" ]
null
null
null
src/py/analysis_lib/behaviour/analyze_behaviour_test.py
LandonFuhr/aseen
9a7d6b0a66930872cf1da7d3a5493326285f3bd1
[ "MIT" ]
null
null
null
import numpy as np import pytest from analysis_lib.dlc_results_adapter import DlcResults, get_labels from analysis_lib.behaviour.analyze_behaviour import get_region_stats, basic_behavioural_assay_algorithm from analysis_lib.behaviour.arena_setup_adapter import ArenaSetup, Point, RectangleGeometry, Region def test_it_counts_entries(): point_inside = [2, 2, 1.0] point_outside = [0, 0, 1.0] dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left']), np.array([ point_outside, point_inside, # entry 1 point_outside, point_inside, # entry 2 point_outside, point_outside, point_inside, # entry 3 point_inside, point_inside, point_outside ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].stats_per_region[0].n_entries == 3 def test_it_counts_frames_fully_inside_areas(): point_in_both = [2.0, 2.0, 1.0] point_only_in_1 = [3.5, 3.5, 1.0] point_only_in_2 = [0.5, 0.5, 1.0] dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ point_in_both + point_in_both, # fully inside both point_in_both + point_only_in_1, # fully inside 1 point_only_in_2 + point_only_in_1, # fully inside neither point_in_both + point_in_both, # fully inside both point_only_in_1 + point_only_in_1, # fully inside 1 ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id="1", geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None), Region(_id="2", geometry=RectangleGeometry( top_left=Point(x=0, y=0), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert get_region_stats( 'Mouse 1', '1', results).frames_fully_inside == 4 assert get_region_stats( 'Mouse 1', '2', results).frames_fully_inside == 2 def test_it_counts_frames_partly_inside_areas(): point_in_both = [2.0, 2.0, 1.0] point_only_in_1 = [3.5, 3.5, 1.0] point_only_in_2 = [0.5, 0.5, 1.0] point_in_neither = [10.0, 10.0, 1.0] dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ point_in_both + point_in_neither, # partly inside both point_in_neither + point_only_in_1, # partly inside 1 point_only_in_1 + point_only_in_1, # partly inside 1 point_only_in_2 + point_in_neither, # partly inside 2 point_in_both + point_in_both, # partly inside both point_only_in_1 + point_only_in_2, # partly inside both point_in_neither + point_in_neither # partly inside neither ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id="1", geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None), Region(_id="2", geometry=RectangleGeometry( top_left=Point(x=0, y=0), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert get_region_stats( 'Mouse 1', '1', results).frames_partly_inside == 5 assert get_region_stats( 'Mouse 1', '2', results).frames_partly_inside == 4 def test_it_counts_interaction_frames(): inside_region_1 = [3.0, 3.0, 1.0] inside_both_regions = [1.5, 1.5, 1.0] outside_regions = [5.0, 5.0, 1.0] dlc_results = DlcResults( get_labels(['Mouse 1'], ['nose', 'ear_left', 'ear_right']), np.array([ inside_region_1 + outside_regions + outside_regions, # 1 outside_regions + outside_regions + outside_regions, inside_region_1 + outside_regions + outside_regions, # 1 inside_both_regions + outside_regions + outside_regions, # 1 & 2 outside_regions + outside_regions + outside_regions, ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id="1", geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None), Region(_id="2", geometry=RectangleGeometry( top_left=Point(x=0, y=0), width=2, height=2, rotation=0), color_palette=None)], interaction_zones=[]) results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert get_region_stats( 'Mouse 1', '1', results).frames_of_interaction == 3 assert get_region_stats( 'Mouse 1', '2', results).frames_of_interaction == 1 def test_it_tracks_total_distance_using_center(): dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ [0, 0, 1.0] + [0, 0, 1.0], [0, 0, 1.0] + [2, 2, 1.0], # sqrt(2) NE [2, 2, 1.0] + [2, 2, 1.0], # sqrt(2) NE [12, 2, 1.0] + [8, 2, 1.0], # 8 N [-5, -18, 1.0] + [-5, -18, 1.0], # 25 SW ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].stats_overall.total_distance_travelled_in_pixels == 35.82842712474619 def test_it_tracks_total_distance_ignoring_nan_frames(): dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [0, 0, 1.0] + [0, 0, 1.0], [0, 0, 1.0] + [2, 2, 1.0], # sqrt(2) NE [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [2, 2, 1.0] + [2, 2, 1.0], # sqrt(2) NE [12, 2, 1.0] + [8, 2, 1.0], # 8 N [-5, -18, 1.0] + [-5, -18, 1.0], # 25 SW [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].stats_overall.total_distance_travelled_in_pixels == 35.82842712474619 def test_it_tracks_distance_by_frame(): dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ [0, 0, 1.0] + [0, 0, 1.0], [0, 0, 1.0] + [2, 2, 1.0], # sqrt(2) NE [2, 2, 1.0] + [2, 2, 1.0], # sqrt(2) NE [12, 2, 1.0] + [8, 2, 1.0], # 8 N [-5, -18, 1.0] + [-5, -18, 1.0], # 25 SW ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].source_data.distance_travelled_between_each_frame_in_pixels == [ 1.4142135623730951, 1.4142135623730951, 8.0, 25.0] def test_it_tracks_distance_by_frame_with_nan_as_None(): dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [0, 0, 1.0] + [0, 0, 1.0], [0, 0, 1.0] + [2, 2, 1.0], # sqrt(2) NE [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [2, 2, 1.0] + [2, 2, 1.0], # sqrt(2) NE [12, 2, 1.0] + [8, 2, 1.0], # 8 N [-5, -18, 1.0] + [-5, -18, 1.0], # 25 SW [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].source_data.distance_travelled_between_each_frame_in_pixels == [ None, 0, 1.4142135623730951, None, 1.4142135623730951, 8.0, 25.0, None, None] def test_it_tracks_average_speed(): dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ [0, 0, 1.0] + [0, 0, 1.0], [0, 0, 1.0] + [2, 2, 1.0], # sqrt(2) NE [2, 2, 1.0] + [2, 2, 1.0], # sqrt(2) NE [12, 2, 1.0] + [8, 2, 1.0], # 8 N [-5, -18, 1.0] + [-5, -18, 1.0], # 25 SW ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].stats_overall.average_speed_in_pixels_per_frame == 35.82842712474619 / 5 def test_it_tracks_fraction_of_frames_fully_detected(): dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], # not detected [0, 0, 1.0] + [np.nan, np.nan, np.nan], # partly detected [2, 2, 1.0] + [2, 2, 1.0], # fully detected [np.nan, np.nan, np.nan] + [-5, -18, 1.0], # partly detected [12, 2, 1.0] + [8, 2, 1.0], # fully detected ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].stats_overall.fraction_of_frames_with_animal_fully_detected == 0.4 def test_it_tracks_fraction_of_frames_partly_detected(): dlc_results = DlcResults( get_labels(['Mouse 1'], ['ear_left', 'ear_right']), np.array([ [np.nan, np.nan, np.nan] + [np.nan, np.nan, np.nan], # not detected [0, 0, 1.0] + [np.nan, np.nan, np.nan], # partly detected [2, 2, 1.0] + [2, 2, 1.0], # fully detected [np.nan, np.nan, np.nan] + [-5, -18, 1.0], # partly detected [12, 2, 1.0] + [8, 2, 1.0], # fully detected ], dtype=float)) arena_setup = ArenaSetup( areas=[Region(_id=None, geometry=RectangleGeometry( top_left=Point(x=1, y=1), width=3, height=3, rotation=0), color_palette=None)], interaction_zones=[]) behaviour_results = basic_behavioural_assay_algorithm( arena_setup=arena_setup, dlc_results=dlc_results) assert behaviour_results[0].stats_overall.fraction_of_frames_with_animal_partly_detected == 0.8
42.121795
104
0.565972
1,777
13,142
3.92628
0.061339
0.060198
0.072237
0.103196
0.880321
0.849362
0.816253
0.783145
0.770818
0.746309
0
0.069351
0.298889
13,142
311
105
42.257235
0.687866
0.042383
0
0.827068
0
0
0.024968
0
0
0
0
0
0.052632
1
0.041353
false
0
0.018797
0
0.06015
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
55cfdbe7c47156aab43da24381c3b2726dd82b18
2,366
py
Python
social/apps/django_app/tests.py
Nikolay232/python-social-auth
bb2f08127005091544a69208e9adf0aeccce9479
[ "BSD-3-Clause" ]
1
2015-04-19T21:38:46.000Z
2015-04-19T21:38:46.000Z
social/apps/django_app/tests.py
nvbn/python-social-auth
3e0e99404f20e7b6847ca069e0844ba8c090415f
[ "BSD-3-Clause" ]
null
null
null
social/apps/django_app/tests.py
nvbn/python-social-auth
3e0e99404f20e7b6847ca069e0844ba8c090415f
[ "BSD-3-Clause" ]
1
2016-04-01T06:03:51.000Z
2016-04-01T06:03:51.000Z
from social.tests.test_exceptions import * from social.tests.test_pipeline import * from social.tests.test_storage import * from social.tests.test_utils import * from social.tests.actions.test_associate import * from social.tests.actions.test_disconnect import * from social.tests.actions.test_login import * from social.tests.backends.test_amazon import * from social.tests.backends.test_angel import * from social.tests.backends.test_behance import * from social.tests.backends.test_bitbucket import * from social.tests.backends.test_box import * from social.tests.backends.test_broken import * from social.tests.backends.test_dailymotion import * from social.tests.backends.test_disqus import * from social.tests.backends.test_dropbox import * from social.tests.backends.test_dummy import * from social.tests.backends.test_email import * from social.tests.backends.test_evernote import * from social.tests.backends.test_facebook import * from social.tests.backends.test_fitbit import * from social.tests.backends.test_flickr import * from social.tests.backends.test_foursquare import * from social.tests.backends.test_google import * from social.tests.backends.test_instagram import * from social.tests.backends.test_linkedin import * from social.tests.backends.test_live import * from social.tests.backends.test_livejournal import * from social.tests.backends.test_mixcloud import * from social.tests.backends.test_podio import * from social.tests.backends.test_readability import * from social.tests.backends.test_reddit import * from social.tests.backends.test_skyrock import * from social.tests.backends.test_soundcloud import * from social.tests.backends.test_stackoverflow import * from social.tests.backends.test_steam import * from social.tests.backends.test_stocktwits import * from social.tests.backends.test_stripe import * from social.tests.backends.test_thisismyjam import * from social.tests.backends.test_tripit import * from social.tests.backends.test_tumblr import * from social.tests.backends.test_twitter import * from social.tests.backends.test_username import * from social.tests.backends.test_utils import * from social.tests.backends.test_vk import * from social.tests.backends.test_xing import * from social.tests.backends.test_yahoo import * from social.tests.backends.test_yammer import * from social.tests.backends.test_yandex import *
47.32
54
0.83601
339
2,366
5.690265
0.162242
0.254018
0.381026
0.522551
0.812338
0.775531
0
0
0
0
0
0
0.08284
2,366
49
55
48.285714
0.88894
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
3624b0ea62c97c3195c6b232ba8c26de8ff104da
10,582
py
Python
envs/robosuite/sawyer.py
vincentlui/megae
16b8d29377e3180447b03cb8f5120e9e086ad56d
[ "MIT" ]
82
2020-07-06T16:53:39.000Z
2022-03-25T18:12:35.000Z
envs/robosuite/sawyer.py
vincentlui/megae
16b8d29377e3180447b03cb8f5120e9e086ad56d
[ "MIT" ]
7
2020-10-21T21:25:12.000Z
2022-01-13T02:58:46.000Z
envs/robosuite/sawyer.py
vincentlui/megae
16b8d29377e3180447b03cb8f5120e9e086ad56d
[ "MIT" ]
19
2020-06-05T23:11:32.000Z
2022-03-28T08:43:27.000Z
import numpy as np import robosuite as rs import gym import time from robosuite.environments.sawyer_lift import SawyerLift from robosuite.utils import MujocoPyRenderer class GoalBasedSawyerLift(SawyerLift, gym.GoalEnv): """ Wraps SawyerLift in a GoalEnv. """ def __init__(self, test=False, use_dense=True, objgrip=False): super().__init__( use_camera_obs=False, has_renderer=False, use_indicator_object=True, has_offscreen_renderer=False, horizon=50, ignore_done=True ) self._viewer = None self.action_space = gym.spaces.Box(*self.action_spec) self._init_eef = np.array([ 0.58218024, -0.01407946, 0.90169981]) o = self._get_observation() self._obs_keys = ['cube_pos', 'cube_quat', 'eef_pos', 'joint_pos', 'joint_vel', 'gripper_qpos', 'gripper_qvel'] observation_space = gym.spaces.Box(-np.inf, np.inf, (sum([len(o[k]) for k in self._obs_keys]),)) if objgrip: self.ag = lambda obs: np.concatenate((obs[:3], obs[7:10])) else: self.ag = lambda obs: obs[:3] if objgrip: goal_space = gym.spaces.Box(-np.inf, np.inf, (6,)) else: goal_space = gym.spaces.Box(-np.inf, np.inf, (3,)) self.observation_space = gym.spaces.Dict({ 'observation': observation_space, 'desired_goal': goal_space, 'achieved_goal': goal_space }) self.max_steps = 50 self.num_steps = 0 self.dist_threshold = 0.05 if objgrip: self.dist_threshold = 0.07 self.test = test self.use_dense = use_dense self.objgrip = objgrip self.goal = None self._prev_state = None def reset(self): """ Need to override reset because the original is awfully inefficient (it reloads Mujoco Sim entirely) """ self.num_steps = 0 # From base.py self.sim.set_state(self.sim_state_initial) self.initialize_time(self.control_freq) self._get_reference() self.cur_time = 0 self.timestep = 0 self.done = False # From sawyer.py self.sim.data.qpos[self._ref_joint_pos_indexes] = self.mujoco_robot.init_qpos if self.has_gripper: self.sim.data.qpos[ self._ref_gripper_joint_pos_indexes ] = self.gripper.init_qpos # From sawyer_lift.py # reset positions of objects # self.model.place_objects() <- doesn't actually work, because this operates on the model and not the sim... init_pos = self.sim.data.get_joint_qpos('cube') init_pos[:2] += np.random.uniform(-0.1, 0.1, 2) self.sim.data.set_joint_qpos('cube', init_pos) # reset goal position self.goal = init_pos[:3] + np.array([0., 0., 0.12]) self.move_indicator(self.goal) if self.objgrip: self.goal = np.concatenate((self.goal, self.goal)) # reset joint positions init_pos = np.array([-0.5538, -0.8208, 0.4155, 1.8409, -0.4955, 0.6482, 1.9628]) init_pos += np.random.randn(init_pos.shape[0]) * 0.02 self.sim.data.qpos[self._ref_joint_pos_indexes] = np.array(init_pos) # And again from base.py self.sim.forward() obs = self._get_observation() obs = np.concatenate([obs[k] for k in self._obs_keys]) ag = self.ag(obs) self._prev_state = obs obs = { 'observation': obs, 'achieved_goal': ag, 'desired_goal': self.goal, } return obs def seed(self, seed=None): np.random.seed(seed) def compute_reward(self, ag, dg, info): d = np.linalg.norm(ag - dg, axis=-1) reward = -(d >= self.dist_threshold).astype(np.float32) success = 1. + reward failure = -1 * reward reward -= d * success ns = info['ns'] if len(ns.shape) == 1: reward -= (0.2*np.tanh(np.mean(np.abs(info['s'][7:17] - ns[7:17]), axis=-1)) * success) # Penalize successes that move else: reward -= (0.2*np.tanh(np.mean(np.abs(info['s'][:,7:17] - ns[:,7:17]), axis=-1)) * success) # Penalize successes that move # add dense reward discourages agent from being far away from the cube if self.use_dense: if len(ns.shape) == 1: reward -= np.linalg.norm(ns[:3] - ns[7:10]) * failure else: reward -= np.linalg.norm(ns[:,:3] - ns[:,7:10], axis=-1) * failure return reward def is_success(self, ag, dg): d = np.linalg.norm(ag[:3] - dg[:3], axis=-1) return d <= self.dist_threshold def step(self, action): obs, _, _, _ = super().step(action) obs = np.concatenate([obs[k] for k in self._obs_keys]) ag = self.ag(obs) eef = obs[7:10] reward = self.compute_reward(ag, self.goal, {'s':obs, 'ns':self._prev_state}) self._prev_state = obs obs = { 'observation': obs, 'achieved_goal': ag, 'desired_goal': self.goal, } if not self.test: info = {'is_success': self.is_success(ag, self.goal)} elif self.test: info = {'is_success': self._check_success()} self.num_steps += 1 done = True if self.num_steps >= self.max_steps else False if done: info['TimeLimit.truncated'] = True return obs, reward, done, info def render(self): """ Fix Robosuite render method, so that Viewer is only spawned on render """ if self._viewer is None: self._viewer = MujocoPyRenderer(self.sim) self._viewer.viewer.vopt.geomgroup[0] = ( 1 if self.render_collision_mesh else 0 ) self._viewer.viewer.vopt.geomgroup[1] = 1 if self.render_visual_mesh else 0 # hiding the overlay speeds up rendering significantly self._viewer.viewer._hide_overlay = True self._viewer.viewer._render_every_frame = True time.sleep(1/self.control_freq) self._viewer.render() class SawyerReach(SawyerLift, gym.GoalEnv): """ Wraps SawyerLift with a toy Reaching task. """ def __init__(self): super().__init__( use_camera_obs=False, has_renderer=False, use_indicator_object=True, has_offscreen_renderer=False, horizon=50, ignore_done=True ) self._viewer = None self.action_space = gym.spaces.Box(*self.action_spec) self._init_eef = np.array([ 0.58218024, -0.01407946, 0.90169981]) o = self._get_observation() self._obs_keys = ['eef_pos', 'joint_pos', 'joint_vel', 'gripper_qpos', 'gripper_qvel'] #, 'cube_pos', 'cube_quat'] observation_space = gym.spaces.Box(-np.inf, np.inf, (sum([len(o[k]) for k in self._obs_keys]),)) self.ag = lambda obs: obs[:3] goal_space = gym.spaces.Box(-np.inf, np.inf, o['eef_pos'].shape) self.observation_space = gym.spaces.Dict({ 'observation': observation_space, 'desired_goal': goal_space, 'achieved_goal': goal_space }) self.max_steps = 50 self.num_steps = 0 self.dist_threshold = 0.05 self._prev_state = None def reset(self): """ Need to override reset because the original is awfully inefficient (it reloads Mujoco Sim entirely) """ self.num_steps = 0 # From base.py self.sim.set_state(self.sim_state_initial) self.initialize_time(self.control_freq) self._get_reference() self.cur_time = 0 self.timestep = 0 self.done = False # From sawyer.py self.sim.data.qpos[self._ref_joint_pos_indexes] = self.mujoco_robot.init_qpos if self.has_gripper: self.sim.data.qpos[ self._ref_gripper_joint_pos_indexes ] = self.gripper.init_qpos # From sawyer_lift.py # reset positions of objects # self.model.place_objects() <- doesn't actually work, because this operates on the model and not the sim... init_pos = self.sim.data.get_joint_qpos('cube') init_pos[:2] += np.random.uniform(-0.05, 0.05, 2) self.sim.data.set_joint_qpos('cube', init_pos) # reset joint positions init_pos = np.array([-0.5538, -0.8208, 0.4155, 1.8409, -0.4955, 0.6482, 1.9628]) init_pos += np.random.randn(init_pos.shape[0]) * 0.02 self.sim.data.qpos[self._ref_joint_pos_indexes] = np.array(init_pos) # reset goal position init_pos = self._init_eef proposal = init_pos + np.random.uniform(-0.2, 0.2, 3) + np.array([0., 0., 0.15]) while np.linalg.norm(init_pos - proposal) < 0.05: proposal = init_pos + np.random.uniform(-0.2, 0.2, 3) + np.array([0., 0., 0.15]) self.goal = proposal self.move_indicator(self.goal) # And again from base.py self.sim.forward() obs = self._get_observation() obs = np.concatenate([obs[k] for k in self._obs_keys]) ag = self.ag(obs) self._prev_state = obs obs = { 'observation': obs, 'achieved_goal': ag, 'desired_goal': self.goal, } return obs def seed(self, seed=None): np.random.seed(seed) def compute_reward(self, achieved_goal, desired_goal, info): d = np.linalg.norm(achieved_goal - desired_goal, axis=-1) reward = -(d >= self.dist_threshold).astype(np.float32) success = 1. + reward reward -= d * success # penalize distance when successful if len(achieved_goal.shape) == 1: reward -= (0.2*np.tanh(np.mean(np.abs(info['s'][0:10] - info['ns'][0:10]), axis=-1)) * success) # Penalize successes that move else: reward -= (0.2*np.tanh(np.mean(np.abs(info['s'][:,0:10] - info['ns'][:,0:10]), axis=-1)) * success) # Penalize successes that move return reward def is_success(self, ag, dg): d = np.linalg.norm(ag[:3] - dg[:3], axis=-1) return d <= self.dist_threshold def step(self, action): obs, _, _, _ = super().step(action) obs = np.concatenate([obs[k] for k in self._obs_keys]) ag = self.ag(obs) reward = self.compute_reward(ag, self.goal, {'s':self._prev_state, 'ns':obs}) self._prev_state = obs obs = { 'observation': obs, 'achieved_goal': ag, 'desired_goal': self.goal, } info = {'is_success': self.is_success(ag, self.goal)} self.num_steps += 1 done = True if self.num_steps >= self.max_steps else False if done: info['TimeLimit.truncated'] = True return obs, reward, done, info def render(self): """ Fix Robosuite render method, so that Viewer is only spawned on render """ if self._viewer is None: self._viewer = MujocoPyRenderer(self.sim) self._viewer.viewer.vopt.geomgroup[0] = ( 1 if self.render_collision_mesh else 0 ) self._viewer.viewer.vopt.geomgroup[1] = 1 if self.render_visual_mesh else 0 # hiding the overlay speeds up rendering significantly self._viewer.viewer._hide_overlay = True self._viewer._render_every_frame = True time.sleep(1/self.control_freq) self._viewer.render()
30.851312
136
0.64175
1,566
10,582
4.15198
0.141762
0.020455
0.016918
0.018302
0.864196
0.83036
0.814672
0.814672
0.804214
0.77084
0
0.035143
0.220185
10,582
343
137
30.851312
0.752787
0.118976
0
0.751055
0
0
0.047521
0
0
0
0
0
0
1
0.059072
false
0
0.025316
0
0.126582
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
36bb777efe76c70ae9583366a0fabafdc13564c3
10,484
py
Python
tests/test_create_generators_bulk.py
craft-ai/craft-ai-client-python
3d8b3d9a49c0c70964deaeb9645130dd54f9a0b3
[ "BSD-3-Clause" ]
14
2016-08-26T07:06:57.000Z
2020-09-22T07:41:21.000Z
tests/test_create_generators_bulk.py
craft-ai/craft-ai-client-python
3d8b3d9a49c0c70964deaeb9645130dd54f9a0b3
[ "BSD-3-Clause" ]
94
2016-08-02T14:07:59.000Z
2021-10-06T11:50:52.000Z
tests/test_create_generators_bulk.py
craft-ai/craft-ai-client-python
3d8b3d9a49c0c70964deaeb9645130dd54f9a0b3
[ "BSD-3-Clause" ]
8
2017-02-07T12:05:57.000Z
2021-10-14T09:45:30.000Z
import unittest from craft_ai import Client, errors as craft_err from . import settings from .utils import generate_entity_id from .data import valid_data class TestCreateGeneratorsBulkSuccess(unittest.TestCase): """Checks that the client succeeds when creating an/multiple generator(s) with OK input""" @classmethod def setUpClass(cls): cls.client = Client(settings.CRAFT_CFG) cls.agent_id1 = generate_entity_id("bulk_agent") cls.agent_id2 = generate_entity_id("bulk_agent") cls.generator_id1 = generate_entity_id("bulk_generator") cls.generator_id2 = generate_entity_id("bulk_generator") cls.filter = [cls.agent_id1, cls.agent_id2] def setUp(self): self.client.delete_agent(self.agent_id1) self.client.delete_agent(self.agent_id2) self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) payload = [ {"configuration": valid_data.VALID_CONFIGURATION, "id": self.agent_id1}, {"configuration": valid_data.VALID_CONFIGURATION, "id": self.agent_id2}, ] self.client.create_agents_bulk(payload) def tearDown(self): self.client.delete_agent(self.agent_id1) self.client.delete_agent(self.agent_id2) self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) def clean_up_generators(self): self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) def test_create_one_generator(self): """create_generators_bulk should succeed when given a string ID in a set It should give a proper JSON response with `id` and `configuration` fields being strings and `id` being the same as the one given as a parameter.""" generator_configuration = valid_data.VALID_GENERATOR_CONFIGURATION.copy() generator_configuration["filter"] = self.filter payload = [{"id": self.generator_id1, "configuration": generator_configuration}] resp = self.client.create_generators_bulk(payload) self.assertEqual(resp[0].get("id"), self.generator_id1) self.addCleanup(self.clean_up_generators) def test_create_multiple_generators(self): """create_generators_bulk should succeed when given a set of string IDs It should give a proper JSON response with `id` and `configuration` fields being strings and `id` being the same as the one given as a parameter.""" generator_configuration = valid_data.VALID_GENERATOR_CONFIGURATION.copy() generator_configuration["filter"] = self.filter payload = [ {"id": self.generator_id1, "configuration": generator_configuration}, {"id": self.generator_id2, "configuration": generator_configuration}, ] resp = self.client.create_generators_bulk(payload) self.assertEqual(resp[0].get("id"), self.generator_id1) self.assertEqual(resp[1].get("id"), self.generator_id2) self.addCleanup(self.clean_up_generators) class TestCreateGeneratorsBulkFailure(unittest.TestCase): """Checks that the client fails when creating an/multiple generator(s) with bad input""" @classmethod def setUpClass(cls): cls.client = Client(settings.CRAFT_CFG) cls.agent_id1 = generate_entity_id("test_create_generators_bulk_agent") cls.agent_id2 = generate_entity_id("test_create_generators_bulk_agent") cls.generator_id1 = generate_entity_id("test_create_generators_bulk_generator") cls.generator_id2 = generate_entity_id("test_create_generators_bulk_generator") cls.filter = [cls.agent_id1, cls.agent_id2] def setUp(self): self.client.delete_agent(self.agent_id1) self.client.delete_agent(self.agent_id2) self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) payload = [ {"configuration": valid_data.VALID_CONFIGURATION}, {"configuration": valid_data.VALID_CONFIGURATION}, ] self.client.create_agents_bulk(payload) def tearDown(self): self.client.delete_agent(self.agent_id1) self.client.delete_agent(self.agent_id2) self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) def clean_up_generators(self): self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) def test_create_multiple_generators_with_invalid_ids(self): """create_generators_bulk should fail when given a set of invalid IDs It should raise an error upon request for creation of all generators with invalid id.""" generator_configuration = valid_data.VALID_GENERATOR_CONFIGURATION.copy() generator_configuration["filter"] = self.filter payload = [ {"id": 123, "configuration": generator_configuration}, {"id": 345, "configuration": generator_configuration}, ] self.assertRaises( craft_err.CraftAiBadRequestError, self.client.create_generators_bulk, payload, ) def test_create_multiple_generators_with_undefined_configurations(self): """create_generators_bulk should fail when given no configurations It should raise an error upon request for creation of all generators with undefined configurations.""" payload = [{"id": self.generator_id1}, {"id": self.generator_id2}] self.assertRaises( craft_err.CraftAiBadRequestError, self.client.create_generators_bulk, payload, ) def test_create_multiple_generators_with_no_filter(self): """create_generators_bulk should fail when given no filter It should raise an error upon request for creation of all generators with no filter.""" generator_configuration = valid_data.VALID_GENERATOR_CONFIGURATION.copy() generator_configuration.pop("filter", None) payload = [ {"id": self.generator_id1, "configuration": generator_configuration}, {"id": self.generator_id2, "configuration": generator_configuration}, ] self.assertRaises( craft_err.CraftAiBadRequestError, self.client.create_generators_bulk, payload, ) class TestCreateGeneratorsBulkSomeFailure(unittest.TestCase): """Checks that the client succeed when creating an/multiple generator(s) with bad input and an/multiple generator(s) with valid input""" @classmethod def setUpClass(cls): add = "3" cls.client = Client(settings.CRAFT_CFG) cls.agent_id1 = generate_entity_id("test_create_gen_bulk_ag" + add) cls.agent_id2 = generate_entity_id("test_create_gen_bulk_ag" + add) cls.generator_id1 = generate_entity_id("test_create_gen_bulk_gen" + add) cls.generator_id2 = generate_entity_id("test_create_gen_bulk_gen" + add) cls.filter = [cls.agent_id1, cls.agent_id2] def setUp(self): self.client.delete_agent(self.agent_id1) self.client.delete_agent(self.agent_id2) self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id1) self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id2) def tearDown(self): self.client.delete_agent(self.agent_id1) self.client.delete_agent(self.agent_id2) self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) def clean_up_generators(self): self.client.delete_generator(self.generator_id1) self.client.delete_generator(self.generator_id2) def test_create_some_generators_with_invalid_generator_id(self): """create_generators_bulk should succeed when some of the ID given are invalid and the others are valid. It should give a proper JSON response with a list containing dicts. The ones having invalid IDs have the `error` field being a CraftAiBadRequestError. The ones having valid IDs have `configuration` field being strings. In either case they should have 'id' being the same as the one given as a parameter. """ generator_configuration = valid_data.VALID_GENERATOR_CONFIGURATION.copy() generator_configuration["filter"] = self.filter payload = [ {"id": self.generator_id1, "configuration": generator_configuration}, {"id": 123, "configuration": generator_configuration}, ] resp = self.client.create_generators_bulk(payload) self.assertEqual(resp[0].get("id"), self.generator_id1) self.assertTrue("configuration" in resp[0]) self.assertEqual(resp[1].get("id"), 123) self.assertIsInstance(resp[1].get("error"), craft_err.CraftAiBadRequestError) self.assertFalse("configuration" in resp[1]) self.addCleanup(self.tearDown) def test_create_repeated_generator_id(self): """create_generators_bulk should succeed when generators in a bulk have the same ID given. It should give a proper JSON response with a list containing two dicts. The first one should have 'id' being the same as the one given as a parameter, and the `configuration` field being strings. The second one should have `id` being the same as the one given as a parameter 'error' field being a CraftAiBadRequestError. """ generator_configuration = valid_data.VALID_GENERATOR_CONFIGURATION.copy() generator_configuration["filter"] = self.filter payload = [ {"id": self.generator_id1, "configuration": generator_configuration}, {"id": self.generator_id1, "configuration": generator_configuration}, ] resp = self.client.create_generators_bulk(payload) self.assertEqual(resp[0].get("id"), self.generator_id1) self.assertTrue("configuration" in resp[0]) self.assertIsInstance(resp[1].get("error"), craft_err.CraftAiBadRequestError) self.addCleanup(self.clean_up_generators)
42.967213
98
0.696299
1,257
10,484
5.565632
0.10183
0.058605
0.068611
0.064322
0.886364
0.840623
0.802601
0.797456
0.756146
0.680532
0
0.011076
0.21633
10,484
243
99
43.144033
0.840433
0.191053
0
0.685535
1
0
0.074106
0.028662
0
0
0
0
0.08805
1
0.119497
false
0
0.031447
0
0.169811
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
36f2c38d51e9e6d22fc64d8611224399c8d8fe4b
20,592
py
Python
angr/procedures/definitions/win32_oleacc.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
angr/procedures/definitions/win32_oleacc.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
angr/procedures/definitions/win32_oleacc.py
r4b3rt/angr
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
[ "BSD-2-Clause" ]
null
null
null
# pylint:disable=line-too-long import logging from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64 from .. import SIM_PROCEDURES as P from . import SimLibrary _l = logging.getLogger(name=__name__) lib = SimLibrary() lib.set_default_cc('X86', SimCCStdcall) lib.set_default_cc('AMD64', SimCCMicrosoftAMD64) lib.set_library_names("oleacc.dll") prototypes = \ { # 'LresultFromObject': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeBottom(label="IUnknown")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["riid", "wParam", "punk"]), # 'ObjectFromLresult': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["lResult", "riid", "wParam", "ppvObject"]), # 'WindowFromAccessibleObject': SimTypeFunction([SimTypeBottom(label="IAccessible"), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["param0", "phwnd"]), # 'AccessibleObjectFromWindow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hwnd", "dwId", "riid", "ppvObject"]), # 'AccessibleObjectFromEvent': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IAccessible"), offset=0), SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"llVal": SimTypeLongLong(signed=True, label="Int64"), "lVal": SimTypeInt(signed=True, label="Int32"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "parray": SimTypePointer(SimStruct({"cDims": SimTypeShort(signed=False, label="UInt16"), "fFeatures": SimTypeShort(signed=False, label="UInt16"), "cbElements": SimTypeInt(signed=False, label="UInt32"), "cLocks": SimTypeInt(signed=False, label="UInt32"), "pvData": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "rgsabound": SimTypePointer(SimStruct({"cElements": SimTypeInt(signed=False, label="UInt32"), "lLbound": SimTypeInt(signed=True, label="Int32")}, name="SAFEARRAYBOUND", pack=False, align=None), offset=0)}, name="SAFEARRAY", pack=False, align=None), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pllVal": SimTypePointer(SimTypeLongLong(signed=True, label="Int64"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "__OBSOLETE__VARIANT_PBOOL": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimStruct({"cDims": SimTypeShort(signed=False, label="UInt16"), "fFeatures": SimTypeShort(signed=False, label="UInt16"), "cbElements": SimTypeInt(signed=False, label="UInt32"), "cLocks": SimTypeInt(signed=False, label="UInt32"), "pvData": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "rgsabound": SimTypePointer(SimStruct({"cElements": SimTypeInt(signed=False, label="UInt32"), "lLbound": SimTypeInt(signed=True, label="Int32")}, name="SAFEARRAYBOUND", pack=False, align=None), offset=0)}, name="SAFEARRAY", pack=False, align=None), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="VARIANT"), offset=0), "byref": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "cVal": SimTypeBottom(label="CHAR"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "ullVal": SimTypeLongLong(signed=False, label="UInt64"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pullVal": SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "Anonymous": SimStruct({"pvRecord": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "pRecInfo": SimTypeBottom(label="IRecordInfo")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="VARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hwnd", "dwId", "dwChildId", "ppacc", "pvarChild"]), # 'AccessibleObjectFromPoint': SimTypeFunction([SimStruct({"x": SimTypeInt(signed=True, label="Int32"), "y": SimTypeInt(signed=True, label="Int32")}, name="POINT", pack=False, align=None), SimTypePointer(SimTypeBottom(label="IAccessible"), offset=0), SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"llVal": SimTypeLongLong(signed=True, label="Int64"), "lVal": SimTypeInt(signed=True, label="Int32"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "parray": SimTypePointer(SimStruct({"cDims": SimTypeShort(signed=False, label="UInt16"), "fFeatures": SimTypeShort(signed=False, label="UInt16"), "cbElements": SimTypeInt(signed=False, label="UInt32"), "cLocks": SimTypeInt(signed=False, label="UInt32"), "pvData": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "rgsabound": SimTypePointer(SimStruct({"cElements": SimTypeInt(signed=False, label="UInt32"), "lLbound": SimTypeInt(signed=True, label="Int32")}, name="SAFEARRAYBOUND", pack=False, align=None), offset=0)}, name="SAFEARRAY", pack=False, align=None), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pllVal": SimTypePointer(SimTypeLongLong(signed=True, label="Int64"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "__OBSOLETE__VARIANT_PBOOL": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimStruct({"cDims": SimTypeShort(signed=False, label="UInt16"), "fFeatures": SimTypeShort(signed=False, label="UInt16"), "cbElements": SimTypeInt(signed=False, label="UInt32"), "cLocks": SimTypeInt(signed=False, label="UInt32"), "pvData": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "rgsabound": SimTypePointer(SimStruct({"cElements": SimTypeInt(signed=False, label="UInt32"), "lLbound": SimTypeInt(signed=True, label="Int32")}, name="SAFEARRAYBOUND", pack=False, align=None), offset=0)}, name="SAFEARRAY", pack=False, align=None), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="VARIANT"), offset=0), "byref": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "cVal": SimTypeBottom(label="CHAR"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "ullVal": SimTypeLongLong(signed=False, label="UInt64"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pullVal": SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "Anonymous": SimStruct({"pvRecord": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "pRecInfo": SimTypeBottom(label="IRecordInfo")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="VARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ptScreen", "ppacc", "pvarChild"]), # 'AccessibleChildren': SimTypeFunction([SimTypeBottom(label="IAccessible"), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"llVal": SimTypeLongLong(signed=True, label="Int64"), "lVal": SimTypeInt(signed=True, label="Int32"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "parray": SimTypePointer(SimStruct({"cDims": SimTypeShort(signed=False, label="UInt16"), "fFeatures": SimTypeShort(signed=False, label="UInt16"), "cbElements": SimTypeInt(signed=False, label="UInt32"), "cLocks": SimTypeInt(signed=False, label="UInt32"), "pvData": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "rgsabound": SimTypePointer(SimStruct({"cElements": SimTypeInt(signed=False, label="UInt32"), "lLbound": SimTypeInt(signed=True, label="Int32")}, name="SAFEARRAYBOUND", pack=False, align=None), offset=0)}, name="SAFEARRAY", pack=False, align=None), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pllVal": SimTypePointer(SimTypeLongLong(signed=True, label="Int64"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "__OBSOLETE__VARIANT_PBOOL": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimStruct({"cDims": SimTypeShort(signed=False, label="UInt16"), "fFeatures": SimTypeShort(signed=False, label="UInt16"), "cbElements": SimTypeInt(signed=False, label="UInt32"), "cLocks": SimTypeInt(signed=False, label="UInt32"), "pvData": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "rgsabound": SimTypePointer(SimStruct({"cElements": SimTypeInt(signed=False, label="UInt32"), "lLbound": SimTypeInt(signed=True, label="Int32")}, name="SAFEARRAYBOUND", pack=False, align=None), offset=0)}, name="SAFEARRAY", pack=False, align=None), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="VARIANT"), offset=0), "byref": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "cVal": SimTypeBottom(label="CHAR"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "ullVal": SimTypeLongLong(signed=False, label="UInt64"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pullVal": SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "Anonymous": SimStruct({"pvRecord": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "pRecInfo": SimTypeBottom(label="IRecordInfo")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="VARIANT", pack=False, align=None), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["paccContainer", "iChildStart", "cChildren", "rgvarChildren", "pcObtained"]), # 'GetRoleTextA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lRole", "lpszRole", "cchRoleMax"]), # 'GetRoleTextW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lRole", "lpszRole", "cchRoleMax"]), # 'GetStateTextA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lStateBit", "lpszState", "cchState"]), # 'GetStateTextW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["lStateBit", "lpszState", "cchState"]), # 'GetOleaccVersionInfo': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeBottom(label="Void"), arg_names=["pVer", "pBuild"]), # 'CreateStdAccessibleObject': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hwnd", "idObject", "riid", "ppvObject"]), # 'CreateStdAccessibleProxyA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hwnd", "pClassName", "idObject", "riid", "ppvObject"]), # 'CreateStdAccessibleProxyW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hwnd", "pClassName", "idObject", "riid", "ppvObject"]), # 'AccSetRunningUtilityState': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="ACC_UTILITY_STATE_FLAGS")], SimTypeInt(signed=True, label="Int32"), arg_names=["hwndApp", "dwUtilityStateMask", "dwUtilityState"]), # 'AccNotifyTouchInteraction': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimStruct({"x": SimTypeInt(signed=True, label="Int32"), "y": SimTypeInt(signed=True, label="Int32")}, name="POINT", pack=False, align=None)], SimTypeInt(signed=True, label="Int32"), arg_names=["hwndApp", "hwndTarget", "ptTarget"]), } lib.set_prototypes(prototypes)
367.714286
4,929
0.738879
2,239
20,592
6.755695
0.091559
0.066177
0.092027
0.092556
0.911279
0.909295
0.906783
0.90632
0.890454
0.887479
0
0.02618
0.066968
20,592
55
4,930
374.4
0.761099
0.00136
0
0
0
0
0.205378
0.019339
0
0
0
0
0
1
0
false
0
0.16129
0
0.16129
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
3d44dceed074cc73684b46b47ce8ba15dad54970
30,714
py
Python
lms_app/tests/test_api.py
sashanight/LMS
8290805d9c3fd2b7ae50896ba75ee112b50619e3
[ "MIT" ]
null
null
null
lms_app/tests/test_api.py
sashanight/LMS
8290805d9c3fd2b7ae50896ba75ee112b50619e3
[ "MIT" ]
null
null
null
lms_app/tests/test_api.py
sashanight/LMS
8290805d9c3fd2b7ae50896ba75ee112b50619e3
[ "MIT" ]
null
null
null
from django.test import TestCase from django.urls import reverse from lms_app.urls import urlpatterns from .. import views from ..models import * class AuthTest(TestCase): @classmethod def setUpTestData(cls): user = User(e_mail='foo@example.com') user.set_password("password1") user.save() def test_success_auth(self): data = {'email': 'foo@example.com', 'password': 'password1'} resp = self.client.post(reverse('lms_app:authentication'), data) self.assertEqual(resp.status_code, 200) def test_bad_password(self): data = {'email': 'foo@example.com', 'password': 'password2'} resp = self.client.post(reverse('lms_app:authentication'), data) self.assertEqual(resp.status_code, 406) def test_no_user(self): data = {'email': 'foo1@example.com', 'password': 'password2'} resp = self.client.post(reverse('lms_app:authentication'), data) self.assertEqual(resp.status_code, 404) def test_bad_request(self): data = {'EMAIL': 'foo@example.com', 'password': 'password1'} resp = self.client.post(reverse('lms_app:authentication'), data) self.assertEqual(resp.status_code, 400) class RegistrationTest(TestCase): @classmethod def setUpTestData(cls): user = User(FIO="Knyazev Alexander") user.save() def test_success_registration(self): user = User.objects.get(id=1) code = user.verification_code data = {'verification_code': code, 'email': 'foo@example.com', 'password': 'password1'} resp = self.client.post(reverse('lms_app:registration'), data) self.assertEqual(resp.status_code, 201) self.assertEqual(User.objects.get(id=1).e_mail, data['email']) def test_bad_verification_code(self): user = User.objects.get(id=1) code = user.verification_code data = {'verification_code': "----", 'email': 'foo@example.com', 'password': 'password1'} resp = self.client.post(reverse('lms_app:registration'), data) self.assertEqual(resp.status_code, 404) def test_bad_request(self): user = User.objects.get(id=1) code = user.verification_code data = {'verification_code': code, 'email': 'foo@example.com'} resp = self.client.post(reverse('lms_app:registration'), data) self.assertEqual(resp.status_code, 400) def test_bad_email(self): user = User.objects.get(id=1) code = user.verification_code data = {'verification_code': code, 'email': 'fooexample', 'password': 'password1'} resp = self.client.post(reverse('lms_app:registration'), data) self.assertEqual(resp.status_code, 403) self.assertTrue('answer' in resp.json()) self.assertEqual(resp.json()['answer'], "Bad e-mail") def test_bad_password(self): user = User.objects.get(id=1) code = user.verification_code data = {'verification_code': code, 'email': 'foo@example.com', 'password': '1234'} resp = self.client.post(reverse('lms_app:registration'), data) self.assertEqual(resp.status_code, 403) class MyProfileWatchTest(TestCase): @classmethod def setUpTestData(cls): user = User(FIO="Knyazev Alexander", e_mail='foo@example.com') user.set_password('password1') user.save() access_token = AccessToken() access_token.user = user access_token.token = '123456' access_token.save() def test_success_case(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 200) self.assertTrue('FIO' in resp.json()) self.assertEqual(resp.json()['FIO'], 'Knyazev Alexander') def test_invalid_token(self): data = {'token': '12345678'} resp = self.client.get(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 401) class MyProfileEditTest(TestCase): @classmethod def setUpTestData(cls): user = User(FIO="Knyazev Alexander", e_mail='foo@example.com') user.set_password('password1') user.save() access_token = AccessToken() access_token.user = user access_token.token = '123456' access_token.save() def test_success_case(self): data = {'token': '123456', 'vk_link': "https://vk.com/example", 'phone_number': '+79671219023', 'person_info': 'I am student', 'hometown': 'Moscow', 'password': 'password2', 'old_password': 'password1'} resp = self.client.post(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 200) self.assertEqual(User.objects.get(id=1).hometown, data['hometown']) def test_invalid_token(self): data = {'token': '12345678', 'vk_link': "https://vk.com/example", 'phone_number': '+79671219023', 'person_info': 'I am student', 'hometown': 'Moscow'} resp = self.client.post(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 401) def test_invalid_profile_link(self): data = {'token': '123456', 'vk_link':"vk.com/example", 'phone_number': '+79671219023', 'person_info': 'I am student', 'hometown': 'Moscow'} resp = self.client.post(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 400) def test_invalid_phone_number(self): data = {'token': '123456', 'vk_link':"vk.com/example", 'phone_number': '89671219023', 'person_info': 'I am student', 'hometown': 'Moscow'} resp = self.client.post(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 400) def test_not_editable_field(self): data = {'token': '123456', 'fio': 'Knyazev Aleksandr', 'vk_link':"vk.com/example", 'phone_number': '89671219023', 'person_info': 'I am student', 'hometown': 'Moscow'} resp = self.client.post(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 400) def test_failed_password_change(self): data = {'token': '123456', 'password': 'password2'} resp = self.client.post(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 400) def test_failed_password_change2(self): data = {'token': '123456', 'password': 'password2', 'old_password': 'password0'} resp = self.client.post(reverse('lms_app:my_profile'), data) self.assertEqual(resp.status_code, 400) class GetUserProfileTest(TestCase): @classmethod def setUpTestData(cls): user = User(FIO="Knyazev Alexander", e_mail='foo@example.com') user.set_password("password1") user.save() access_token = AccessToken() access_token.user = user access_token.token = '123456' access_token.save() user2 = User(FIO="Kotov Sergey", e_mail='foo2@example.com', vk_link='https://vk.com/KS') user2.set_password('password2') user2.save() def test_success_access_by_id(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:user_profile', kwargs={"user_id": 2}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["FIO"], "Kotov Sergey") def test_success_access_by_link(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:user_profile', kwargs={"link_type": "vk", "link_text": "KS"}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["FIO"], "Kotov Sergey") def test_bad_token(self): data = {'token': '1234567'} resp = self.client.get(reverse('lms_app:user_profile', kwargs={"link_type": "vk", "link_text": "KS"}), data) self.assertEqual(resp.status_code, 401) def test_bad_link(self): data = {'token': '123456', 'link_to_user': 'https://vk.com/KSergey'} resp = self.client.get(reverse('lms_app:user_profile', kwargs={"link_type": "vk", "link_text": "KSerg"}), data) self.assertEqual(resp.status_code, 404) def test_bad_uid(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:user_profile', kwargs={"user_id": 10}), data) self.assertEqual(resp.status_code, 404) def test_bad_request(self): data = {'token': '123456', 'fio': 'Kotov Sergey'} resp = self.client.get(reverse('lms_app:user_profile', kwargs={"link_type": "twitter", "link_text": "KS"}), data) self.assertEqual(resp.status_code, 400) class GetClassmatesTest(TestCase): @classmethod def setUpTestData(cls): group = Group(group_name="492", department_name="diht", course_number=4) group.save() group2 = Group(group_name="493", department_name="diht", course_number=4) group2.save() student1 = Student(FIO="Knyazev Alexander", e_mail='foo@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student1.set_password('password1') student1.save() access_token = AccessToken() access_token.user = student1 access_token.token = '123456' access_token.save() student2 = Student(FIO="Alexey", e_mail='foo2@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student2.set_password('password2') student2.save() student3 = Student(FIO="Vladimir", e_mail='foo3@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student3.set_password('password3') student3.save() student4 = Student(FIO="Boris", e_mail='foo4@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group2) student4.set_password('password4') student4.save() def test_success_case(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:my_classmates'), data) self.assertEqual(resp.status_code, 200) self.assertEqual(len(resp.json()), 2) classmates_FIO = [classmate['FIO'] for classmate in resp.json()] self.assertTrue("Alexey" in classmates_FIO) self.assertTrue("Vladimir" in classmates_FIO) self.assertTrue("Boris" not in classmates_FIO) def test_bad_token(self): data = {'token': '1234567'} resp = self.client.get(reverse('lms_app:my_classmates'), data) self.assertEqual(resp.status_code, 401) class GetCoursesListTest(TestCase): @classmethod def setUpTestData(cls): group = Group(group_name="492", department_name="diht", course_number=4) group.save() student = Student(FIO="Knyazev Alexander", e_mail='foo@example.com',degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student.set_password('password1') student.save() access_token = AccessToken() access_token.user = student access_token.token = '123456' access_token.save() teacher = Teacher(FIO="Ivanov Yuri", e_mail='foo2@example.com') teacher.set_password('password2') teacher.save() access_token2 = AccessToken() access_token2.user = teacher access_token2.token = '123457' access_token2.save() course1 = Course(course_name="Matan") course1.save() course1.groups_of_course.add(group) course1.course_instructors.add(teacher) course1.save() course2 = Course(course_name="English") course2.save() course2.groups_of_course.add(group) course2.course_instructors.add(teacher) course2.save() def test_success_student_case(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:courses_list'), data) self.assertEqual(resp.status_code, 200) self.assertTrue('list_of_courses' in resp.json()) self.assertEqual(len(resp.json()['list_of_courses']), 2) def test_success_teacher_case(self): data = {'token': '123457'} resp = self.client.get(reverse('lms_app:courses_list'), data) self.assertEqual(resp.status_code, 200) self.assertTrue('list_of_courses' in resp.json()) self.assertEqual(len(resp.json()['list_of_courses']), 2) def test_bad_token(self): data = {'token': '123'} resp = self.client.get(reverse('lms_app:courses_list'), data) self.assertEqual(resp.status_code, 401) class GetCourseInfoTest(TestCase): @classmethod def setUpTestData(cls): group = Group(group_name="492", department_name="diht", course_number=4) group.save() student = Student(FIO="Knyazev Alexander", e_mail='foo@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student.set_password('password1') student.save() access_token = AccessToken() access_token.user = student access_token.token = '123456' access_token.save() course = Course(course_name='Matan', description='---') course.save() course.groups_of_course.add(group) course.trusted_individuals.add(student) course.save() material = CourseMaterial() material.material_name = "Lecture 1" material.content = "----" material.course = course material.save() task = Task() task.task_name = "HW 1" task.course = course task.description = "Solve smth" task.start = datetime.datetime.now() task.end = datetime.datetime.now() + datetime.timedelta(days=7) task.save() def test_success_case(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:course_info', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) def test_invalid_course_name(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:course_info', kwargs={"course_name": "Linal"}), data) self.assertEqual(resp.status_code, 404) def test_bad_token(self): data = {'token': '12345678'} resp = self.client.get(reverse('lms_app:course_info', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 401) class ManageCourseMaterialTest(TestCase): @classmethod def setUpTestData(cls): group = Group(group_name="492", department_name="diht", course_number=4) group.save() student = Student(FIO="Knyazev Alexander", e_mail='foo@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student.set_password('password1') student.save() access_token = AccessToken() access_token.user = student access_token.token = '123456' access_token.save() teacher = Teacher(FIO="Ivanov Yuri", e_mail='foo2@example.com') teacher.set_password('password2') teacher.save() access_token2 = AccessToken() access_token2.user = teacher access_token2.token = '123457' access_token2.save() student2 = Student(FIO="Kotov Sergey", e_mail='foo@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student2.set_password('password2') student2.save() access_token3 = AccessToken() access_token3.user = student2 access_token3.token = '123458' access_token3.save() course = Course(course_name='Matan', description='---') course.save() course.groups_of_course.add(group) course.trusted_individuals.add(student) course.course_instructors.add(teacher) course.save() course_material = CourseMaterial(material_name="Lecture 1", course=course, content='Intro') course_material.save() def test_success_add_by_teacher(self): data = {'token': '123457', 'course_material_name': 'Lecture 2', 'course_material_body': 'No info'} resp = self.client.post(reverse('lms_app:course_materials', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(Course.objects.get(course_name="Matan").coursematerial_set.count(), 2) def test_success_add_by_student(self): data = {'token': '123456', 'course_material_name': 'Lecture 2', 'course_material_body': 'No info'} resp = self.client.post(reverse('lms_app:course_materials', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(Course.objects.get(course_name="Matan").coursematerial_set.count(), 2) def test_success_mod_by_student(self): data = {'token': '123456', 'course_material_name': 'Lecture 1', 'course_material_body': 'No info'} resp = self.client.post(reverse('lms_app:course_materials', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(Course.objects.get(course_name="Matan").coursematerial_set.count(), 1) print(Course.objects.get(course_name="Matan").coursematerial_set.all()[0].material_name) self.assertEqual(Course.objects.get(course_name="Matan").coursematerial_set.all()[0].content, "No info") def test_no_student_access(self): data = {'token': '123458', 'course_material_name': 'Lecture 1', 'course_material_body': 'No info'} resp = self.client.post(reverse('lms_app:course_materials', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 403) def test_success_delete_by_teacher(self): data = {'token': '123456', 'course_material_name': 'Lecture 1'} resp = self.client.post(reverse('lms_app:course_materials', kwargs={"course_name": "Matan"}), data) self.assertEqual(CourseMaterial.objects.filter(course__course_name="Matan", material_name="Lecture 1").count(), 0) self.assertEqual(resp.status_code, 200) def test_bad_course(self): data = {'token': '123457', 'course_material_name': 'Lecture 1', 'course_material_body': 'No info'} resp = self.client.post(reverse('lms_app:course_materials', kwargs={"course_name": "Linal"}), data) self.assertEqual(resp.status_code, 404) class AddTrustedIndividualsTest(TestCase): @classmethod def setUpTestData(cls): group = Group(group_name="492", department_name="diht", course_number=4) group.save() student = Student(FIO="Knyazev Alexander", e_mail='foo@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student.set_password('password1') student.save() access_token = AccessToken() access_token.user = student access_token.token = '123456' access_token.save() teacher = Teacher(FIO="Ivanov Yuri", e_mail='foo2@example.com') teacher.set_password('password2') teacher.save() access_token2 = AccessToken() access_token2.user = teacher access_token2.token = '123457' access_token2.save() course = Course(course_name='Matan', description='---') course.save() course.groups_of_course.add(group) course.course_instructors.add(teacher) course.save() course2 = Course(course_name='Linal', description='---') course2.save() course2.groups_of_course.add(group) course2.save() def test_success_case(self): data = {'token': '123457', 'trusted_individual_id': 1} self.assertEqual(Course.objects.get(course_name="Matan").trusted_individuals.count(), 0) resp = self.client.post(reverse('lms_app:trusted_individuals', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(Course.objects.get(course_name="Matan").trusted_individuals.count(), 1) def test_bad_teacher_access(self): data = {'token': '123457', 'trusted_individual_id': 1} resp = self.client.post(reverse('lms_app:trusted_individuals', kwargs={"course_name": "Linal"}), data) self.assertEqual(resp.status_code, 403) def test_bad_student_access(self): data = {'token': '123456', 'course_name': 'Matan', 'trusted_individual_id': 1} resp = self.client.post(reverse('lms_app:trusted_individuals', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 403) def test_bad_request(self): data = {'token': '123456', 'FIO': "Knyazev Alexander"} resp = self.client.post(reverse('lms_app:trusted_individuals', kwargs={"course_name": "Philosophy"}), data) self.assertEqual(resp.status_code, 400) def test_invalid_token(self): data = {'token': '123', 'trusted_individual_id': 1} resp = self.client.post(reverse('lms_app:trusted_individuals', kwargs={'course_name': 'Matan'}), data) self.assertEqual(resp.status_code, 401) class ManageCourseTasksTest(TestCase): @classmethod def setUpTestData(cls): teacher = Teacher(FIO="Ivanov Yuri", e_mail='foo2@example.com') teacher.set_password('passwors2') teacher.save() access_token = AccessToken() access_token.user = teacher access_token.token = '123457' access_token.save() course = Course(course_name='Matan', description='---') course.save() course.course_instructors.add(teacher) course.save() course2 = Course(course_name='Linal', description='---') course2.save() task = Task(task_name="HW 1", course=course, description='---', start=datetime.datetime.now(), end=datetime.datetime.now() + datetime.timedelta(days=7)) task.save() def test_success_add(self): data = {'token': '123457', 'task_name': 'HW 2', 'task_body': 'Solve smth', 'task_start': datetime.datetime.now(), 'task_end': datetime.datetime.now() + datetime.timedelta(days=7)} self.assertEqual(Task.objects.filter(course__course_name="Matan").count(), 1) resp = self.client.post(reverse('lms_app:course_task', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(Task.objects.filter(course__course_name="Matan").count(), 2) def test_success_mod(self): data = {'token': '123457', 'task_name': 'HW 1', 'task_body': 'Solve smth', 'task_start': datetime.datetime.now(), 'task_end': datetime.datetime.now() + datetime.timedelta(days=9)} resp = self.client.post(reverse('lms_app:course_task', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(Task.objects.get(course__course_name="Matan", task_name="HW 1").description, "Solve smth") def test_success_delete(self): data = {'token': '123457', 'task_name': 'HW 1', 'task_start': datetime.datetime.now(), 'task_end': datetime.datetime.now() + datetime.timedelta(days=9)} resp = self.client.post(reverse('lms_app:course_task', kwargs={"course_name": "Matan"}), data) self.assertEqual(resp.status_code, 200) def test_no_teacher_access(self): data = {'token': '123457', 'task_name': 'HW 2', 'task_body': 'Solve smth', 'task_start': datetime.datetime.now(), 'task_end': datetime.datetime.now() + datetime.timedelta(days=7)} resp = self.client.post(reverse('lms_app:course_task', kwargs={'course_name': 'Linal'}), data) self.assertEqual(resp.status_code, 403) def test_bad_request(self): data = {'token': '123457', 'task_name': 'HW1', 'content': 'Solve'} resp = self.client.post(reverse('lms_app:course_task', kwargs={'course_name': 'Matan'}), data) self.assertEqual(resp.status_code, 400) def test_invalid_token(self): data = {'token': '123456', 'task_name': 'HW2', 'task_body': ''} resp = self.client.post(reverse('lms_app:course_task', kwargs={'course_name': 'Matan'}), data) self.assertEqual(resp.status_code, 401) class UploadTaskSolutionTest(TestCase): @classmethod def setUpTestData(cls): group = Group(group_name="492", department_name="diht", course_number=4) group.save() student = Student(FIO="Knyazev Alexander", e_mail='foo@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student.set_password('password1') student.save() access_token = AccessToken() access_token.user = student access_token.token = '123456' access_token.save() teacher = Teacher(FIO="Ivanov Yuri", e_mail='foo2@example.com') teacher.set_password('password2') teacher.save() access_token2 = AccessToken() access_token2.user = teacher access_token2.token = '123457' access_token2.save() course = Course(course_name='Matan', description='---') course.save() course.course_instructors.add(teacher) course.groups_of_course.add(group) course.save() course2 = Course(course_name='Linal', description='---') course2.save() course2.course_instructors.add(teacher) course2.save() task = Task(task_name="HW 1", course=course, description='---', start=datetime.datetime.now(), end=datetime.datetime.now() + datetime.timedelta(days=7)) task.save() task2 = Task(task_name="First homework", course=course2, description='---', start=datetime.datetime.now(), end=datetime.datetime.now() + datetime.timedelta(days=7)) task2.save() def test_success_add(self): data = {'token': '123456', 'solution_body': 'answer 1'} resp = self.client.post(reverse('lms_app:task_solutions', kwargs={'course_name': 'Matan', 'task_id': '1'}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(Task.objects.get(course__course_name="Matan", id="1").tasksolution_set.all()[0].solution, "answer 1") def test_no_access(self): data = {'token': '123456', 'solution_body': 'answer 1'} resp = self.client.post(reverse('lms_app:task_solutions', kwargs={'course_name': 'Linal', 'task_id': '1'}), data) self.assertEqual(resp.status_code, 403) def test_bad_request(self): data = {'token': '123456'} resp = self.client.post(reverse('lms_app:task_solutions', kwargs={'course_name': 'Matan', 'task_id': '1'}), data) self.assertEqual(resp.status_code, 400) def test_invalid_token(self): data = {'token': '123', 'course_name': 'Matan', 'task_id': '1', 'solution_body': 'answer 1'} resp = self.client.post(reverse('lms_app:task_solutions', kwargs={'course_name': 'Matan', 'task_id': '1'}), data) self.assertEqual(resp.status_code, 401) class WatchTaskSolutionTest(TestCase): @classmethod def setUpTestData(cls): group = Group(group_name="492", department_name="diht", course_number=4) group.save() student = Student(FIO="Knyazev Alexander", e_mail='foo@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group) student.set_password('password1') student.save() group2 = Group(group_name="493", department_name="diht", course_number=4) group2.save() student2 = Student(FIO="Sidorov Ivan", e_mail='foo3@example.com', degree='Bachelor', form_of_study='Full-time', learning_base='Budget', group=group2) student2.set_password('password3') student2.save() access_token = AccessToken() access_token.user = student access_token.token = '123456' access_token.save() teacher = Teacher(FIO="Ivanov Yuri", e_mail='foo2@example.com') teacher.set_password('password2') teacher.save() access_token2 = AccessToken() access_token2.user = teacher access_token2.token = '123457' access_token2.save() course = Course(course_name='Matan', description='---') course.save() course.course_instructors.add(teacher) course.groups_of_course.add(group) course.groups_of_course.add(group2) course.save() task = Task(task_name="HW 1", course=course, description='---', start=datetime.datetime.now(), end=datetime.datetime.now() + datetime.timedelta(days=7)) task.save() task_solution = TaskSolution(task=task, user=student, solution="Answer=1") task_solution.save() def test_success_case(self): data = {'token': '123457'} resp = self.client.get(reverse('lms_app:passed_solutions', kwargs={'course_name': 'Matan', 'task_id': '1'}), data) self.assertEqual(resp.status_code, 200) self.assertEqual(len(resp.json()), 2) self.assertEqual(resp.json()['492']['Knyazev Alexander']['Sent'], 'Yes') def test_no_access(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:passed_solutions', kwargs={'course_name': 'Matan', 'task_id': '1'}), data) self.assertEqual(resp.status_code, 403) def test_bad_request(self): data = {'token': '123456'} resp = self.client.get(reverse('lms_app:passed_solutions', kwargs={'course_name': 'Linal', 'task_id': '2'}), data) self.assertEqual(resp.status_code, 404) def test_invalid_token(self): data = {'token': '123'} resp = self.client.get(reverse('lms_app:passed_solutions', kwargs={'course_name': 'Matan', 'task_id': '1'}), data) self.assertEqual(resp.status_code, 401)
42.131687
121
0.632741
3,607
30,714
5.192404
0.062379
0.063271
0.062897
0.076085
0.889903
0.862299
0.850179
0.828021
0.815046
0.789364
0
0.03565
0.220974
30,714
729
122
42.131687
0.747106
0
0
0.704974
0
0
0.19134
0.022041
0
0
0
0
0.147513
1
0.120069
false
0.073756
0.008576
0
0.150943
0.001715
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
3d59c4c93ba6b5658c57f472618209b0952ca6f8
231
py
Python
tests/whoosh_tests/tests/__init__.py
pbs/django-haystack
433e154f76a450ffc095792c6f2e051ef508fc2d
[ "BSD-3-Clause" ]
2
2015-01-10T09:14:47.000Z
2018-11-19T13:45:55.000Z
tests/whoosh_tests/tests/__init__.py
ericholscher/django-haystack
1fde37afa4921c2121a95a4902f2012bbf837bf1
[ "BSD-3-Clause" ]
1
2017-06-13T02:08:54.000Z
2017-06-13T02:22:15.000Z
tests/whoosh_tests/tests/__init__.py
ericholscher/django-haystack
1fde37afa4921c2121a95a4902f2012bbf837bf1
[ "BSD-3-Clause" ]
2
2015-08-11T17:00:42.000Z
2021-01-04T08:39:33.000Z
import warnings warnings.simplefilter('ignore', Warning) from whoosh_tests.tests.forms import * from whoosh_tests.tests.inputs import * from whoosh_tests.tests.whoosh_query import * from whoosh_tests.tests.whoosh_backend import *
28.875
47
0.831169
32
231
5.8125
0.375
0.215054
0.322581
0.430108
0.483871
0.344086
0
0
0
0
0
0
0.090909
231
7
48
33
0.885714
0
0
0
0
0
0.025974
0
0
0
0
0
0
1
0
true
0
0.833333
0
0.833333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
180b34188f664bd915e690a35828c5fc8cc67fe3
18,395
py
Python
tests/test_summaries/test_submissions_summary.py
costas-basdekis/aox
63a90fb722f29d9b2d26041f9035f99b6b21615e
[ "MIT" ]
2
2021-11-10T22:38:49.000Z
2021-12-03T08:09:01.000Z
tests/test_summaries/test_submissions_summary.py
costas-basdekis/aox
63a90fb722f29d9b2d26041f9035f99b6b21615e
[ "MIT" ]
null
null
null
tests/test_summaries/test_submissions_summary.py
costas-basdekis/aox
63a90fb722f29d9b2d26041f9035f99b6b21615e
[ "MIT" ]
null
null
null
# flake8: noqa E501 from unittest import TestCase from aox.model import CombinedInfo, RepoInfo, AccountInfo from aox.summary import SubmissionsSummary from tests.utils import making_combined_info class TestSubmissionsSummary(TestCase): maxDiff = None def get_years_links(self, years): return "".join(map(self.get_year_links, years)) def get_year_links(self, year): return ( "\n" "[ch-{short_year}]: https://adventofcode.com/{year}\n" "[co-{short_year}]: year_{year}\n" + "".join( ( "[ch-{{short_year}}-{day:0>2}]: " "https://adventofcode.com/{{year}}/day/{day}\n" "[co-{{short_year}}-{day:0>2}]: " "year_{{year}}/day_{day:0>2}\n" ).format(day=day) for day in range(1, 26) ) ).format(year=year, short_year=str(year)[-2:]) def test_generate_with_no_info(self): combined_info = CombinedInfo.from_repo_and_account_infos( RepoInfo.from_roots([]), AccountInfo.from_collected_data({ "username": "Test User", "total_stars": 0, "years": {}, }), ) self.assertEqual(SubmissionsSummary().generate(combined_info), ( "\n\n" "| |\n" "| ---: |\n" "| |\n" "| |\n" "| 1 |\n" "| 2 |\n" "| 3 |\n" "| 4 |\n" "| 5 |\n" "| 6 |\n" "| 7 |\n" "| 8 |\n" "| 9 |\n" "| 10 |\n" "| 11 |\n" "| 12 |\n" "| 13 |\n" "| 14 |\n" "| 15 |\n" "| 16 |\n" "| 17 |\n" "| 18 |\n" "| 19 |\n" "| 20 |\n" "| 21 |\n" "| 22 |\n" "| 23 |\n" "| 24 |\n" "| 25 |\n" "\n" "\n" "\n" )) def test_generate_with_one_year_with_no_stars(self): combined_info = CombinedInfo.from_repo_and_account_infos( RepoInfo.from_roots([]), AccountInfo.from_collected_data({ "username": "Test User", "total_stars": 0, "years": { 2020: {"year": 2020, "stars": 0, "days": {}}, }, }), ) self.assertEqual(SubmissionsSummary().generate(combined_info), ( "\n\n" "| |\n" "| ---: |\n" "| |\n" "| |\n" "| 1 |\n" "| 2 |\n" "| 3 |\n" "| 4 |\n" "| 5 |\n" "| 6 |\n" "| 7 |\n" "| 8 |\n" "| 9 |\n" "| 10 |\n" "| 11 |\n" "| 12 |\n" "| 13 |\n" "| 14 |\n" "| 15 |\n" "| 16 |\n" "| 17 |\n" "| 18 |\n" "| 19 |\n" "| 20 |\n" "| 21 |\n" "| 22 |\n" "| 23 |\n" "| 24 |\n" "| 25 |\n" "\n" "\n" "\n" )) def test_generate_with_one_year_with_some_stars(self): combined_info = CombinedInfo.from_repo_and_account_infos( RepoInfo.from_roots([]), AccountInfo.from_collected_data({ "username": "Test User", "total_stars": 3, "years": { 2020: {"year": 2020, "stars": 3, "days": { 1: 2, 2: 1, 3: 0, }}, }, }), ) self.assertEqual(SubmissionsSummary().generate(combined_info), ( "\n\n" "| | 2020 |\n" "| ---: | :---: |\n" "| | Code & [Challenges][ch-20] |\n" "| | 3 :star: / 0 :x: / 0 :grey_exclamation: |\n" "| 1 | Code :star: :star: [Challenge][ch-20-01] |\n" "| 2 | Code :star: [Challenge][ch-20-02] |\n" "| 3 | Code [Challenge][ch-20-03] |\n" "| 4 | Code [Challenge][ch-20-04] |\n" "| 5 | Code [Challenge][ch-20-05] |\n" "| 6 | Code [Challenge][ch-20-06] |\n" "| 7 | Code [Challenge][ch-20-07] |\n" "| 8 | Code [Challenge][ch-20-08] |\n" "| 9 | Code [Challenge][ch-20-09] |\n" "| 10 | Code [Challenge][ch-20-10] |\n" "| 11 | Code [Challenge][ch-20-11] |\n" "| 12 | Code [Challenge][ch-20-12] |\n" "| 13 | Code [Challenge][ch-20-13] |\n" "| 14 | Code [Challenge][ch-20-14] |\n" "| 15 | Code [Challenge][ch-20-15] |\n" "| 16 | Code [Challenge][ch-20-16] |\n" "| 17 | Code [Challenge][ch-20-17] |\n" "| 18 | Code [Challenge][ch-20-18] |\n" "| 19 | Code [Challenge][ch-20-19] |\n" "| 20 | Code [Challenge][ch-20-20] |\n" "| 21 | Code [Challenge][ch-20-21] |\n" "| 22 | Code [Challenge][ch-20-22] |\n" "| 23 | Code [Challenge][ch-20-23] |\n" "| 24 | Code [Challenge][ch-20-24] |\n" "| 25 | Code [Challenge][ch-20-25] |\n" + self.get_years_links([2020]) + "\n" )) def test_generate_with_one_year_with_some_stars_and_code(self): with making_combined_info( [ (2020, 1, 'a'), (2020, 1, 'b'), (2020, 2, 'a'), (2020, 2, 'b'), (2020, 3, 'a'), ], { "username": "Test User", "total_stars": 3, "years": { 2020: {"year": 2020, "stars": 3, "days": { 1: 2, 2: 1, 3: 0, }}, }, }, ) as combined_info: self.assertEqual(SubmissionsSummary().generate(combined_info), ( "\n\n" "| | 2020 |\n" "| ---: | :---: |\n" "| | [Code][co-20] & [Challenges][ch-20] |\n" "| | 3 :star: / 2 :x: / 1 :grey_exclamation: |\n" "| 1 | [Code][co-20-01] :star: :star: [Challenge][ch-20-01] |\n" "| 2 | [Code][co-20-02] :star: :x: [Challenge][ch-20-02] |\n" "| 3 | [Code][co-20-03] :x: :grey_exclamation: [Challenge][ch-20-03] |\n" "| 4 | Code [Challenge][ch-20-04] |\n" "| 5 | Code [Challenge][ch-20-05] |\n" "| 6 | Code [Challenge][ch-20-06] |\n" "| 7 | Code [Challenge][ch-20-07] |\n" "| 8 | Code [Challenge][ch-20-08] |\n" "| 9 | Code [Challenge][ch-20-09] |\n" "| 10 | Code [Challenge][ch-20-10] |\n" "| 11 | Code [Challenge][ch-20-11] |\n" "| 12 | Code [Challenge][ch-20-12] |\n" "| 13 | Code [Challenge][ch-20-13] |\n" "| 14 | Code [Challenge][ch-20-14] |\n" "| 15 | Code [Challenge][ch-20-15] |\n" "| 16 | Code [Challenge][ch-20-16] |\n" "| 17 | Code [Challenge][ch-20-17] |\n" "| 18 | Code [Challenge][ch-20-18] |\n" "| 19 | Code [Challenge][ch-20-19] |\n" "| 20 | Code [Challenge][ch-20-20] |\n" "| 21 | Code [Challenge][ch-20-21] |\n" "| 22 | Code [Challenge][ch-20-22] |\n" "| 23 | Code [Challenge][ch-20-23] |\n" "| 24 | Code [Challenge][ch-20-24] |\n" "| 25 | Code [Challenge][ch-20-25] |\n" + self.get_years_links([2020]) + "\n" )) def test_generate_with_one_year_with_all_stars_and_code(self): with making_combined_info( [ (2020, day, part) for part in ['a', 'b'] for day in range(1, 26) ], { "username": "Test User", "total_stars": 50, "years": { 2020: {"year": 2020, "stars": 50, "days": { day: 2 for day in range(1, 26) }}, }, }, ) as combined_info: self.assertEqual(SubmissionsSummary().generate(combined_info), ( "\n\n" "| | 2020 |\n" "| ---: | :---: |\n" "| | [Code][co-20] & [Challenges][ch-20] |\n" "| | 50 :star: :star: |\n" "| 1 | [Code][co-20-01] :star: :star: [Challenge][ch-20-01] |\n" "| 2 | [Code][co-20-02] :star: :star: [Challenge][ch-20-02] |\n" "| 3 | [Code][co-20-03] :star: :star: [Challenge][ch-20-03] |\n" "| 4 | [Code][co-20-04] :star: :star: [Challenge][ch-20-04] |\n" "| 5 | [Code][co-20-05] :star: :star: [Challenge][ch-20-05] |\n" "| 6 | [Code][co-20-06] :star: :star: [Challenge][ch-20-06] |\n" "| 7 | [Code][co-20-07] :star: :star: [Challenge][ch-20-07] |\n" "| 8 | [Code][co-20-08] :star: :star: [Challenge][ch-20-08] |\n" "| 9 | [Code][co-20-09] :star: :star: [Challenge][ch-20-09] |\n" "| 10 | [Code][co-20-10] :star: :star: [Challenge][ch-20-10] |\n" "| 11 | [Code][co-20-11] :star: :star: [Challenge][ch-20-11] |\n" "| 12 | [Code][co-20-12] :star: :star: [Challenge][ch-20-12] |\n" "| 13 | [Code][co-20-13] :star: :star: [Challenge][ch-20-13] |\n" "| 14 | [Code][co-20-14] :star: :star: [Challenge][ch-20-14] |\n" "| 15 | [Code][co-20-15] :star: :star: [Challenge][ch-20-15] |\n" "| 16 | [Code][co-20-16] :star: :star: [Challenge][ch-20-16] |\n" "| 17 | [Code][co-20-17] :star: :star: [Challenge][ch-20-17] |\n" "| 18 | [Code][co-20-18] :star: :star: [Challenge][ch-20-18] |\n" "| 19 | [Code][co-20-19] :star: :star: [Challenge][ch-20-19] |\n" "| 20 | [Code][co-20-20] :star: :star: [Challenge][ch-20-20] |\n" "| 21 | [Code][co-20-21] :star: :star: [Challenge][ch-20-21] |\n" "| 22 | [Code][co-20-22] :star: :star: [Challenge][ch-20-22] |\n" "| 23 | [Code][co-20-23] :star: :star: [Challenge][ch-20-23] |\n" "| 24 | [Code][co-20-24] :star: :star: [Challenge][ch-20-24] |\n" "| 25 | [Code][co-20-25] :star: :star: [Challenge][ch-20-25] |\n" + self.get_years_links([2020]) + "\n" )) def test_generate_with_multiple_years_with_different_stars_and_code(self): with making_combined_info( [ (2020, day, part) for part in ['a', 'b'] for day in range(1, 26) ] + [ (2018, 1, 'a'), (2018, 1, 'b'), (2018, 2, 'a'), (2018, 2, 'b'), (2018, 3, 'a'), ], { "username": "Test User", "total_stars": 53, "years": { 2020: {"year": 2020, "stars": 50, "days": { day: 2 for day in range(1, 26) }}, 2018: {"year": 2018, "stars": 3, "days": { 1: 2, 2: 1, 3: 0, }}, }, }, ) as combined_info: self.assertEqual(SubmissionsSummary().generate(combined_info), ( "\n\n" "| | 2020 | 2018 |\n" "| ---: | :---: | :---: |\n" "| | [Code][co-20] & [Challenges][ch-20] | [Code][co-18] & [Challenges][ch-18] |\n" "| | 50 :star: :star: | 3 :star: / 2 :x: / 1 :grey_exclamation: |\n" "| 1 | [Code][co-20-01] :star: :star: [Challenge][ch-20-01] | [Code][co-18-01] :star: :star: [Challenge][ch-18-01] |\n" "| 2 | [Code][co-20-02] :star: :star: [Challenge][ch-20-02] | [Code][co-18-02] :star: :x: [Challenge][ch-18-02] |\n" "| 3 | [Code][co-20-03] :star: :star: [Challenge][ch-20-03] | [Code][co-18-03] :x: :grey_exclamation: [Challenge][ch-18-03] |\n" "| 4 | [Code][co-20-04] :star: :star: [Challenge][ch-20-04] | Code [Challenge][ch-18-04] |\n" "| 5 | [Code][co-20-05] :star: :star: [Challenge][ch-20-05] | Code [Challenge][ch-18-05] |\n" "| 6 | [Code][co-20-06] :star: :star: [Challenge][ch-20-06] | Code [Challenge][ch-18-06] |\n" "| 7 | [Code][co-20-07] :star: :star: [Challenge][ch-20-07] | Code [Challenge][ch-18-07] |\n" "| 8 | [Code][co-20-08] :star: :star: [Challenge][ch-20-08] | Code [Challenge][ch-18-08] |\n" "| 9 | [Code][co-20-09] :star: :star: [Challenge][ch-20-09] | Code [Challenge][ch-18-09] |\n" "| 10 | [Code][co-20-10] :star: :star: [Challenge][ch-20-10] | Code [Challenge][ch-18-10] |\n" "| 11 | [Code][co-20-11] :star: :star: [Challenge][ch-20-11] | Code [Challenge][ch-18-11] |\n" "| 12 | [Code][co-20-12] :star: :star: [Challenge][ch-20-12] | Code [Challenge][ch-18-12] |\n" "| 13 | [Code][co-20-13] :star: :star: [Challenge][ch-20-13] | Code [Challenge][ch-18-13] |\n" "| 14 | [Code][co-20-14] :star: :star: [Challenge][ch-20-14] | Code [Challenge][ch-18-14] |\n" "| 15 | [Code][co-20-15] :star: :star: [Challenge][ch-20-15] | Code [Challenge][ch-18-15] |\n" "| 16 | [Code][co-20-16] :star: :star: [Challenge][ch-20-16] | Code [Challenge][ch-18-16] |\n" "| 17 | [Code][co-20-17] :star: :star: [Challenge][ch-20-17] | Code [Challenge][ch-18-17] |\n" "| 18 | [Code][co-20-18] :star: :star: [Challenge][ch-20-18] | Code [Challenge][ch-18-18] |\n" "| 19 | [Code][co-20-19] :star: :star: [Challenge][ch-20-19] | Code [Challenge][ch-18-19] |\n" "| 20 | [Code][co-20-20] :star: :star: [Challenge][ch-20-20] | Code [Challenge][ch-18-20] |\n" "| 21 | [Code][co-20-21] :star: :star: [Challenge][ch-20-21] | Code [Challenge][ch-18-21] |\n" "| 22 | [Code][co-20-22] :star: :star: [Challenge][ch-20-22] | Code [Challenge][ch-18-22] |\n" "| 23 | [Code][co-20-23] :star: :star: [Challenge][ch-20-23] | Code [Challenge][ch-18-23] |\n" "| 24 | [Code][co-20-24] :star: :star: [Challenge][ch-20-24] | Code [Challenge][ch-18-24] |\n" "| 25 | [Code][co-20-25] :star: :star: [Challenge][ch-20-25] | Code [Challenge][ch-18-25] |\n" + self.get_years_links([2020, 2018]) + "\n" ))
55.911854
151
0.328404
1,783
18,395
3.321368
0.066181
0.232185
0.21952
0.170044
0.825228
0.802938
0.78926
0.778791
0.770517
0.759372
0
0.131199
0.499049
18,395
328
152
56.082317
0.511449
0.000924
0
0.620253
0
0.177215
0.587342
0.147693
0
0
0
0
0.018987
1
0.025316
false
0
0.012658
0.006329
0.050633
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
182c01b590fc77d6700c8cd8ce81434a6e4d9bec
139
py
Python
python/dgl/data/chem/utils/__init__.py
arangoml/dgl
d135058f9986fadcbdf6aa1011a00c3ad45a8ce3
[ "Apache-2.0" ]
3
2020-02-28T07:28:52.000Z
2020-06-03T08:41:55.000Z
python/dgl/data/chem/utils/__init__.py
sherry-1001/dgl
60d2e7d3c928d43bbb18e7ab17c066451c49f649
[ "Apache-2.0" ]
null
null
null
python/dgl/data/chem/utils/__init__.py
sherry-1001/dgl
60d2e7d3c928d43bbb18e7ab17c066451c49f649
[ "Apache-2.0" ]
2
2020-12-07T09:34:01.000Z
2020-12-13T06:18:58.000Z
from .splitters import * from .featurizers import * from .mol_to_graph import * from .complex_to_graph import * from .rdkit_utils import *
23.166667
31
0.784173
20
139
5.2
0.5
0.384615
0.25
0.326923
0
0
0
0
0
0
0
0
0.143885
139
5
32
27.8
0.87395
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
a12cd141fe75810223f704033eac2bbff2285ad9
7,592
py
Python
src/test.py
lucaspbordignon/goku
d7de5be04c6ed6147a9bbde9f13691dbafe82103
[ "MIT" ]
null
null
null
src/test.py
lucaspbordignon/goku
d7de5be04c6ed6147a9bbde9f13691dbafe82103
[ "MIT" ]
null
null
null
src/test.py
lucaspbordignon/goku
d7de5be04c6ed6147a9bbde9f13691dbafe82103
[ "MIT" ]
null
null
null
import unittest import goku from gomoku import Gomoku class GokuTest(unittest.TestCase): def setUp(self): self.game = Gomoku() def test_find_doubles_col(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(1, 0)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 1) game._mark_board(player=0, position=(2, 0)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 2) def test_find_doubles_diag(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(1, 1)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 1) game._mark_board(player=0, position=(2, 2)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 2) def test_find_doubles_edge_diag(self): game = self.game game._mark_board(player=0, position=(0, 1)) game._mark_board(player=0, position=(1, 0)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 1) game._mark_board(player=0, position=(13, 14)) game._mark_board(player=0, position=(14, 13)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 2) game._mark_board(player=0, position=(0, 13)) game._mark_board(player=0, position=(1, 14)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 3) game._mark_board(player=0, position=(13, 0)) game._mark_board(player=0, position=(14, 1)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 4) def test_find_doubles_row(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(0, 1)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 1) game._mark_board(player=0, position=(0, 2)) doubles = goku.find_doubles('X', game._board) self.assertEqual(doubles, 2) def test_find_triples_col(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(1, 0)) game._mark_board(player=0, position=(2, 0)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 1) game._mark_board(player=0, position=(3, 0)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 2) def test_find_triples_diag(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(1, 1)) game._mark_board(player=0, position=(2, 2)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 1) game._mark_board(player=0, position=(3, 3)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 2) def test_find_triples_edge_diag(self): game = self.game game._mark_board(player=0, position=(0, 2)) game._mark_board(player=0, position=(1, 1)) game._mark_board(player=0, position=(2, 0)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 1) game._mark_board(player=0, position=(12, 14)) game._mark_board(player=0, position=(13, 13)) game._mark_board(player=0, position=(14, 12)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 2) game._mark_board(player=0, position=(0, 12)) game._mark_board(player=0, position=(1, 13)) game._mark_board(player=0, position=(2, 14)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 3) game._mark_board(player=0, position=(12, 0)) game._mark_board(player=0, position=(13, 1)) game._mark_board(player=0, position=(14, 2)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 4) def test_find_triples_row(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(0, 1)) game._mark_board(player=0, position=(0, 2)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 1) game._mark_board(player=0, position=(0, 3)) triples = goku.find_triples('X', game._board) self.assertEqual(triples, 2) def test_find_quartets_col(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(1, 0)) game._mark_board(player=0, position=(2, 0)) game._mark_board(player=0, position=(3, 0)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 1) game._mark_board(player=0, position=(4, 0)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 2) def test_find_quartets_diag(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(1, 1)) game._mark_board(player=0, position=(2, 2)) game._mark_board(player=0, position=(3, 3)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 1) game._mark_board(player=0, position=(4, 4)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 2) def test_find_quartets_edge_diag(self): game = self.game game._mark_board(player=0, position=(0, 3)) game._mark_board(player=0, position=(1, 2)) game._mark_board(player=0, position=(2, 1)) game._mark_board(player=0, position=(3, 0)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 1) game._mark_board(player=0, position=(11, 14)) game._mark_board(player=0, position=(12, 13)) game._mark_board(player=0, position=(13, 12)) game._mark_board(player=0, position=(14, 11)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 2) game._mark_board(player=0, position=(0, 11)) game._mark_board(player=0, position=(1, 12)) game._mark_board(player=0, position=(2, 13)) game._mark_board(player=0, position=(3, 14)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 3) game._mark_board(player=0, position=(11, 0)) game._mark_board(player=0, position=(12, 1)) game._mark_board(player=0, position=(13, 2)) game._mark_board(player=0, position=(14, 3)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 4) def test_find_quartets_row(self): game = self.game game._mark_board(player=0, position=(0, 0)) game._mark_board(player=0, position=(0, 1)) game._mark_board(player=0, position=(0, 2)) game._mark_board(player=0, position=(0, 3)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 1) game._mark_board(player=0, position=(0, 4)) quartets = goku.find_quartets('X', game._board) self.assertEqual(quartets, 2) if __name__ == '__main__': # pragma: no cover unittest.main()
31.371901
55
0.622893
1,043
7,592
4.293384
0.040268
0.128629
0.209022
0.305494
0.949531
0.946181
0.946181
0.792765
0.758598
0.758151
0
0.04847
0.233667
7,592
241
56
31.502075
0.72121
0.002107
0
0.646341
0
0
0.005017
0
0
0
0
0
0.182927
1
0.079268
false
0
0.018293
0
0.103659
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
a137ceffd9b43b766f9a3f049d6b46bd840b17fa
27,517
py
Python
testsuite/test_un_directed_un_weighted_graph.py
trycatchhorn/PyAlgDat
85f8c7550630cf31b5e4472fd593956c9d96c078
[ "MIT" ]
null
null
null
testsuite/test_un_directed_un_weighted_graph.py
trycatchhorn/PyAlgDat
85f8c7550630cf31b5e4472fd593956c9d96c078
[ "MIT" ]
null
null
null
testsuite/test_un_directed_un_weighted_graph.py
trycatchhorn/PyAlgDat
85f8c7550630cf31b5e4472fd593956c9d96c078
[ "MIT" ]
null
null
null
#!/usr/bin/env py.test """ Test UnDirectedUnWeightedGraph class. """ import unittest import copy from py_alg_dat import dfs_edge_classification from py_alg_dat import graph from py_alg_dat import graph_edge from py_alg_dat import graph_vertex class TestUnDirectedUnWeightedGraph(unittest.TestCase): """ Test UnDirectedUnWeightedGraph class. """ def setUp(self): self.g_1 = graph.UnDirectedUnWeightedGraph(7) self.v_1 = graph_vertex.UnWeightedGraphVertex(self.g_1, 'A') self.v_2 = graph_vertex.UnWeightedGraphVertex(self.g_1, 'B') self.v_3 = graph_vertex.UnWeightedGraphVertex(self.g_1, 'C') self.v_4 = graph_vertex.UnWeightedGraphVertex(self.g_1, 'D') self.v_5 = graph_vertex.UnWeightedGraphVertex(self.g_1, 'E') self.v_6 = graph_vertex.UnWeightedGraphVertex(self.g_1, 'F') self.v_7 = graph_vertex.UnWeightedGraphVertex(self.g_1, 'G') self.g_1.add_vertex(self.v_1) self.g_1.add_vertex(self.v_2) self.g_1.add_vertex(self.v_3) self.g_1.add_vertex(self.v_4) self.g_1.add_vertex(self.v_5) self.g_1.add_vertex(self.v_6) self.g_1.add_vertex(self.v_7) self.e12 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_1, self.v_2) self.e21 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_2, self.v_1) self.e14 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_1, self.v_4) self.e41 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_4, self.v_1) self.e23 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_2, self.v_3) self.e32 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_3, self.v_2) self.e24 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_2, self.v_4) self.e42 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_4, self.v_2) self.e25 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_2, self.v_5) self.e52 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_5, self.v_2) self.e35 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_3, self.v_5) self.e53 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_5, self.v_3) self.e45 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_4, self.v_5) self.e54 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_5, self.v_4) self.e46 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_4, self.v_6) self.e64 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_6, self.v_4) self.e56 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_5, self.v_6) self.e65 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_6, self.v_5) self.e57 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_5, self.v_7) self.e75 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_7, self.v_5) self.e67 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_6, self.v_7) self.e76 = graph_edge.UnDirectedUnWeightedGraphEdge( self.g_1, self.v_7, self.v_6) self.g_1.add_edge(self.v_1, self.v_2) self.g_1.add_edge(self.v_1, self.v_4) self.g_1.add_edge(self.v_2, self.v_3) self.g_1.add_edge(self.v_2, self.v_4) self.g_1.add_edge(self.v_2, self.v_5) self.g_1.add_edge(self.v_3, self.v_5) self.g_1.add_edge(self.v_4, self.v_5) self.g_1.add_edge(self.v_4, self.v_6) self.g_1.add_edge(self.v_5, self.v_6) self.g_1.add_edge(self.v_5, self.v_7) self.g_1.add_edge(self.v_6, self.v_7) def test_un_directed_un_weighted_graph_equal(self): """ Test operator "equal". """ a_graph = graph.UnDirectedUnWeightedGraph(5) ref = graph.UnDirectedUnWeightedGraph(5) v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_5 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) a_graph.add_vertex(v_5) ref.add_vertex(v_1) ref.add_vertex(v_2) ref.add_vertex(v_3) ref.add_vertex(v_4) ref.add_vertex(v_5) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_1, v_5) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_2, v_5) a_graph.add_edge(v_3, v_4) a_graph.add_edge(v_3, v_5) a_graph.add_edge(v_4, v_5) ref.add_edge(v_1, v_2) ref.add_edge(v_1, v_3) ref.add_edge(v_1, v_4) ref.add_edge(v_1, v_5) ref.add_edge(v_2, v_3) ref.add_edge(v_2, v_4) ref.add_edge(v_2, v_5) ref.add_edge(v_3, v_4) ref.add_edge(v_3, v_5) ref.add_edge(v_4, v_5) self.assertEqual(ref, a_graph) def test_un_directed_un_weighted_graph_copy(self): """ Test operator "copy". """ a_graph = graph.UnDirectedUnWeightedGraph(5) v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_5 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) a_graph.add_vertex(v_5) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_1, v_5) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_2, v_5) a_graph.add_edge(v_3, v_4) a_graph.add_edge(v_3, v_5) a_graph.add_edge(v_4, v_5) ref = copy.copy(a_graph) self.assertEqual(a_graph, ref) def test_un_directed_un_weighted_graph_is_weighted(self): """ Test method "is_weighted". """ self.assertFalse(self.g_1.is_weighted()) def test_un_directed_un_weighted_graph_len(self): """ Test operator "len". """ self.assertEqual(7, len(self.g_1)) def test_un_directed_un_weighted_graph_get_item(self): """ Test operator "get_item". """ self.assertEqual(self.g_1.get_vertex_at_index(3), self.g_1[3]) def test_un_directed_un_weighted_graph_get_number_of_vertices(self): """ Test method "get_number_of_vertices". """ self.assertEqual(7, self.g_1.get_number_of_vertices()) def test_un_directed_un_weighted_graph_get_number_of_edges(self): """ Test method "get_number_of_edges". """ self.assertEqual(22, self.g_1.get_number_of_edges()) def test_un_directed_un_weighted_graph_get_vertices(self): """ Test method "get_vertices". """ list1 = [] list1.append(self.v_1) list1.append(self.v_2) list1.append(self.v_3) list1.append(self.v_4) list1.append(self.v_5) list1.append(self.v_6) list1.append(self.v_7) list2 = [] for i in self.g_1.get_vertices(): list2.append(i) s_list1 = sorted(list1, key=lambda vertex: vertex.vertex_number) s_list2 = sorted(list2, key=lambda vertex: vertex.vertex_number) self.assertEqual(s_list1, s_list2) def test_un_directed_un_weighted_graph_get_edges(self): """ Test method "get_edges". """ list1 = [] list1.append(self.e12) list1.append(self.e21) list1.append(self.e14) list1.append(self.e41) list1.append(self.e23) list1.append(self.e32) list1.append(self.e24) list1.append(self.e42) list1.append(self.e25) list1.append(self.e52) list1.append(self.e35) list1.append(self.e53) list1.append(self.e45) list1.append(self.e54) list1.append(self.e46) list1.append(self.e64) list1.append(self.e56) list1.append(self.e65) list1.append(self.e57) list1.append(self.e75) list1.append(self.e67) list1.append(self.e76) list2 = [] for i in self.g_1.get_edges(): list2.append(i) s_list1 = sorted(list1, key=lambda edge: ( edge.head_vertex, edge.tail_vertex)) s_list2 = sorted(list2, key=lambda edge: ( edge.head_vertex, edge.tail_vertex)) self.assertEqual(s_list1, s_list2) def test_un_directed_un_weighted_graph_get_edge(self): """ Test method "get_edge". """ self.assertEqual(self.e12, self.g_1.get_edge(self.v_1, self.v_2)) def test_un_directed_un_weighted_graph_is_edge(self): """ Test method "is_edge". """ try: self.assertTrue(self.g_1.is_edge(self.v_1, self.v_2)) except KeyError: print "Exception caught: %s" % str(KeyError) def test_un_directed_un_weighted_graph_remove_vertex_v_0(self): """ Test method "remove_vertex". """ # https://reference.wolfram.com/mathematica/ref/VertexDelete.html # Create a graph from where a vertex should be removed. a_graph = graph.UnDirectedUnWeightedGraph(5) v_0 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') # Add vertices to the graph. a_graph.add_vertex(v_0) a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) # Add edges to the graph. a_graph.add_edge(v_0, v_1) a_graph.add_edge(v_0, v_2) a_graph.add_edge(v_0, v_3) a_graph.add_edge(v_0, v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_3, v_4) # Create a reference graph used to compare the result after a vertex has been removed. g_ref = graph.UnDirectedUnWeightedGraph(4) # Create reference vertices. v_1_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'B') v_2_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'C') v_3_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'D') v_4_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'E') # Add vertices to the reference graph. g_ref.add_vertex(v_1_ref) g_ref.add_vertex(v_2_ref) g_ref.add_vertex(v_3_ref) g_ref.add_vertex(v_4_ref) # Add edges to the reference graph. g_ref.add_edge(v_1_ref, v_2_ref) g_ref.add_edge(v_1_ref, v_3_ref) g_ref.add_edge(v_1_ref, v_4_ref) g_ref.add_edge(v_2_ref, v_3_ref) g_ref.add_edge(v_2_ref, v_4_ref) g_ref.add_edge(v_3_ref, v_4_ref) # Remove vertex form graph. a_graph.remove_vertex(v_0) self.assertEqual(g_ref, a_graph) def test_un_directed_un_weighted_graph_remove_vertex_v_1(self): """ Test method "remove_vertex". """ # Create a graph from where a vertex should be removed. a_graph = graph.UnDirectedUnWeightedGraph(5) v_0 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') # Add vertices to the graph. a_graph.add_vertex(v_0) a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) # Add edges to the graph. a_graph.add_edge(v_0, v_1) a_graph.add_edge(v_0, v_2) a_graph.add_edge(v_0, v_3) a_graph.add_edge(v_0, v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_3, v_4) # Create a reference graph used to compare the result after a vertex has been removed. g_ref = graph.UnDirectedUnWeightedGraph(4) # Create reference vertices. v_0_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'A') v_2_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'C') v_3_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'D') v_4_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'E') # Add vertices to the reference graph. g_ref.add_vertex(v_0_ref) g_ref.add_vertex(v_2_ref) g_ref.add_vertex(v_3_ref) g_ref.add_vertex(v_4_ref) # Add edges to the reference graph. g_ref.add_edge(v_0_ref, v_2_ref) g_ref.add_edge(v_0_ref, v_3_ref) g_ref.add_edge(v_0_ref, v_4_ref) g_ref.add_edge(v_2_ref, v_3_ref) g_ref.add_edge(v_2_ref, v_4_ref) g_ref.add_edge(v_3_ref, v_4_ref) # Remove vertex form graph. a_graph.remove_vertex(v_1) self.assertEqual(g_ref, a_graph) def test_un_directed_un_weighted_graph_remove_vertex_v_2(self): """ Test method "remove_vertex". """ # Create a graph from where a vertex should be removed. a_graph = graph.UnDirectedUnWeightedGraph(5) v_0 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') # Add vertices to the graph. a_graph.add_vertex(v_0) a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) # Add edges to the graph. a_graph.add_edge(v_0, v_1) a_graph.add_edge(v_0, v_2) a_graph.add_edge(v_0, v_3) a_graph.add_edge(v_0, v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_3, v_4) # Create a reference graph used to compare the result after a vertex has been removed. g_ref = graph.UnDirectedUnWeightedGraph(4) # Create reference vertices. v_0_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'A') v_1_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'B') v_3_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'D') v_4_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'E') # Add vertices to the reference graph. g_ref.add_vertex(v_0_ref) g_ref.add_vertex(v_1_ref) g_ref.add_vertex(v_3_ref) g_ref.add_vertex(v_4_ref) # Add edges to the reference graph. g_ref.add_edge(v_0_ref, v_1_ref) g_ref.add_edge(v_0_ref, v_3_ref) g_ref.add_edge(v_0_ref, v_4_ref) g_ref.add_edge(v_1_ref, v_3_ref) g_ref.add_edge(v_1_ref, v_4_ref) g_ref.add_edge(v_3_ref, v_4_ref) # Remove vertex form graph. a_graph.remove_vertex(v_2) self.assertEqual(g_ref, a_graph) def test_un_directed_un_weighted_graph_remove_vertex_v_3(self): """ Test method "remove_vertex". """ # Create a graph from where a vertex should be removed. a_graph = graph.UnDirectedUnWeightedGraph(5) v_0 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') # Add vertices to the graph. a_graph.add_vertex(v_0) a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) # Add edges to the graph. a_graph.add_edge(v_0, v_1) a_graph.add_edge(v_0, v_2) a_graph.add_edge(v_0, v_3) a_graph.add_edge(v_0, v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_3, v_4) # Create a reference graph used to compare the result after a vertex has been removed. g_ref = graph.UnDirectedUnWeightedGraph(4) # Create reference vertices. v_0_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'A') v_1_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'B') v_2_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'C') v_4_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'E') # Add vertices to the reference graph. g_ref.add_vertex(v_0_ref) g_ref.add_vertex(v_1_ref) g_ref.add_vertex(v_2_ref) g_ref.add_vertex(v_4_ref) # Add edges to the reference graph. g_ref.add_edge(v_0_ref, v_1_ref) g_ref.add_edge(v_0_ref, v_2_ref) g_ref.add_edge(v_0_ref, v_4_ref) g_ref.add_edge(v_1_ref, v_2_ref) g_ref.add_edge(v_1_ref, v_4_ref) g_ref.add_edge(v_2_ref, v_4_ref) # Remove vertex form graph. a_graph.remove_vertex(v_3) self.assertEqual(g_ref, a_graph) def test_un_directed_un_weighted_graph_remove_vertex_v_4(self): """ Test method "remove_vertex". """ # Create a graph from where a vertex should be removed. a_graph = graph.UnDirectedUnWeightedGraph(5) v_0 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') # Add vertices to the graph. a_graph.add_vertex(v_0) a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) # Add edges to the graph. a_graph.add_edge(v_0, v_1) a_graph.add_edge(v_0, v_2) a_graph.add_edge(v_0, v_3) a_graph.add_edge(v_0, v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_3, v_4) # Create a reference graph used to compare the result after a vertex has been removed. g_ref = graph.UnDirectedUnWeightedGraph(4) # Create reference vertices. v_0_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'A') v_1_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'B') v_2_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'C') v_3_ref = graph_vertex.UnWeightedGraphVertex(g_ref, 'D') # Add vertices to the reference graph. g_ref.add_vertex(v_0_ref) g_ref.add_vertex(v_1_ref) g_ref.add_vertex(v_2_ref) g_ref.add_vertex(v_3_ref) # Add edges to the reference graph. g_ref.add_edge(v_0_ref, v_1_ref) g_ref.add_edge(v_0_ref, v_2_ref) g_ref.add_edge(v_0_ref, v_3_ref) g_ref.add_edge(v_1_ref, v_2_ref) g_ref.add_edge(v_1_ref, v_3_ref) g_ref.add_edge(v_2_ref, v_3_ref) # Remove vertex form graph. a_graph.remove_vertex(v_4) self.assertEqual(g_ref, a_graph) def test_un_directed_un_weighted_graph_remove_edge(self): """ Test method "remove_edge". """ # https://reference.wolfram.com/mathematica/ref/VertexDelete.html a_graph = graph.UnDirectedUnWeightedGraph(5) v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') v_5 = graph_vertex.UnWeightedGraphVertex(a_graph, 'E') a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) a_graph.add_vertex(v_5) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_1, v_3) a_graph.add_edge(v_1, v_4) a_graph.add_edge(v_1, v_5) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) a_graph.add_edge(v_2, v_5) a_graph.add_edge(v_3, v_4) a_graph.add_edge(v_3, v_5) a_graph.add_edge(v_4, v_5) a_graph.remove_edge(v_1, v_2) a_graph.remove_edge(v_1, v_3) a_graph.remove_edge(v_1, v_4) a_graph.remove_edge(v_1, v_5) a_graph.remove_edge(v_2, v_3) a_graph.remove_edge(v_2, v_4) a_graph.remove_edge(v_2, v_5) a_graph.remove_edge(v_3, v_4) a_graph.remove_edge(v_3, v_5) a_graph.remove_edge(v_4, v_5) ref = [] res = a_graph.get_edges() self.assertEqual(ref, res) def test_un_directed_un_weighted_graph_is_directed(self): """ Test method "is_directed". """ self.assertFalse(self.g_1.is_directed()) def test_un_directed_un_weighted_graph_is_connected(self): """ Test method "is_connected". """ self.assertTrue(self.g_1.is_connected()) def test_un_directed_un_weighted_graph_is_cyclic(self): """ Test method "is_cyclic". """ a_graph = graph.UnDirectedUnWeightedGraph(4) v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_1, v_3) self.assertTrue(a_graph.is_cyclic()) def test_un_directed_un_weighted_graph_is_cyclic_not(self): """ Test method "is_cyclic" - inverted. """ a_graph = graph.UnDirectedUnWeightedGraph(4) v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) self.assertFalse(a_graph.is_cyclic()) def test_un_directed_un_weighted_graph_get_vertex_at_index(self): """ Test method "get_vertex_at_index". """ self.assertEqual(self.v_4, self.g_1.get_vertex_at_index(3)) def test_un_directed_un_weighted_graph_get_emanating_edges(self): """ Test method "get_emanating_edges". """ ref = [] res = [] ref.append(self.e12) ref.append(self.e14) res = self.g_1.get_emanating_edges(self.v_1.get_vertex_number()) self.assertEqual(ref, res) def test_un_directed_un_weighted_graph_get_incident_edges(self): """ Test method "get_incident_edges". """ ref = [] res = [] ref.append(self.e12) ref.append(self.e14) res = self.g_1.get_incident_edges(self.v_1.get_vertex_number()) self.assertEqual(ref, res) def test_un_directed_un_weighted_graph_classify_edges_cyclic(self): """ Test classify edges - cyclic graph. """ # Create an undirected unweighted cyclic graph a_graph = graph.UnDirectedUnWeightedGraph(4) v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_1, v_3) res = a_graph.classify_edges().get_edges() ref = dfs_edge_classification.DFSEdgeClassification( a_graph).get_edges() e12 = graph_edge.UnDirectedUnWeightedGraphEdge(a_graph, v_1, v_2) e23 = graph_edge.UnDirectedUnWeightedGraphEdge(a_graph, v_2, v_3) e13 = graph_edge.UnDirectedUnWeightedGraphEdge(a_graph, v_3, v_1) ref[e12] = graph_edge.EdgeClassification.TREE_EDGE ref[e23] = graph_edge.EdgeClassification.TREE_EDGE ref[e13] = graph_edge.EdgeClassification.BACK_EDGE self.assertEqual(res, ref) def test_un_directed_un_weighted_graph_classify_edges_acyclic(self): """ Test classify edges - acyclic graph. """ # Create an undirected unweighted acyclic graph a_graph = graph.UnDirectedUnWeightedGraph(4) v_1 = graph_vertex.UnWeightedGraphVertex(a_graph, 'A') v_2 = graph_vertex.UnWeightedGraphVertex(a_graph, 'B') v_3 = graph_vertex.UnWeightedGraphVertex(a_graph, 'C') v_4 = graph_vertex.UnWeightedGraphVertex(a_graph, 'D') a_graph.add_vertex(v_1) a_graph.add_vertex(v_2) a_graph.add_vertex(v_3) a_graph.add_vertex(v_4) a_graph.add_edge(v_1, v_2) a_graph.add_edge(v_2, v_3) a_graph.add_edge(v_2, v_4) res = a_graph.classify_edges().get_edges() ref = dfs_edge_classification.DFSEdgeClassification( a_graph).get_edges() e12 = graph_edge.UnDirectedUnWeightedGraphEdge(a_graph, v_1, v_2) e23 = graph_edge.UnDirectedUnWeightedGraphEdge(a_graph, v_2, v_3) e24 = graph_edge.UnDirectedUnWeightedGraphEdge(a_graph, v_2, v_4) ref[e12] = graph_edge.EdgeClassification.TREE_EDGE ref[e23] = graph_edge.EdgeClassification.TREE_EDGE ref[e24] = graph_edge.EdgeClassification.TREE_EDGE self.assertEqual(res, ref)
36.494695
94
0.64611
4,315
27,517
3.723754
0.03314
0.095967
0.082898
0.074434
0.892644
0.865385
0.812858
0.779188
0.752676
0.734503
0
0.039673
0.253443
27,517
753
95
36.543161
0.742491
0.065778
0
0.63138
0
0
0.004255
0
0
0
0
0
0.049149
0
null
null
0
0.011342
null
null
0.00189
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
a1c13d9ba18a41e6a26a28f3a06f3b100bb60640
68,646
py
Python
benchmarks/SimResults/combinations_spec_rr/cmp_bwavesGemsFDTDastaromnetpp/power.py
TugberkArkose/MLScheduler
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
[ "Unlicense" ]
null
null
null
benchmarks/SimResults/combinations_spec_rr/cmp_bwavesGemsFDTDastaromnetpp/power.py
TugberkArkose/MLScheduler
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
[ "Unlicense" ]
null
null
null
benchmarks/SimResults/combinations_spec_rr/cmp_bwavesGemsFDTDastaromnetpp/power.py
TugberkArkose/MLScheduler
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
[ "Unlicense" ]
null
null
null
power = {'BUSES': {'Area': 1.33155, 'Bus/Area': 1.33155, 'Bus/Gate Leakage': 0.00662954, 'Bus/Peak Dynamic': 0.0, 'Bus/Runtime Dynamic': 0.0, 'Bus/Subthreshold Leakage': 0.0691322, 'Bus/Subthreshold Leakage with power gating': 0.0259246, 'Gate Leakage': 0.00662954, 'Peak Dynamic': 0.0, 'Runtime Dynamic': 0.0, 'Subthreshold Leakage': 0.0691322, 'Subthreshold Leakage with power gating': 0.0259246}, 'Core': [{'Area': 32.6082, 'Execution Unit/Area': 8.2042, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.0754741, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.26197, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.434868, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.122718, 'Execution Unit/Instruction Scheduler/Area': 2.17927, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.207989, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.360161, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.206563, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.774712, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.138917, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 5.89784, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0821558, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00753975, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0816689, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0557611, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.163825, 'Execution Unit/Register Files/Runtime Dynamic': 0.0633008, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.218022, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.602725, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155, 'Execution Unit/Runtime Dynamic': 2.10808, 'Execution Unit/Subthreshold Leakage': 1.83518, 'Execution Unit/Subthreshold Leakage with power gating': 0.709678, 'Gate Leakage': 0.372997, 'Instruction Fetch Unit/Area': 5.86007, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000142853, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000142853, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000123614, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 4.74099e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000801012, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00121033, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00139862, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0590479, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0536046, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.40971, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.130241, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.182065, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 5.79624, 'Instruction Fetch Unit/Runtime Dynamic': 0.36852, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932587, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.145895, 'L2/Runtime Dynamic': 0.0415471, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80969, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 3.49932, 'Load Store Unit/Data Cache/Runtime Dynamic': 1.14308, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0351387, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0731875, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0731875, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 3.84633, 'Load Store Unit/Runtime Dynamic': 1.5772, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.180468, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.360936, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591622, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283406, 'Memory Management Unit/Area': 0.434579, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.0640487, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.066232, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00813591, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.212003, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0213747, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.48123, 'Memory Management Unit/Runtime Dynamic': 0.0876067, 'Memory Management Unit/Subthreshold Leakage': 0.0769113, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462, 'Peak Dynamic': 20.7292, 'Renaming Unit/Area': 0.369768, 'Renaming Unit/FP Front End RAT/Area': 0.168486, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.286622, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925, 'Renaming Unit/Free List/Area': 0.0414755, 'Renaming Unit/Free List/Gate Leakage': 4.15911e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0401324, 'Renaming Unit/Free List/Runtime Dynamic': 0.0140844, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987, 'Renaming Unit/Gate Leakage': 0.00863632, 'Renaming Unit/Int Front End RAT/Area': 0.114751, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.104347, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781, 'Renaming Unit/Peak Dynamic': 4.56169, 'Renaming Unit/Runtime Dynamic': 0.405054, 'Renaming Unit/Subthreshold Leakage': 0.070483, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779, 'Runtime Dynamic': 4.58801, 'Subthreshold Leakage': 6.21877, 'Subthreshold Leakage with power gating': 2.58311}, {'Area': 32.0201, 'Execution Unit/Area': 7.68434, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.0448161, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.237889, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.240052, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.120359, 'Execution Unit/Instruction Scheduler/Area': 1.66526, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.104519, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.168586, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0850966, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.358202, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.0827372, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 4.40875, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.045351, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00438402, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0485606, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0324225, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.0939116, 'Execution Unit/Register Files/Runtime Dynamic': 0.0368065, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.11351, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.282018, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543, 'Execution Unit/Runtime Dynamic': 1.32029, 'Execution Unit/Subthreshold Leakage': 1.79543, 'Execution Unit/Subthreshold Leakage with power gating': 0.688821, 'Gate Leakage': 0.368936, 'Instruction Fetch Unit/Area': 5.85939, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000319456, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000319456, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000293162, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000121646, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000465752, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00139783, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00252997, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0589979, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0311686, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.98259, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0722479, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.105862, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 4.29732, 'Instruction Fetch Unit/Runtime Dynamic': 0.213207, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932286, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0418493, 'L2/Runtime Dynamic': 0.00364728, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80901, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 2.45922, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.591432, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0350888, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0395377, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0395378, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 2.64592, 'Load Store Unit/Runtime Dynamic': 0.825957, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.0974932, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.194987, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591321, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283293, 'Memory Management Unit/Area': 0.4339, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.0346007, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0352275, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00808595, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.12327, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0118491, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.338817, 'Memory Management Unit/Runtime Dynamic': 0.0470766, 'Memory Management Unit/Subthreshold Leakage': 0.0766103, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333, 'Peak Dynamic': 15.3221, 'Renaming Unit/Area': 0.303608, 'Renaming Unit/FP Front End RAT/Area': 0.131045, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.119298, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885, 'Renaming Unit/Free List/Area': 0.0340654, 'Renaming Unit/Free List/Gate Leakage': 2.5481e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0306032, 'Renaming Unit/Free List/Runtime Dynamic': 0.00616746, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064, 'Renaming Unit/Gate Leakage': 0.00708398, 'Renaming Unit/Int Front End RAT/Area': 0.0941223, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0513195, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228, 'Renaming Unit/Peak Dynamic': 3.58947, 'Renaming Unit/Runtime Dynamic': 0.176785, 'Renaming Unit/Subthreshold Leakage': 0.0552466, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461, 'Runtime Dynamic': 2.58696, 'Subthreshold Leakage': 6.16288, 'Subthreshold Leakage with power gating': 2.55328}, {'Area': 32.0201, 'Execution Unit/Area': 7.68434, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 0.0, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.0, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.120359, 'Execution Unit/Instruction Scheduler/Area': 1.66526, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.131742, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.212495, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.10726, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.451497, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.150675, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 4.11765, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00552585, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0399589, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0408671, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.0399589, 'Execution Unit/Register Files/Runtime Dynamic': 0.0463929, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0841823, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.240959, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543, 'Execution Unit/Runtime Dynamic': 1.34691, 'Execution Unit/Subthreshold Leakage': 1.79543, 'Execution Unit/Subthreshold Leakage with power gating': 0.688821, 'Gate Leakage': 0.368936, 'Instruction Fetch Unit/Area': 5.85939, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00128352, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00128352, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00117484, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000485916, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000587059, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00432893, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0102735, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0589979, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0392865, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.49896, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.101212, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.133435, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 4.83876, 'Instruction Fetch Unit/Runtime Dynamic': 0.288536, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932286, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0176446, 'L2/Runtime Dynamic': 0.00413011, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80901, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 2.43835, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.58224, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0350888, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0388627, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0388626, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 2.62187, 'Load Store Unit/Runtime Dynamic': 0.81276, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.0958289, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.191657, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591321, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283293, 'Memory Management Unit/Area': 0.4339, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.03401, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0342744, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00808595, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.155376, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.0165937, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.369908, 'Memory Management Unit/Runtime Dynamic': 0.050868, 'Memory Management Unit/Subthreshold Leakage': 0.0766103, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333, 'Peak Dynamic': 15.5553, 'Renaming Unit/Area': 0.303608, 'Renaming Unit/FP Front End RAT/Area': 0.131045, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885, 'Renaming Unit/Free List/Area': 0.0340654, 'Renaming Unit/Free List/Gate Leakage': 2.5481e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0306032, 'Renaming Unit/Free List/Runtime Dynamic': 0.00594384, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064, 'Renaming Unit/Gate Leakage': 0.00708398, 'Renaming Unit/Int Front End RAT/Area': 0.0941223, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0683957, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228, 'Renaming Unit/Peak Dynamic': 3.58947, 'Renaming Unit/Runtime Dynamic': 0.0743396, 'Renaming Unit/Subthreshold Leakage': 0.0552466, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461, 'Runtime Dynamic': 2.57755, 'Subthreshold Leakage': 6.16288, 'Subthreshold Leakage with power gating': 2.55328}, {'Area': 32.0201, 'Execution Unit/Area': 7.68434, 'Execution Unit/Complex ALUs/Area': 0.235435, 'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646, 'Execution Unit/Complex ALUs/Peak Dynamic': 4.81792e-05, 'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202726, 'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111, 'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163, 'Execution Unit/Floating Point Units/Area': 4.6585, 'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156, 'Execution Unit/Floating Point Units/Peak Dynamic': 0.000652749, 'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033, 'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829, 'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061, 'Execution Unit/Gate Leakage': 0.120359, 'Execution Unit/Instruction Scheduler/Area': 1.66526, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0531955, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453, 'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519, 'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913, 'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223, 'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562, 'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763, 'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0858024, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755, 'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964, 'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262, 'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388, 'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608, 'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451, 'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0433102, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853, 'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446, 'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.182308, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892, 'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346, 'Execution Unit/Integer ALUs/Area': 0.47087, 'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291, 'Execution Unit/Integer ALUs/Peak Dynamic': 0.0607415, 'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344, 'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222, 'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833, 'Execution Unit/Peak Dynamic': 3.94248, 'Execution Unit/Register Files/Area': 0.570804, 'Execution Unit/Register Files/Floating Point RF/Area': 0.208131, 'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788, 'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000123318, 'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00223126, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698, 'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968, 'Execution Unit/Register Files/Gate Leakage': 0.000622708, 'Execution Unit/Register Files/Integer RF/Area': 0.362673, 'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992, 'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0161372, 'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0165015, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175, 'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675, 'Execution Unit/Register Files/Peak Dynamic': 0.0162606, 'Execution Unit/Register Files/Runtime Dynamic': 0.0187328, 'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387, 'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643, 'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912, 'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402, 'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0340087, 'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.0892756, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478, 'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543, 'Execution Unit/Runtime Dynamic': 0.89842, 'Execution Unit/Subthreshold Leakage': 1.79543, 'Execution Unit/Subthreshold Leakage with power gating': 0.688821, 'Gate Leakage': 0.368936, 'Instruction Fetch Unit/Area': 5.85939, 'Instruction Fetch Unit/Branch Predictor/Area': 0.138516, 'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000773963, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000773963, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719, 'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000695742, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344, 'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000281159, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347, 'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045, 'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838, 'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732, 'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05, 'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602, 'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000237046, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505, 'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733, 'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00248071, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703, 'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282, 'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954, 'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758, 'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867, 'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0066482, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682, 'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357, 'Instruction Fetch Unit/Gate Leakage': 0.0589979, 'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323, 'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05, 'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827, 'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0158633, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885, 'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682, 'Instruction Fetch Unit/Instruction Cache/Area': 3.14635, 'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931, 'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.00905, 'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0623038, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022, 'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386, 'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799, 'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493, 'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404, 'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.053879, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943, 'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104, 'Instruction Fetch Unit/Peak Dynamic': 3.27653, 'Instruction Fetch Unit/Runtime Dynamic': 0.141175, 'Instruction Fetch Unit/Subthreshold Leakage': 0.932286, 'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843, 'L2/Area': 4.53318, 'L2/Gate Leakage': 0.015464, 'L2/Peak Dynamic': 0.0262964, 'L2/Runtime Dynamic': 0.00842122, 'L2/Subthreshold Leakage': 0.834142, 'L2/Subthreshold Leakage with power gating': 0.401066, 'Load Store Unit/Area': 8.80901, 'Load Store Unit/Data Cache/Area': 6.84535, 'Load Store Unit/Data Cache/Gate Leakage': 0.0279261, 'Load Store Unit/Data Cache/Peak Dynamic': 1.80237, 'Load Store Unit/Data Cache/Runtime Dynamic': 0.284037, 'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675, 'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085, 'Load Store Unit/Gate Leakage': 0.0350888, 'Load Store Unit/LoadQ/Area': 0.0836782, 'Load Store Unit/LoadQ/Gate Leakage': 0.00059896, 'Load Store Unit/LoadQ/Peak Dynamic': 0.0182873, 'Load Store Unit/LoadQ/Runtime Dynamic': 0.0182872, 'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961, 'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918, 'Load Store Unit/Peak Dynamic': 1.88873, 'Load Store Unit/Runtime Dynamic': 0.392511, 'Load Store Unit/StoreQ/Area': 0.322079, 'Load Store Unit/StoreQ/Gate Leakage': 0.00329971, 'Load Store Unit/StoreQ/Peak Dynamic': 0.0450933, 'Load Store Unit/StoreQ/Runtime Dynamic': 0.0901861, 'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621, 'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004, 'Load Store Unit/Subthreshold Leakage': 0.591321, 'Load Store Unit/Subthreshold Leakage with power gating': 0.283293, 'Memory Management Unit/Area': 0.4339, 'Memory Management Unit/Dtlb/Area': 0.0879726, 'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729, 'Memory Management Unit/Dtlb/Peak Dynamic': 0.0160037, 'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0163861, 'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699, 'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485, 'Memory Management Unit/Gate Leakage': 0.00808595, 'Memory Management Unit/Itlb/Area': 0.301552, 'Memory Management Unit/Itlb/Gate Leakage': 0.00393464, 'Memory Management Unit/Itlb/Peak Dynamic': 0.0627388, 'Memory Management Unit/Itlb/Runtime Dynamic': 0.010251, 'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758, 'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842, 'Memory Management Unit/Peak Dynamic': 0.246339, 'Memory Management Unit/Runtime Dynamic': 0.0266371, 'Memory Management Unit/Subthreshold Leakage': 0.0766103, 'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333, 'Peak Dynamic': 12.9698, 'Renaming Unit/Area': 0.303608, 'Renaming Unit/FP Front End RAT/Area': 0.131045, 'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123, 'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468, 'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000323969, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571, 'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885, 'Renaming Unit/Free List/Area': 0.0340654, 'Renaming Unit/Free List/Gate Leakage': 2.5481e-05, 'Renaming Unit/Free List/Peak Dynamic': 0.0306032, 'Renaming Unit/Free List/Runtime Dynamic': 0.00240398, 'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144, 'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064, 'Renaming Unit/Gate Leakage': 0.00708398, 'Renaming Unit/Int Front End RAT/Area': 0.0941223, 'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242, 'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965, 'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0269515, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488, 'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228, 'Renaming Unit/Peak Dynamic': 3.58947, 'Renaming Unit/Runtime Dynamic': 0.0296794, 'Renaming Unit/Subthreshold Leakage': 0.0552466, 'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461, 'Runtime Dynamic': 1.49684, 'Subthreshold Leakage': 6.16288, 'Subthreshold Leakage with power gating': 2.55328}], 'DRAM': {'Area': 0, 'Gate Leakage': 0, 'Peak Dynamic': 7.4987146375184395, 'Runtime Dynamic': 7.4987146375184395, 'Subthreshold Leakage': 4.252, 'Subthreshold Leakage with power gating': 4.252}, 'L3': [{'Area': 61.9075, 'Gate Leakage': 0.0484137, 'Peak Dynamic': 0.413011, 'Runtime Dynamic': 0.151554, 'Subthreshold Leakage': 6.80085, 'Subthreshold Leakage with power gating': 3.32364}], 'Processor': {'Area': 191.908, 'Gate Leakage': 1.53485, 'Peak Dynamic': 64.9895, 'Peak Power': 98.1018, 'Runtime Dynamic': 11.4009, 'Subthreshold Leakage': 31.5774, 'Subthreshold Leakage with power gating': 13.9484, 'Total Cores/Area': 128.669, 'Total Cores/Gate Leakage': 1.4798, 'Total Cores/Peak Dynamic': 64.5765, 'Total Cores/Runtime Dynamic': 11.2494, 'Total Cores/Subthreshold Leakage': 24.7074, 'Total Cores/Subthreshold Leakage with power gating': 10.2429, 'Total L3s/Area': 61.9075, 'Total L3s/Gate Leakage': 0.0484137, 'Total L3s/Peak Dynamic': 0.413011, 'Total L3s/Runtime Dynamic': 0.151554, 'Total L3s/Subthreshold Leakage': 6.80085, 'Total L3s/Subthreshold Leakage with power gating': 3.32364, 'Total Leakage': 33.1122, 'Total NoCs/Area': 1.33155, 'Total NoCs/Gate Leakage': 0.00662954, 'Total NoCs/Peak Dynamic': 0.0, 'Total NoCs/Runtime Dynamic': 0.0, 'Total NoCs/Subthreshold Leakage': 0.0691322, 'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
75.105033
124
0.682225
8,084
68,646
5.787234
0.06717
0.123461
0.112859
0.093365
0.938697
0.930831
0.918391
0.887419
0.862346
0.841634
0
0.132476
0.224208
68,646
914
125
75.105033
0.746014
0
0
0.642232
0
0
0.657057
0.048072
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
62bf5e3d8cab8d306b7ae9e1be86ac2dd11f178d
122
py
Python
python/taichi/lang/simt/grid.py
weiyunfei/taichi
52a7cd8325672bc160e5540e54064c960c78256d
[ "MIT" ]
1
2020-11-10T07:17:01.000Z
2020-11-10T07:17:01.000Z
python/taichi/lang/simt/grid.py
weiyunfei/taichi
52a7cd8325672bc160e5540e54064c960c78256d
[ "MIT" ]
1
2020-08-24T05:18:43.000Z
2020-08-24T05:18:43.000Z
python/taichi/lang/simt/grid.py
weiyunfei/taichi
52a7cd8325672bc160e5540e54064c960c78256d
[ "MIT" ]
null
null
null
from taichi.lang import impl def memfence(): return impl.call_internal("grid_memfence", with_runtime_context=False)
20.333333
74
0.786885
17
122
5.411765
0.882353
0
0
0
0
0
0
0
0
0
0
0
0.122951
122
5
75
24.4
0.859813
0
0
0
0
0
0.106557
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
1
1
0
0
7
62f382270393647e2700d65080453569e09a3be4
13,877
py
Python
src/tasks/dockerDeploy/acs-dcos/test_acsclient.py
Bhaskers-Blu-Org2/vsts-docker
22755267f1fca6a82297c6fc2f6d8d81acb7c3b7
[ "MIT" ]
8
2017-02-22T19:41:13.000Z
2019-02-10T22:34:38.000Z
src/tasks/dockerDeploy/acs-dcos/test_acsclient.py
Bhaskers-Blu-Org2/vsts-docker
22755267f1fca6a82297c6fc2f6d8d81acb7c3b7
[ "MIT" ]
18
2017-02-01T16:00:22.000Z
2019-04-01T14:09:20.000Z
src/tasks/dockerDeploy/acs-dcos/test_acsclient.py
microsoft/vsts-docker
22755267f1fca6a82297c6fc2f6d8d81acb7c3b7
[ "MIT" ]
6
2019-11-04T00:13:36.000Z
2021-11-10T10:11:54.000Z
import time import unittest import paramiko import requests from mock import Mock, patch import acsclient import acsinfo def mocked_requests_get(*args, **kwargs): class MockResponse: def __init__(self, json_data, status_code): self.json_data = json_data self.status_code = status_code def json(self): return self.json_data if args[0].startswith('unsupported_version'): return MockResponse({'version': '1.2.3'}, 200) elif args[0].startswith('supported_version'): return MockResponse({'version': '1.8.4'}, 200) elif args[0].startswith('missing_version'): return MockResponse({'blah': '123'}, 200) elif args[0].startswith('wait_for_test'): return MockResponse({}, 200) elif args[0].startswith('wait_for_test_404'): return MockResponse({}, 404) elif args[0].startswith('http://make_request_200'): return MockResponse({}, 200) elif args[0].startswith('http://make_request_404'): return MockResponse({}, 404) return MockResponse({}, 404) class AcsClientTest(unittest.TestCase): @patch('requests.get', side_effect=mocked_requests_get) def test_ensure_dcos_version_unsupported(self, mock_get): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, None, 'unsupported_version') acs = acsclient.ACSClient(acs_info) self.assertRaises(ValueError, acs.ensure_dcos_version) @patch('requests.get', side_effect=mocked_requests_get) def test_ensure_dcos_version_supported(self, mock_get): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, None, 'supported_version') acs = acsclient.ACSClient(acs_info) self.assertTrue(acs.ensure_dcos_version()) @patch('requests.get', side_effect=mocked_requests_get) def test_ensure_dcos_version_missing(self, mock_get): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, None, 'missing_version') acs = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs.ensure_dcos_version) def test_using_direct_connection(self): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, None, 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertTrue(acs_client.is_direct) def test_using_ssh_connection(self): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, None, None) acs_client = acsclient.ACSClient(acs_info) self.assertFalse(acs_client.is_direct) def test_get_private_key_missing(self): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, None, 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs_client._get_private_key) def test_get_private_key_invalidkey(self): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, 'MYPRIVATEKEY', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertRaises(paramiko.SSHException, acs_client._get_private_key) @patch('paramiko.RSAKey') def test_get_private_key_called(self, mock_rsakey): acs_info = acsinfo.AcsInfo('myhost', 2200, None, 'mypassword', 'MYPRIVATEKEY', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) key = acs_client._get_private_key() self.assertIsNotNone(key) self.assertTrue(mock_rsakey.from_private_key.called) def test_setup_tunnel_direct(self): acs_info = acsinfo.AcsInfo('myhost', 2200, None, None, None, 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertEquals(acs_client._setup_tunnel_server(8080), 8080) @patch('acsclient.ACSClient.get_available_local_port') @patch('sshtunnel.SSHTunnelForwarder') @patch('sshtunnel.SSHTunnelForwarder.start') @patch('acsclient.ACSClient._wait_for_tunnel') @patch('acsclient.ACSClient._get_private_key') def test_setup_tunnel_ssh(self, mock_get_private_key, mock_wait_for_tunnel, mock_tunnel_forwarder, mock_tunnel, mock_available_port): mock_available_port.return_value = '1234' mock_get_private_key.return_value = Mock() acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', None) acs_client = acsclient.ACSClient(acs_info) return_value = acs_client._setup_tunnel_server(8080) self.assertTrue(mock_available_port.called) self.assertIsNotNone(acs_client.current_tunnel[0]) self.assertEquals(acs_client.current_tunnel[1], 1234) self.assertTrue(mock_tunnel_forwarder.called) self.assertTrue(mock_wait_for_tunnel.called) self.assertEquals(return_value, 1234) @patch('requests.get', side_effect=mocked_requests_get) def test_wait_for_tunnel(self, mock_get): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', None) acs_client = acsclient.ACSClient(acs_info) self.assertFalse(acs_client.is_running) acs_client._wait_for_tunnel(time.time(), 'wait_for_test') self.assertTrue(acs_client.is_running) @patch('requests.get', side_effect=mocked_requests_get) def test_wait_for_tunnel_fails(self, mock_get): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', None) acs_client = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs_client._wait_for_tunnel, -1, 'wait_for_test_400') @patch('acsclient.ACSClient._setup_tunnel_server') def test_get_request_url_ssh(self, mock_tunnel_server): mock_tunnel_server.return_value = 1234 acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', None) acs_client = acsclient.ACSClient(acs_info) actual = acs_client.create_request_url('mypath', 8080) self.assertEquals(actual, 'http://127.0.0.1:1234/mypath') @patch('acsclient.ACSClient._setup_tunnel_server') def test_get_request_url_direct(self, mock_tunnel_server): mock_tunnel_server.return_value = 1234 acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) actual = acs_client.create_request_url('mypath', 8080) self.assertEquals(actual, 'http://leader.mesos:1234/mypath') @patch('acsclient.ACSClient.create_request_url') @patch('requests.get', side_effect=mocked_requests_get) def test_make_request_invalid_method(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_200' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs_client.make_request, '', 'INVALID') @patch('acsclient.ACSClient.create_request_url') @patch('requests.get', side_effect=mocked_requests_get) def test_make_request_get_200(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_200' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) actual = acs_client.make_request('', 'get') self.assertIsNotNone(actual) self.assertEquals(actual.status_code, 200) @patch('acsclient.ACSClient.create_request_url') @patch('requests.get', side_effect=mocked_requests_get) def test_make_request_get_400(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_400' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs_client.make_request, '', 'get') @patch('acsclient.ACSClient.create_request_url') @patch('requests.get', side_effect=mocked_requests_get) def test_make_request_get_200_data(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_200' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) actual = acs_client.make_request('', 'get', data='mydata') self.assertIsNotNone(actual) self.assertEquals(actual.status_code, 200) @patch('acsclient.ACSClient.create_request_url') @patch('requests.delete', side_effect=mocked_requests_get) def test_make_request_delete_200(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_200' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) actual = acs_client.make_request('', 'delete') self.assertIsNotNone(actual) self.assertEquals(actual.status_code, 200) @patch('acsclient.ACSClient.create_request_url') @patch('requests.delete', side_effect=mocked_requests_get) def test_make_request_delete_400(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_400' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs_client.make_request, '', 'delete') @patch('acsclient.ACSClient.create_request_url') @patch('requests.put', side_effect=mocked_requests_get) def test_make_request_put_200(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_200' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) actual = acs_client.make_request('', 'put', data='somedata') self.assertIsNotNone(actual) self.assertEquals(actual.status_code, 200) @patch('acsclient.ACSClient.create_request_url') @patch('requests.put', side_effect=mocked_requests_get) def test_make_request_put_400(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_400' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs_client.make_request, '', 'put') @patch('acsclient.ACSClient.create_request_url') @patch('requests.post', side_effect=mocked_requests_get) def test_make_request_post_200(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_200' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) actual = acs_client.make_request('', 'post', data='somedata') self.assertIsNotNone(actual) self.assertEquals(actual.status_code, 200) @patch('acsclient.ACSClient.create_request_url') @patch('requests.post', side_effect=mocked_requests_get) def test_make_request_post_400(self, mock_get, mock_request_url): mock_request_url.return_value = 'http://make_request_400' acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) self.assertRaises(Exception, acs_client.make_request, '', 'post') @patch('acsclient.ACSClient.make_request') def test_get_request(self, mock_make_request): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) acs_client.get_request('mypath') mock_make_request.assert_called_with('mypath', 'get') @patch('acsclient.ACSClient.make_request') def test_delete_request(self, mock_make_request): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) acs_client.delete_request('mypath') mock_make_request.assert_called_with('mypath', 'delete') @patch('acsclient.ACSClient.make_request') def test_put_request(self, mock_make_request): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) acs_client.put_request('mypath', put_data='mydata') mock_make_request.assert_called_with('mypath', 'put', data='mydata') @patch('acsclient.ACSClient.make_request') def test_post_request(self, mock_make_request): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) acs_client.post_request('mypath', post_data='mydata') mock_make_request.assert_called_with('mypath', 'post', data='mydata') @patch('acsclient.ACSClient.current_tunnel') def test_shutdown_not_called(self, mock_current_tunnel): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) acs_client.shutdown() self.assertFalse(acs_client.is_running) self.assertFalse(mock_current_tunnel[0].stop.called) @patch('acsclient.ACSClient.current_tunnel') def test_shutdown(self, mock_current_tunnel): acs_info = acsinfo.AcsInfo('myhost', 2200, 'user', 'password', 'pkey', 'http://leader.mesos') acs_client = acsclient.ACSClient(acs_info) acs_client.is_running = True acs_client.shutdown() self.assertFalse(acs_client.is_running) self.assertTrue(mock_current_tunnel[0].stop.called)
48.862676
137
0.705628
1,735
13,877
5.319308
0.068588
0.059486
0.045509
0.068263
0.832701
0.792719
0.775707
0.735833
0.705277
0.679597
0
0.026211
0.166967
13,877
283
138
49.035336
0.772145
0
0
0.517094
0
0
0.194639
0.060099
0
0
0
0
0.188034
1
0.141026
false
0.094017
0.029915
0.004274
0.217949
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
1a2a015515a9af8ebf0326fcd57c2922a2208e8f
84
py
Python
Maya_Scripts/Tools/Animation/GPU_Cache/command.py
tadame/TAS_Dev
972f439fe7178a5b6f635930623f5ef70f296f06
[ "MIT" ]
null
null
null
Maya_Scripts/Tools/Animation/GPU_Cache/command.py
tadame/TAS_Dev
972f439fe7178a5b6f635930623f5ef70f296f06
[ "MIT" ]
4
2020-02-12T21:25:43.000Z
2020-02-13T08:55:23.000Z
Maya_Scripts/Tools/Animation/GPU_Cache/command.py
tadame/TAS_Dev
972f439fe7178a5b6f635930623f5ef70f296f06
[ "MIT" ]
1
2020-02-13T08:53:49.000Z
2020-02-13T08:53:49.000Z
import Tools.Animation.GPU_Cache.GPU_Cache as gpu_cache reload(gpu_cache) gpu_cache
21
55
0.869048
15
84
4.533333
0.466667
0.588235
0.323529
0.470588
0
0
0
0
0
0
0
0
0.071429
84
3
56
28
0.871795
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
c50d7185cb4b8f57577f222e4a034b1494a4b5a5
22,176
py
Python
save_data.py
DongDong-123/wanshida
66767e02e507c38375c9361820d8bad1a50c735e
[ "Apache-2.0" ]
null
null
null
save_data.py
DongDong-123/wanshida
66767e02e507c38375c9361820d8bad1a50c735e
[ "Apache-2.0" ]
null
null
null
save_data.py
DongDong-123/wanshida
66767e02e507c38375c9361820d8bad1a50c735e
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # @Time : 2020-06-25 11:54 # @Author : liudongyang # @FileName: save_data.py # @Software: PyCharm # 存储数据 import pymysql import os, csv from readconfig import ReadMySqlConfig from parm import zip_floder import time import copy conf = ReadMySqlConfig() # t_stan_org = ("busi_reg_no", "ctnm", "ctsnm", "cten", "ctsen", "busi_name", "appli_country", "sub_company", "former_name", "citp", "citp_nt", "ctid", "ctid_edt", "state", "city", "address", "post_code", "tel", "fax", "m_state", "m_city", "m_address", "m_post_code", "m_tel", "m_fax", "pr_mr_ms", "pr_name", "pr_title", "pr_phone", "pr_fax", "pr_email", "pr_address", "sec_mr_ms", "sec_name", "sec_title", "sec_phone", "sec_fax", "sec_email", "sec_address", "aml_mr_ms", "aml_name", "aml_title", "aml_phone", "aml_fax", "aml_email", "aml_address", "client_tp", "lfa_type", "lfa_type_explain", "fud_date", "assets_size", "country", "other_oper_country", "desc_business", "tin", "busi_type", "ctvc", "indu_code", "indu_code_nt", "crnm", "crit", "crit_nt", "crid", "crid_edt", "crid_country", "reg_cptl", "reg_cptl_code", "remark_ctvc", "eecp", "scale", "rgdt", "cls_dt", "unit_code", "remark", "stat_flag_ori", "stat_flag", "mer_unit", "cmgr", "reals", "complex", "clear", "data_crdt", "data_cruser", "data_updt", "data_upuser") # t_stan_org = [ # 'csnm', 'custormer_name', 'custormer_sname', 'custormer_ename', 'custormer_sename', 'busi_name', 'appli_country', 'sub_company', 'former_name', 'cert_tp', 'cert_tp_explain', 'cert_num', 'cert_validity', 'state', 'city', 'address', 'post_code', 'tel', 'fax', 'm_state', 'm_city', 'm_address', 'm_post_code', 'm_tel', 'm_fax', 'pr_mr_ms', 'pr_name', 'pr_title', 'pr_phone', 'pr_fax', 'pr_email', 'pr_address', 'sec_mr_ms', 'sec_name', 'sec_title', 'sec_phone', 'sec_fax', 'sec_email', 'sec_address', 'aml_mr_ms', 'aml_name', 'aml_title', 'aml_phone', 'aml_fax', 'aml_email', 'aml_address', 'client_tp', 'lfa_type', 'lfa_type_explain', 'found_date', 'assets_size', 'country', 'other_oper_country', 'desc_business', 'tin', 'busi_type', 'industry_type', 'indu_code', 'indu_code_nt', 'legal_p_name', 'legal_p_ename', 'legal_p_cert_tp', 'legal_p_cert_explain', 'legal_p_cert_num', 'legal_cert_validity', 'crid_country', 'registered_capital', 'registered_capital_currency', 'business_scope', 'enps_ecic_sectors', 'scale', 'establish_busi_date', 'end_busi_date', 'unit_code', 'remark', 'stat_flag_ori', 'stat_flag', 'mer_unit', 'account_manager', 'reals', 'complex', 'clear', 'create_time', 'update_time', 'creator', 'updator' # ] # t_stan_relation = [ # 'ctif_id', 'ctnm', 'rel_tp', 'rel_layer', 'rel_cstp', 'fir_name', 'sec_name', 'last_name', 'citp', 'citp_nt', 'ctid', 'ctid_edt', 'rcnt', 'dob', 'cob', 'years_comp', 'years_indu', 'rel_prov', 'rel_city', 'rel_area', 'rear', 'retl', 'ret_mphone', 'rel_fax', 'rel_email', 'gov_owned', 'hold_per', 'hold_amt', 'remark', 'data_crdt', 'data_cruser', 'data_updt', 'data_upuser' # ] # t_stan_ptxn = [ # 'msg_id', 'msg_type', 'inter_tran_type', 'uuid', 'trace_id', 'tran_group_id', 'tran_init', 'tran_res', 'card_bin', 'card_type', 'card_product', 'card_brand', 'card_media', 'token_pan', 'encrypt_pan', 'hash_pan', 'digsit', 'crdhldr_tran_type', 'crdhldr_acc_tp_from', 'crdhldr_acc_tp_to', 'tran_amount', 'sett_amount', 'bill_amount', 'tran_datetime', 'crdhldr_bill_fee', 'sett_conv_rate', 'bill_conv_rate', 'sys_trace_audit_nbr', 'local_tran_datetime', 'exp_date', 'sett_date', 'conv_date', 'mcc', 'pos_entry_cd', 'card_seq_num', 'pos_pin_cptr_cd', 'tran_fee_indi', 'acq_srchg_amount', 'acq_ins_id_cd', 'fwd_ins_id_cd', 'trk2_prsnt_sw', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'acq_merch_id', 'acq_merch_name', 'acq_merch_city', 'acq_merch_state', 'frmt_resp_data', 'additional_data', 'funding_payment_tti', 'tran_curr_cd', 'sett_curr_cd', 'bill_curr_cd', 'data_integrated', 'paym_account', 'advice_reason_cd', 'advice_reason_dt_cd', 'advice_reason_dt_txt', 'advice_reason_add_txt', 'pos_data', 'pos_crdhldr_present', 'pos_tran_status', 'inf_data', 'ntw_mng_inf_cd', 'org_mti', 'org_stan', 'org_tran_datetime', 'org_acq_ins_id_cd', 'org_fwd_ins_id_cd', 'org_trace_id', 'rcv_ins_id_cd', 'iss_mti_cd', 'iss_pcode', 'iss_ins_id_cd', 'acq_msg_flag', 'iss_msg_flag', 'single_dual_flag', 'tran_buss_st', 'tran_advice_st', 'inter_resp_cd', 'dc_id', 'insert_timestamp', 'insert_by', 'last_update_timestamp', 'last_update_by', 'channel_type', 'cash_back_amount', 'cash_back_indicator', 'mcht_data_srv', 'tcc', 'cvv2', 'pos_cat_level', 'merch_advic_cd', 'src_member_id', 'dest_member_id', 'group_tran_type', 'fee_category', 'fan_ntw_cd', 'int_rate_id', 'net_ref_num', 'bnk_ref_num', 'acq_ref_num', 'gcms_prc_num', 'act_tran_amount', 'act_sett_amount', 'act_bill_amount', 'zero_fill_amount', 'reserve1', 'reserve2', 'reserve3', 'data_transfer_dt' # ] # t_stan_dtxn = [ # 'batclr_sngl_dspt_msg_id', 'dspt_sys_id', 'orig_trace_id', 'card_type', 'card_product', 'card_brand', 'token_pan', 'encrypt_pan', 'crdhldr_tran_type', 'crdhldr_acc_tp_from', 'crdhldr_acc_tp_to', 'sett_conv_rate', 'dspt_trace_aud_num', 'orig_local_tran_datetime', 'sett_date', 'mcc', 'pos_entry_cd', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'tran_curr_cd', 'sett_curr_cd', 'dspt_advic_rsn_cd', 'dspt_advic_rsn_dtl_cd', 'org_stan', 'channel_type', 'cash_back_amount', 'orig_tran_type', 'dspt_tran_type', 'send_ica', 'rcvr_ica', 'send_rl', 'rcvr_rl', 'dspt_tran_amt', 'dspt_setl_amt', 'orig_sett_date', 'db_cr_flag', 'tran_amt', 'setl_amt', 'actl_tran_amt', 'setl_tran_amt', 'cash_back_indicator', 'mcht_data_srv', 'dspt_ref_num', 'insert_timestamp', 'last_update_timestamp', 'reserve1', 'reserve2', 'reserve3', 'version', 'case_id', 'msg_rev_ind', 'dspt_tran_dttm', 'data_transfer_dt' # ] # t_stan_txn = [ # 'id', 'tran_kd', 'uuid', 'trace_id', 'card_bin', 'card_type', 'card_type_pboc', 'card_product', 'card_brand', 'token_pan', 'encrypt_pan', 'crdhldr_tran_type', 'crdhldr_acc_tp_from', 'crdhldr_acc_tp_to', 'tran_datetime', 'orig_local_tran_datetime', 'tsdr', 'tran_amount', 'sett_amount', 'tran_curr_cd', 'sett_curr_cd', 'sett_conv_rate', 'sett_date', 'crat_u', 'crat_c', 'mcc', 'pos_entry_cd', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'rcv_ins_id_cd', 'iss_mti_cd', 'iss_pcode', 'iss_ins_id_cd', 'acq_merch_id', 'acq_merch_name', 'acq_merch_city', 'acq_merch_state', 'acq_ins_id_cd', 'fwd_ins_id_cd', 'TRCD', 'CBIF', 'channel_type', 'TSTP', 'cash_back_amount', 'cash_back_indicator', 'tran_type', 'dspt_tran_type', 'org_stan', 'tran_buss_st', 'tran_advice_st', 'mcht_data_srv', 'additional_data', 'insert_timestamp', 'insert_by', 'last_update_timestamp', 'last_update_by', 'mer_unit', 'data_transfer_dt' # ] # t_stan_stif = [ # 'unit_code', 'warn_dt', 'rule_id', 'rule_type', 'warn_kd', 'susp_value', 'ctif_tp', 'tran_kd', 'card_type', 'MCNO', 'MCNM', 'ACCD', 'fwd_ins_id_cd', 'STCT', 'card_product', 'card_brand', 'STCI', 'IUCD', 'rcv_ins_id_cd', 'tstm', 'tsdr', 'TCPP', 'TCTP', 'TCAT', 'TCMN', 'TCNM', 'CACD', 'c_fwd_ins_id_cd', 'TCCT', 'T_card_product', 'T_card_brand', 'TCCI', 'TCIC', 'c_rcv_ins_id_cd', 'bptc', 'ticd', 'busi_type', 'trans_type', 'trans_stat', 'tran_advice_st', 'acq_merch_city', 'acq_merch_state', 'TRCD', 'CBIF', 'trans_channel', 'PCTP', 'PCAT', 'crat_u', 'crat_c', 'TSTP', 'mcc', 'pos_entry_cd', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'mer_unit', 'run_dt', 'data_transfer_dt' # ] # t_stan_info1 = [ # 'ctif_id', 'ctnm', 'info_a_bool', 'laws_name', 'info_a_bool2', 'info_a_bool3', 'supervisor_name', 'inspection_time', 'info_a_explain', 'info_a_explain2', 'info_b_bool', 'info_b_bool2', 'info_b_bool3', 'info_b_explain', 'info_c_bool', 'info_c_explain', 'info_d_bool', 'info_d_bool2', 'info_d_explain', 'payment_card_org ', 'compliance_org', 'chartered_institution', 'info_e_bool', 'info_e_bool2', 'info_e_bool3', 'supervision_trace_doc', 'info_f_bool', 'list_type', 'other_list_type', 'info_f_explain', 'info_g_bool', 'info_g_explain', 'info_h_bool', 'info_h_explain', 'data_crdt', 'data_cruser', 'data_updt', 'data_upuser' # ] # t_stan_info2 = [ # 'ctif_id', 'ctnm', 'info2_a_bool', 'info2_a_explain', 'info2_b_bool', 'info2_b_explain', 'agents_num', 'aml_role_explain', 'compliance_name', 'aml_workers', 'aml_position', 'info2_c_bool', 'info2_c_bool2', 'info2_c_explain', 'info2_d_bool', 'info2_d_explain', 'info2_e_bool', 'info2_f_bool', 'info2_g_bool', 'info2_g_explain', 'info2_h_bool', 'info2_h_explain', 'info2_i_bool', 'info2_i_explain', 'data_crdt', 'data_cruser', 'data_updt', 'data_upuser' # ] # t_stan_info3 = [ # 'ctif_id', 'ctnm', 'fi_mcard_principal', 'fi_mcard_affillate', 'fi_mcard_association', 'fi_mcard_issuing', 'fi_mcard_acquiring_merchants', 'fi_mcard_acquiring_atm', 'fi_mcard_acquiring_mcd', 'fi_mcard_optrpt_msd', 'fi_mcard_optrpt_ms', 'fi_mcard_optrpt_mscb', 'fi_mcard_optrpt_mpqr', 'fi_mstro_principal', 'fi_mstro_affillate', 'fi_mstro_issuing', 'fi_mstro_acquiring_merchants', 'fi_mstro_acquiring_atm', 'fi_mstro_optrpt_msd', 'fi_mstro_optrpt_ms', 'fi_mstro_optrpt_mscb', 'fi_mstro_optrpt_mpqr', 'fi_cirrus_principal', 'fi_cirrus_affillate', 'fi_cirrus_issuing_atm', 'fi_cirrus_acquiring_atm', 'fi_cirrus_optp2p_ms', 'fi_cirrus_optp2p_mscb', 'fi_cirrus_optp2p_mpqr', 'cgi_mcard_principal', 'cgi_mcard_affillate', 'cgi_mcard_issuing_credit', 'cgi_mcard_issuing_debit', 'cgi_mcard_issuing_prepaid', 'cgi_mcard_acquiring_atm', 'cgi_mcard_acquiring_mcd', 'cgi_mcard_acquiring_merchants', 'cgi_mcard_acquiring_poi', 'cgi_mcard_optrpt_msd', 'cgi_mcard_optrpt_ms', 'cgi_mcard_optrpt_mscb', 'cgi_mcard_optrpt_mpqr', 'cgi_mstro_principal', 'cgi_mstro_affillate', 'cgi_mstro_issuing_debit', 'cgi_mstro_issuing_prepaid', 'cgi_mstro_acquiring_atm', 'cgi_mstro_acquiring_merchants', 'cgi_mstro_acquiring_poi', 'cgi_mstro_optrpt_msd', 'cgi_mstro_optrpt_ms', 'cgi_mstro_optrpt_mscb', 'cgi_mstro_optrpt_mpqr', 'cgi_cirrus_principal', 'cgi_cirrus_affillate', 'cgi_cirrus__issuing', 'cgi_cirrus_acquiring_atm', 'cgi_cirrus_optp2p_ms', 'cgi_cirrus_optp2p_mscb', 'cgi_cirrus_optp2p_mpqr', 'info_a_bool', 'info_a_explain', 'additional_services_transfer', 'acquiring_rePower', 'data_crdt', 'data_cruser', 'data_updt', 'data_upuser' # ] t_stan_org = [ 'csnm', 'custormer_name','custormer_ename', 'custormer_sename', 'busi_name', 'appli_country', 'sub_company', 'former_name', 'cert_tp', 'cert_tp_explain', 'cert_num', 'cert_validity', 'state', 'city', 'address', 'post_code', 'tel', 'fax', 'pr_mr_ms', 'pr_name', 'pr_title', 'pr_phone', 'pr_fax', 'pr_email', 'sec_mr_ms', 'sec_name', 'sec_title', 'sec_phone', 'sec_fax', 'sec_email', 'aml_mr_ms', 'aml_name', 'aml_title', 'aml_phone', 'aml_fax', 'aml_email', 'client_tp', 'lfa_type', 'found_date', 'assets_size', 'country', 'other_oper_country', 'desc_business', 'busi_type', 'industry_type', 'legal_p_name', 'legal_p_ename', 'legal_p_cert_tp', 'legal_p_cert_explain', 'legal_p_cert_num', 'legal_cert_validity', 'registered_capital', 'registered_capital_currency', 'business_scope','establish_busi_date', 'end_busi_date', 'stat_flag_ori', 'stat_flag', 'mer_unit', 'account_manager', 'create_time', 'creator', 'update_time', 'updator' ] t_stan_relation = [ 'ctif_id', 'ctnm', 'rel_tp', 'rel_cstp', 'fir_name', 'sec_name', 'last_name', 'dob', 'cob', 'years_comp', 'years_indu','hold_per', 'data_crdt', 'data_cruser', 'data_updt', 'data_upuser' ] t_stan_ptxn = [ 'msg_id', 'msg_type', 'inter_tran_type', 'uuid', 'trace_id', 'tran_group_id', 'tran_init', 'tran_res', 'card_bin', 'card_type', 'card_product', 'card_brand', 'card_media', 'token_pan', 'encrypt_pan', 'hash_pan', 'digsit', 'crdhldr_tran_type', 'crdhldr_acc_tp_from', 'crdhldr_acc_tp_to', 'tran_amount', 'sett_amount', 'bill_amount', 'tran_datetime', 'crdhldr_bill_fee', 'sett_conv_rate', 'bill_conv_rate', 'sys_trace_audit_nbr', 'local_tran_datetime', 'exp_date', 'sett_date', 'conv_date', 'mcc', 'pos_entry_cd', 'card_seq_num', 'pos_pin_cptr_cd', 'tran_fee_indi', 'acq_srchg_amount', 'acq_ins_id_cd', 'fwd_ins_id_cd', 'trk2_prsnt_sw', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'acq_merch_id', 'acq_merch_name', 'acq_merch_city', 'acq_merch_state', 'frmt_resp_data', 'additional_data', 'funding_payment_tti', 'tran_curr_cd', 'sett_curr_cd', 'bill_curr_cd', 'data_integrated', 'paym_account', 'advice_reason_cd', 'advice_reason_dt_cd', 'advice_reason_dt_txt', 'advice_reason_add_txt', 'pos_data', 'pos_crdhldr_present', 'pos_tran_status', 'inf_data', 'ntw_mng_inf_cd', 'org_mti', 'org_stan', 'org_tran_datetime', 'org_acq_ins_id_cd', 'org_fwd_ins_id_cd', 'org_trace_id', 'rcv_ins_id_cd', 'iss_mti_cd', 'iss_pcode', 'iss_ins_id_cd', 'acq_msg_flag', 'iss_msg_flag', 'single_dual_flag', 'tran_buss_st', 'tran_advice_st', 'inter_resp_cd', 'dc_id', 'insert_timestamp', 'insert_by', 'last_update_timestamp', 'last_update_by', 'channel_type', 'cash_back_amount', 'cash_back_indicator', 'mcht_data_srv', 'tcc', 'cvv2', 'pos_cat_level', 'merch_advic_cd', 'src_member_id', 'dest_member_id', 'group_tran_type', 'fee_category', 'fan_ntw_cd', 'int_rate_id', 'net_ref_num', 'bnk_ref_num', 'acq_ref_num', 'gcms_prc_num', 'act_tran_amount', 'act_sett_amount', 'act_bill_amount', 'zero_fill_amount', 'reserve1', 'reserve2', 'reserve3', 'data_transfer_dt' ] t_stan_dtxn = [ 'batclr_sngl_dspt_msg_id', 'dspt_sys_id', 'orig_trace_id', 'card_type', 'card_product', 'card_brand', 'token_pan', 'encrypt_pan', 'crdhldr_tran_type', 'crdhldr_acc_tp_from', 'crdhldr_acc_tp_to', 'sett_conv_rate', 'dspt_trace_aud_num', 'orig_local_tran_datetime', 'sett_date', 'mcc', 'pos_entry_cd', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'tran_curr_cd', 'sett_curr_cd', 'dspt_advic_rsn_cd', 'dspt_advic_rsn_dtl_cd', 'org_stan', 'channel_type', 'cash_back_amount', 'orig_tran_type', 'dspt_tran_type', 'send_ica', 'rcvr_ica', 'send_rl', 'rcvr_rl', 'dspt_tran_amt', 'dspt_setl_amt', 'orig_sett_date', 'db_cr_flag', 'tran_amt', 'setl_amt', 'actl_tran_amt', 'setl_tran_amt', 'cash_back_indicator', 'mcht_data_srv', 'dspt_ref_num', 'insert_timestamp', 'last_update_timestamp', 'reserve1', 'reserve2', 'reserve3', 'version', 'case_id', 'msg_rev_ind', 'dspt_tran_dttm', 'data_transfer_dt' ] t_stan_txn = [ 'tran_kd', 'uuid', 'trace_id', 'card_bin', 'card_type', 'card_type_pboc', 'card_product', 'card_brand', 'token_pan', 'encrypt_pan', 'crdhldr_tran_type', 'crdhldr_acc_tp_from', 'crdhldr_acc_tp_to', 'tran_datetime', 'orig_local_tran_datetime', 'tsdr', 'tran_amount', 'sett_amount', 'tran_curr_cd', 'sett_curr_cd', 'sett_conv_rate', 'sett_date', 'crat_u', 'crat_c', 'mcc', 'pos_entry_cd', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'rcv_ins_id_cd', 'iss_mti_cd', 'iss_pcode', 'iss_ins_id_cd', 'acq_merch_id', 'acq_merch_name', 'acq_merch_city', 'acq_merch_state', 'acq_ins_id_cd', 'fwd_ins_id_cd', 'TRCD', 'CBIF', 'channel_type', 'TSTP', 'cash_back_amount', 'cash_back_indicator', 'tran_type', 'dspt_tran_type', 'org_stan', 'tran_buss_st', 'tran_advice_st', 'mcht_data_srv', 'additional_data', 'insert_timestamp', 'insert_by', 'last_update_timestamp', 'last_update_by', 'mer_unit', 'data_transfer_dt' ] t_stan_stif = [ 'unit_code', 'warn_dt', 'rule_id', 'rule_type', 'warn_kd', 'susp_value', 'ctif_tp', 'tran_kd', 'card_type', 'MCNO', 'MCNM', 'ACCD', 'fwd_ins_id_cd', 'STCT', 'card_product', 'card_brand', 'STCI', 'IUCD', 'rcv_ins_id_cd', 'tstm', 'tsdr', 'TCPP', 'TCTP', 'TCAT', 'TCMN', 'TCNM', 'CACD', 'c_fwd_ins_id_cd', 'TCCT', 'T_card_product', 'T_card_brand', 'TCCI', 'TCIC', 'c_rcv_ins_id_cd', 'bptc', 'ticd', 'busi_type', 'trans_type', 'trans_stat', 'tran_advice_st', 'acq_merch_city', 'acq_merch_state', 'TRCD', 'CBIF', 'trans_channel', 'PCTP', 'PCAT', 'crat_u', 'crat_c', 'TSTP', 'mcc', 'pos_entry_cd', 'retriv_ref_num', 'auth_cd', 'resp_cd', 'pos_term_id', 'mer_unit', 'run_dt', 'data_transfer_dt' ] t_stan_info1 = [ 'ctif_id', 'ctnm', 'info_a_bool', 'laws_name', 'info_a_bool2', 'info_a_bool3', 'supervisor_name', 'info_a_explain', 'info_a_explain2', 'info_b_bool', 'info_b_bool2', 'info_b_bool3', 'info_b_explain', 'info_d_bool', 'info_d_bool2', 'info_d_explain', 'payment_card_org ', 'compliance_org', 'chartered_institution', 'info_e_bool', 'info_e_bool2', 'info_e_bool3', 'info_f_bool', 'list_type', 'other_list_type', 'info_g_bool','info_h_bool', 'info_h_explain', 'data_crdt', 'data_cruser', 'data_updt', 'data_upuser' ] t_stan_info2 = [ 'ctif_id', 'ctnm', 'info2_a_bool', 'info2_a_explain', 'info2_b_bool', 'info2_b_explain', 'agents_num', 'compliance_name', 'aml_position', 'info2_c_bool', 'info2_e_bool', 'info2_f_bool', 'info2_h_bool', 'info2_h_explain', 'info2_i_bool', 'info2_i_explain', 'data_crdt', 'data_cruser', 'data_updt', 'data_upuser' ] t_stan_info3 = [ 'ctif_id', 'ctnm', 'fi_mcard_issuing', 'fi_mcard_acquiring_merchants', 'fi_mcard_acquiring_atm', 'fi_mcard_acquiring_mcd','data_crdt', 'data_cruser', 'data_updt', 'data_upuser' ] t_stan_mapping = ["cid", "ica", "status", "create_time"] t_stan_dic = ["type", "code", "name", "ename", "insert_timestamp", "last_update_timestamp", "data_transfer_dt"] # t_stan_dic = ["type", "code", "name", "ename"] class ConnectMysql: def __init__(self): self.host = conf.host() self.user = conf.user() self.passwd = conf.passwd() self.db = conf.db() self.port = int(conf.port()) def save_to_mysql(self, datas, table_name): conn = pymysql.connect(host=self.host, user=self.user, password=self.passwd, db=self.db, port=self.port, charset="utf8") curs = conn.cursor() for data_t in datas: if table_name == 't_stan_txn': data_tmp = copy.deepcopy(data_t) data_tmp.insert(0,0) else: data_tmp = data_t sql = "insert into {} VALUES {}".format(table_name, tuple(data_tmp)) print("sql", sql) curs.execute(sql) try: conn.commit() except Exception as e: print(e) curs.close() conn.close() class SaveFile: def __init__(self): self.file_path = zip_floder self.currt_time = time.strftime('%Y%m%d', time.localtime()) self.t_stan_org = t_stan_org self.t_stan_relation = t_stan_relation self.t_stan_ptxn = t_stan_ptxn self.t_stan_dtxn = t_stan_dtxn self.t_stan_txn = t_stan_txn self.t_stan_stif = t_stan_stif self.t_stan_info1 = t_stan_info1 self.t_stan_info2 = t_stan_info2 self.t_stan_info3 = t_stan_info3 self.t_stan_mapping = t_stan_mapping self.t_stan_dic = t_stan_dic currt_time = round(time.time() * 1000) def write_to_csv(self, datas, file_name, date_time, num, total_num,control_file_time,delimiter=','): """ :param datas: 写入数据 :param file_name: 文件名 :param date_time: 文件名日期 :param num: 文件编号 :param total_num: 控制文件内数据数量 :return: """ if delimiter == ',': file_path = os.path.join(self.file_path,'custom',date_time) elif delimiter == 'map': file_path = os.path.join(self.file_path,'mapping',date_time) elif delimiter == 'dic': file_path = os.path.join(self.file_path, 'dic', date_time) elif delimiter == '||': file_path = os.path.join(self.file_path,'txn',date_time) else: file_path = os.path.join(self.file_path,'stif',date_time) if num < 10: file_full = os.path.join(file_path, '{}-D{}-T{}-000{}.csv'.format(file_name.upper(), date_time, control_file_time, num)) else: file_full = os.path.join(file_path, '{}-D{}-T{}-00{}.csv'.format(file_name.upper(), date_time, control_file_time, num)) # ============交易单独写入================== # 交易数据分割符为||,需要无法直接写入CSV,改用TXT写入 if delimiter == '||': if not os.path.exists(file_full): title = eval('self.' + 't_stan_' + file_name) with open(file_full, 'a', encoding="utf-8-sig") as f: f.write("||".join(title)+'\n') with open(file_full, 'a', encoding="utf-8-sig") as f: for da in datas: f.write("||".join([str(tt) for tt in da]) + '\n') # ============================================ elif delimiter == 'map': if not os.path.exists(file_full): title = eval('self.' + 't_stan_' + file_name) with open(file_full, 'a', encoding="utf-8-sig") as f: f.write("||".join(title)+'\n') with open(file_full, 'a', encoding="utf-8-sig") as f: for dat in datas: f.write(dat + '\n') elif delimiter == 'dic': if not os.path.exists(file_full): title = eval('self.' + 't_stan_' + file_name) with open(file_full, 'a', encoding="utf-8-sig") as f: f.write("||".join(title)+'\n') with open(file_full, 'a', encoding="utf-8-sig") as f: for da in datas: f.write("||".join([str(tt) for tt in da]) + '\n') else: # if not os.path.exists(file_full): # title = eval('self.' + 't_stan_' + file_name) # csvfile = open(file_full, 'a', encoding="utf-8-sig", newline='') # writer = csv.writer(csvfile,delimiter=delimiter) # writer.writerow(title) # csvfile.close() # # csvfile = open(file_full, 'a', encoding="utf-8-sig", newline='') # writer = csv.writer(csvfile,delimiter=delimiter) # writer.writerows(datas) # csvfile.close() if not os.path.exists(file_full): title = eval('self.' + 't_stan_' + file_name) with open(file_full, 'a', encoding="utf-8-sig") as f: f.write(",".join(['"'+tit+'"' for tit in title])+'\n') with open(file_full, 'a', encoding="utf-8-sig") as f: for da in datas: res = [str(tt) for tt in da] f.write(",".join(['"'+dd+'"' for dd in res]) + '\n')
107.130435
1,854
0.681277
3,332
22,176
4.027911
0.141657
0.018628
0.014604
0.007451
0.764772
0.758513
0.746219
0.727815
0.71403
0.694956
0
0.006035
0.133297
22,176
206
1,855
107.650485
0.692248
0.48088
0
0.272
0
0
0.451004
0.027809
0
0
0
0
0
1
0.032
false
0.016
0.048
0
0.096
0.016
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c539122607b71b5b877c9f2756d7f143d51e151e
2,931
py
Python
vpc_hyp2/hyp2_veth1.py
dhanraj-vedanth/IaaS_VPC_CDN
262dbc7db63d5e76398dadc8015256fb37986e36
[ "MIT" ]
null
null
null
vpc_hyp2/hyp2_veth1.py
dhanraj-vedanth/IaaS_VPC_CDN
262dbc7db63d5e76398dadc8015256fb37986e36
[ "MIT" ]
null
null
null
vpc_hyp2/hyp2_veth1.py
dhanraj-vedanth/IaaS_VPC_CDN
262dbc7db63d5e76398dadc8015256fb37986e36
[ "MIT" ]
null
null
null
import sys import os tenant_check = sys.argv[1] each = sys.argv[2] def create_veths(tenant_check,each): #LINK BETWEEN THE BRIDGE AND VXLAN NAMESPACE #L3 veth print("VETH FUNCTION ON THEIR END??????") # sudo ip link set t" + str(tenant_check) + "_" + str(each) + " up" # sudo ip link set t" + str(tenant_check) + "_" + str(each) + "vx up #op1 = os.system("sudo ip link add t" + str(tenant_check) + "_" + str(each) + " type veth peer name t" + str(tenant_check) + "r") #print(op1) #op2 = os.system("sudo ip link set t" + str(tenant_check) + "_" + str(each) + " up") #print(op2) #Vxlan veth print(os.system("sudo ip link add t" + str(tenant_check) + "_" + str(each) + "vx type veth peer name t" + str(tenant_check) + "vxr")) print(os.system("sudo ip link set t" + str(tenant_check) + "_" + str(each) + "vx up")) #Adding one end to Ovs bridge # "sudo ovs-vsctl add-port tenant" + str(tenant_check) + "br_" + str(each) + " t" + str(tenant_check) + "_" + str(each) # "sudo ovs-vsctl add-port tenant" + str(tenant_check) + "br_" + str(each) + " t" + str(tenant_check) + "_" + str(each) + "vx" # print(os.system("sudo ovs-vsctl add-port tenant" + str(tenant_check) + "br_" + str(each) + " t" + str(tenant_check) + "_" + str(each))) print(os.system("sudo ovs-vsctl add-port tenant" + str(tenant_check) + "br_" + str(each) + " t" + str(tenant_check) + "_" + str(each) + "vx")) #Adding other end of L3 veth to namespace #print(os.system("sudo ip link set t" + str(tenant_check) + "r netns NS_tenant" + str(tenant_check) + "_" + str(each))) pid=os.popen('docker inspect -f \'{{.State.Pid}}\' ' +"NS_tenant" + str(tenant_check) + "_" + str(each)).read() print(pid) pid = pid.split('\n')[0] print("ip link set netns "+pid+" dev t"+str(tenant_check) + "vxr" ) os.system("ip link set netns "+pid+" dev t"+str(tenant_check) + "vxr" ) ''' print(os.system("sudo docker exec -it" + str(tenant_check) + "_" + str(each) + " ip link set t" + str(tenant_check) + "r up")) #Adding other end of vxlan veth to namespace and then to bridge #print(os.system("sudo ip link set t" + str(tenant_check) + "vxr netns NS_tenant" + str(tenant_check) + "_" + str(each))) pid=os.popen('docker inspect -f \'{{.State.Pid}}\' ' +"NS_tenant" + str(tenant_check) + "_" + str(each)).read() print(pid) pid = pid.split('\n')[0] print("ip link set netns "+pid+" dev t"+str(tenant_check) + "r" ) print(os.system("ip link set netns "+pid+" dev t"+str(tenant_check) + "r" )) ''' print(os.system("sudo docker exec -it NS_tenant" + str(tenant_check) + "_" + str(each) + " ip link set t" + str(tenant_check) + "vxr up")) print(os.system("sudo docker exec -it NS_tenant" + str(tenant_check) + "_" + str(each) + " brctl addif BR_NS t" + str(tenant_check) + "vxr")) print("out of the stupid veth") create_veths(tenant_check,each)
55.301887
146
0.614125
458
2,931
3.78821
0.155022
0.221902
0.258213
0.181556
0.832853
0.802882
0.783285
0.775216
0.743516
0.723343
0
0.004254
0.197885
2,931
52
147
56.365385
0.73373
0.33811
0
0
0
0
0.276863
0
0
0
0
0
0
1
0.055556
false
0
0.111111
0
0.166667
0.5
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
10
c562d6bb9ae16a7562617a86c6c111f4db3ca386
123
py
Python
src/sage/symbolic/series.py
UCD4IDS/sage
43474c96d533fd396fe29fe0782d44dc7f5164f7
[ "BSL-1.0" ]
1,742
2015-01-04T07:06:13.000Z
2022-03-30T11:32:52.000Z
src/sage/symbolic/series.py
UCD4IDS/sage
43474c96d533fd396fe29fe0782d44dc7f5164f7
[ "BSL-1.0" ]
66
2015-03-19T19:17:24.000Z
2022-03-16T11:59:30.000Z
src/sage/symbolic/series.py
UCD4IDS/sage
43474c96d533fd396fe29fe0782d44dc7f5164f7
[ "BSL-1.0" ]
495
2015-01-10T10:23:18.000Z
2022-03-24T22:06:11.000Z
from sage.misc.lazy_import import lazy_import lazy_import('sage.symbolic.expression', 'SymbolicSeries', deprecation=32386)
41
76
0.837398
16
123
6.25
0.625
0.3
0.32
0
0
0
0
0
0
0
0
0.043103
0.056911
123
2
77
61.5
0.818966
0
0
0
0
0
0.308943
0.195122
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
3d782dd62f7593e05bba1a0406d360c3d9506cdd
129
py
Python
dataset/__init__.py
ndb796/DARP
069941c6cf414099658bce73bbdb427f39af0a1f
[ "MIT" ]
43
2020-07-23T01:26:15.000Z
2022-03-29T14:03:09.000Z
dataset/__init__.py
ndb796/DARP
069941c6cf414099658bce73bbdb427f39af0a1f
[ "MIT" ]
9
2020-07-24T13:28:18.000Z
2021-11-08T07:05:30.000Z
dataset/__init__.py
ndb796/DARP
069941c6cf414099658bce73bbdb427f39af0a1f
[ "MIT" ]
9
2020-11-05T15:13:20.000Z
2022-01-19T08:03:53.000Z
from dataset.all_cifar10 import get_cifar10 from dataset.all_cifar100 import get_cifar100 from dataset.all_stl10 import get_stl10
43
45
0.891473
21
129
5.190476
0.380952
0.302752
0.385321
0
0
0
0
0
0
0
0
0.118644
0.085271
129
3
46
43
0.805085
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
3de7fe44df5a8ed7ba58f1247938879d1f2a7faf
109
py
Python
uiza/api_resources/base/__init__.py
uizaio/api-wrapper-python
e67c162e711857341f7ef5752178219e94f604d3
[ "MIT" ]
2
2019-04-22T11:39:36.000Z
2020-05-26T04:01:43.000Z
uiza/api_resources/base/__init__.py
uizaio/api-wrapper-python
e67c162e711857341f7ef5752178219e94f604d3
[ "MIT" ]
null
null
null
uiza/api_resources/base/__init__.py
uizaio/api-wrapper-python
e67c162e711857341f7ef5752178219e94f604d3
[ "MIT" ]
2
2019-02-11T09:34:03.000Z
2019-02-12T10:31:41.000Z
from uiza.api_resources.base.connections import Connection from uiza.api_resources.base.base import UizaBase
36.333333
58
0.87156
16
109
5.8125
0.5625
0.172043
0.236559
0.430108
0.516129
0
0
0
0
0
0
0
0.073395
109
2
59
54.5
0.920792
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
3df21252c1adc6310e2c5cef4e23d650b252dd62
44,208
py
Python
test/common/dbserver.py
Hawkheart/Astron
3a15606ab15b63b666fdff1e0145417470232dbc
[ "BSD-3-Clause" ]
null
null
null
test/common/dbserver.py
Hawkheart/Astron
3a15606ab15b63b666fdff1e0145417470232dbc
[ "BSD-3-Clause" ]
null
null
null
test/common/dbserver.py
Hawkheart/Astron
3a15606ab15b63b666fdff1e0145417470232dbc
[ "BSD-3-Clause" ]
null
null
null
from astron import * from dcfile import * CREATE_DOID_OFFSET = 1 + (CHANNEL_SIZE_BYTES * 2) + 2 + 4 class DBServerTestsuite(object): def createTypeGetId(self, sender, context, type): # Create object of type dg = Datagram.create([75757], sender, DBSERVER_CREATE_OBJECT) dg.add_uint32(context) dg.add_uint16(type) dg.add_uint16(0) # Field count self.conn.send(dg) dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) dgi.seek(CREATE_DOID_OFFSET) return dgi.read_doid() def createGenericGetId(self, sender, context): return self.createTypeGetId(sender, context, DistributedTestObject1) def deleteObject(self, sender, doid, check=False): dg = Datagram.create([75757], sender, DBSERVER_OBJECT_DELETE) dg.add_doid(doid) self.conn.send(dg) if check: dg = Datagram.create([DATABASE_PREFIX|doid], sender, DBSERVER_OBJECT_DELETE) dg.add_doid(doid) self.expect(self.objects, dg) else: self.objects.flush() def test_create_getall(self): self.objects.flush() self.conn.flush() self.conn.send(Datagram.create_add_channel(20)) doids = [] ### Test for CreateObject and GetAll with no fields ### # Create a stored DistributedTestObject1 with no initial values... dg = Datagram.create([75757], 20, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) # Context dg.add_uint16(DistributedTestObject1) dg.add_uint16(0) # Field count self.conn.send(dg) # The Database should return the context and do_id... dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) self.assertTrue(*dgi.matches_header([20], 75757, DBSERVER_CREATE_OBJECT_RESP, remaining = 4 + DOID_SIZE_BYTES)) self.assertEquals(dgi.read_uint32(), 1) # Check context doids.append(dgi.read_doid()) self.assertGreaterEqual(doids[0], 1000000) # do_id in valid range self.assertLessEqual(doids[0], 1000010) # do_id in valid range # Select all fields from the stored object dg = Datagram.create([75757], 20, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(2) # Context dg.add_doid(doids[0]) self.conn.send(dg) # Retrieve object from the database, we stored no DB values, so get none back dg = Datagram.create([20], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(2) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject1) dg.add_uint16(0) # Field count self.expect(self.conn, dg) # Expecting SELECT_RESP with no values # Create a stored DistributedTestObject3 with an actual values... dg = Datagram.create([75757], 20, DBSERVER_CREATE_OBJECT) dg.add_uint32(4) # Context dg.add_uint16(DistributedTestObject3) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint32(91849) dg.add_uint16(setDb3) dg.add_string("You monster...") self.conn.send(dg) # The Database should return a new do_id... dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) self.assertTrue(*dgi.matches_header([20], 75757, DBSERVER_CREATE_OBJECT_RESP, remaining = 4 + DOID_SIZE_BYTES)) self.assertEquals(dgi.read_uint32(), 4) # Check context doids.append(dgi.read_doid()) self.assertGreaterEqual(doids[1], 1000000) # do_id in valid range self.assertLessEqual(doids[1], 1000010) # do_id in valid range self.assertTrue(doids[0] != doids[1]) # do_ids should be different # Retrieve object from the database... dg = Datagram.create([75757], 20, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(5) # Context dg.add_doid(doids[1]) self.conn.send(dg) # Get values back from server dg = Datagram.create([20], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(5) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject3) dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_string("You monster...") dg.add_uint16(setRDB3) dg.add_uint32(91849) self.expect(self.conn, dg) # Try selecting an ID that doesn't exist dg = Datagram.create([75757], 20, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(6) # Context dg.add_doid(78787) # Non-existant ID self.conn.send(dg) # Get failure from server dg = Datagram.create([20], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(6) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) ### Test for CreateObject and GetAll with default field values ### # Create a stored DistributedTestObject1 with no initial values... dg = Datagram.create([75757], 20, DBSERVER_CREATE_OBJECT) dg.add_uint32(7) # Context dg.add_uint16(DistributedTestObject5) dg.add_uint16(0) # Field count self.conn.send(dg) # The Database should return the context and do_id... dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) self.assertTrue(*dgi.matches_header([20], 75757, DBSERVER_CREATE_OBJECT_RESP, remaining = 4 + DOID_SIZE_BYTES)) self.assertEquals(dgi.read_uint32(), 7) # Check context doids.append(dgi.read_doid()) self.assertGreaterEqual(doids[-1], 1000000) # do_id in valid range self.assertLessEqual(doids[-1], 1000010) # do_id in valid range # Select all fields from the stored object dg = Datagram.create([75757], 20, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(8) # Context dg.add_doid(doids[-1]) self.conn.send(dg) # Retrieve object from the database, we stored no DB values, so get none back dg = Datagram.create([20], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(8) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject5) dg.add_uint16(1) # Field count dg.add_uint16(setRDbD5) dg.add_uint8(setRDbD5DefaultValue) self.expect(self.conn, dg) # Cleanup for doid in doids: self.deleteObject(20, doid) self.conn.send(Datagram.create_remove_channel(20)) def test_delete(self): self.objects.flush() self.conn.flush() self.conn.send(Datagram.create_add_channel(30)) # Create an object, get its doid doid = self.createGenericGetId(30, 1) # Delete the object self.deleteObject(30, doid, True) # Check to make sure the object is deleted dg = Datagram.create([75757], 30, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(2) # Context dg.add_doid(doid) self.conn.send(dg) # Get failure from database dg = Datagram.create([30], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(2) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) # object deleted # Create some other objects doidA = self.createGenericGetId(30, 3) doidB = self.createGenericGetId(30, 4) # Delete object "A" self.deleteObject(30, doidA, True) # Check to make sure object "B" isn't affected dg = Datagram.create([75757], 30, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(5) # Context dg.add_doid(doidB) self.conn.send(dg) # Reponse for object "B" dg = Datagram.create([30], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(5) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject1) dg.add_uint16(0) # Field count self.expect(self.conn, dg) # object "B" not deleted # Cleanup self.deleteObject(30, doidB) self.conn.send(Datagram.create_remove_channel(30)) def test_create_collisions(self): self.objects.flush() self.conn.flush() self.conn.send(Datagram.create_add_channel(40)) doids = [] # Create the maximum number of objects we can assign doid = self.createGenericGetId(40, len(doids)) while doid != 0 and len(doids) < 15: doids.append(doid) doid = self.createGenericGetId(40, len(doids)) self.assertEquals(len(set(doids)), len(doids)) # Check if duplicate do_ids exist self.assertEquals(len(doids), 11) # Check we received the max do_ids we requested self.assertEquals(doid, INVALID_DO_ID) # Check the last object returned was BAD_DO_ID (0x0) # Delete an object self.deleteObject(40, doids[6]) # Get new object with the last remaining id newdoid = self.createGenericGetId(40, 16) self.assertEquals(newdoid, doids[6]) # Delete multiple objects self.deleteObject(40, doids[0]) self.deleteObject(40, doids[1]) self.deleteObject(40, doids[2]) doids = doids[3:] # Create an object, it shouldn't collide doid = self.createGenericGetId(40, 17) for do in doids: self.assertNotEqual(do, doid) # Cleanup self.deleteObject(40, doid) self.deleteObject(40, doids[0]) for do in doids: self.deleteObject(40, do) self.conn.send(Datagram.create_remove_channel(40)) def test_ram(self): self.objects.flush() self.conn.flush() self.conn.send(Datagram.create_add_channel(50)) # Create a stored DistributedTestObject3 with actual values and non-db/ram values we don't care about... dg = Datagram.create([75757], 50, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) # Context dg.add_uint16(DistributedTestObject3) dg.add_uint16(5) # Field count dg.add_uint16(setRDB3) dg.add_uint32(91849) dg.add_uint16(setBA1) dg.add_uint16(239) dg.add_uint16(setDb3) dg.add_string("You monster...") dg.add_uint16(setB1) dg.add_uint8(17) dg.add_uint16(setBR1) dg.add_string("Fiddlesticks!!!") self.conn.send(dg) # The Database should return a new do_id... dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) self.assertTrue(*dgi.matches_header([50], 75757, DBSERVER_CREATE_OBJECT_RESP)) self.assertEquals(dgi.read_uint32(), 1) # Check context doid = dgi.read_doid() def assert_no_change(context): # Retrieve object from the database... dg = Datagram.create([75757], 50, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(context) # Context dg.add_doid(doid) self.conn.send(dg) # Get values back from server dg = Datagram.create([50], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(context) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject3) dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_string("You monster...") dg.add_uint16(setRDB3) dg.add_uint32(91849) self.expect(self.conn, dg) # Create shouldn't store ram fields assert_no_change(2) # Update object with single ram field dg = Datagram.create([75757], 50, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doid) dg.add_uint16(setBR1) dg.add_string("(deep breath...) 'Yay...'") self.conn.send(dg) self.objects.flush() # Update shouldn't store ram fields assert_no_change(3) # Update object with multiple ram fields dg = Datagram.create([75757], 50, DBSERVER_OBJECT_SET_FIELDS) dg.add_doid(doid) dg.add_uint16(2) dg.add_uint16(setBR1) dg.add_string("(deep breath...) 'Yay...'") dg.add_uint16(setB1) dg.add_uint8(100) self.conn.send(dg) self.objects.flush() # Update shouldn't store ram fields assert_no_change(4) # Update if equals with a ram field dg = Datagram.create([75757], 50, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS) dg.add_uint32(5) # Context dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint32(91849) # Old value dg.add_uint32(44444) # New value dg.add_uint16(setBA1) dg.add_uint16(0) # Old value (null: 0) dg.add_uint16(239) # New value self.conn.send(dg) # Get update failure dg = Datagram.create([50], 75757, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS_RESP) dg.add_uint32(5) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) self.objects.flush() # Update shouldn't store ram fields, are update non-ram fields assert_no_change(6) # Update if equals with ram fields dg = Datagram.create([75757], 50, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS) dg.add_uint32(7) # Context dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setB1) dg.add_uint8(100) dg.add_uint16(setRDB3) dg.add_uint32(91849) # Old value dg.add_uint32(44444) # New value dg.add_uint16(setBA1) dg.add_uint16(0) # Old value (null: 0) dg.add_uint16(239) # New value self.conn.send(dg) # Get update failure dg = Datagram.create([50], 75757, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS_RESP) dg.add_uint32(7) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) self.objects.flush() # Update shouldn't store ram fields, are update non-ram fields assert_no_change(8) # Cleanup self.deleteObject(50, doid) self.conn.send(Datagram.create_remove_channel(50)) def test_set(self): self.objects.flush() self.conn.flush() self.conn.send(Datagram.create_add_channel(60)) # Create db object dg = Datagram.create([75757], 60, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) dg.add_uint16(DistributedTestObject3) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(54231) self.conn.send(dg) dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) dgi.seek(CREATE_DOID_OFFSET) doid = dgi.read_doid() # Select all fields from the stored object dg = Datagram.create([75757], 60, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(2) # Context dg.add_doid(doid) self.conn.send(dg) # Retrieve object from the database # Should get only RDB3 back dg = Datagram.create([60], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(2) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject3) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(54231) self.expect(self.conn, dg) # Expecting SELECT_RESP with RDB3 # Update single value dg = Datagram.create([75757], 60, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doid) dg.add_uint16(setDb3) dg.add_string("Oh my gosh! Oh my gosh!! OMG! OMG!!!") self.conn.send(dg) # Expect SET_FIELD broadcast dg = Datagram.create([DATABASE_PREFIX|doid], 60, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doid) dg.add_uint16(setDb3) dg.add_string("Oh my gosh! Oh my gosh!! OMG! OMG!!!") self.expect(self.objects, dg) # Select all fields from the stored object dg = Datagram.create([75757], 60, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(3) # Context dg.add_doid(doid) self.conn.send(dg) # Retrieve object from the database # The values should be updated dg = Datagram.create([60], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(3) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject3) dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_string("Oh my gosh! Oh my gosh!! OMG! OMG!!!") dg.add_uint16(setRDB3) dg.add_uint32(54231) self.expect(self.conn, dg) # Update multiple values dg = Datagram.create([75757], 60, DBSERVER_OBJECT_SET_FIELDS) dg.add_doid(doid) dg.add_uint16(3) # Field count dg.add_uint16(setRDB3) dg.add_uint32(9999) dg.add_uint16(setDb3) dg.add_string("... can you make me a sandwich?") dg.add_uint16(setADb3) dg.add_string("sudo make me a sandwich") self.conn.send(dg) # Expect SET_FIELDs broadcast dg = Datagram.create([DATABASE_PREFIX|doid], 60, DBSERVER_OBJECT_SET_FIELDS) dg.add_doid(doid) dg.add_uint16(3) # Field count dg.add_uint16(setDb3) dg.add_string("... can you make me a sandwich?") dg.add_uint16(setRDB3) dg.add_uint32(9999) dg.add_uint16(setADb3) dg.add_string("sudo make me a sandwich") self.expect(self.objects, dg) # Select all fields from the stored object dg = Datagram.create([75757], 60, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(4) # Context dg.add_doid(doid) self.conn.send(dg) # Retrieve object from the database # The values should be updated dg = Datagram.create([60], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(4) # Context dg.add_uint8(SUCCESS) # Status dg.add_uint16(DistributedTestObject3) dg.add_uint16(3) # Field count dg.add_uint16(setDb3) dg.add_string("... can you make me a sandwich?") dg.add_uint16(setRDB3) dg.add_uint32(9999) dg.add_uint16(setADb3) dg.add_string("sudo make me a sandwich") self.expect(self.conn, dg) # Cleanup self.deleteObject(60, doid) self.conn.send(Datagram.create_remove_channel(60)) def test_set_if_empty(self): self.conn.flush() self.conn.send(Datagram.create_add_channel(100)) # Create db object dg = Datagram.create([75757], 100, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) # Context dg.add_uint16(DistributedTestObject3) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(55) self.conn.send(dg) dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) dgi.seek(CREATE_DOID_OFFSET) doid = dgi.read_doid() # Update field with empty value dg = Datagram.create([75757], 100, DBSERVER_OBJECT_SET_FIELD_IF_EMPTY) dg.add_uint32(2) # Context dg.add_doid(doid) dg.add_uint16(setDb3) dg.add_string("Beware... beware!!!") # Field value self.conn.send(dg) # Get update response dg = Datagram.create([100], 75757, DBSERVER_OBJECT_SET_FIELD_IF_EMPTY_RESP) dg.add_uint32(2) # Context dg.add_uint8(SUCCESS) self.expect(self.conn, dg) # Expect SET_FIELD broadcast dg = Datagram.create([DATABASE_PREFIX|doid], 100, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doid) dg.add_uint16(setDb3) dg.add_string("Beware... beware!!!") # Field value self.expect(self.objects, dg) # Select object with new value dg = Datagram.create([75757], 100, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(3) # Context dg.add_doid(doid) dg.add_uint16(setDb3) self.conn.send(dg) # Recieve updated value dg = Datagram.create([100], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(3) # Context dg.add_uint8(SUCCESS) dg.add_uint16(setDb3) dg.add_string("Beware... beware!!!") self.expect(self.conn, dg) # Update field with existing value dg = Datagram.create([75757], 100, DBSERVER_OBJECT_SET_FIELD_IF_EMPTY) dg.add_uint32(4) # Context dg.add_doid(doid) dg.add_uint16(setDb3) dg.add_string("It's raining chocolate!") # New value self.conn.send(dg) # Get update failure dg = Datagram.create([100], 75757, DBSERVER_OBJECT_SET_FIELD_IF_EMPTY_RESP) dg.add_uint32(4) # Context dg.add_uint8(FAILURE) dg.add_uint16(setDb3) dg.add_string("Beware... beware!!!") self.expect(self.conn, dg) # Expect no broadcast self.expectNone(self.objects) # Select object dg = Datagram.create([75757], 100, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(3) # Context dg.add_doid(doid) dg.add_uint16(setDb3) self.conn.send(dg) # Ensure value not updated dg = Datagram.create([100], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(3) # Context dg.add_uint8(SUCCESS) dg.add_uint16(setDb3) dg.add_string("Beware... beware!!!") self.expect(self.conn, dg) # Cleanup self.deleteObject(100, doid) self.conn.send(Datagram.create_remove_channel(100)) def test_set_if_equals(self): self.conn.flush() self.conn.send(Datagram.create_add_channel(70)) # Create db object dg = Datagram.create([75757], 70, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) # Context dg.add_uint16(DistributedTestObject3) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(767676) self.conn.send(dg) dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) dgi.seek(CREATE_DOID_OFFSET) doid = dgi.read_doid() # Update field with correct old value dg = Datagram.create([75757], 70, DBSERVER_OBJECT_SET_FIELD_IF_EQUALS) dg.add_uint32(2) # Context dg.add_doid(doid) dg.add_uint16(setRDB3) dg.add_uint32(767676) # Old value dg.add_uint32(787878) # New value self.conn.send(dg) # Get update response dg = Datagram.create([70], 75757, DBSERVER_OBJECT_SET_FIELD_IF_EQUALS_RESP) dg.add_uint32(2) # Context dg.add_uint8(SUCCESS) self.expect(self.conn, dg) # Expect SET_FIELD broadcast dg = Datagram.create([DATABASE_PREFIX|doid], 70, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doid) dg.add_uint16(setRDB3) dg.add_uint32(787878) self.expect(self.objects, dg) # Select object with new value dg = Datagram.create([75757], 70, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(3) # Context dg.add_doid(doid) self.conn.send(dg) # Recieve updated value dg = Datagram.create([70], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(3) # Context dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject3) dg.add_uint16(1) # Field Count dg.add_uint16(setRDB3) dg.add_uint32(787878) self.expect(self.conn, dg) # Update field with incorrect old value dg = Datagram.create([75757], 70, DBSERVER_OBJECT_SET_FIELD_IF_EQUALS) dg.add_uint32(4) # Context dg.add_doid(doid) dg.add_uint16(setRDB3) dg.add_uint32(767676) # Old value (incorrect) dg.add_uint32(383838) # New value self.conn.send(dg) # Get update failure dg = Datagram.create([70], 75757, DBSERVER_OBJECT_SET_FIELD_IF_EQUALS_RESP) dg.add_uint32(4) # Context dg.add_uint8(FAILURE) dg.add_uint16(setRDB3) dg.add_uint32(787878) # Correct value self.expect(self.conn, dg) self.conn.flush() # Expect no broadcast self.expectNone(self.objects) # Comparison existing value to non existing value in update dg = Datagram.create([75757], 70, DBSERVER_OBJECT_SET_FIELD_IF_EQUALS) dg.add_uint32(5) # Context dg.add_doid(doid) dg.add_uint16(setDb3) dg.add_string("That was a TERRIBLE surprise!") # Old value dg.add_string("Wish upon a twinkle...") # New value self.conn.send(dg) # Get update failure (old value doesn't exist) dg = Datagram.create([70], 75757, DBSERVER_OBJECT_SET_FIELD_IF_EQUALS_RESP) dg.add_uint32(5) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) # Expect no broadcast self.expectNone(self.objects) # Update object with partially empty values dg = Datagram.create([75757], 70, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS) dg.add_uint32(8) # Context dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint32(787878) # Old value dg.add_uint32(919191) # New value dg.add_uint16(setDb3) dg.add_string("I can clear the sky in 10 seconds flat.") dg.add_string("Jesse!! We have to code!") self.conn.send(dg) # Get update failure dg = Datagram.create([70], 75757, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS_RESP) dg.add_uint32(8) # Context dg.add_uint8(FAILURE) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(787878) self.expect(self.conn, dg) # Expect no broadcast self.expectNone(self.objects) # Set the empty value to an actual value dg = Datagram.create([75757], 70, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doid) dg.add_uint16(setDb3) dg.add_string("Daddy... why did you eat my fries? I bought them... and they were mine.") self.conn.send(dg) # Ignore set broadcast self.objects.flush() # Sanity check on set field dg = Datagram.create([75757], 70, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(10) # Context dg.add_doid(doid) self.conn.send(dg) # Recieve updated value dg = Datagram.create([70], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(10) # Context dg.add_uint8(SUCCESS) # Status dg.add_uint16(DistributedTestObject3) dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_string("Daddy... why did you eat my fries? I bought them... and they were mine.") dg.add_uint16(setRDB3) dg.add_uint32(787878) self.expect(self.conn, dg) # Update multiple with correct old values dg = Datagram.create([75757], 70, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS) dg.add_uint32(9) # Context dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint32(787878) # Old value dg.add_uint32(919191) # New value dg.add_uint16(setDb3) dg.add_string("Daddy... why did you eat my fries? I bought them... and they were mine.") dg.add_string("Mind if I... take a look inside the barn?!") # New value self.conn.send(dg) # Recieve update success dg = Datagram.create([70], 75757, DBSERVER_OBJECT_SET_FIELDS_IF_EQUALS_RESP) dg.add_uint32(9) # Context dg.add_uint8(SUCCESS) self.expect(self.conn, dg) # Expect SET_FIELDS broadcast dg = Datagram.create([DATABASE_PREFIX|doid], 70, DBSERVER_OBJECT_SET_FIELDS) dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_string("Mind if I... take a look inside the barn?!") dg.add_uint16(setRDB3) dg.add_uint32(919191) self.expect(self.objects, dg) # Select object with new value dg = Datagram.create([75757], 70, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(10) # Context dg.add_doid(doid) self.conn.send(dg) # Recieve updated value dg = Datagram.create([70], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(10) # Context dg.add_uint8(SUCCESS) # Resp status dg.add_uint16(DistributedTestObject3) # dclass dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_string("Mind if I... take a look inside the barn?!") dg.add_uint16(setRDB3) dg.add_uint32(919191) self.expect(self.conn, dg) # Cleanup self.deleteObject(70, doid) self.conn.send(Datagram.create_remove_channel(70)) def test_get(self): self.conn.flush() self.conn.send(Datagram.create_add_channel(80)) # Create object dg = Datagram.create([75757], 80, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) # Context dg.add_uint16(DistributedTestObject3) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint32(1337) dg.add_uint16(setDb3) dg.add_string("Uppercut! Downercut! Fireball! Bowl of Punch!") self.conn.send(dg) dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) dgi.seek(CREATE_DOID_OFFSET) doid = dgi.read_doid() # Select the field dg = Datagram.create([75757], 80, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(2) # Context dg.add_doid(doid) dg.add_uint16(setDb3) self.conn.send(dg) # Get value in reply dg = Datagram.create([80], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(2) # Context dg.add_uint8(SUCCESS) dg.add_uint16(setDb3) dg.add_string("Uppercut! Downercut! Fireball! Bowl of Punch!") self.expect(self.conn, dg) # Select multiple fields dg = Datagram.create([75757], 80, DBSERVER_OBJECT_GET_FIELDS) dg.add_uint32(3) # Context dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_uint16(setRDB3) self.conn.send(dg) # Get values in reply dg = Datagram.create([80], 75757, DBSERVER_OBJECT_GET_FIELDS_RESP) dg.add_uint32(3) # Context dg.add_uint8(SUCCESS) # Resp status dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_string("Uppercut! Downercut! Fireball! Bowl of Punch!") dg.add_uint16(setRDB3) dg.add_uint32(1337) self.expect(self.conn, dg) # Select invalid object dg = Datagram.create([75757], 80, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(4) # Context dg.add_doid(doid+1) dg.add_uint16(setDb3) self.conn.send(dg) # Get failure dg = Datagram.create([80], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(4) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) # Select invalid object, multiple fields dg = Datagram.create([75757], 80, DBSERVER_OBJECT_GET_FIELDS) dg.add_uint32(5) # Context dg.add_doid(doid+1) dg.add_uint16(2) # Field count dg.add_uint16(setDb3) dg.add_uint16(setRDB3) self.conn.send(dg) # Get failure dg = Datagram.create([80], 75757, DBSERVER_OBJECT_GET_FIELDS_RESP) dg.add_uint32(5) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) # Clear one field dg = Datagram.create([75757], 80, DBSERVER_OBJECT_DELETE_FIELD) dg.add_doid(doid) dg.add_uint16(setDb3) self.conn.send(dg) # Select the cleared field dg = Datagram.create([75757], 80, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(6) # Context dg.add_doid(doid) dg.add_uint16(setDb3) self.conn.send(dg) # Get failure dg = Datagram.create([80], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(6) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) # Select the cleared field, with multiple message dg = Datagram.create([75757], 80, DBSERVER_OBJECT_GET_FIELDS) dg.add_uint32(7) # Context dg.add_doid(doid) dg.add_uint16(1) # Field count dg.add_uint16(setDb3) self.conn.send(dg) # Get success dg = Datagram.create([80], 75757, DBSERVER_OBJECT_GET_FIELDS_RESP) dg.add_uint32(7) # Context dg.add_uint8(SUCCESS) dg.add_uint16(0) # Field count self.expect(self.conn, dg) # Select a cleared and non-cleared field dg = Datagram.create([75757], 80, DBSERVER_OBJECT_GET_FIELDS) dg.add_uint32(8) # Context dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint16(setDb3) self.conn.send(dg) # Get success dg = Datagram.create([80], 75757, DBSERVER_OBJECT_GET_FIELDS_RESP) dg.add_uint32(8) # Context dg.add_uint8(SUCCESS) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(1337) self.expect(self.conn, dg) # Cleanup self.deleteObject(80, doid) self.conn.send(Datagram.create_remove_channel(80)) def test_delete_fields(self): self.conn.flush() self.conn.send(Datagram.create_add_channel(90)) # Create objects def generic_db_obj(): dg = Datagram.create([75757], 90, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) # Context dg.add_uint16(DistributedTestObject5) dg.add_uint16(4) # Field count dg.add_uint16(setDb3) dg.add_string("Not enough vespian gas.") dg.add_uint16(setRDB3) dg.add_uint32(5337) dg.add_uint16(setRDbD5) dg.add_uint8(9) dg.add_uint16(setFoo) dg.add_uint16(123) self.conn.send(dg) dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) dgi.seek(CREATE_DOID_OFFSET) return dgi.read_doid() doidA = generic_db_obj() # Clear a single field dg = Datagram.create([75757], 90, DBSERVER_OBJECT_DELETE_FIELD) dg.add_doid(doidA) dg.add_uint16(setDb3) self.conn.send(dg) self.expectNone(self.conn); # Expect DELETE_FIELD broadcast dg = Datagram.create([DATABASE_PREFIX|doidA], 90, DBSERVER_OBJECT_DELETE_FIELD) dg.add_doid(doidA) dg.add_uint16(setDb3) self.expect(self.objects, dg) # Get cleared field dg = Datagram.create([75757], 90, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(2) # Context dg.add_doid(doidA) dg.add_uint16(setDb3) self.conn.send(dg) # Cleared field shouldn't be returned dg = Datagram.create([90], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(2) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) # Clear a required field with a default dg = Datagram.create([75757], 90, DBSERVER_OBJECT_DELETE_FIELD) dg.add_doid(doidA) dg.add_uint16(setRDbD5) self.conn.send(dg) # Expect SET_FIELD broadcast dg = Datagram.create([DATABASE_PREFIX|doidA], 90, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doidA) dg.add_uint16(setRDbD5) dg.add_uint8(setRDbD5DefaultValue) self.expect(self.objects, dg) # Get cleared fields dg = Datagram.create([75757], 90, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(3) # Context dg.add_doid(doidA) dg.add_uint16(setRDbD5) self.conn.send(dg) # Cleared required default field should be reset dg = Datagram.create([90], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(3) # Context dg.add_uint8(SUCCESS) dg.add_uint16(setRDbD5) dg.add_uint8(setRDbD5DefaultValue) self.expect(self.conn, dg) #Field setRDbD5 should be default # Clearing multiple fields should behave as expected per field doidB = generic_db_obj() dg = Datagram.create([75757], 90, DBSERVER_OBJECT_DELETE_FIELDS) dg.add_doid(doidB) dg.add_uint16(4) # Field count dg.add_uint16(setDb3) dg.add_uint16(setRDB3) dg.add_uint16(setRDbD5) dg.add_uint16(setFoo) self.conn.send(dg) # Expect DELETE_FIELDS... expected = [] dg = Datagram.create([DATABASE_PREFIX|doidB], 90, DBSERVER_OBJECT_DELETE_FIELDS) dg.add_doid(doidB) dg.add_uint16(3) # Field count dg.add_uint16(setDb3) dg.add_uint16(setRDB3) dg.add_uint16(setFoo) expected.append(dg) # ... and SET_FIELDS broadcasts. dg = Datagram.create([DATABASE_PREFIX|doidB], 90, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doidB) dg.add_uint16(setRDbD5) dg.add_uint8(setRDbD5DefaultValue) expected.append(dg) self.expectMany(self.objects, expected) # Get all object fields dg = Datagram.create([75757], 90, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(5) # Context dg.add_doid(doidB) self.conn.send(dg) # Fields should be cleared dg = Datagram.create([90], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(5) dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject5) dg.add_uint16(1) # Field count dg.add_uint16(setRDbD5) dg.add_uint8(setRDbD5DefaultValue) self.expect(self.conn, dg) # Clear one field then attempt to clear multiple fields, some of which are already cleared doidC = generic_db_obj() dg = Datagram.create([75757], 90, DBSERVER_OBJECT_DELETE_FIELD) dg.add_doid(doidC) dg.add_uint16(setDb3) self.conn.send(dg) self.objects.flush() # Ignore broadcast dg = Datagram.create([75757], 90, DBSERVER_OBJECT_DELETE_FIELDS) dg.add_doid(doidC) dg.add_uint16(4) # Field count dg.add_uint16(setDb3) dg.add_uint16(setRDB3) dg.add_uint16(setRDbD5) dg.add_uint16(setFoo) self.conn.send(dg) # Expect DELETE_FIELDS... expected = [] dg = Datagram.create([DATABASE_PREFIX|doidC], 90, DBSERVER_OBJECT_DELETE_FIELDS) dg.add_doid(doidC) dg.add_uint16(3) # Field count dg.add_uint16(setDb3) dg.add_uint16(setRDB3) dg.add_uint16(setFoo) expected.append(dg) # ... and SET_FIELDS broadcasts. dg = Datagram.create([DATABASE_PREFIX|doidC], 90, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doidC) dg.add_uint16(setRDbD5) dg.add_uint8(setRDbD5DefaultValue) expected.append(dg) self.expectMany(self.objects, expected) # Get all object fields dg = Datagram.create([75757], 90, DBSERVER_OBJECT_GET_ALL) dg.add_uint32(6) # Context dg.add_doid(doidC) self.conn.send(dg) # Fields should be cleared dg = Datagram.create([90], 75757, DBSERVER_OBJECT_GET_ALL_RESP) dg.add_uint32(6) dg.add_uint8(SUCCESS) dg.add_uint16(DistributedTestObject5) dg.add_uint16(1) # Field count dg.add_uint16(setRDbD5) dg.add_uint8(setRDbD5DefaultValue) self.expect(self.conn, dg) # Cleanup self.deleteObject(90, doidA) self.deleteObject(90, doidB) self.deleteObject(90, doidC) self.conn.send(Datagram.create_remove_channel(90)) def test_wrong_fields(self): # This test tests what happens when you try to put (otherwise valid) # db fields on objects whose dclasses do not actually have those fields. self.conn.flush() self.conn.send(Datagram.create_add_channel(100)) # Create a (valid) object. dg = Datagram.create([75757], 100, DBSERVER_CREATE_OBJECT) dg.add_uint32(1) # Context dg.add_uint16(DistributedTestObject3) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(1337) self.conn.send(dg) dg = self.conn.recv_maybe() self.assertTrue(dg is not None, "Did not receive CreateObjectResp.") dgi = DatagramIterator(dg) dgi.seek(CREATE_DOID_OFFSET) doid = dgi.read_doid() ### TEST SETTING NON-BELONGING FIELDS ### # Set an invalid field on our object: dg = Datagram.create([75757], 100, DBSERVER_OBJECT_SET_FIELD) dg.add_doid(doid) dg.add_uint16(setFoo) dg.add_uint16(32112) self.conn.send(dg) self.objects.flush() # Select the field dg = Datagram.create([75757], 100, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(2) # Context dg.add_doid(doid) dg.add_uint16(setFoo) self.conn.send(dg) # Get value in reply dg = Datagram.create([100], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(2) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) ### TEST REQUESTING NON-BELONGING FIELDS ### # Select two fields, one of which does not belong: dg = Datagram.create([75757], 100, DBSERVER_OBJECT_GET_FIELDS) dg.add_uint32(3) # Context dg.add_doid(doid) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint16(setRDbD5) self.conn.send(dg) # Get value in reply dg = Datagram.create([100], 75757, DBSERVER_OBJECT_GET_FIELDS_RESP) dg.add_uint32(3) # Context dg.add_uint8(SUCCESS) dg.add_uint16(1) # Field count dg.add_uint16(setRDB3) dg.add_uint32(1337) self.expect(self.conn, dg) ### TEST DELETE ON NON-BELONGING FIELDS ### # Delete a field that does not belong, but has a default: dg = Datagram.create([75757], 100, DBSERVER_OBJECT_DELETE_FIELD) dg.add_doid(doid) dg.add_uint16(setRDbD5) self.conn.send(dg) self.objects.flush() # Select the field dg = Datagram.create([75757], 100, DBSERVER_OBJECT_GET_FIELD) dg.add_uint32(4) # Context dg.add_doid(doid) dg.add_uint16(setRDbD5) self.conn.send(dg) # Get value in reply dg = Datagram.create([100], 75757, DBSERVER_OBJECT_GET_FIELD_RESP) dg.add_uint32(4) # Context dg.add_uint8(FAILURE) self.expect(self.conn, dg) ### TEST CREATING AN OBJECT WITH NON-BELONGING FIELDS ### # Create a DTO3 with field setFoo (which doesn't belong): dg = Datagram.create([75757], 100, DBSERVER_CREATE_OBJECT) dg.add_uint32(5) # Context dg.add_uint16(DistributedTestObject3) dg.add_uint16(2) # Field count dg.add_uint16(setRDB3) dg.add_uint32(1337) dg.add_uint16(setFoo) dg.add_uint16(32112) self.conn.send(dg) # The database must refuse to create our object: dg = Datagram.create([100], 75757, DBSERVER_CREATE_OBJECT_RESP) dg.add_uint32(5) # Context dg.add_doid(INVALID_DO_ID) self.expect(self.conn, dg) # Cleanup self.deleteObject(100, doid) self.conn.send(Datagram.create_remove_channel(100))
35.912266
112
0.624321
5,812
44,208
4.564694
0.057983
0.089898
0.082925
0.05066
0.879947
0.866114
0.848813
0.824124
0.792763
0.769619
0
0.069005
0.27391
44,208
1,230
113
35.941463
0.7575
0.157257
0
0.845556
0
0
0.040202
0
0
0
0
0
0.041111
1
0.016667
false
0
0.002222
0.001111
0.023333
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
9a9984588f558a4fa3d73533820555ea2106dd4c
17,614
py
Python
tests/unit/controllers/test_deploy.py
senstb/aws-elastic-beanstalk-cli
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
[ "Apache-2.0" ]
null
null
null
tests/unit/controllers/test_deploy.py
senstb/aws-elastic-beanstalk-cli
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
[ "Apache-2.0" ]
null
null
null
tests/unit/controllers/test_deploy.py
senstb/aws-elastic-beanstalk-cli
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import shutil import mock import unittest from ebcli.controllers import deploy from ebcli.core.ebcore import EB from ebcli.core import fileoperations from ebcli.objects.platform import PlatformVersion class TestDeploy(unittest.TestCase): platform = PlatformVersion( 'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5' ) def setUp(self): self.root_dir = os.getcwd() if not os.path.exists('testDir'): os.mkdir('testDir') os.chdir('testDir') fileoperations.create_config_file( 'my-application', 'us-west-2', self.platform.name ) def tearDown(self): os.chdir(self.root_dir) shutil.rmtree('testDir') class TestErrorConditions(TestDeploy): @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') def test_deploy__version_and_message_specified_together( self, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB(argv=['deploy', '--version', 'my-app-version', '--message', 'my-message']) app.setup() with self.assertRaises(deploy.InvalidOptionsError) as context_manager: app.run() self.assertEqual( 'You cannot use the "--version" option with either the "--message" or "--label" option.', str(context_manager.exception) ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') def test_deploy__version_and_label_specified_together( self, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB(argv=['deploy', '--version', 'my-app-version', '--label', 'my-label']) app.setup() with self.assertRaises(deploy.InvalidOptionsError) as context_manager: app.run() self.assertEqual( 'You cannot use the "--version" option with either the "--message" or "--label" option.', str(context_manager.exception) ) class TestDeployNormal(TestDeploy): @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB(argv=['deploy']) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, None, None, group_name=None, process_app_versions=False, source=None, staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__nohang_sets_timeout_to_zero( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB(argv=['deploy', '--nohang', '--timeout', '5']) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, None, None, group_name=None, process_app_versions=False, source=None, staged=False, timeout=0 ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__with_version_label( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB(argv=['deploy', '--version', 'my-version']) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', 'my-version', None, None, group_name=None, process_app_versions=False, source=None, staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__with_label_and_message( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB( argv=[ 'deploy', '--label', 'my-label', '--message', 'This is my message' ] ) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, 'my-label', 'This is my message', group_name=None, process_app_versions=False, source=None, staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__process_app_version_because_env_yaml_exists( self, deploy_mock, get_env_name_mock, get_app_name_mock ): open('env.yaml', 'w').close() get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB( argv=[ 'deploy', '--label', 'my-label', '--message', 'This is my message' ] ) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, 'my-label', 'This is my message', group_name=None, process_app_versions=True, source=None, staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__process_app_version_because_process_flag_is_specified( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB( argv=[ 'deploy', '--label', 'my-label', '--message', 'This is my message', '--process' ] ) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, 'my-label', 'This is my message', group_name=None, process_app_versions=True, source=None, staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__pass_group_name( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB( argv=[ 'deploy', '--env-group-suffix', 'group-name', ] ) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, None, None, group_name='group-name', process_app_versions=False, source=None, staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__specify_codecommit_source( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB( argv=[ 'deploy', '--source', 'codecommit/my-repository/my-branch' ] ) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, None, None, group_name=None, process_app_versions=False, source='codecommit/my-repository/my-branch', staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__specify_codecommit_source_with_forward_slash_in_branch_name( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB( argv=[ 'deploy', '--source', 'codecommit/my-repository/my-branch/feature' ] ) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, None, None, group_name=None, process_app_versions=False, source='codecommit/my-repository/my-branch/feature', staged=False, timeout=None ) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.DeployController.get_env_name') @mock.patch('ebcli.controllers.deploy.deployops.deploy') def test_deploy__indicate_staged_changes_must_be_used( self, deploy_mock, get_env_name_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' get_env_name_mock.return_value = 'environment-1' app = EB( argv=[ 'deploy', '--process', '--staged' ] ) app.setup() app.run() deploy_mock.assert_called_with( 'my-application', 'environment-1', None, None, None, group_name=None, process_app_versions=True, source=None, staged=True, timeout=None ) class TestMultipleAppDeploy(unittest.TestCase): platform = PlatformVersion( 'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5' ) def setUp(self): self.root_dir = os.getcwd() if not os.path.exists('testDir'): os.mkdir('testDir') os.chdir('testDir') def tearDown(self): os.chdir(self.root_dir) shutil.rmtree('testDir') @mock.patch('ebcli.controllers.deploy.io.log_error') def test_multiple_modules__none_of_the_specified_modules_actually_exists( self, log_error_mock ): app = EB( argv=[ 'deploy', '--modules', 'module-1', 'module-2', ] ) app.setup() app.run() log_error_mock.assert_has_calls( [ mock.call('The directory module-1 does not exist.'), mock.call('The directory module-2 does not exist.') ] ) @mock.patch('ebcli.controllers.deploy.io.echo') def test_multiple_modules__one_or_more_of_the_specified_modules_lacks_an_env_yaml( self, echo_mock ): os.mkdir('module-1') os.mkdir('module-2') os.mkdir('module-3') open(os.path.join('module-1', 'env.yaml'), 'w').close() app = EB( argv=[ 'deploy', '--modules', 'module-1', 'module-2', 'module-3' ] ) app.setup() app.run() echo_mock.assert_called_once_with( 'All specified modules require an env.yaml file.\n' 'The following modules are missing this file: module-2, module-3' ) def create_config_file_in(self, path): original_dir = os.getcwd() os.chdir(path) fileoperations.create_config_file( 'my-application', 'us-west-2', self.platform.name ) os.chdir(original_dir) @mock.patch('ebcli.controllers.deploy.DeployController.get_app_name') @mock.patch('ebcli.controllers.deploy.io.echo') @mock.patch('ebcli.controllers.deploy.commonops.create_app_version') @mock.patch('ebcli.controllers.deploy.composeops.compose_no_events') @mock.patch('ebcli.controllers.deploy.commonops.wait_for_compose_events') def test_multiple_modules( self, wait_for_compose_events_mock, compose_no_events_mock, create_app_version_mock, echo_mock, get_app_name_mock ): get_app_name_mock.return_value = 'my-application' create_app_version_mock.side_effect = [ 'app-version-1', 'app-version-2', 'app-version-3' ] compose_no_events_mock.return_value = 'request-id' os.mkdir('module-1') os.mkdir('module-2') os.mkdir('module-3') with open(os.path.join('module-1', 'env.yaml'), 'w') as file: file.write("""AWSConfigurationTemplateVersion: 1.1.0.0 EnvironmentName: front+ """) with open(os.path.join('module-2', 'env.yaml'), 'w') as file: file.write("""AWSConfigurationTemplateVersion: 1.1.0.0 EnvironmentName: back+ """) with open(os.path.join('module-3', 'env.yaml'), 'w') as file: file.write("""AWSConfigurationTemplateVersion: 1.1.0.0 """) self.create_config_file_in('module-1') self.create_config_file_in('module-2') self.create_config_file_in('module-3') app = EB( argv=[ 'deploy', '--modules', 'module-1', 'module-2', 'module-3', '--env-group-suffix', 'group-name' ] ) app.setup() app.run() echo_mock.assert_has_calls( [ mock.call('--- Creating application version for module: module-1 ---'), mock.call('--- Creating application version for module: module-2 ---'), mock.call('--- Creating application version for module: module-3 ---'), mock.call('No environment name was specified in env.yaml for module module-3. Unable to deploy.') ] ) compose_no_events_mock.assert_called_once_with( 'my-application', [ 'app-version-1', 'app-version-2' ], group_name='group-name' ) wait_for_compose_events_mock.assert_called_once_with( 'request-id', 'my-application', [ 'front-group-name', 'back-group-name' ], None )
30.633043
113
0.573408
1,920
17,614
5.008854
0.113021
0.060726
0.059686
0.106582
0.817719
0.804513
0.764272
0.753977
0.738692
0.72351
0
0.007987
0.317588
17,614
574
114
30.686411
0.792097
0.03043
0
0.708502
0
0.004049
0.279578
0.138822
0
0
0
0
0.038462
1
0.040486
false
0.002024
0.016194
0
0.068826
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
9aa381c35795031cb6794afe90c768ae724268ad
20,788
py
Python
Config-Scripts/service.autoexec/resources/en/remote-control_en.py
joy-it/MultimediaCase-for-Raspberry-Pi
909522521b2d53175b8460d9ef5bb1a51cfaec44
[ "MIT" ]
9
2020-12-29T22:17:13.000Z
2021-12-14T09:12:47.000Z
Config-Scripts/service.autoexec/resources/en/remote-control_en.py
joy-it/MultimediaCase-for-Raspberry-Pi
909522521b2d53175b8460d9ef5bb1a51cfaec44
[ "MIT" ]
null
null
null
Config-Scripts/service.autoexec/resources/en/remote-control_en.py
joy-it/MultimediaCase-for-Raspberry-Pi
909522521b2d53175b8460d9ef5bb1a51cfaec44
[ "MIT" ]
2
2021-12-07T15:55:35.000Z
2022-03-21T11:43:43.000Z
import xbmcaddon import xbmcgui import subprocess import time import os win = xbmcgui.Window() width = win.getWidth() monitor = xbmc.Monitor() Skipall = False Cancel = False y = 100 while not monitor.abortRequested(): #reset old configuration remotefile = open("/storage/.kodi/temp/my_custom_remote","w+") remotefile.write ("") remotefile.close() logfile = open("/storage/IRlog.txt","w+") logfile.close() process=subprocess.Popen("ir-keytable -p all -v", shell=True) time.sleep (1) process.terminate() remotefile = open("/storage/.kodi/temp/my_custom_remote","a") ####################Protocol############################# win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press any button on your remote control several times')) win.show() y += 20 process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press any Button on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "" and not monitor.abortRequested(): log = logfile.readline() pDialog.close() process.terminate() lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 protocol = (log[log.find('(')+1 : log.find(')')]) #get protocol from log if protocol == "necx": protocol = "nec" elif protocol == "sony12": protocol = "sony" remotefile.write("# table justboom, type: " + protocol + '\n') #add header to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_OK############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button OK", ["Ok", "Skip this Key", "Skip all"]) #cID = win.getControl() #win.removeControl(cID) #win.setLabel('Status') #xbmcgui.ControlFadeLabel.reset (ctrl) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button OK on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: OK on your IR-Remote several times') pDialog.update(9, message='Press Button: OK on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "" and not monitor.abortRequested(): log = logfile.readline() pDialog.close() process.terminate() lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 protocol = (log[log.find('(')+1 : log.find(')')]) #get protocol from log if protocol == "necx": protocol = "nec" elif protocol == "sony12": protocol = "sony" code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log #print (code) #print(protocol) remotefile.write(code + " KEY_OK" + '\n') #add KEY_OK to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_EXIT############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button EXIT", ["Ok", "Skip this Key", "Skip all"]) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button EXIT on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: EXIT on your IR-Remote several times') pDialog.update(18, message='Press Button: EXIT on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log #print (code) remotefile.write(code + " KEY_EXIT" + '\n') #add KEY_EXIT to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_LEFT############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button LEFT", ["Ok", "Skip this Key", "Skip all"]) ctrl = win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) ctrl = win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button LEFT (<) on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: LEFT on your IR-Remote several times') pDialog.update(27, message='Press Button: LEFT on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log #print (code) remotefile.write(code + " KEY_LEFT" + '\n') #add KEY_LEFT to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_RIGHT############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button RIGHT", ["Ok", "Skip this Key", "Skip all"]) ctrl = win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) ctrl = win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button RIGHT (>) on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: RIGHT on your IR-Remote several times') pDialog.update(36, message='Press Button: RIGHT on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log #print (code) remotefile.write(code + " KEY_RIGHT" + '\n') #add KEY_RIGHT to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_UP############################# if Skipall == False: pDialog.update(45, message='Press Button: UP on your IR-Remote several times') ret = xbmcgui.Dialog().select("Configure Button UP", ["Ok", "Skip this Key", "Skip all"]) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button UP (^) on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: UP on your IR-Remote several times') pDialog.update(45, message='Press Button: UP on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log #print (code) remotefile.write(code + " KEY_UP" + '\n') #add KEY_UP to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_DOWN############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button DOWN", ["Ok", "Skip this Key", "Skip all"]) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button DOWN (v) on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: DOWN on your IR-Remote several times') pDialog.update(54, message='Press Button: DOWN on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log print (code) remotefile.write(code + " KEY_DOWN" + '\n') #add KEY_DOWN to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() process.terminate() ####################KEY_VOLUMEDOWN############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button VOLUMEDOWN", ["Ok", "Skip this Key", "Skip all"]) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button VOLUMEDOWN on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: VOLUMEDOWN on your IR-Remote several times') pDialog.update(63, message='Press Button: VOLUMEDOWN on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Press Button: Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log print (code) remotefile.write(code + " KEY_VOLUMEDOWN" + '\n') #add KEY_VOLUMEDOWN to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_VOLUMEUP############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button VOLUMEUP", ["Ok", "Skip this Key", "Skip all"]) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button VOLUMEUP on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: VOLUMEUP on your IR-Remote several times') pDialog.update(72, message='Press Button: VOLUMEUP on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log print (code) remotefile.write(code + " KEY_VOLUMEUP" + '\n') #add KEY_VOLUMEUP to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_MUTE############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button MUTE", ["Ok", "Skip this Key", "Skip all"]) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button MUTE on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: MUTE on your IR-Remote several times') pDialog.update(81, message='Press Button: MUTE on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log print (code) remotefile.write(code + " KEY_MUTE" + '\n') #add KEY_MUTE to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ####################KEY_PLAYPAUSE############################# if Skipall == False: ret = xbmcgui.Dialog().select("Configure Button PLAY/PAUSE", ["Ok", "Skip this Key", "Skip all"]) win.addControl ( xbmcgui.ControlLabel ( x = 187 , y = y - 20 , width = 700 , height = 25 , label = '-------------------------------------------------------------------------------------------------------')) win.addControl ( xbmcgui.ControlLabel ( x = 190 , y = y , width = 700 , height = 25 , label = 'Press button PLAY/PAUSE on your remote control several times')) win.show() y += 20 if ret == 2 or ret == -1: Skipall = True if ret == -1: Cancel = True if ret == 0: process=subprocess.Popen("ir-keytable -t >> /storage/IRlog.txt", shell=True) pDialog = xbmcgui.DialogProgressBG() pDialog.create('Add Key', 'Press Button: PLAY/PAUSE on your IR-Remote several times') pDialog.update(90, message='Press Button: PLAY/PAUSE on your IR-Remote several times') logfile = open("/storage/IRlog.txt", "r") log = logfile.readline() while log == "": log = logfile.readline() pDialog.close() process.terminate() #xbmcgui.Dialog().ok(addonname, "Taste erkannt") #time.sleep(1) lines = logfile.readlines() i = 1 logfile.close() while log.find("scancode") == -1 or log.find("protocol") == -1 or log.find("repeat") != -1 or log.find("toggle") != -1: #suche nach der richtigen Zeile log = lines[i] i+=1 code = (log[log.find('= ')+2 : log.find('/n')]) #get scancode from log print (code) remotefile.write(code + " KEY_PLAYPAUSE" + '\n') #add KEY_PLAYPAUSE to remotefile #reset logfile logfile = open("/storage/IRlog.txt", "w") logfile.write("") logfile.close() ################# remotefile.close() os.system("rm /storage/IRlog.txt") if Cancel == False: os.system("rm /storage/.config/rc_keymaps/my_custom_remote") os.system("cp /storage/.kodi/temp/my_custom_remote /storage/.config/rc_keymaps/my_custom_remote") #os.system("rm /storage/.kodi/temp/my_custom_remote") conffile = open("/storage/.config/rc_maps.cfg", "w") conffile.write("* * my_custom_remote") conffile.close() xbmcgui.Dialog().ok("Configuration of remote control", "Your remote has been successfully configured!") os.system("kodi-send --action='RunScript(\"/storage/.kodi/addons/service.autoexec/resources/en/end_en.py\")'") else: conffile = open("/storage/.config/rc_maps.cfg", "w") conffile.write("") conffile.close() remotefile = open("/storage/.config/rc_keymaps/my_custom_remote","w") remotefile.write ("") remotefile.close() os.system("rm /storage/.kodi/temp/my_custom_remote") xbmcgui.Dialog().ok("Configuration of remote control", "Cancelled! Configuration aborted!") os.system("kodi-send --action='RunScript(\"/storage/.kodi/addons/service.autoexec/resources/en/end_en.py\")'") break
37.121429
218
0.578363
2,594
20,788
4.613724
0.069005
0.039773
0.043867
0.027574
0.918199
0.910762
0.90015
0.888285
0.864973
0.779495
0
0.021804
0.192515
20,788
559
219
37.187835
0.691171
0.092746
0
0.798005
0
0
0.290087
0.07702
0
0
0
0
0
1
0
false
0
0.012469
0
0.012469
0.012469
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
b10078d301b0630b8d6de13081f345c2195285d9
4,117
py
Python
wd/wdapp/migrations/0005_auto_20181127_1523.py
LiYangCom1994/companylair
e8d085e3357b08f178b089c4a52e5dc2f9eb103f
[ "MIT" ]
null
null
null
wd/wdapp/migrations/0005_auto_20181127_1523.py
LiYangCom1994/companylair
e8d085e3357b08f178b089c4a52e5dc2f9eb103f
[ "MIT" ]
null
null
null
wd/wdapp/migrations/0005_auto_20181127_1523.py
LiYangCom1994/companylair
e8d085e3357b08f178b089c4a52e5dc2f9eb103f
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2018-11-27 15:23 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('wdapp', '0004_auto_20181127_0500'), ] operations = [ migrations.AddField( model_name='business', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='businessorder', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='cargo', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='cargomanifest', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='company', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='driver', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='driverexpense', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='stop', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='trip', name='id', field=models.AutoField(auto_created=True, default=0, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AlterField( model_name='business', name='business_id', field=models.CharField(max_length=25, null='True'), ), migrations.AlterField( model_name='businessorder', name='order_id', field=models.CharField(max_length=25, null=True), ), migrations.AlterField( model_name='cargo', name='cargo_id', field=models.CharField(blank='True', max_length=25, null='True'), ), migrations.AlterField( model_name='cargomanifest', name='manifest_id', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='company', name='company_id', field=models.CharField(max_length=9, null='True'), ), migrations.AlterField( model_name='driver', name='driver_id', field=models.CharField(max_length=25, null='True'), ), migrations.AlterField( model_name='driverexpense', name='expenses_id', field=models.CharField(max_length=25, null=True), ), migrations.AlterField( model_name='stop', name='stop_id', field=models.CharField(blank='True', max_length=25, null='True'), ), migrations.AlterField( model_name='trip', name='trip_id', field=models.IntegerField(null=True), ), ]
35.8
119
0.575176
412
4,117
5.558252
0.165049
0.070742
0.102183
0.106114
0.79345
0.79345
0.722271
0.722271
0.722271
0.722271
0
0.018842
0.303862
4,117
114
120
36.114035
0.780181
0.016031
0
0.831776
1
0
0.079051
0.005682
0
0
0
0
0
1
0
false
0
0.018692
0
0.046729
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
b107db55ba24e49aca969d4e57f49f4f7306a949
2,434
py
Python
schedule/migrations/0009_auto_20200609_1648.py
Tanmoy-Sarkar/Varsity-Management-System
11bf506d78cf15b11553bd2a971efef9d8272225
[ "MIT" ]
null
null
null
schedule/migrations/0009_auto_20200609_1648.py
Tanmoy-Sarkar/Varsity-Management-System
11bf506d78cf15b11553bd2a971efef9d8272225
[ "MIT" ]
1
2020-06-14T16:51:52.000Z
2020-06-14T16:51:52.000Z
schedule/migrations/0009_auto_20200609_1648.py
Tanmoy-Sarkar/Varsity-Management-System
11bf506d78cf15b11553bd2a971efef9d8272225
[ "MIT" ]
null
null
null
# Generated by Django 3.0.6 on 2020-06-09 10:48 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('schedule', '0008_auto_20200609_1610'), ] operations = [ migrations.AlterField( model_name='routine', name='day', field=models.CharField(choices=[('A', 'A'), ('B', 'B'), ('C', 'C'), ('D', 'D'), ('E', 'E')], max_length=1), ), migrations.AlterField( model_name='routine', name='eigth_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='fifth_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='first_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='fourth_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='ninth_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='second_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='seventh_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='sixth_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='routine', name='third_period', field=models.CharField(blank=True, default='-', max_length=30, null=' '), ), migrations.AlterField( model_name='schedule', name='day', field=models.CharField(choices=[('A', 'A'), ('B', 'B'), ('C', 'C'), ('D', 'D'), ('E', 'E')], max_length=1), ), ]
35.275362
119
0.52917
239
2,434
5.246862
0.238494
0.175439
0.219298
0.254386
0.834131
0.834131
0.773525
0.773525
0.773525
0.773525
0
0.030053
0.302794
2,434
68
120
35.794118
0.708898
0.018488
0
0.725806
1
0
0.111018
0.009636
0
0
0
0
0
1
0
false
0
0.016129
0
0.064516
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
b11e638896199f0f9fab84b4d45478bd93dd690e
155
py
Python
deprovisioning/settings.py
UniversitaDellaCalabria/IdM
0c80bc1a192e8f3075c941ca2d89773bca25e892
[ "Apache-2.0" ]
2
2020-03-02T23:03:16.000Z
2020-07-13T06:04:04.000Z
deprovisioning/settings.py
UniversitaDellaCalabria/IdM
0c80bc1a192e8f3075c941ca2d89773bca25e892
[ "Apache-2.0" ]
7
2020-01-28T15:57:35.000Z
2020-07-01T15:16:46.000Z
deprovisioning/settings.py
UniversitaDellaCalabria/IdM
0c80bc1a192e8f3075c941ca2d89773bca25e892
[ "Apache-2.0" ]
null
null
null
EMPLOYEE_CODE_PREFIX = 'urn:schac:personalUniqueCode:it:unical.it:dipendente:' STUDENT_CODE_PREFIX = 'urn:schac:personalUniqueCode:it:unical.it:studente:'
51.666667
78
0.832258
20
155
6.25
0.55
0.16
0.208
0.288
0.736
0.736
0.736
0.736
0
0
0
0
0.03871
155
2
79
77.5
0.838926
0
0
0
0
0
0.670968
0.670968
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
1
0
1
1
1
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
b121af9f5829ac48c0f692bb1b4d3f950281703e
163
py
Python
ngshare/__init__.py
yumoL/ngshare
c15a52b377144a60751e6a3b6614b9feb651d4f9
[ "BSD-3-Clause" ]
6
2020-06-12T18:49:49.000Z
2021-05-06T08:41:30.000Z
ngshare/__init__.py
yumoL/ngshare
c15a52b377144a60751e6a3b6614b9feb651d4f9
[ "BSD-3-Clause" ]
86
2020-02-22T08:51:27.000Z
2020-06-05T21:43:14.000Z
ngshare/__init__.py
yumoL/ngshare
c15a52b377144a60751e6a3b6614b9feb651d4f9
[ "BSD-3-Clause" ]
10
2020-06-18T10:58:29.000Z
2022-01-03T13:24:44.000Z
try: from .version import __version__ from . import ngshare except ImportError: # pragma: no cover from version import __version__ import ngshare
23.285714
39
0.730061
19
163
5.842105
0.526316
0.351351
0.306306
0.432432
0
0
0
0
0
0
0
0
0.233129
163
6
40
27.166667
0.888
0.09816
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.833333
0
0.833333
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
b12bf1e300a8d5e0c5974c6d1f1a305dace7d6d6
44
py
Python
instance/config.py
changawa-antony/newsapp
c5f2240686fbdaa4e313d8f589a2e633d388d59c
[ "MIT" ]
null
null
null
instance/config.py
changawa-antony/newsapp
c5f2240686fbdaa4e313d8f589a2e633d388d59c
[ "MIT" ]
null
null
null
instance/config.py
changawa-antony/newsapp
c5f2240686fbdaa4e313d8f589a2e633d388d59c
[ "MIT" ]
null
null
null
api_key = 'efe747fcab0c4a039abf0eb00b2b68ec'
44
44
0.886364
3
44
12.666667
1
0
0
0
0
0
0
0
0
0
0
0.333333
0.045455
44
1
44
44
0.571429
0
0
0
0
0
0.711111
0.711111
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
495281721f88b0b21a1a32b2c1d3a300e5fcf751
155
py
Python
django-rgd-imagery/rgd_imagery/admin/__init__.py
ResonantGeoData/ResonantGeoData
72b3d4085cc5700d0ad5556f31b7eb96ed2d3b8a
[ "Apache-2.0" ]
40
2020-05-07T17:15:26.000Z
2022-02-27T14:45:04.000Z
django-rgd-imagery/rgd_imagery/admin/__init__.py
ResonantGeoData/ResonantGeoData
72b3d4085cc5700d0ad5556f31b7eb96ed2d3b8a
[ "Apache-2.0" ]
408
2020-05-07T15:10:35.000Z
2022-03-30T03:08:47.000Z
django-rgd-imagery/rgd_imagery/admin/__init__.py
ResonantGeoData/ResonantGeoData
72b3d4085cc5700d0ad5556f31b7eb96ed2d3b8a
[ "Apache-2.0" ]
3
2021-04-12T20:16:22.000Z
2021-06-22T14:03:46.000Z
from .annotation import * # noqa from .base import * # noqa from .kwcoco import * # noqa from .processed import * # noqa from .raster import * # noqa
25.833333
33
0.677419
20
155
5.25
0.4
0.47619
0.533333
0
0
0
0
0
0
0
0
0
0.225806
155
5
34
31
0.875
0.154839
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
49814c49dd086fd495842391e45d573ca4bb87e2
152
py
Python
HotGauge/HotGauge/thermal/utils.py
TuftsCompArchLab/HotGauge
4e99e5c7aca0d4a481af331631dc236b172e63d3
[ "BSD-3-Clause" ]
null
null
null
HotGauge/HotGauge/thermal/utils.py
TuftsCompArchLab/HotGauge
4e99e5c7aca0d4a481af331631dc236b172e63d3
[ "BSD-3-Clause" ]
null
null
null
HotGauge/HotGauge/thermal/utils.py
TuftsCompArchLab/HotGauge
4e99e5c7aca0d4a481af331631dc236b172e63d3
[ "BSD-3-Clause" ]
null
null
null
KELVIN_CELCIUS_OFFSET = 273.15 def K_to_C(value): return value - KELVIN_CELCIUS_OFFSET def C_to_K(value): return value + KELVIN_CELCIUS_OFFSET
21.714286
40
0.776316
25
152
4.32
0.44
0.361111
0.527778
0.407407
0.648148
0.648148
0
0
0
0
0
0.039063
0.157895
152
6
41
25.333333
0.804688
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0
0.4
0.8
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
8
49a054031a661ae45ad78e349b02a9a4c7dfe3b6
7,432
py
Python
python/display_boundaries/sample_map_server/utils.py
Niccodedev/python_prototype
726f957f88cd7e84281220bdafb4e4b47b8815f6
[ "Apache-2.0" ]
32
2017-06-06T23:47:18.000Z
2022-03-10T02:54:57.000Z
python/display_boundaries/sample_map_server/utils.py
Niccodedev/python_prototype
726f957f88cd7e84281220bdafb4e4b47b8815f6
[ "Apache-2.0" ]
8
2017-06-28T04:32:19.000Z
2020-03-06T05:10:13.000Z
python/display_boundaries/sample_map_server/utils.py
Niccodedev/python_prototype
726f957f88cd7e84281220bdafb4e4b47b8815f6
[ "Apache-2.0" ]
13
2017-06-28T23:42:48.000Z
2021-11-21T06:45:43.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- from psycopg2.extensions import AsIs def get_min_max(data_table, boundary_table, stat_field, num_classes, min_val, map_type, pg_cur, settings): # query to get min and max values (filter small populations that overly influence the map visualisation) try: # if map_type == "values": sql = "SELECT MIN(%s) AS min, MAX(%s) AS max FROM %s AS tab " \ "INNER JOIN %s AS bdy ON tab.{0} = bdy.id " \ "WHERE %s > 0 " \ "AND bdy.population > {1}" \ .format(settings['region_id_field'], float(min_val)) sql_string = pg_cur.mogrify(sql, (AsIs(stat_field), AsIs(stat_field), AsIs(data_table), AsIs(boundary_table), AsIs(stat_field))) pg_cur.execute(sql_string) row = pg_cur.fetchone() except Exception as ex: print("{0} - {1} Failed: {2}".format(data_table, stat_field, ex)) return list() output_dict = { "min": row["min"], "max": row["max"] } return output_dict def get_kmeans_bins(data_table, boundary_table, stat_field, num_classes, min_val, map_type, pg_cur, settings): # query to get min and max values (filter small populations that overly influence the map visualisation) try: if map_type == "values": sql = "WITH sub AS (" \ "WITH points AS (" \ "SELECT %s as val, ST_MakePoint(%s, 0) AS pnt " \ "FROM %s AS tab " \ "INNER JOIN %s AS bdy ON tab.{0} = bdy.id " \ "WHERE %s > 0.0 " \ "AND bdy.population > {1}" \ ") " \ "SELECT val, ST_ClusterKMeans(pnt, %s) OVER () AS cluster_id FROM points" \ ") " \ "SELECT MAX(val) AS val FROM sub GROUP BY cluster_id ORDER BY val" \ .format(settings['region_id_field'], float(min_val)) sql_string = pg_cur.mogrify(sql, (AsIs(stat_field), AsIs(stat_field), AsIs(data_table), AsIs(boundary_table), AsIs(stat_field), AsIs(num_classes))) else: # map_type == "percent" sql = "WITH sub AS (" \ "WITH points AS (" \ "SELECT %s as val, ST_MakePoint(%s, 0) AS pnt " \ "FROM %s AS tab " \ "INNER JOIN %s AS bdy ON tab.{0} = bdy.id " \ "WHERE %s > 0.0 AND %s < 100.0 " \ "AND bdy.population > {1}" \ ") " \ "SELECT val, ST_ClusterKMeans(pnt, %s) OVER () AS cluster_id FROM points" \ ") " \ "SELECT MAX(val) AS val FROM sub GROUP BY cluster_id ORDER BY val" \ .format(settings['region_id_field'], float(min_val)) sql_string = pg_cur.mogrify(sql, (AsIs(stat_field), AsIs(stat_field), AsIs(data_table), AsIs(boundary_table), AsIs(stat_field), AsIs(stat_field), AsIs(num_classes))) pg_cur.execute(sql_string) rows = pg_cur.fetchall() except Exception as ex: print("{0} - {1} Failed: {2}".format(data_table, stat_field, ex)) return list() # census_2011_data.ced_b23a - b4318 output_list = list() for row in rows: output_list.append(row["val"]) return output_list def get_equal_interval_bins(data_table, boundary_table, stat_field, num_classes, min_val, map_type, pg_cur, settings): # query to get min and max values (filter small populations that overly influence the map visualisation) try: if map_type == "values": sql = "SELECT MIN(%s) AS min, MAX(%s) AS max FROM %s AS tab " \ "INNER JOIN %s AS bdy ON tab.{0} = bdy.id " \ "WHERE %s > 0 " \ "AND bdy.population > {1}" \ .format(settings['region_id_field'], float(min_val)) sql_string = pg_cur.mogrify(sql, (AsIs(stat_field), AsIs(stat_field), AsIs(data_table), AsIs(boundary_table), AsIs(stat_field))) else: # map_type == "percent" sql = "SELECT MIN(%s) AS min, MAX(%s) AS max FROM %s AS tab " \ "INNER JOIN %s AS bdy ON tab.{0} = bdy.id " \ "WHERE %s > 0 AND %s < 100.0 " \ "AND bdy.population > {1}" \ .format(settings['region_id_field'], float(min_val)) sql_string = pg_cur.mogrify(sql, (AsIs(stat_field), AsIs(stat_field), AsIs(data_table), AsIs(boundary_table), AsIs(stat_field), AsIs(stat_field))) pg_cur.execute(sql_string) row = pg_cur.fetchone() except Exception as ex: print("{0} - {1} Failed: {2}".format(data_table, stat_field, ex)) return list() output_list = list() min_val = row["min"] max_val = row["max"] delta = (max_val - min_val) / float(num_classes) curr_val = min_val # print("{0} : from {1} to {2}".format(boundary_table, min, max)) for i in range(0, num_classes): output_list.append(curr_val) curr_val += delta return output_list def get_equal_count_bins(data_table, boundary_table, stat_field, num_classes, min_val, map_type, pg_cur, settings): # query to get min and max values (filter small populations that overly influence the map visualisation) try: if map_type == "values": sql = "WITH classes AS (" \ "SELECT %s as val, ntile(%s) OVER (ORDER BY %s) AS class_id " \ "FROM %s AS tab " \ "INNER JOIN %s AS bdy ON tab.{0} = bdy.id " \ "WHERE %s > 0.0 " \ "AND bdy.population > {1}" \ ") " \ "SELECT MAX(val) AS val, class_id FROM classes GROUP BY class_id ORDER BY class_id" \ .format(settings['region_id_field'], float(min_val)) sql_string = pg_cur.mogrify(sql, (AsIs(stat_field), AsIs(num_classes), AsIs(stat_field), AsIs(data_table), AsIs(boundary_table), AsIs(stat_field))) else: # map_type == "percent" sql = "WITH classes AS (" \ "SELECT %s as val, ntile(7) OVER (ORDER BY %s) AS class_id " \ "FROM %s AS tab " \ "INNER JOIN %s AS bdy ON tab.{0} = bdy.id " \ "WHERE %s > 0.0 AND %s < 100.0 " \ "AND bdy.population > {1}" \ ") " \ "SELECT MAX(val) AS val, class_id FROM classes GROUP BY class_id ORDER BY class_id" \ .format(settings['region_id_field'], float(min_val)) sql_string = pg_cur.mogrify(sql, (AsIs(stat_field), AsIs(stat_field), AsIs(data_table), AsIs(boundary_table), AsIs(stat_field), AsIs(stat_field))) # print(sql_string) pg_cur.execute(sql_string) rows = pg_cur.fetchall() except Exception as ex: print("{0} - {1} Failed: {2}".format(data_table, stat_field, ex)) return list() output_list = list() for row in rows: output_list.append(row["val"]) return output_list
39.956989
118
0.532158
960
7,432
3.929167
0.116667
0.076352
0.082715
0.085631
0.910127
0.910127
0.887328
0.886267
0.886267
0.870361
0
0.014032
0.347955
7,432
185
119
40.172973
0.764342
0.08894
0
0.813953
0
0
0.265167
0.006215
0
0
0
0
0
1
0.031008
false
0
0.007752
0
0.100775
0.031008
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
771aaf2aca885d3956e6774e21b78aa5eecd15c7
111
py
Python
tests/test_noop.py
dmontemayor/RLmini-course
66362b1c1b296403832301999edea612775e7203
[ "MIT" ]
2
2020-04-25T14:50:45.000Z
2022-03-28T02:14:12.000Z
tests/test_noop.py
dmontemayor/RLmini-course
66362b1c1b296403832301999edea612775e7203
[ "MIT" ]
null
null
null
tests/test_noop.py
dmontemayor/RLmini-course
66362b1c1b296403832301999edea612775e7203
[ "MIT" ]
null
null
null
"""NOOP test """ from .context import rlmini def test_noop(): """This test does nothing. """ pass
12.333333
30
0.594595
14
111
4.642857
0.785714
0
0
0
0
0
0
0
0
0
0
0
0.252252
111
8
31
13.875
0.783133
0.342342
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
1
0
1
0
0
7
6218fac8bc137f039fb0c30e42e1c4bea9b6eca8
14,170
py
Python
tests/test_basic.py
YuichiNotoya/jpholiday
3dffce410ed232d43f6ac2224e3eebdb0f7379ce
[ "MIT" ]
null
null
null
tests/test_basic.py
YuichiNotoya/jpholiday
3dffce410ed232d43f6ac2224e3eebdb0f7379ce
[ "MIT" ]
null
null
null
tests/test_basic.py
YuichiNotoya/jpholiday
3dffce410ed232d43f6ac2224e3eebdb0f7379ce
[ "MIT" ]
null
null
null
# coding: utf-8 import unittest import datetime from jpholiday import jpholiday class TestBasic(unittest.TestCase): # Init def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def test_vernal_equinox_day(self): """ 春分の日 """ self.assertEqual(jpholiday._vernal_equinox_day(2000), 20) self.assertEqual(jpholiday._vernal_equinox_day(2001), 20) self.assertEqual(jpholiday._vernal_equinox_day(2002), 21) self.assertEqual(jpholiday._vernal_equinox_day(2003), 21) self.assertEqual(jpholiday._vernal_equinox_day(2004), 20) self.assertEqual(jpholiday._vernal_equinox_day(2005), 20) self.assertEqual(jpholiday._vernal_equinox_day(2006), 21) self.assertEqual(jpholiday._vernal_equinox_day(2007), 21) self.assertEqual(jpholiday._vernal_equinox_day(2008), 20) self.assertEqual(jpholiday._vernal_equinox_day(2009), 20) self.assertEqual(jpholiday._vernal_equinox_day(2010), 21) self.assertEqual(jpholiday._vernal_equinox_day(2011), 21) self.assertEqual(jpholiday._vernal_equinox_day(2012), 20) self.assertEqual(jpholiday._vernal_equinox_day(2013), 20) self.assertEqual(jpholiday._vernal_equinox_day(2014), 21) self.assertEqual(jpholiday._vernal_equinox_day(2015), 21) self.assertEqual(jpholiday._vernal_equinox_day(2016), 20) self.assertEqual(jpholiday._vernal_equinox_day(2017), 20) self.assertEqual(jpholiday._vernal_equinox_day(2018), 21) self.assertEqual(jpholiday._vernal_equinox_day(2019), 21) self.assertEqual(jpholiday._vernal_equinox_day(2020), 20) self.assertEqual(jpholiday._vernal_equinox_day(2021), 20) self.assertEqual(jpholiday._vernal_equinox_day(2022), 21) self.assertEqual(jpholiday._vernal_equinox_day(2023), 21) self.assertEqual(jpholiday._vernal_equinox_day(2024), 20) self.assertEqual(jpholiday._vernal_equinox_day(2025), 20) self.assertEqual(jpholiday._vernal_equinox_day(2026), 20) self.assertEqual(jpholiday._vernal_equinox_day(2027), 21) self.assertEqual(jpholiday._vernal_equinox_day(2028), 20) self.assertEqual(jpholiday._vernal_equinox_day(2029), 20) self.assertEqual(jpholiday._vernal_equinox_day(2030), 20) def test_autumn_equinox_day(self): """ 秋分の日 """ self.assertEqual(jpholiday._autumn_equinox_day(2000), 23) self.assertEqual(jpholiday._autumn_equinox_day(2001), 23) self.assertEqual(jpholiday._autumn_equinox_day(2002), 23) self.assertEqual(jpholiday._autumn_equinox_day(2003), 23) self.assertEqual(jpholiday._autumn_equinox_day(2004), 23) self.assertEqual(jpholiday._autumn_equinox_day(2005), 23) self.assertEqual(jpholiday._autumn_equinox_day(2006), 23) self.assertEqual(jpholiday._autumn_equinox_day(2007), 23) self.assertEqual(jpholiday._autumn_equinox_day(2008), 23) self.assertEqual(jpholiday._autumn_equinox_day(2009), 23) self.assertEqual(jpholiday._autumn_equinox_day(2010), 23) self.assertEqual(jpholiday._autumn_equinox_day(2011), 23) self.assertEqual(jpholiday._autumn_equinox_day(2012), 22) self.assertEqual(jpholiday._autumn_equinox_day(2013), 23) self.assertEqual(jpholiday._autumn_equinox_day(2014), 23) self.assertEqual(jpholiday._autumn_equinox_day(2015), 23) self.assertEqual(jpholiday._autumn_equinox_day(2016), 22) self.assertEqual(jpholiday._autumn_equinox_day(2017), 23) self.assertEqual(jpholiday._autumn_equinox_day(2018), 23) self.assertEqual(jpholiday._autumn_equinox_day(2019), 23) self.assertEqual(jpholiday._autumn_equinox_day(2020), 22) self.assertEqual(jpholiday._autumn_equinox_day(2021), 23) self.assertEqual(jpholiday._autumn_equinox_day(2022), 23) self.assertEqual(jpholiday._autumn_equinox_day(2023), 23) self.assertEqual(jpholiday._autumn_equinox_day(2024), 22) self.assertEqual(jpholiday._autumn_equinox_day(2025), 23) self.assertEqual(jpholiday._autumn_equinox_day(2026), 23) self.assertEqual(jpholiday._autumn_equinox_day(2027), 23) self.assertEqual(jpholiday._autumn_equinox_day(2028), 22) self.assertEqual(jpholiday._autumn_equinox_day(2029), 23) self.assertEqual(jpholiday._autumn_equinox_day(2030), 23) def test_other_holiday(self): self.assertEqual(jpholiday.is_holiday_name(datetime.date(1959, 4, 10)), '皇太子・明仁親王の結婚の儀') self.assertEqual(jpholiday.is_holiday_name(datetime.date(1989, 2, 24)), '昭和天皇の大喪の礼') self.assertEqual(jpholiday.is_holiday_name(datetime.date(1990, 11, 12)), '即位の礼正殿の儀') self.assertEqual(jpholiday.is_holiday_name(datetime.date(1993, 6, 9)), '皇太子・皇太子徳仁親王の結婚の儀') def test_2015(self): """ 2015年祝日 """ self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 1, 1)), '元日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 1, 12)), '成人の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 2, 11)), '建国記念の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 3, 21)), '春分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 4, 29)), '昭和の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 5, 3)), '憲法記念日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 5, 4)), 'みどりの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 5, 5)), 'こどもの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 5, 6)), '憲法記念日 振替休日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 7, 20)), '海の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 9, 21)), '敬老の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 9, 22)), '国民の休日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 9, 23)), '秋分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 10, 12)), '体育の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 11, 3)), '文化の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 11, 23)), '勤労感謝の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2015, 12, 23)), '天皇誕生日') def test_2015_month(self): """ 2015年月祝日数 """ self.assertEqual(len(jpholiday.month_holidays(2015, 1)), 2) self.assertEqual(len(jpholiday.month_holidays(2015, 2)), 1) self.assertEqual(len(jpholiday.month_holidays(2015, 3)), 1) self.assertEqual(len(jpholiday.month_holidays(2015, 4)), 1) self.assertEqual(len(jpholiday.month_holidays(2015, 5)), 4) self.assertEqual(len(jpholiday.month_holidays(2015, 6)), 0) self.assertEqual(len(jpholiday.month_holidays(2015, 7)), 1) self.assertEqual(len(jpholiday.month_holidays(2015, 8)), 0) self.assertEqual(len(jpholiday.month_holidays(2015, 9)), 3) self.assertEqual(len(jpholiday.month_holidays(2015, 10)), 1) self.assertEqual(len(jpholiday.month_holidays(2015, 11)), 2) self.assertEqual(len(jpholiday.month_holidays(2015, 12)), 1) def test_2015_year(self): """ 2015年祝日数 """ self.assertEqual(len(jpholiday.year_holidays(2015)), 17) def test_2016(self): """ 2016年祝日 """ self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 1, 1)), '元日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 1, 11)), '成人の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 2, 11)), '建国記念の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 3, 20)), '春分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 3, 21)), '春分の日 振替休日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 4, 29)), '昭和の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 5, 3)), '憲法記念日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 5, 4)), 'みどりの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 5, 5)), 'こどもの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 7, 18)), '海の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 8, 11)), '山の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 9, 19)), '敬老の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 9, 22)), '秋分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 10, 10)), '体育の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 11, 3)), '文化の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 11, 23)), '勤労感謝の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2016, 12, 23)), '天皇誕生日') self.assertEqual(len(jpholiday.year_holidays(2016)), 17) def test_2016_month(self): """ 2016年月祝日数 """ self.assertEqual(len(jpholiday.month_holidays(2016, 1)), 2) self.assertEqual(len(jpholiday.month_holidays(2016, 2)), 1) self.assertEqual(len(jpholiday.month_holidays(2016, 3)), 2) self.assertEqual(len(jpholiday.month_holidays(2016, 4)), 1) self.assertEqual(len(jpholiday.month_holidays(2016, 5)), 3) self.assertEqual(len(jpholiday.month_holidays(2016, 6)), 0) self.assertEqual(len(jpholiday.month_holidays(2016, 7)), 1) self.assertEqual(len(jpholiday.month_holidays(2016, 8)), 1) self.assertEqual(len(jpholiday.month_holidays(2016, 9)), 2) self.assertEqual(len(jpholiday.month_holidays(2016, 10)), 1) self.assertEqual(len(jpholiday.month_holidays(2016, 11)), 2) self.assertEqual(len(jpholiday.month_holidays(2016, 12)), 1) def test_2016_year(self): """ 2016年祝日数 """ self.assertEqual(len(jpholiday.year_holidays(2016)), 17) def test_2017(self): """ 2017年祝日 """ self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 1, 1)), '元日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 1, 2)), '元日 振替休日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 1, 9)), '成人の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 2, 11)), '建国記念の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 3, 20)), '春分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 4, 29)), '昭和の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 5, 3)), '憲法記念日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 5, 4)), 'みどりの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 5, 5)), 'こどもの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 7, 17)), '海の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 8, 11)), '山の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 9, 18)), '敬老の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 9, 23)), '秋分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 10, 9)), '体育の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 11, 3)), '文化の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 11, 23)), '勤労感謝の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2017, 12, 23)), '天皇誕生日') def test_2017_month(self): """ 2017年月祝日数 """ self.assertEqual(len(jpholiday.month_holidays(2017, 1)), 3) self.assertEqual(len(jpholiday.month_holidays(2017, 2)), 1) self.assertEqual(len(jpholiday.month_holidays(2017, 3)), 1) self.assertEqual(len(jpholiday.month_holidays(2017, 4)), 1) self.assertEqual(len(jpholiday.month_holidays(2017, 5)), 3) self.assertEqual(len(jpholiday.month_holidays(2017, 6)), 0) self.assertEqual(len(jpholiday.month_holidays(2017, 7)), 1) self.assertEqual(len(jpholiday.month_holidays(2017, 8)), 1) self.assertEqual(len(jpholiday.month_holidays(2017, 9)), 2) self.assertEqual(len(jpholiday.month_holidays(2017, 10)), 1) self.assertEqual(len(jpholiday.month_holidays(2017, 11)), 2) self.assertEqual(len(jpholiday.month_holidays(2017, 12)), 1) def test_2017_year(self): """ 2017年祝日数 """ self.assertEqual(len(jpholiday.year_holidays(2017)), 17) def test_2018(self): """ 2018年祝日 """ self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 1, 1)), '元日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 1, 8)), '成人の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 2, 11)), '建国記念の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 2, 12)), '建国記念の日 振替休日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 3, 21)), '春分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 4, 29)), '昭和の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 4, 30)), '昭和の日 振替休日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 3)), '憲法記念日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 4)), 'みどりの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 5)), 'こどもの日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 7, 16)), '海の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 8, 11)), '山の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 17)), '敬老の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 23)), '秋分の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 24)), '秋分の日 振替休日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 10, 8)), '体育の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 11, 3)), '文化の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 11, 23)), '勤労感謝の日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 12, 23)), '天皇誕生日') self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 12, 24)), '天皇誕生日 振替休日') def test_2018_month(self): """ 2018年月祝日数 """ self.assertEqual(len(jpholiday.month_holidays(2018, 1)), 2) self.assertEqual(len(jpholiday.month_holidays(2018, 2)), 2) self.assertEqual(len(jpholiday.month_holidays(2018, 3)), 1) self.assertEqual(len(jpholiday.month_holidays(2018, 4)), 2) self.assertEqual(len(jpholiday.month_holidays(2018, 5)), 3) self.assertEqual(len(jpholiday.month_holidays(2018, 6)), 0) self.assertEqual(len(jpholiday.month_holidays(2018, 7)), 1) self.assertEqual(len(jpholiday.month_holidays(2018, 8)), 1) self.assertEqual(len(jpholiday.month_holidays(2018, 9)), 3) self.assertEqual(len(jpholiday.month_holidays(2018, 10)), 1) self.assertEqual(len(jpholiday.month_holidays(2018, 11)), 2) self.assertEqual(len(jpholiday.month_holidays(2018, 12)), 2) def test_2018_year(self): """ 2018年祝日数 """ self.assertEqual(len(jpholiday.year_holidays(2018)), 20)
51.715328
92
0.764008
1,991
14,170
5.225013
0.063285
0.273959
0.316063
0.187446
0.907046
0.902432
0.883495
0.618187
0.410843
0.353071
0
0.101224
0.083204
14,170
273
93
51.904762
0.699407
0.009668
0
0.009479
0
0
0.027744
0
0
0
0
0
0.900474
1
0.075829
false
0
0.014218
0
0.094787
0
0
0
0
null
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
62213187d896e0f3d50154cfab469166032cea26
9,284
py
Python
clarity_epp/export/workflow.py
UMCUGenetics/clarity_epp
66a255fa27a735faf084201a69c48ea3916ed36a
[ "MIT" ]
1
2018-06-18T16:15:13.000Z
2018-06-18T16:15:13.000Z
clarity_epp/export/workflow.py
UMCUGenetics/clarity_epp
66a255fa27a735faf084201a69c48ea3916ed36a
[ "MIT" ]
16
2019-02-12T12:04:07.000Z
2021-08-16T08:46:37.000Z
clarity_epp/export/workflow.py
UMCUGenetics/clarity_epp
66a255fa27a735faf084201a69c48ea3916ed36a
[ "MIT" ]
null
null
null
"""Workflow export functions.""" from genologics.entities import Process import config def determin_meetw(meetw_processes, sample_processes, repeat_cutoff=2): """Determine meetwaarde and meetwaarde herhaling (reapeat) based on list of processes and repeat cutoff.""" meetw = 'N' meetw_herh = 'N' for process in meetw_processes: if process in sample_processes: if len(sample_processes[process]) >= repeat_cutoff: meetw = 'N' meetw_herh = 'J' break else: meetw = 'J' return meetw, meetw_herh def helix_lab(lims, process_id, output_file): """Export lab workflow information in helix table format.""" output_file.write("meet_id\twerklijst_nummer\tonderzoeknr\tmonsternummer\tZuivering OK?\tZuivering herh?\tLibprep OK?\tLibprep herh?\tEnrichment OK?\tEnrichment herh?\tSequencen OK?\tSequencen herh?\n") process = Process(lims, id=process_id) for artifact in process.all_inputs(): for sample in artifact.samples: if 'Dx Werklijstnummer' in sample.udf: # Only check samples with a 'Werklijstnummer' sample_artifacts = lims.get_artifacts(samplelimsid=sample.id, type='Analyte') sample_artifacts = [sample_artifact for sample_artifact in sample_artifacts if sample_artifact.parent_process] # Filter artifacts without parent_process sample_artifacts = sorted(sample_artifacts, key=lambda artifact: int(artifact.parent_process.id.split('-')[-1])) # Sort artifact by parent process id sample_all_processes = {} sample_filter_processes = {} # reset after Dx Sample registratie zuivering for artifact in sample_artifacts: if 'Dx Sample registratie zuivering' in artifact.parent_process.type.name: sample_filter_processes = {} # reset after new helix import process_id = artifact.parent_process.id process_name = artifact.parent_process.type.name if process_name in sample_all_processes: sample_all_processes[process_name].add(process_id) else: sample_all_processes[process_name] = set([process_id]) if process_name in sample_filter_processes: sample_filter_processes[process_name].add(process_id) else: sample_filter_processes[process_name] = set([process_id]) # Determine meetw repeat_cutoff = len(sample.udf['Dx Werklijstnummer'].split(';')) * 2 meetw_zui, meetw_zui_herh = determin_meetw(config.meetw_zui_processes, sample_all_processes, repeat_cutoff) meetw_libprep, meetw_libprep_herh = determin_meetw(config.meetw_libprep_processes, sample_filter_processes, 2) meetw_enrich, meetw_enrich_herh = determin_meetw(config.meetw_enrich_processes, sample_filter_processes, 2) meetw_seq, meetw_seq_herh = determin_meetw(config.meetw_seq_processes, sample_filter_processes, 2) output_file.write( "{meet_id}\t{werklijst}\t{onderzoeksnummer}\t{monsternummer}\t{meetw_zui}\t{meetw_zui_herh}\t{meetw_libprep}\t{meetw_libprep_herh}\t{meetw_enrich}\t{meetw_enrich_herh}\t{meetw_seq}\t{meetw_seq_herh}\n".format( meet_id=sample.udf['Dx Meet ID'].split(';')[0], werklijst=sample.udf['Dx Werklijstnummer'].split(';')[0], onderzoeksnummer=sample.udf['Dx Onderzoeknummer'].split(';')[0], monsternummer=sample.udf['Dx Monsternummer'], meetw_zui=meetw_zui, meetw_zui_herh=meetw_zui_herh, meetw_libprep=meetw_libprep, meetw_libprep_herh=meetw_libprep_herh, meetw_enrich=meetw_enrich, meetw_enrich_herh=meetw_enrich_herh, meetw_seq=meetw_seq, meetw_seq_herh=meetw_seq_herh, ) ) def helix_data_analysis(lims, process_id, output_file): """Export data analysis workflow information in helix table format.""" output_file.write("meet_id\twerklijst_nummer\tonderzoeknr\tmonsternummer\tBfx analyse OK?\tSNP match OK?\n") process = Process(lims, id=process_id) for artifact in process.analytes()[0]: # Set SNP match meetw meetw_snp_match = 'N' if 'Dx SNPmatch' in list(artifact.udf) and artifact.udf['Dx SNPmatch']: meetw_snp_match = 'J' # Print meetw row for sample in artifact.samples: output_file.write( "{meet_id}\t{werklijst}\t{onderzoeksnummer}\t{monsternummer}\t{meetw_bfx}\t{meetw_snp_match}\n".format( meet_id=sample.udf['Dx Meet ID'].split(';')[0], werklijst=sample.udf['Dx Werklijstnummer'].split(';')[0], onderzoeksnummer=sample.udf['Dx Onderzoeknummer'].split(';')[0], monsternummer=sample.udf['Dx Monsternummer'], meetw_bfx='J', meetw_snp_match=meetw_snp_match, ) ) def helix_all(lims, process_id, output_file): """Export workflow information in helix table format.""" output_file.write("meet_id\twerklijst_nummer\tonderzoeknr\tmonsternummer\tZuivering OK?\tZuivering herh?\tLibprep OK?\tLibprep herh?\tEnrichment OK?\tEnrichment herh?\tSequencen OK?\tSequencen herh?\tBfx analyse OK?\tSNP match OK?\n") process = Process(lims, id=process_id) for artifact in process.analytes()[0]: for sample in artifact.samples: if 'Dx Werklijstnummer' in sample.udf: # Only check samples with a 'Werklijstnummer' sample_artifacts = lims.get_artifacts(samplelimsid=sample.id, type='Analyte') sample_artifacts = [sample_artifact for sample_artifact in sample_artifacts if sample_artifact.parent_process] # Filter artifacts without parent_process sample_artifacts = sorted(sample_artifacts, key=lambda artifact: int(artifact.parent_process.id.split('-')[-1])) # Sort artifact by parent process id sample_all_processes = {} sample_filter_processes = {} # reset after 'Dx Sample registratie zuivering' process -> this is a new import from helix, should not be counted as a repeat for sample_artifact in sample_artifacts: if 'Dx Sample registratie zuivering' in sample_artifact.parent_process.type.name: sample_filter_processes = {} # reset after new helix import process_id = sample_artifact.parent_process.id process_name = sample_artifact.parent_process.type.name if process_name in sample_all_processes: sample_all_processes[process_name].add(process_id) else: sample_all_processes[process_name] = set([process_id]) if process_name in sample_filter_processes: sample_filter_processes[process_name].add(process_id) else: sample_filter_processes[process_name] = set([process_id]) # Determine meetw repeat_cutoff = len(sample.udf['Dx Werklijstnummer'].split(';')) * 2 meetw_zui, meetw_zui_herh = determin_meetw(config.meetw_zui_processes, sample_all_processes, repeat_cutoff) meetw_libprep, meetw_libprep_herh = determin_meetw(config.meetw_libprep_processes, sample_filter_processes, 2) meetw_enrich, meetw_enrich_herh = determin_meetw(config.meetw_enrich_processes, sample_filter_processes, 2) meetw_seq, meetw_seq_herh = determin_meetw(config.meetw_seq_processes, sample_filter_processes, 2) meetw_snp_match = 'N' if 'Dx SNPmatch' in list(artifact.udf) and artifact.udf['Dx SNPmatch']: meetw_snp_match = 'J' output_file.write( "{meet_id}\t{werklijst}\t{onderzoeksnummer}\t{monsternummer}\t{meetw_zui}\t{meetw_zui_herh}\t{meetw_libprep}\t{meetw_libprep_herh}\t{meetw_enrich}\t{meetw_enrich_herh}\t{meetw_seq}\t{meetw_seq_herh}\t{meetw_bfx}\t{meetw_snp_match}\n".format( meet_id=sample.udf['Dx Meet ID'].split(';')[0], werklijst=sample.udf['Dx Werklijstnummer'].split(';')[0], onderzoeksnummer=sample.udf['Dx Onderzoeknummer'].split(';')[0], monsternummer=sample.udf['Dx Monsternummer'], meetw_zui=meetw_zui, meetw_zui_herh=meetw_zui_herh, meetw_libprep=meetw_libprep, meetw_libprep_herh=meetw_libprep_herh, meetw_enrich=meetw_enrich, meetw_enrich_herh=meetw_enrich_herh, meetw_seq=meetw_seq, meetw_seq_herh=meetw_seq_herh, meetw_bfx='J', meetw_snp_match=meetw_snp_match, ) )
59.512821
261
0.632163
1,064
9,284
5.231203
0.112782
0.035573
0.060367
0.053899
0.913762
0.909091
0.869386
0.867769
0.867769
0.856091
0
0.003285
0.278651
9,284
155
262
59.896774
0.827833
0.089832
0
0.705882
0
0.042017
0.167162
0.084294
0
0
0
0
0
1
0.033613
false
0
0.016807
0
0.058824
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
6560465f6fa97301304493ac2619005f0887c9d3
144
py
Python
.mutt/offlineimap.py
audy/docker-homely-mutt
539d99cd9b5a1adb7fa5550993a4e7b9b4f12857
[ "Unlicense", "MIT" ]
null
null
null
.mutt/offlineimap.py
audy/docker-homely-mutt
539d99cd9b5a1adb7fa5550993a4e7b9b4f12857
[ "Unlicense", "MIT" ]
null
null
null
.mutt/offlineimap.py
audy/docker-homely-mutt
539d99cd9b5a1adb7fa5550993a4e7b9b4f12857
[ "Unlicense", "MIT" ]
null
null
null
#!/usr/bin/python import os def get_password(): ''' implement a function that returns password ''' return os.getenv('GMAIL_PASSWORD')
18
54
0.694444
19
144
5.157895
0.842105
0
0
0
0
0
0
0
0
0
0
0
0.173611
144
7
55
20.571429
0.823529
0.416667
0
0
0
0
0.181818
0
0
0
0
0
0
1
0.333333
true
0.666667
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
1
0
1
0
0
7
6576e4dd08d127855572a8ca700aefa702922cc7
18,525
py
Python
tests/scim/test_scim_users.py
namelivia/travelperk-http-python
c6cbd88c999a49f7d61ae040029ca3e91ce72cae
[ "MIT" ]
2
2021-08-30T12:34:26.000Z
2021-08-31T07:56:12.000Z
tests/scim/test_scim_users.py
namelivia/travelperk-http-python
c6cbd88c999a49f7d61ae040029ca3e91ce72cae
[ "MIT" ]
6
2021-07-15T16:13:06.000Z
2022-03-03T09:45:07.000Z
tests/scim/test_scim_users.py
namelivia/travelperk-http-python
c6cbd88c999a49f7d61ae040029ca3e91ce72cae
[ "MIT" ]
null
null
null
import os import json from mock import Mock from travelperk_http_python.api.travelperk import TravelPerk from travelperk_http_python.scim.users import Users from travelperk_http_python.scim.users_input_params import UsersInputParams class TestSCIMUsers: def setup(self): self.travelperk = Mock(spec=TravelPerk) self.users = Users(self.travelperk) def get_stub_contents(self, stub_name): path = os.path.join(os.path.dirname(__file__), "../stubs/") with open(path + stub_name) as stub_data: return json.load(stub_data) def test_getting_all_users_with_params(self): self.travelperk.get.return_value = self.get_stub_contents("scim_users.json") users = self.users.query().set_count(5).set_start_index(3).get() self.travelperk.get.assert_called_once_with("scim/Users?count=5&startIndex=3") assert users.total_results == 2 assert users.items_per_page == 2 assert users.start_index == 1 assert [ "urn:ietf:params:scim:schemas:core:2.0:User", "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", "urn:ietf:params:scim:schemas:extension:travelperk:2.0:User", ] == users.resources[0].schemas assert users.resources[0].name.given_name == "Marlen" assert users.resources[0].name.family_name == "Col" assert users.resources[0].name.middle_name == "" assert users.resources[0].name.honorific_prefix == "" assert users.resources[0].locale == "en" assert users.resources[0].preferred_language == "en" assert users.resources[0].title == "Manager" assert users.resources[0].external_id == "123455667" assert users.resources[0].id == "29" assert users.resources[0].groups == [] assert users.resources[0].active is True assert users.resources[0].user_name == "marlen.col@mycompany.com" assert len(users.resources[0].phone_numbers) == 1 assert users.resources[0].phone_numbers[0].value == "+34 1234567" assert users.resources[0].phone_numbers[0].type == "work" assert users.resources[0].meta.resource_type == "User" assert users.resources[0].meta.created == "2020-04-01T22:24:44.137082+00:00" assert ( users.resources[0].meta.last_modified == "2020-04-01T22:24:44.137082+00:00" ) assert ( users.resources[0].meta.location == "http://app.travelperk.com/api/v2/scim/Users/29" ) assert users.resources[0].enterprise_extension.cost_center == "Marketing" assert users.resources[0].enterprise_extension.manager.value == "123" assert ( "https://app.travelperk.com/api/v2/scim/Users/123/" == users.resources[0].enterprise_extension.manager.ref ) assert ( users.resources[0].enterprise_extension.manager.display_name == "Jack Jackson" ) assert users.resources[0].travelperk_extension.gender == "M" assert users.resources[0].travelperk_extension.date_of_birth == "1980-02-02" assert ( users.resources[0].travelperk_extension.travel_policy == "Marketing travel policy" ) assert len(users.resources[0].travelperk_extension.invoice_profiles) == 1 assert ( users.resources[0].travelperk_extension.invoice_profiles[0].value == "My Company Ltd" ) def test_getting_all_users_non_params(self): self.travelperk.get.return_value = self.get_stub_contents("scim_users.json") users = self.users.query().get() self.travelperk.get.assert_called_once_with("scim/Users?") assert users.total_results == 2 assert users.items_per_page == 2 assert users.start_index == 1 assert [ "urn:ietf:params:scim:schemas:core:2.0:User", "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", "urn:ietf:params:scim:schemas:extension:travelperk:2.0:User", ] == users.resources[0].schemas assert users.resources[0].name.given_name == "Marlen" assert users.resources[0].name.family_name == "Col" assert users.resources[0].name.middle_name == "" assert users.resources[0].name.honorific_prefix == "" assert users.resources[0].locale == "en" assert users.resources[0].preferred_language == "en" assert users.resources[0].title == "Manager" assert users.resources[0].external_id == "123455667" assert users.resources[0].id == "29" assert users.resources[0].groups == [] assert users.resources[0].active is True assert users.resources[0].user_name == "marlen.col@mycompany.com" assert len(users.resources[0].phone_numbers) == 1 assert users.resources[0].phone_numbers[0].value == "+34 1234567" assert users.resources[0].phone_numbers[0].type == "work" assert users.resources[0].meta.resource_type == "User" assert users.resources[0].meta.created == "2020-04-01T22:24:44.137082+00:00" assert ( users.resources[0].meta.last_modified == "2020-04-01T22:24:44.137082+00:00" ) assert ( users.resources[0].meta.location == "http://app.travelperk.com/api/v2/scim/Users/29" ) assert users.resources[0].enterprise_extension.cost_center == "Marketing" assert users.resources[0].enterprise_extension.manager.value == "123" assert ( "https://app.travelperk.com/api/v2/scim/Users/123/" == users.resources[0].enterprise_extension.manager.ref ) assert ( users.resources[0].enterprise_extension.manager.display_name == "Jack Jackson" ) assert users.resources[0].travelperk_extension.gender == "M" assert users.resources[0].travelperk_extension.date_of_birth == "1980-02-02" assert ( users.resources[0].travelperk_extension.travel_policy == "Marketing travel policy" ) assert len(users.resources[0].travelperk_extension.invoice_profiles) == 1 assert ( users.resources[0].travelperk_extension.invoice_profiles[0].value == "My Company Ltd" ) assert ( users.resources[0].travelperk_extension.emergency_contact.name == "Jane Goodie" ) assert ( users.resources[0].travelperk_extension.emergency_contact.phone == "+34 9874637" ) assert ( users.resources[0].travelperk_extension.emergency_contact.name == "Jane Goodie" ) assert ( users.resources[0].travelperk_extension.emergency_contact.phone == "+34 9874637" ) def test_getting_a_user_detail(self): self.travelperk.get.return_value = self.get_stub_contents("scim_user.json") user = self.users.get(1) self.travelperk.get.assert_called_once_with("scim/Users/1") assert [ "urn:ietf:params:scim:schemas:core:2.0:User", "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", "urn:ietf:params:scim:schemas:extension:travelperk:2.0:User", ] == user.schemas assert user.name.given_name == "Marlen" assert user.name.family_name == "Col" assert user.name.middle_name == "" assert user.name.honorific_prefix == "" assert user.locale == "en" assert user.preferred_language == "en" assert user.title == "Manager" assert user.external_id == "123455667" assert user.id == "29" assert user.groups == [] assert user.active is True assert user.user_name == "marlen.col@mycompany.com" assert len(user.phone_numbers) == 1 assert user.phone_numbers[0].value == "+34 1234567" assert user.phone_numbers[0].type == "work" assert user.meta.resource_type == "User" assert user.meta.created == "2020-04-01T22:24:44.137082+00:00" assert user.meta.last_modified == "2020-04-01T22:24:44.137082+00:00" assert user.meta.location == "http://app.travelperk.com/api/v2/scim/Users/29" assert user.enterprise_extension.cost_center == "Marketing" assert user.enterprise_extension.manager.value == "123" assert ( "https://app.travelperk.com/api/v2/scim/Users/123/" == user.enterprise_extension.manager.ref ) assert user.enterprise_extension.manager.display_name == "Jack Jackson" assert user.travelperk_extension.gender == "M" assert user.travelperk_extension.date_of_birth == "1980-02-02" assert user.travelperk_extension.travel_policy == "Marketing travel policy" assert len(user.travelperk_extension.invoice_profiles) == 1 assert user.travelperk_extension.invoice_profiles[0].value == "My Company Ltd" assert user.travelperk_extension.emergency_contact.name == "Jane Goodie" assert user.travelperk_extension.emergency_contact.phone == "+34 9874637" def test_deleting_a_user(self): self.travelperk.delete.return_value = "userDeleted" assert self.users.delete(1) == "userDeleted" self.travelperk.delete.assert_called_once_with("scim/Users/1") def test_making_and_saving_a_user(self): self.travelperk.post.return_value = self.get_stub_contents("scim_user.json") user = ( self.users.make( "testuser@test.com", True, "Test", "User", ) .set_honorific_prefix("Dr") .set_locale("en") .set_title("manager") .save() ) self.travelperk.post.assert_called_once_with( "scim/Users", { "userName": "testuser@test.com", "name": { "givenName": "Test", "familyName": "User", "honorificPrefix": "Dr", }, "active": True, "locale": "en", "title": "manager", }, ) assert [ "urn:ietf:params:scim:schemas:core:2.0:User", "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", "urn:ietf:params:scim:schemas:extension:travelperk:2.0:User", ] == user.schemas assert user.name.given_name == "Marlen" assert user.name.family_name == "Col" assert user.name.middle_name == "" assert user.name.honorific_prefix == "" assert user.locale == "en" assert user.preferred_language == "en" assert user.title == "Manager" assert user.external_id == "123455667" assert user.id == "29" assert user.groups == [] assert user.active is True assert user.user_name == "marlen.col@mycompany.com" assert len(user.phone_numbers) == 1 assert user.phone_numbers[0].value == "+34 1234567" assert user.phone_numbers[0].type == "work" assert user.meta.resource_type == "User" assert user.meta.created == "2020-04-01T22:24:44.137082+00:00" assert user.meta.last_modified == "2020-04-01T22:24:44.137082+00:00" assert user.meta.location == "http://app.travelperk.com/api/v2/scim/Users/29" assert user.enterprise_extension.cost_center == "Marketing" assert user.enterprise_extension.manager.value == "123" assert ( "https://app.travelperk.com/api/v2/scim/Users/123/" == user.enterprise_extension.manager.ref ) assert user.enterprise_extension.manager.display_name == "Jack Jackson" assert user.travelperk_extension.gender == "M" assert user.travelperk_extension.date_of_birth == "1980-02-02" assert user.travelperk_extension.travel_policy == "Marketing travel policy" assert len(user.travelperk_extension.invoice_profiles) == 1 assert user.travelperk_extension.invoice_profiles[0].value == "My Company Ltd" assert user.travelperk_extension.emergency_contact.name == "Jane Goodie" assert user.travelperk_extension.emergency_contact.phone == "+34 9874637" def test_creating_a_user(self): self.travelperk.post.return_value = self.get_stub_contents("scim_user.json") user = self.users.create( "testuser@test.com", True, "Test", "User", ) self.travelperk.post.assert_called_once_with( "scim/Users", { "userName": "testuser@test.com", "name": { "givenName": "Test", "familyName": "User", }, "active": True, }, ) assert [ "urn:ietf:params:scim:schemas:core:2.0:User", "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", "urn:ietf:params:scim:schemas:extension:travelperk:2.0:User", ] == user.schemas assert user.name.given_name == "Marlen" assert user.name.family_name == "Col" assert user.name.middle_name == "" assert user.name.honorific_prefix == "" assert user.locale == "en" assert user.preferred_language == "en" assert user.title == "Manager" assert user.external_id == "123455667" assert user.id == "29" assert user.groups == [] assert user.active is True assert user.user_name == "marlen.col@mycompany.com" assert len(user.phone_numbers) == 1 assert user.phone_numbers[0].value == "+34 1234567" assert user.phone_numbers[0].type == "work" assert user.meta.resource_type == "User" assert user.meta.created == "2020-04-01T22:24:44.137082+00:00" assert user.meta.last_modified == "2020-04-01T22:24:44.137082+00:00" assert user.meta.location == "http://app.travelperk.com/api/v2/scim/Users/29" assert user.enterprise_extension.cost_center == "Marketing" assert user.enterprise_extension.manager.value == "123" assert ( "https://app.travelperk.com/api/v2/scim/Users/123/" == user.enterprise_extension.manager.ref ) assert user.enterprise_extension.manager.display_name == "Jack Jackson" assert user.travelperk_extension.gender == "M" assert user.travelperk_extension.date_of_birth == "1980-02-02" assert user.travelperk_extension.travel_policy == "Marketing travel policy" assert len(user.travelperk_extension.invoice_profiles) == 1 assert user.travelperk_extension.invoice_profiles[0].value == "My Company Ltd" assert user.travelperk_extension.emergency_contact.name == "Jane Goodie" assert user.travelperk_extension.emergency_contact.phone == "+34 9874637" def test_updating_a_user(self): pass # TODO: Rewrite this test # params = Mock(spec=UpdateUserInputParams) # user_id = 1 # $this.expectException(NotImplementedException::class) # $this.expectExceptionMessage('https://github.com/namelivia/travelperk-http-php/issues/7') # self.users.update(user_id, params) def test_replacing_a_user(self): self.travelperk.put.return_value = self.get_stub_contents("scim_user.json") user_id = 1 user = ( self.users.modify( user_id, "testuser@test.com", True, "Test", "User", ) .set_honorific_prefix("Dr") .set_title("manager") .save() ) self.travelperk.put.assert_called_once_with( "scim/Users/1", { "userName": "testuser@test.com", "name": { "givenName": "Test", "familyName": "User", "honorificPrefix": "Dr", }, "active": True, "title": "manager", }, ) assert [ "urn:ietf:params:scim:schemas:core:2.0:User", "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", "urn:ietf:params:scim:schemas:extension:travelperk:2.0:User", ] == user.schemas assert user.name.given_name == "Marlen" assert user.name.family_name == "Col" assert user.name.middle_name == "" assert user.name.honorific_prefix == "" assert user.locale == "en" assert user.preferred_language == "en" assert user.title == "Manager" assert user.external_id == "123455667" assert user.id == "29" assert user.groups == [] assert user.active is True assert user.user_name == "marlen.col@mycompany.com" assert len(user.phone_numbers) == 1 assert user.phone_numbers[0].value == "+34 1234567" assert user.phone_numbers[0].type == "work" assert user.meta.resource_type == "User" assert user.meta.created == "2020-04-01T22:24:44.137082+00:00" assert user.meta.last_modified == "2020-04-01T22:24:44.137082+00:00" assert user.meta.location == "http://app.travelperk.com/api/v2/scim/Users/29" assert user.enterprise_extension.cost_center == "Marketing" assert user.enterprise_extension.manager.value == "123" assert ( "https://app.travelperk.com/api/v2/scim/Users/123/" == user.enterprise_extension.manager.ref ) assert user.enterprise_extension.manager.display_name == "Jack Jackson" assert user.travelperk_extension.gender == "M" assert user.travelperk_extension.date_of_birth == "1980-02-02" assert user.travelperk_extension.travel_policy == "Marketing travel policy" assert len(user.travelperk_extension.invoice_profiles) == 1 assert user.travelperk_extension.invoice_profiles[0].value == "My Company Ltd" assert user.travelperk_extension.emergency_contact.name == "Jane Goodie" assert user.travelperk_extension.emergency_contact.phone == "+34 9874637" def test_getting_all_genders(self): assert self.users.genders() == ["M", "F"] def test_getting_all_languages(self): assert self.users.languages() == ["en", "fr", "de", "es"]
45.740741
99
0.616464
2,163
18,525
5.134073
0.084605
0.097253
0.083746
0.102116
0.915624
0.907519
0.894732
0.889329
0.889329
0.885187
0
0.052024
0.257058
18,525
404
100
45.85396
0.75485
0.013819
0
0.781818
0
0
0.190669
0.082521
0
0
0
0.002475
0.524675
1
0.031169
false
0.002597
0.015584
0
0.051948
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8