hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b5cee9e57a11b879428f17f00cdd9b45d7b5908b
| 82,004
|
py
|
Python
|
typings/bl_ui/properties_physics_fluid.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | 2
|
2021-12-12T18:51:52.000Z
|
2022-02-23T09:49:16.000Z
|
src/blender/blender_autocomplete-master/2.92/bl_ui/properties_physics_fluid.py
|
JonasWard/ClayAdventures
|
a716445ac690e4792e70658319aa1d5299f9c9e9
|
[
"MIT"
] | 2
|
2021-11-08T12:09:02.000Z
|
2021-12-12T23:01:12.000Z
|
src/blender/blender_autocomplete-master/2.92/bl_ui/properties_physics_fluid.py
|
JonasWard/ClayAdventures
|
a716445ac690e4792e70658319aa1d5299f9c9e9
|
[
"MIT"
] | null | null | null |
import sys
import typing
import bl_ui.utils
import bpy_types
class FLUID_PT_presets(bl_ui.utils.PresetPanel, bpy_types.Panel,
bpy_types._GenericUI):
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
preset_add_operator = None
''' '''
preset_operator = None
''' '''
preset_subdir = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_menu(self, layout, text):
'''
'''
pass
def draw_panel_header(self, layout):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PhysicButtonsPanel:
bl_context = None
''' '''
bl_region_type = None
''' '''
bl_space_type = None
''' '''
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
class PHYSICS_PT_adaptive_domain(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_borders(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_cache(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_collections(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_diffusion(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def draw_header_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_export(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_field_weights(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_fire(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_flow_initial_velocity(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_flow_source(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_flow_texture(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_fluid(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_guide(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_liquid(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_mesh(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_noise(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_particles(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_settings(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_smoke(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_smoke_dissolve(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_viewport_display(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_viewport_display_advanced(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_viewport_display_color(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_viewport_display_debug(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_viewport_display_slicing(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class PHYSICS_PT_viscosity(PhysicButtonsPanel, bpy_types.Panel,
bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_context = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def check_domain_has_unbaked_guide(self, domain):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def poll_fluid(self, context):
'''
'''
pass
def poll_fluid_domain(self, context):
'''
'''
pass
def poll_fluid_flow(self, context):
'''
'''
pass
def poll_fluid_flow_liquid(self, context):
'''
'''
pass
def poll_fluid_flow_outflow(self, context):
'''
'''
pass
def poll_gas_domain(self, context):
'''
'''
pass
def poll_liquid_domain(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
| 12.517784
| 79
| 0.390176
| 6,755
| 82,004
| 4.455662
| 0.017172
| 0.216991
| 0.207223
| 0.152502
| 0.983089
| 0.982657
| 0.981593
| 0.979866
| 0.979002
| 0.979002
| 0
| 0
| 0.477623
| 82,004
| 6,550
| 80
| 12.519695
| 0.702617
| 0
| 0
| 0.981933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.434056
| false
| 0.434056
| 0.001807
| 0
| 0.553297
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 12
|
b5dd73f8f4c510de5772c5c8474d4e8fa4f49bd1
| 73
|
py
|
Python
|
app/sample-flask/src/blueprints/__init__.py
|
beerjoa/docker-nginx-flask-gunicorn-skeleton
|
a3c5841f0b3a62e499d2d446b00bdda9079bc39e
|
[
"Apache-2.0"
] | null | null | null |
app/sample-flask/src/blueprints/__init__.py
|
beerjoa/docker-nginx-flask-gunicorn-skeleton
|
a3c5841f0b3a62e499d2d446b00bdda9079bc39e
|
[
"Apache-2.0"
] | null | null | null |
app/sample-flask/src/blueprints/__init__.py
|
beerjoa/docker-nginx-flask-gunicorn-skeleton
|
a3c5841f0b3a62e499d2d446b00bdda9079bc39e
|
[
"Apache-2.0"
] | null | null | null |
from .root import blueprint as root
from .v1013 import blueprint as v1013
| 36.5
| 37
| 0.821918
| 12
| 73
| 5
| 0.5
| 0.5
| 0.566667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 0.150685
| 73
| 2
| 37
| 36.5
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
bd0476e661589a3cfb4e9ef98d0bec8bf20baad9
| 112
|
py
|
Python
|
moderator/sql/__init__.py
|
janeuzil/moderator
|
4e58c7798c210da3cb8e0e411e51ed9fc059cc91
|
[
"MIT"
] | null | null | null |
moderator/sql/__init__.py
|
janeuzil/moderator
|
4e58c7798c210da3cb8e0e411e51ed9fc059cc91
|
[
"MIT"
] | null | null | null |
moderator/sql/__init__.py
|
janeuzil/moderator
|
4e58c7798c210da3cb8e0e411e51ed9fc059cc91
|
[
"MIT"
] | null | null | null |
from sql import Room
from sql import User
from sql import Question
from sql import Faq
from sql import Database
| 18.666667
| 24
| 0.821429
| 20
| 112
| 4.6
| 0.4
| 0.380435
| 0.706522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 112
| 5
| 25
| 22.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bd08a7aab366ea60d10e8813cc9a01ec9d00763e
| 2,369
|
py
|
Python
|
alshamelah_api/apps/categories/models.py
|
devna-dev/durar-backend
|
36ea29bafd4cb95098e4057eb71df211dc923008
|
[
"MIT"
] | null | null | null |
alshamelah_api/apps/categories/models.py
|
devna-dev/durar-backend
|
36ea29bafd4cb95098e4057eb71df211dc923008
|
[
"MIT"
] | null | null | null |
alshamelah_api/apps/categories/models.py
|
devna-dev/durar-backend
|
36ea29bafd4cb95098e4057eb71df211dc923008
|
[
"MIT"
] | null | null | null |
import os
from django.db import models
from django.utils.translation import ugettext_lazy as _
from ..core.models import BaseModel
class Category(BaseModel):
def get_path(self, filename):
return os.path.join(
self.path,
'image',
filename
)
name = models.CharField(max_length=100, verbose_name=_(u'name'), null=False, blank=False)
image = models.ImageField(
upload_to=get_path,
blank=True,
null=True
)
class Meta:
verbose_name_plural = "Categories"
ordering = ['name']
def __str__(self):
return self.name
@property
def path(self):
if not self.pk:
return None
return os.path.join('categories', str(self.pk))
def save(self, *args, **kwargs):
if self.pk is None:
saved_image = self.image
self.image = None
super(Category, self).save(*args, **kwargs)
self.image = saved_image
if 'force_insert' in kwargs:
kwargs.pop('force_insert')
super(Category, self).save(*args, **kwargs)
class SubCategory(BaseModel):
def get_path(self, filename):
return os.path.join(
self.path,
'image',
filename
)
name = models.CharField(max_length=100, verbose_name=_(u'name'), null=False, blank=False)
image = models.ImageField(
upload_to=get_path,
blank=True,
null=True
)
category = models.ForeignKey(Category, related_name='sub_categories', verbose_name=_(u'Category'), null=True,
on_delete=models.SET_NULL)
class Meta:
verbose_name_plural = "Sub Categories"
ordering = ['name']
def __str__(self):
return self.name
@property
def path(self):
if not self.pk:
return None
return os.path.join('categories', str(self.category_id), 'sub-categories', str(self.pk))
def save(self, *args, **kwargs):
if self.pk is None:
saved_image = self.image
self.image = None
super(SubCategory, self).save(*args, **kwargs)
self.image = saved_image
if 'force_insert' in kwargs:
kwargs.pop('force_insert')
super(SubCategory, self).save(*args, **kwargs)
| 26.617978
| 113
| 0.577037
| 276
| 2,369
| 4.804348
| 0.235507
| 0.027149
| 0.036199
| 0.048265
| 0.806184
| 0.766968
| 0.711161
| 0.711161
| 0.711161
| 0.711161
| 0
| 0.00369
| 0.313634
| 2,369
| 88
| 114
| 26.920455
| 0.811808
| 0
| 0
| 0.771429
| 0
| 0
| 0.065006
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.114286
| false
| 0
| 0.057143
| 0.057143
| 0.414286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20fbf046272f2ae231dababd00ef3fe616c9e706
| 18,841
|
py
|
Python
|
PokemonTypes/Matchups.py
|
Pedro29152/poke-types
|
021647e0bd3008d70c412af92b56be7d46f08428
|
[
"MIT"
] | 1
|
2021-10-17T22:50:17.000Z
|
2021-10-17T22:50:17.000Z
|
PokemonTypes/Matchups.py
|
Pedro29152/poke-types
|
021647e0bd3008d70c412af92b56be7d46f08428
|
[
"MIT"
] | null | null | null |
PokemonTypes/Matchups.py
|
Pedro29152/poke-types
|
021647e0bd3008d70c412af92b56be7d46f08428
|
[
"MIT"
] | null | null | null |
from .PokemonTypes import Types
from .Matchup import Matchup
from .MatchupList import MatchupList
ZERO = 0
HALF = 1/2
DOUBLE = 2
def baseMatchup(matchupsList = None):
ret = MatchupList()
if matchupsList:
for item in matchupsList:
ret[item['type']] = item['multiplier']
return ret
class Matchups:
#region TypeMatchups
#NORMAL
normalAttack = [
{'type': Types.Ghost, 'multiplier': ZERO},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Rock, 'multiplier': HALF}
]
normalDefence = [
{'type': Types.Ghost, 'multiplier': ZERO},
{'type': Types.Fighting, 'multiplier': DOUBLE}
]
#FIGHTING
fightingAttack = [
{'type': Types.Normal, 'multiplier': DOUBLE},
{'type': Types.Flying, 'multiplier': HALF},
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Ghost, 'multiplier': ZERO},
{'type': Types.Steel, 'multiplier': DOUBLE},
{'type': Types.Psychic, 'multiplier': HALF},
{'type': Types.Ice, 'multiplier': DOUBLE},
{'type': Types.Dark, 'multiplier': DOUBLE},
{'type': Types.Fairy, 'multiplier': HALF}
]
fightingDefence = [
{'type': Types.Flying, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Psychic, 'multiplier': DOUBLE},
{'type': Types.Dark, 'multiplier': HALF},
{'type': Types.Fairy, 'multiplier': DOUBLE}
]
#FLYING
flyingAttack = [
{'type': Types.Fighting, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': DOUBLE},
{'type': Types.Electric, 'multiplier': HALF}
]
flyingDefence = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Ground, 'multiplier': ZERO},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Electric, 'multiplier': DOUBLE},
{'type': Types.Ice, 'multiplier': DOUBLE}
]
#POISON
poisonAttack = [
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Ground, 'multiplier': HALF},
{'type': Types.Rock, 'multiplier': HALF},
{'type': Types.Ghost, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': ZERO},
{'type': Types.Grass, 'multiplier': DOUBLE},
{'type': Types.Fairy, 'multiplier': DOUBLE},
]
poisonDefence = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Psychic, 'multiplier': DOUBLE},
{'type': Types.Fairy, 'multiplier': HALF},
]
#GROUND
groundAttack = [
{'type': Types.Flying, 'multiplier': ZERO},
{'type': Types.Poison, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': DOUBLE},
{'type': Types.Fire, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Electric, 'multiplier': DOUBLE}
]
groundDefence = [
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Rock, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': DOUBLE},
{'type': Types.Electric, 'multiplier': ZERO},
{'type': Types.Ice, 'multiplier': DOUBLE}
]
#ROCK
rockAttack = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Flying, 'multiplier': DOUBLE},
{'type': Types.Ground, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': DOUBLE},
{'type': Types.Ice, 'multiplier': DOUBLE}
]
rockDefence = [
{'type': Types.Normal, 'multiplier': HALF},
{'type': Types.Fighting, 'multiplier': DOUBLE},
{'type': Types.Flying, 'multiplier': HALF},
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': DOUBLE},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': DOUBLE}
]
#BUG
bugAttack = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Flying, 'multiplier': HALF},
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Ghost, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': DOUBLE},
{'type': Types.Psychic, 'multiplier': DOUBLE},
{'type': Types.Dark, 'multiplier': DOUBLE},
{'type': Types.Fairy, 'multiplier': HALF}
]
bugDefence = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Flying, 'multiplier': DOUBLE},
{'type': Types.Ground, 'multiplier': HALF},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Fire, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': HALF}
]
#GHOST
ghostAttack = [
{'type': Types.Normal, 'multiplier': ZERO},
{'type': Types.Ghost, 'multiplier': DOUBLE},
{'type': Types.Psychic, 'multiplier': DOUBLE},
{'type': Types.Dark, 'multiplier': HALF}
]
ghostDefence = [
{'type': Types.Normal, 'multiplier': ZERO},
{'type': Types.Fighting, 'multiplier': ZERO},
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Ghost, 'multiplier': DOUBLE},
{'type': Types.Dark, 'multiplier': DOUBLE}
]
#STEEL
steelAttack = [
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': HALF},
{'type': Types.Electric, 'multiplier': HALF},
{'type': Types.Ice, 'multiplier': DOUBLE},
{'type': Types.Fairy, 'multiplier': DOUBLE}
]
steelDefence = [
{'type': Types.Normal, 'multiplier': HALF},
{'type': Types.Fighting, 'multiplier': DOUBLE},
{'type': Types.Flying, 'multiplier': HALF},
{'type': Types.Poison, 'multiplier': ZERO},
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Psychic, 'multiplier': HALF},
{'type': Types.Ice, 'multiplier': HALF},
{'type': Types.Dragon, 'multiplier': HALF},
{'type': Types.Fairy, 'multiplier': HALF},
]
#FIRE
fireAttack = [
{'type': Types.Rock, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': DOUBLE},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': DOUBLE},
{'type': Types.Ice, 'multiplier': DOUBLE},
{'type': Types.Dragon, 'multiplier': HALF}
]
fireDefence = [
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Ice, 'multiplier': HALF},
{'type': Types.Fairy, 'multiplier': HALF}
]
#WATER
waterAttack = [
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Fire, 'multiplier': DOUBLE},
{'type': Types.Water, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Dragon, 'multiplier': HALF}
]
waterDefence = [
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': DOUBLE},
{'type': Types.Electric, 'multiplier': DOUBLE},
{'type': Types.Ice, 'multiplier': HALF}
]
#GRASS
grassAttack = [
{'type': Types.Flying, 'multiplier': HALF},
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Dragon, 'multiplier': HALF}
]
grassDefence = [
{'type': Types.Flying, 'multiplier': DOUBLE},
{'type': Types.Poison, 'multiplier': DOUBLE},
{'type': Types.Ground, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': DOUBLE},
{'type': Types.Fire, 'multiplier': DOUBLE},
{'type': Types.Water, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Electric, 'multiplier': HALF},
{'type': Types.Ice, 'multiplier': DOUBLE}
]
#ELECTRIC
electricAttack = [
{'type': Types.Flying, 'multiplier': DOUBLE},
{'type': Types.Ground, 'multiplier': ZERO},
{'type': Types.Water, 'multiplier': DOUBLE},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Electric, 'multiplier': HALF},
{'type': Types.Dragon, 'multiplier': HALF}
]
electricDefence = [
{'type': Types.Flying, 'multiplier': HALF},
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Electric, 'multiplier': HALF}
]
#PSYCHIC
psychicAttack = [
{'type': Types.Fighting, 'multiplier': DOUBLE},
{'type': Types.Poison, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Psychic, 'multiplier': HALF},
{'type': Types.Dark, 'multiplier': ZERO}
]
psychicDefence = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Bug, 'multiplier': DOUBLE},
{'type': Types.Ghost, 'multiplier': DOUBLE},
{'type': Types.Electric, 'multiplier': HALF},
{'type': Types.Dark, 'multiplier': DOUBLE}
]
#ICE
iceAttack = [
{'type': Types.Flying, 'multiplier': DOUBLE},
{'type': Types.Ground, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': DOUBLE},
{'type': Types.Ice, 'multiplier': HALF},
{'type': Types.Dragon, 'multiplier': DOUBLE}
]
iceDefence = [
{'type': Types.Fighting, 'multiplier': DOUBLE},
{'type': Types.Rock, 'multiplier': DOUBLE},
{'type': Types.Steel, 'multiplier': DOUBLE},
{'type': Types.Fire, 'multiplier': DOUBLE},
{'type': Types.Ice, 'multiplier': HALF}
]
#DRAGON
dragonAttack = [
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Dragon, 'multiplier': DOUBLE},
{'type': Types.Fairy, 'multiplier': ZERO}
]
dragonDefence = [
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Water, 'multiplier': HALF},
{'type': Types.Grass, 'multiplier': HALF},
{'type': Types.Electric, 'multiplier': HALF},
{'type': Types.Ice, 'multiplier': DOUBLE},
{'type': Types.Dragon, 'multiplier': DOUBLE},
{'type': Types.Fairy, 'multiplier': DOUBLE}
]
#DARK
darkAttack = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Ghost, 'multiplier': DOUBLE},
{'type': Types.Psychic, 'multiplier': DOUBLE},
{'type': Types.Dark, 'multiplier': HALF},
{'type': Types.Fairy, 'multiplier': HALF}
]
darkDefence = [
{'type': Types.Fighting, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': DOUBLE},
{'type': Types.Ghost, 'multiplier': HALF},
{'type': Types.Psychic, 'multiplier': ZERO},
{'type': Types.Dark, 'multiplier': HALF},
{'type': Types.Fairy, 'multiplier': DOUBLE}
]
#FAIRY
fairyAttack = [
{'type': Types.Fighting, 'multiplier': DOUBLE},
{'type': Types.Poison, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': HALF},
{'type': Types.Fire, 'multiplier': HALF},
{'type': Types.Dragon, 'multiplier': DOUBLE},
{'type': Types.Dark, 'multiplier': DOUBLE}
]
fairyDefence = [
{'type': Types.Fighting, 'multiplier': HALF},
{'type': Types.Poison, 'multiplier': DOUBLE},
{'type': Types.Bug, 'multiplier': HALF},
{'type': Types.Steel, 'multiplier': DOUBLE},
{'type': Types.Dragon, 'multiplier': ZERO},
{'type': Types.Dark, 'multiplier': HALF}
]
#endregion
def getMatchups(self):
ret = (Types.max() + 1) * [None]
ret[Types.Normal.value] = Matchup(
Types.Normal,
baseMatchup(self.normalDefence),
baseMatchup(self.normalAttack)
)
ret[Types.Fighting.value] = Matchup(
Types.Fighting,
baseMatchup(self.fightingDefence),
baseMatchup(self.fightingAttack)
)
ret[Types.Flying.value] = Matchup(
Types.Flying,
baseMatchup(self.flyingDefence),
baseMatchup(self.flyingAttack)
)
ret[Types.Poison.value] = Matchup(
Types.Poison,
baseMatchup(self.poisonDefence),
baseMatchup(self.poisonAttack)
)
ret[Types.Ground.value] = Matchup(
Types.Ground,
baseMatchup(self.groundDefence),
baseMatchup(self.groundAttack)
)
ret[Types.Rock.value] = Matchup(
Types.Rock,
baseMatchup(self.rockDefence),
baseMatchup(self.rockAttack)
)
ret[Types.Bug.value] = Matchup(
Types.Bug,
baseMatchup(self.bugDefence),
baseMatchup(self.bugAttack)
)
ret[Types.Ghost.value] = Matchup(
Types.Ghost,
baseMatchup(self.ghostDefence),
baseMatchup(self.ghostAttack)
)
ret[Types.Steel.value] = Matchup(
Types.Steel,
baseMatchup(self.steelDefence),
baseMatchup(self.steelAttack)
)
ret[Types.Fire.value] = Matchup(
Types.Fire,
baseMatchup(self.fireDefence),
baseMatchup(self.fireAttack)
)
ret[Types.Water.value] = Matchup(
Types.Water,
baseMatchup(self.waterDefence),
baseMatchup(self.waterAttack)
)
ret[Types.Grass.value] = Matchup(
Types.Grass,
baseMatchup(self.grassDefence),
baseMatchup(self.grassAttack)
)
ret[Types.Electric.value] = Matchup(
Types.Electric,
baseMatchup(self.electricDefence),
baseMatchup(self.electricAttack)
)
ret[Types.Psychic.value] = Matchup(
Types.Psychic,
baseMatchup(self.psychicDefence),
baseMatchup(self.psychicAttack)
)
ret[Types.Ice.value] = Matchup(
Types.Ice,
baseMatchup(self.iceDefence),
baseMatchup(self.iceAttack)
)
ret[Types.Dragon.value] = Matchup(
Types.Dragon,
baseMatchup(self.dragonDefence),
baseMatchup(self.dragonAttack)
)
ret[Types.Dark.value] = Matchup(
Types.Dark,
baseMatchup(self.darkDefence),
baseMatchup(self.darkAttack)
)
ret[Types.Fairy.value] = Matchup(
Types.Fairy,
baseMatchup(self.fairyDefence),
baseMatchup(self.fairyAttack)
)
return ret
def getMatchupsObj(self):
ret = (Types.max() + 1) * [None]
ret[Types.Normal.value] = Matchup(Types.Normal, self.normalDefence, self.normalAttack)
ret[Types.Fighting.value] = Matchup(Types.Fighting, self.fightingDefence, self.fightingAttack)
ret[Types.Flying.value] = Matchup(Types.Flying, self.flyingDefence, self.flyingAttack)
ret[Types.Poison.value] = Matchup(Types.Poison, self.poisonDefence, self.poisonAttack)
ret[Types.Ground.value] = Matchup(Types.Ground, self.groundDefence, self.groundAttack)
ret[Types.Rock.value] = Matchup(Types.Rock, self.rockDefence, self.rockAttack)
ret[Types.Bug.value] = Matchup(Types.Bug, self.bugDefence, self.bugAttack)
ret[Types.Ghost.value] = Matchup(Types.Ghost, self.ghostDefence, self.ghostAttack)
ret[Types.Steel.value] = Matchup(Types.Steel, self.steelDefence, self.steelAttack)
ret[Types.Fire.value] = Matchup(Types.Fire, self.fireDefence, self.fireAttack)
ret[Types.Water.value] = Matchup(Types.Water, self.waterDefence, self.waterAttack)
ret[Types.Grass.value] = Matchup(Types.Grass, self.grassDefence, self.grassAttack)
ret[Types.Electric.value] = Matchup(Types.Electric, self.electricDefence, self.electricAttack)
ret[Types.Psychic.value] = Matchup(Types.Psychic, self.psychicDefence, self.psychicAttack)
ret[Types.Ice.value] = Matchup(Types.Ice, self.iceAttack, self.iceDefence )
ret[Types.Dragon.value] = Matchup(Types.Dragon, self.dragonDefence, self.dragonAttack)
ret[Types.Dark.value] = Matchup(Types.Dark, self.darkDefence, self.darkAttack)
ret[Types.Fairy.value] = Matchup(Types.Fairy, self.fairyDefence, self.fairyAttack)
return ret
| 38.45102
| 102
| 0.566053
| 1,749
| 18,841
| 6.09777
| 0.050314
| 0.202532
| 0.175527
| 0.224285
| 0.820441
| 0.816315
| 0.807407
| 0.730333
| 0.730333
| 0.447257
| 0
| 0.00043
| 0.259169
| 18,841
| 489
| 103
| 38.529652
| 0.763648
| 0.006581
| 0
| 0.517084
| 0
| 0
| 0.180457
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006834
| false
| 0
| 0.006834
| 0
| 0.104784
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1f4f766818502b1d009bbf2a6b7edd57a08be9b5
| 16,109
|
py
|
Python
|
scripts/vrouter/test_fat_flow_aggr.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 5
|
2020-09-29T00:36:57.000Z
|
2022-02-16T06:51:32.000Z
|
scripts/vrouter/test_fat_flow_aggr.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 27
|
2019-11-02T02:18:34.000Z
|
2022-02-24T18:49:08.000Z
|
scripts/vrouter/test_fat_flow_aggr.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 20
|
2019-11-28T16:02:25.000Z
|
2022-01-06T05:56:58.000Z
|
from common.vrouter.base import BaseVrouterTest
from tcutils.wrappers import preposttest_wrapper
import test
from tcutils.util import get_random_name, is_v6
import random
from common.neutron.lbaasv2.base import BaseLBaaSTest
from common.servicechain.config import ConfigSvcChain
from common.servicechain.verify import VerifySvcChain
AF_TEST = 'v6'
class FatFlowAggr(BaseVrouterTest, BaseLBaaSTest):
@classmethod
def setUpClass(cls):
super(FatFlowAggr, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(FatFlowAggr, cls).tearDownClass()
#This is required just to override the method in BaseLBaaSTest, else tests
#run only in openstack liberty and up
def is_test_applicable(self):
return (True, None)
@test.attr(type=['sanity','dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_icmp_intra_vn_inter_node(self):
""":
Description: Verify fat flow prefix aggr dest (IPv4) for intra-vn inter-node
Steps:
1. Create 1 VN and launch 3 VMs.2 client VMs on same node and server VM on different node.
Client 1 in subnet 1, Client 2 in the next subnet.
2. On server VM, config fat flow aggr prefix dest len 25 for ICMP port 0.
3. From both the client VMs, send ICMP traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the remote CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of fat flows with prefix aggregated for the src IPs
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On client VM compute nodes, expect 4 pairs of flows and on server compute,
expect 2 pairs of flows
3. On server compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
prefix_length = 27
ipv6 = False
only_v6 = False
prefix_length6 = 123
if self.inputs.get_af() == 'dual':
ipv6 = True
only_v6 = True
inter_node = True
inter_vn = False
proto = 'icmp'
if proto == 'icmp':
port = 0
policy_deny = False
vn_policy = False
self.fat_flow_with_prefix_aggr(prefix_length=prefix_length,
inter_node=inter_node,inter_vn=inter_vn,
proto=proto, port=port, policy_deny=policy_deny, vn_policy=vn_policy,
dual=ipv6, prefix_length6=prefix_length6, only_v6=only_v6)
return True
# end test_fat_flow_aggr_dest_len_icmp_intra_vn_inter_node
@test.attr(type=['dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_udp_inter_vn_inter_node(self):
""":
Description: Verify fat flow prefix aggr dest (IPv4) for intra-vn inter-node
Steps:
1. Create 2 VNs and launch 3 VMs.2 client VMs in VN1 on same node
and server VM in VN2 on different node.
Client 1 in subnet 1, Client 2 in the next subnet.
Policy p1 configured to allow udp traffic between VN1 and VN2.
2. On server VM, config fat flow aggr prefix dest len 29 for UDP port 55.
3. From both the client VMs, send ICMP traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the remote CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of fat flows with prefix aggregated for the src IPs
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On client VM compute nodes, expect 4 pairs of flows and on server compute,
expect 2 pairs of flows
3. On server compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
prefix_length = 29
ipv6 = False
only_v6 = False
if self.inputs.get_af() == 'dual':
ipv6 = True
only_v6 = True
prefix_length6 = 125
inter_node = True
inter_vn = True
proto = 'udp'
port = 55
policy_deny = False
vn_policy = True
self.fat_flow_with_prefix_aggr(prefix_length=prefix_length,
inter_node=inter_node,inter_vn=inter_vn, proto=proto,
port=port, vn_policy=vn_policy, policy_deny=policy_deny,
dual=ipv6, prefix_length6=prefix_length6, only_v6=only_v6)
return True
@test.attr(type=['dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_ignore_src_udp_inter_vn_inter_node(self):
""":
Description: Verify fat flow prefix aggr dest for intra-vn inter-node
Steps:
1. Create 2 VNs and launch 3 VMs.2 client VMs in VN1 on same node
and server VM in VN2 on different node.
Client 1 in subnet 1, Client 2 in the next subnet.
Policy p1 configured to allow udp traffic between VN1 and VN2.
2. On server VM, config fat flow aggr prefix dest len 29
with ignore src for UDP port 55.
3. From both the client VMs, send udp traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the remote CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of fat flows with prefix aggregated for the src IPs and with dest ip 0.0.0.0/0
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On client VM compute nodes, expect 4 pairs of flows and on server compute,
expect 2 pairs of flows
3. On server compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
prefix_length = 28
inter_node = True
inter_vn = True
ignore_address = 'src'
proto = 'udp'
port = 55
policy_deny = False
vn_policy = True
ipv6 = False
only_v6 = False
if self.inputs.get_af() == 'dual':
ipv6 = True
only_v6 = True
prefix_length6 = 124
self.fat_flow_with_prefix_aggr(prefix_length=prefix_length,
inter_node=inter_node,inter_vn=inter_vn, proto=proto,
port=port, vn_policy=vn_policy, policy_deny=policy_deny,
ignore_address=ignore_address, dual=ipv6,
prefix_length6=prefix_length6, only_v6=only_v6)
return True
@test.attr(type=['dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_ignore_src_icmp_inter_vn_intra_node(self):
""":
Description: Verify fat flow prefix aggr dest (IPv4) for intra-vn inter-node
Steps:
1. Create 2 VNs and launch 3 VMs.2 client VMs in VN1 on same node
and server VM in VN2 on the same node.
Client 1 in subnet 1, Client 2 in the next subnet.
Policy p1 configured to allow icmp traffic between VN1 and VN2.
2. On server VM, config fat flow aggr prefix dest len 29
with ignore src for icmp port 0.
3. From both the client VMs, send icmp traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of fat flows with prefix aggregated for the src IPs and with dest ip 0.0.0.0/0
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On the CN, also expect 4 pairs of flows.
3. On the compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
prefix_length = 27
inter_node = False
inter_vn = True
ignore_address = 'src'
proto = 'icmp'
port = 0
policy_deny = False
vn_policy = True
ipv6 = False
only_v6 = False
if self.inputs.get_af() == 'dual':
ipv6 = True
only_v6 = True
prefix_length6 = 125
self.fat_flow_with_prefix_aggr(prefix_length=prefix_length,
inter_node=inter_node,inter_vn=inter_vn, proto=proto,
port=port, vn_policy=vn_policy, policy_deny=policy_deny, ignore_address=ignore_address,
dual=ipv6, prefix_length6=prefix_length6, only_v6=only_v6)
return True
@preposttest_wrapper
def itest_fat_flow_aggr_scaling(self):
""":
Description: Verify fat flow prefix aggr dest for intra-vn inter-node
Steps:
1. Create 2 VNs and launch n clients VMs.1 server VM in VN2 on remote node.
Client 1 in subnet 1, Client 2 in the next subnet.
Policy p1 configured to allow icmp traffic between VN1 and VN2.
2. On server VM, config fat flow aggr prefix dest len 25
with ignore src for icmp port 0.
3. From all the client VMs, send icmp traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the server CN, expect 2 pairs( 1 for client 1, 1 for client 2)
of fat flows with prefix aggregated for the src IPs and with dest ip 0.0.0.0/0
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On the CN, also expect n pairs of flows.
3. On the compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
prefix_length = 25
inter_node = True
inter_vn = True
ignore_address = 'src'
proto = 'icmp'
port = 0
policy_deny = False
vn_policy = True
self.fat_flow_with_prefix_aggr(prefix_length=prefix_length,
inter_node=inter_node,inter_vn=inter_vn, proto=proto,
port=port, vn_policy=vn_policy, policy_deny=policy_deny,
ignore_address=ignore_address, scale=4)
return True
class FatFlowAggrIpv6(FatFlowAggr):
@classmethod
def setUpClass(cls):
super(FatFlowAggr, cls).setUpClass()
cls.inputs.set_af(AF_TEST)
def is_test_applicable(self):
if self.inputs.orchestrator == 'vcenter' and not self.orch.is_feature_supported('ipv6'):
return(False, 'Skipping IPv6 Test on vcenter setup')
if not self.connections.orch.is_feature_supported('ipv6'):
return(False, 'IPv6 tests not supported in this environment ')
return (True, None)
@preposttest_wrapper
def test_fat_flow_lbaasv2(self):
raise self.skipTest("Skipping Test. LBaas is NOT supported for IPv6")
@test.attr(type=['dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_icmp_intra_vn_inter_node(self):
""":
Description: Verify fat flow prefix aggr dest (IPv6) for intra-vn inter-node
Steps:
1. Create 1 VN with IPv6 subnet and launch 3 VMs.
2 client VMs on same node and server VM on different node.
Client 1 in subnet 1, Client 2 in the next subnet.
2. On server VM, config fat flow aggr prefix dest IPv6 len 123 for ICMP port 0.
3. From both the client VMs, send ICMP6 traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the remote CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of IPv6 fat flows with prefix aggregated for the src IPs
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On client VM compute nodes, expect 4 pairs of IPv6 flows and on server compute,
expect 2 pairs of IPv6 flows
3. On server compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
self.inputs.set_af('dual')
super(FatFlowAggrIpv6, self).test_fat_flow_aggr_dest_icmp_intra_vn_inter_node()
@test.attr(type=['sanity','dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_udp_inter_vn_inter_node(self):
"""
Description: Verify fat flow prefix aggr dest (IPv6) for intra-vn inter-node
Steps:
1. Create 2 VNs with IPv6 subnets and launch 3 VMs.2 client VMs in VN1 on same node
and server VM in VN2 on different node.
Client 1 in subnet 1, Client 2 in the next subnet.
Policy p1 configured to allow udp traffic between VN1 and VN2.
2. On server VM, config fat flow aggr prefix dest IPv6 len 125 for UDP port 55.
3. From both the client VMs, send ICMP6 traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the remote CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of IPv6 fat flows with prefix aggregated for the src IPs
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On client VM compute nodes, expect 4 pairs of IPv6 flows and on server compute,
expect 2 pairs of IPv6 flows
3. On server compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
self.inputs.set_af('dual')
super(FatFlowAggrIpv6, self).test_fat_flow_aggr_dest_udp_inter_vn_inter_node()
@test.attr(type=['dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_ignore_src_udp_inter_vn_inter_node(self):
""":
Description: Verify fat flow prefix aggr dest (IPv6) for intra-vn inter-node
Steps:
1. Create 2 VNs with IPv6 subnets and launch 3 VMs.2 client VMs in VN1 on same node
and server VM in VN2 on different node.
Client 1 in subnet 1, Client 2 in the next subnet.
Policy p1 configured to allow udp traffic between VN1 and VN2.
2. On server VM, config fat flow aggr prefix dest IPv6 len 100
with ignore src for UDP port 55.
3. From both the client VMs, send udp traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the remote CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of fat IPv6 flows with prefix aggregated for the src IPs and with dest IPv6 :::0
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On client VM compute nodes, expect 4 pairs of flows and on server compute,
expect 2 pairs of flows
3. On server compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
self.inputs.set_af('dual')
super(FatFlowAggrIpv6, self).test_fat_flow_aggr_dest_ignore_src_udp_inter_vn_inter_node()
@test.attr(type=['dev_reg'])
@preposttest_wrapper
def test_fat_flow_aggr_dest_ignore_src_icmp_inter_vn_intra_node(self):
""":
Description: Verify fat flow prefix aggr dest (IPv6) for intra-vn inter-node
Steps:
1. Create 2 VNs with IPv6 subnets and launch 3 VMs.2 client VMs in VN1 on same node
and server VM in VN2 on the same node.
Client 1 in subnet 1, Client 2 in the next subnet.
Policy p1 configured to allow icmp6 traffic between VN1 and VN2.
2. On server VM, config fat flow aggr prefix dest IPv6 len 100
with ignore src for icmp6 port 0.
3. From both the client VMs, send icmp6 traffic to the server VM twice with diff. src ports
Pass criteria:
1. On the CN, expect 2 pairs ( 1 for client 1, 1 for client 2)
of fat IPv6 flows with prefix aggregated for the src IPs and with dest ip :::0
(VM to fabric, Prefix Aggr Dest: Aggregation happens for SRC IPs)
2. On the CN, also expect 4 pairs of IPv6 flows.
3. On the compute node, flow's source port should be 0 for fat flows
Maintainer: Ankitja@juniper.net
"""
self.inputs.set_af('dual')
super(FatFlowAggrIpv6, self).test_fat_flow_aggr_dest_ignore_src_icmp_inter_vn_intra_node()
| 45.50565
| 103
| 0.635607
| 2,389
| 16,109
| 4.156132
| 0.074508
| 0.02679
| 0.025481
| 0.019639
| 0.900191
| 0.888106
| 0.882063
| 0.87461
| 0.862423
| 0.855675
| 0
| 0.033638
| 0.307964
| 16,109
| 353
| 104
| 45.634561
| 0.857015
| 0.535539
| 0
| 0.741935
| 0
| 0
| 0.044122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0.051613
| 0.006452
| 0.206452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85ce171435cfdbd8cb660a34b4937d472f8b0116
| 18,192
|
py
|
Python
|
maddpg/models/graph_net.py
|
zixianma/PIC
|
bbfe8985121e3ffb693c047ed3fe85d0c8256737
|
[
"MIT"
] | 28
|
2019-10-31T00:38:10.000Z
|
2022-03-21T12:33:03.000Z
|
maddpg/models/graph_net.py
|
zixianma/PIC
|
bbfe8985121e3ffb693c047ed3fe85d0c8256737
|
[
"MIT"
] | 10
|
2019-11-27T12:37:25.000Z
|
2021-06-07T11:52:34.000Z
|
maddpg/models/graph_net.py
|
baicenxiao/AREL
|
2168508138fde62150bc5e8b47b1aa3bcef09785
|
[
"MIT"
] | 13
|
2019-10-31T00:38:17.000Z
|
2022-03-06T04:24:09.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.graph_layers import GraphConvLayer, MessageFunc, UpdateFunc
class GraphNetHetro(nn.Module):
# A graph net that supports different edge attributes.
def __init__(self, sa_dim, n_agents, hidden_size, agent_groups, agent_id=0,
pool_type='avg', use_agent_id=False):
"""
:param sa_dim: integer
:param n_agents: integer
:param hidden_size: integer
:param agent_groups: list, represents number of agents in each group, agents in the same
group are homogeneous. Agents in different groups are heterogeneous
ex. agent_groups = [4] --> Group three has has agent 0, agent 1, agent 2, agent 3
agent_groups =[2, 3] --> Group one has agent 0, agent 1.
Group two has agent 2, agent 3, agent 4
agent_groups =[2,3,4] --> Group one has agent 0, agent 1.
Group two has agent 2, agent 3, agent 4.
Group three has has agent 5, agent 6, agent 7
"""
super(GraphNetHetro, self).__init__()
assert n_agents == sum(agent_groups)
self.sa_dim = sa_dim
self.n_agents = n_agents
self.pool_type = pool_type
self.agent_groups = agent_groups
group_emb_dim = 2 # Dimension for the group embedding.
if use_agent_id:
agent_id_attr_dim = 2
self.gc1 = GraphConvLayer(sa_dim + agent_id_attr_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim + agent_id_attr_dim, hidden_size)
else:
self.gc1 = GraphConvLayer(sa_dim + group_emb_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim + group_emb_dim, hidden_size)
self.gc2 = GraphConvLayer(hidden_size, hidden_size)
self.nn_gc2 = nn.Linear(hidden_size, hidden_size)
self.V = nn.Linear(hidden_size, 1)
self.V.weight.data.mul_(0.1)
self.V.bias.data.mul_(0.1)
# Create group embeddings.
num_groups = len(agent_groups)
self.group_emb = nn.ParameterList([nn.Parameter(torch.randn(1, 1, group_emb_dim), requires_grad=True) for k in range(num_groups)])
# Assumes a fully connected graph.
self.register_buffer('adj', (torch.ones(n_agents, n_agents) - torch.eye(n_agents)) / self.n_agents)
self.use_agent_id = use_agent_id
self.agent_id = agent_id
if use_agent_id:
self.curr_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim, 1), requires_grad=True)
self.other_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim, 1), requires_grad=True)
agent_att = []
for k in range(self.n_agents):
if k == self.agent_id:
agent_att.append(self.curr_agent_attr.unsqueeze(-1))
else:
agent_att.append(self.other_agent_attr.unsqueeze(-1))
agent_att = torch.cat(agent_att, -1)
self.agent_att = agent_att.unsqueeze(0)
def forward(self, x):
"""
:param x: [batch_size, self.sa_dim, self.n_agent] tensor
:return: [batch_size, self.output_dim] tensor
"""
if self.use_agent_id:
agent_att = torch.cat([self.agent_att] * x.shape[0], 0)
x = torch.cat([x, agent_att], 1)
# Concat group embeddings, concat to input layer.
group_emb_list = []
for k_idx, k in enumerate(self.agent_groups):
group_emb_list += [self.group_emb[k_idx]]*k
group_emb = torch.cat(group_emb_list, 1)
group_emb_batch = torch.cat([group_emb]*x.shape[0], 0)
x = torch.cat([x, group_emb_batch], -1)
feat = F.relu(self.gc1(x, self.adj))
feat += F.relu(self.nn_gc1(x))
feat /= (1. * self.n_agents)
out = F.relu(self.gc2(feat, self.adj))
out += F.relu(self.nn_gc2(feat))
out /= (1. * self.n_agents)
# Pooling
if self.pool_type == 'avg':
ret = out.mean(1) # Pooling over the agent dimension.
elif self.pool_type == 'max':
ret, _ = out.max(1)
# Compute V
V = self.V(ret)
return V
class GraphNetV(nn.Module):
# A graph net that supports different edge attributes and outputs an vector
def __init__(self, sa_dim, n_agents, hidden_size, agent_groups, agent_id=0,
pool_type='avg', use_agent_id=False):
"""
:param sa_dim: integer
:param n_agents: integer
:param hidden_size: integer
:param agent_groups: list, represents number of agents in each group, agents in the same
group are homogeneous. Agents in different groups are heterogeneous
ex. agent_groups = [4] --> Group three has has agent 0, agent 1, agent 2, agent 3
agent_groups =[2, 3] --> Group one has agent 0, agent 1.
Group two has agent 2, agent 3, agent 4
agent_groups =[2,3,4] --> Group one has agent 0, agent 1.
Group two has agent 2, agent 3, agent 4.
Group three has has agent 5, agent 6, agent 7
"""
super(GraphNetV, self).__init__()
assert n_agents == sum(agent_groups)
self.sa_dim = sa_dim
self.n_agents = n_agents
self.pool_type = pool_type
self.agent_groups = agent_groups
group_emb_dim = 2 # Dimension for the group embedding.
if use_agent_id:
agent_id_attr_dim = 2
self.gc1 = GraphConvLayer(sa_dim + agent_id_attr_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim + agent_id_attr_dim, hidden_size)
else:
self.gc1 = GraphConvLayer(sa_dim + group_emb_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim + group_emb_dim, hidden_size)
self.gc2 = GraphConvLayer(hidden_size, hidden_size)
self.nn_gc2 = nn.Linear(hidden_size, hidden_size)
# Create group embeddings.
num_groups = len(agent_groups)
self.group_emb = nn.ParameterList([nn.Parameter(torch.randn(1, 1, group_emb_dim), requires_grad=True) for k in range(num_groups)])
# Assumes a fully connected graph.
self.register_buffer('adj', (torch.ones(n_agents, n_agents) - torch.eye(n_agents)) / self.n_agents)
self.use_agent_id = use_agent_id
self.agent_id = agent_id
if use_agent_id:
self.curr_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim, 1), requires_grad=True)
self.other_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim, 1), requires_grad=True)
agent_att = []
for k in range(self.n_agents):
if k == self.agent_id:
agent_att.append(self.curr_agent_attr.unsqueeze(-1))
else:
agent_att.append(self.other_agent_attr.unsqueeze(-1))
agent_att = torch.cat(agent_att, -1)
self.agent_att = agent_att.unsqueeze(0)
def forward(self, x):
"""
:param x: [batch_size, self.sa_dim, self.n_agent] tensor
:return: [batch_size, self.output_dim] tensor
"""
if self.use_agent_id:
agent_att = torch.cat([self.agent_att] * x.shape[0], 0)
x = torch.cat([x, agent_att], 1)
# Concat group embeddings, concat to input layer.
group_emb_list = []
for k_idx, k in enumerate(self.agent_groups):
group_emb_list += [self.group_emb[k_idx]]*k
group_emb = torch.cat(group_emb_list, 1)
group_emb_batch = torch.cat([group_emb]*x.shape[0], 0)
x = torch.cat([x, group_emb_batch], -1)
feat = F.relu(self.gc1(x, self.adj))
feat += F.relu(self.nn_gc1(x))
feat /= (1. * self.n_agents)
out = F.relu(self.gc2(feat, self.adj))
out += F.relu(self.nn_gc2(feat))
out /= (1. * self.n_agents)
# Pooling
if self.pool_type == 'avg':
ret = out.mean(1) # Pooling over the agent dimension.
elif self.pool_type == 'max':
ret, _ = out.max(1)
return ret
class GraphNet(nn.Module):
"""
A graph net that is used to pre-process actions and states, and solve the order issue.
"""
def __init__(self, sa_dim, n_agents, hidden_size, agent_id=0,
pool_type='avg', use_agent_id=False):
super(GraphNet, self).__init__()
self.sa_dim = sa_dim
self.n_agents = n_agents
self.pool_type = pool_type
if use_agent_id:
agent_id_attr_dim = 2
self.gc1 = GraphConvLayer(sa_dim + agent_id_attr_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim + agent_id_attr_dim, hidden_size)
else:
self.gc1 = GraphConvLayer(sa_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim, hidden_size)
self.gc2 = GraphConvLayer(hidden_size, hidden_size)
self.nn_gc2 = nn.Linear(hidden_size, hidden_size)
self.V = nn.Linear(hidden_size, 1)
self.V.weight.data.mul_(0.1)
self.V.bias.data.mul_(0.1)
# Assumes a fully connected graph.
self.register_buffer('adj', (torch.ones(n_agents, n_agents) - torch.eye(n_agents)) / self.n_agents)
self.use_agent_id = use_agent_id
self.agent_id = agent_id
if use_agent_id:
self.curr_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim), requires_grad=True)
self.other_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim), requires_grad=True)
agent_att = []
for k in range(self.n_agents):
if k == self.agent_id:
agent_att.append(self.curr_agent_attr.unsqueeze(-1))
else:
agent_att.append(self.other_agent_attr.unsqueeze(-1))
agent_att = torch.cat(agent_att, -1)
self.agent_att = agent_att.unsqueeze(0)
def forward(self, x):
"""
:param x: [batch_size, self.sa_dim, self.n_agent] tensor
:return: [batch_size, self.output_dim] tensor
"""
if self.use_agent_id:
agent_att = torch.cat([self.agent_att] * x.shape[0], 0)
x = torch.cat([x, agent_att], 1)
feat = F.relu(self.gc1(x, self.adj))
feat += F.relu(self.nn_gc1(x))
feat /= (1. * self.n_agents)
out = F.relu(self.gc2(feat, self.adj))
out += F.relu(self.nn_gc2(feat))
out /= (1. * self.n_agents)
# Pooling
if self.pool_type == 'avg':
ret = out.mean(1) # Pooling over the agent dimension.
elif self.pool_type == 'max':
ret, _ = out.max(1)
# Compute V
V = self.V(ret)
return V
class MsgGraphNet(nn.Module):
"""
A message-passing GNN
"""
def __init__(self, sa_dim, n_agents, hidden_size):
super(MsgGraphNet, self).__init__()
self.sa_dim = sa_dim
self.n_agents = n_agents
self.msg1 = MessageFunc(sa_dim * 2, hidden_size)
self.msg2 = MessageFunc(hidden_size * 2, hidden_size)
self.update1 = UpdateFunc(sa_dim, n_agents, hidden_size)
self.update2 = UpdateFunc(sa_dim, n_agents, hidden_size)
self.V = nn.Linear(hidden_size, 1)
self.non_linear = F.relu # tanh, sigmoid
self.adj = torch.ones(n_agents, n_agents) - \
torch.eye(n_agents)
self.register_buffer('extended_adj', self.extend_adj())
def extend_adj(self):
ret = torch.zeros(self.n_agents, self.n_agents * self.n_agents)
for i in range(self.n_agents):
for j in range(self.n_agents):
if self.adj[i, j] == 1:
ret[i, j * self.n_agents + i] = 1
return ret
def forward(self, x):
"""
:param x: [batch_size, self.n_agent, self.sa_dim, ] tensor
:return: [batch_size, self.output_dim] tensor
"""
e1 = self.non_linear(self.msg1(x))
v1 = self.non_linear(self.update1(e1, x, self.extended_adj))
e2 = self.non_linear(self.msg2(v1))
v2 = self.non_linear(self.update2(e2, x, self.extended_adj))
out = torch.max(v2, dim=1)[0]
# Compute V
return self.V(out)
class MsgGraphNetHard(nn.Module):
"""
A message-passing GNN with 3-clique graph, will extend to general case.
"""
def __init__(self, sa_dim, n_agents, hidden_size):
super(MsgGraphNetHard, self).__init__()
self.sa_dim = sa_dim
self.n_agents = n_agents
self.fe1 = nn.Linear(sa_dim * 2, hidden_size)
self.fe2 = nn.Linear(hidden_size * 2, hidden_size)
self.fv1 = nn.Linear(hidden_size + sa_dim, hidden_size)
self.fv2 = nn.Linear(hidden_size + sa_dim, hidden_size)
self.msg1 = MessageFunc(sa_dim * 2, hidden_size)
self.msg2 = MessageFunc(hidden_size * 2, hidden_size)
self.update1 = UpdateFunc(sa_dim, n_agents, hidden_size)
self.update2 = UpdateFunc(sa_dim, n_agents, hidden_size)
self.V = nn.Linear(hidden_size, 1)
self.non_linear = F.relu # tanh, sigmoid
self.adj = torch.ones(n_agents, n_agents) - \
torch.eye(n_agents)
self.extended_adj = self.extend_adj()
def extend_adj(self):
ret = torch.zeros(self.n_agents, self.n_agents * self.n_agents)
for i in range(self.n_agents):
for j in range(self.n_agents):
if self.adj[i, j] == 1:
ret[i, j * self.n_agents + i] = 1
return ret
def forward(self, x):
"""
:param x: [batch_size, self.n_agent, self.sa_dim, ] tensor
:return: [batch_size, self.output_dim] tensor
"""
x = torch.transpose(x, 1, 2)
h1_01 = self.non_linear(self.fe1(torch.cat((x[:, :, 0], x[:, :, 1]), dim=1)))
h1_02 = self.non_linear(self.fe1(torch.cat((x[:, :, 0], x[:, :, 2]), dim=1)))
h1_10 = self.non_linear(self.fe1(torch.cat((x[:, :, 1], x[:, :, 0]), dim=1)))
h1_12 = self.non_linear(self.fe1(torch.cat((x[:, :, 1], x[:, :, 2]), dim=1)))
h1_20 = self.non_linear(self.fe1(torch.cat((x[:, :, 2], x[:, :, 0]), dim=1)))
h1_21 = self.non_linear(self.fe1(torch.cat((x[:, :, 2], x[:, :, 1]), dim=1)))
h2_0 = self.non_linear(self.fv1(torch.cat(((h1_10 + h1_20) / 2, x[:, :, 0]), dim=1)))
h2_1 = self.non_linear(self.fv1(torch.cat(((h1_01 + h1_21) / 2, x[:, :, 1]), dim=1)))
h2_2 = self.non_linear(self.fv1(torch.cat(((h1_12 + h1_02) / 2, x[:, :, 2]), dim=1)))
h2_01 = self.non_linear(self.fe2(torch.cat((h2_0, h2_1), dim=1)))
h2_02 = self.non_linear(self.fe2(torch.cat((h2_0, h2_2), dim=1)))
h2_10 = self.non_linear(self.fe2(torch.cat((h2_1, h2_0), dim=1)))
h2_12 = self.non_linear(self.fe2(torch.cat((h2_1, h2_2), dim=1)))
h2_20 = self.non_linear(self.fe2(torch.cat((h2_2, h2_0), dim=1)))
h2_21 = self.non_linear(self.fe2(torch.cat((h2_2, h2_1), dim=1)))
h3_0 = self.non_linear(self.fv2(torch.cat(((h2_10 + h2_20) / 2, x[:, :, 0]), dim=1)))
h3_1 = self.non_linear(self.fv2(torch.cat(((h2_01 + h2_21) / 2, x[:, :, 1]), dim=1)))
h3_2 = self.non_linear(self.fv2(torch.cat(((h2_02 + h2_12) / 2, x[:, :, 2]), dim=1)))
out = torch.max(torch.max(h3_0, h3_1), h3_2)
# Compute V
return self.V(out)
class GraphNetNN(nn.Module):
"""
A graph net that is used to pre-process actions and states, and solve the order issue.
"""
def __init__(self, sa_dim, n_agents, hidden_size, agent_id=0,
pool_type='avg', use_agent_id=False):
super(GraphNetNN, self).__init__()
self.sa_dim = sa_dim
self.n_agents = n_agents
self.pool_type = pool_type
if use_agent_id:
agent_id_attr_dim = 2
self.gc1 = GraphConvLayer(sa_dim + agent_id_attr_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim + agent_id_attr_dim, hidden_size)
else:
self.gc1 = GraphConvLayer(sa_dim, hidden_size)
self.nn_gc1 = nn.Linear(sa_dim, hidden_size)
self.gc2 = GraphConvLayer(hidden_size, hidden_size)
self.nn_gc2 = nn.Linear(hidden_size, hidden_size)
self.V = nn.Linear(hidden_size, 1)
self.V.weight.data.mul_(0.1)
self.V.bias.data.mul_(0.1)
# Assumes a fully connected graph.
self.use_agent_id = use_agent_id
self.agent_id = agent_id
if use_agent_id:
self.curr_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim), requires_grad=True)
self.other_agent_attr = nn.Parameter(
torch.randn(agent_id_attr_dim), requires_grad=True)
agent_att = []
for k in range(self.n_agents):
if k == self.agent_id:
agent_att.append(self.curr_agent_attr.unsqueeze(-1))
else:
agent_att.append(self.other_agent_attr.unsqueeze(-1))
agent_att = torch.cat(agent_att, -1)
self.agent_att = agent_att.unsqueeze(0)
def forward(self, x, adj):
"""
:param x: [batch_size, self.sa_dim, self.n_agent] tensor
:return: [batch_size, self.output_dim] tensor
"""
if self.use_agent_id:
agent_att = torch.cat([self.agent_att] * x.shape[0], 0)
x = torch.cat([x, agent_att], 1)
feat = F.relu(self.gc1(x, adj))
feat += F.relu(self.nn_gc1(x))
feat /= (1. * self.n_agents)
out = F.relu(self.gc2(feat, adj))
out += F.relu(self.nn_gc2(feat))
out /= (1. * self.n_agents)
# Pooling
if self.pool_type == 'avg':
ret = out.mean(1) # Pooling over the agent dimension.
elif self.pool_type == 'max':
ret, _ = out.max(1)
# Compute V
V = self.V(ret)
return V
| 38.542373
| 138
| 0.584653
| 2,651
| 18,192
| 3.761977
| 0.071671
| 0.047729
| 0.036398
| 0.037501
| 0.93693
| 0.926903
| 0.912664
| 0.90725
| 0.889201
| 0.882182
| 0
| 0.030051
| 0.29392
| 18,192
| 471
| 139
| 38.624204
| 0.74636
| 0.160455
| 0
| 0.833333
| 0
| 0
| 0.003854
| 0
| 0
| 0
| 0
| 0
| 0.006667
| 1
| 0.046667
| false
| 0
| 0.013333
| 0
| 0.106667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85e68c36430995e79ac3587d9846f48e64c539cc
| 395
|
py
|
Python
|
scratch/shawn/setup.py
|
sasgc6/pysmurf
|
a370b515ab717c982781223da147bea3c8fb3a9c
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2019-10-17T02:37:59.000Z
|
2022-03-09T16:42:34.000Z
|
scratch/shawn/setup.py
|
sasgc6/pysmurf
|
a370b515ab717c982781223da147bea3c8fb3a9c
|
[
"BSD-3-Clause-LBNL"
] | 446
|
2019-04-10T04:46:20.000Z
|
2022-03-15T20:27:57.000Z
|
scratch/shawn/setup.py
|
sasgc6/pysmurf
|
a370b515ab717c982781223da147bea3c8fb3a9c
|
[
"BSD-3-Clause-LBNL"
] | 13
|
2019-02-05T18:02:05.000Z
|
2021-03-02T18:41:49.000Z
|
import pysmurf
#S5 = pysmurf.SmurfControl(make_logfile=False,setup=True,epics_root='test_epics5',cfg_file='/usr/local/controls/Applications/smurf/pysmurf/pysmurf/cfg_files/experiment_fp28_smurfsrv04.cfg')
S = pysmurf.SmurfControl(make_logfile=False,setup=True,epics_root='test_epics2',cfg_file='/usr/local/controls/Applications/smurf/pysmurf/pysmurf/cfg_files/experiment_fp29_smurfsrv04.cfg')
| 65.833333
| 189
| 0.84557
| 56
| 395
| 5.714286
| 0.482143
| 0.11875
| 0.14375
| 0.1875
| 0.80625
| 0.80625
| 0.80625
| 0.80625
| 0.80625
| 0.80625
| 0
| 0.028571
| 0.025316
| 395
| 5
| 190
| 79
| 0.802597
| 0.475949
| 0
| 0
| 0
| 0
| 0.517073
| 0.463415
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
c080f76d9073cf032cb7f22c17dc3bcb55da0958
| 187
|
py
|
Python
|
rnachipintegrator/__init__.py
|
fls-bioinformatics-core/RnaChipIntegrator
|
d02aa34459a7dc4ab0c7955d295af1c939545526
|
[
"Artistic-2.0"
] | null | null | null |
rnachipintegrator/__init__.py
|
fls-bioinformatics-core/RnaChipIntegrator
|
d02aa34459a7dc4ab0c7955d295af1c939545526
|
[
"Artistic-2.0"
] | 42
|
2015-09-02T15:42:27.000Z
|
2020-01-06T13:30:09.000Z
|
rnachipintegrator/__init__.py
|
fls-bioinformatics-core/RnaChipIntegrator
|
d02aa34459a7dc4ab0c7955d295af1c939545526
|
[
"Artistic-2.0"
] | 1
|
2019-07-29T02:50:47.000Z
|
2019-07-29T02:50:47.000Z
|
# Current version of the library
__version__ = '2.0.0'
def get_version():
"""Returns a string with the current version of the library (e.g., "0.2.0")
"""
return __version__
| 20.777778
| 79
| 0.657754
| 29
| 187
| 3.931034
| 0.551724
| 0.245614
| 0.280702
| 0.333333
| 0.45614
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 0.213904
| 187
| 8
| 80
| 23.375
| 0.734694
| 0.588235
| 0
| 0
| 0
| 0
| 0.070423
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
c0ba68e45abd4955932216da2c85557063d12c5e
| 43,648
|
py
|
Python
|
venv/lib/python3.8/site-packages/spaceone/api/power_scheduler/v1/schedule_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/power_scheduler/v1/schedule_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/power_scheduler/v1/schedule_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: spaceone/api/power_scheduler/v1/schedule.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from spaceone.api.core.v1 import query_pb2 as spaceone_dot_api_dot_core_dot_v1_dot_query__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='spaceone/api/power_scheduler/v1/schedule.proto',
package='spaceone.api.power_scheduler.v1',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n.spaceone/api/power_scheduler/v1/schedule.proto\x12\x1fspaceone.api.power_scheduler.v1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1cgoogle/api/annotations.proto\x1a spaceone/api/core/v1/query.proto\"\x84\x01\n\x15\x43reateScheduleRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12%\n\x04tags\x18\x0b \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x0f\n\x07user_id\x18\x17 \x01(\t\x12\x12\n\nproject_id\x18\x15 \x01(\t\x12\x11\n\tdomain_id\x18\x16 \x01(\t\"\xb3\x01\n\x15UpdateScheduleRequest\x12\x13\n\x0bschedule_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12=\n\x05state\x18\x03 \x01(\x0e\x32..spaceone.api.power_scheduler.v1.ScheduleState\x12%\n\x04tags\x18\x0b \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x11\n\tdomain_id\x18\x16 \x01(\t\"9\n\x0fScheduleRequest\x12\x13\n\x0bschedule_id\x18\x01 \x01(\t\x12\x11\n\tdomain_id\x18\x02 \x01(\t\"J\n\x12GetScheduleRequest\x12\x13\n\x0bschedule_id\x18\x01 \x01(\t\x12\x11\n\tdomain_id\x18\x02 \x01(\t\x12\x0c\n\x04only\x18\x03 \x03(\t\"\xdf\x01\n\rScheduleQuery\x12*\n\x05query\x18\x01 \x01(\x0b\x32\x1b.spaceone.api.core.v1.Query\x12\x13\n\x0bschedule_id\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12=\n\x05state\x18\x04 \x01(\x0e\x32..spaceone.api.power_scheduler.v1.ScheduleState\x12\x19\n\x11resource_group_id\x18\x05 \x01(\t\x12\x12\n\nproject_id\x18\x06 \x01(\t\x12\x11\n\tdomain_id\x18\x07 \x01(\t\"<\n\rResourceGroup\x12\x19\n\x11resource_group_id\x18\x01 \x01(\t\x12\x10\n\x08priority\x18\x02 \x01(\x05\"q\n\x1a\x43reateResourceGroupRequest\x12\x13\n\x0bschedule_id\x18\x01 \x01(\t\x12\x19\n\x11resource_group_id\x18\x02 \x01(\t\x12\x10\n\x08priority\x18\x03 \x01(\x05\x12\x11\n\tdomain_id\x18\x16 \x01(\t\"k\n\x14ResourceGroupRequest\x12\x13\n\x0bschedule_id\x18\x01 \x01(\t\x12\x19\n\x11resource_group_id\x18\x02 \x01(\t\x12\x10\n\x08priority\x18\x03 \x01(\x05\x12\x11\n\tdomain_id\x18\x16 \x01(\t\"\xaf\x02\n\x0cScheduleInfo\x12\x13\n\x0bschedule_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12=\n\x05state\x18\x03 \x01(\x0e\x32..spaceone.api.power_scheduler.v1.ScheduleState\x12G\n\x0fresource_groups\x18\x0b \x03(\x0b\x32..spaceone.api.power_scheduler.v1.ResourceGroup\x12%\n\x04tags\x18\x0c \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x12\n\nproject_id\x18\x15 \x01(\t\x12\x11\n\tdomain_id\x18\x16 \x01(\t\x12\x12\n\ncreated_by\x18\x17 \x01(\t\x12\x12\n\ncreated_at\x18\x1f \x01(\t\"d\n\rSchedulesInfo\x12>\n\x07results\x18\x01 \x03(\x0b\x32-.spaceone.api.power_scheduler.v1.ScheduleInfo\x12\x13\n\x0btotal_count\x18\x02 \x01(\x05\"\\\n\x11ScheduleStatQuery\x12\x34\n\x05query\x18\x01 \x01(\x0b\x32%.spaceone.api.core.v1.StatisticsQuery\x12\x11\n\tdomain_id\x18\x02 \x01(\t*4\n\rScheduleState\x12\x08\n\x04NONE\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\x0c\n\x08\x44ISABLED\x10\x02\x32\x94\x0f\n\x08Schedule\x12\x96\x01\n\x06\x63reate\x12\x36.spaceone.api.power_scheduler.v1.CreateScheduleRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\"%\x82\xd3\xe4\x93\x02\x1f\"\x1d/power-scheduler/v1/schedules\x12\xa3\x01\n\x06update\x12\x36.spaceone.api.power_scheduler.v1.UpdateScheduleRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\"2\x82\xd3\xe4\x93\x02,\x1a*/power-scheduler/v1/schedule/{schedule_id}\x12\xa4\x01\n\x06\x65nable\x12\x30.spaceone.api.power_scheduler.v1.ScheduleRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\"9\x82\xd3\xe4\x93\x02\x33\x1a\x31/power-scheduler/v1/schedule/{schedule_id}/enable\x12\xa6\x01\n\x07\x64isable\x12\x30.spaceone.api.power_scheduler.v1.ScheduleRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\":\x82\xd3\xe4\x93\x02\x34\x1a\x32/power-scheduler/v1/schedule/{schedule_id}/disable\x12\xe1\x01\n\x15\x61ppend_resource_group\x12;.spaceone.api.power_scheduler.v1.CreateResourceGroupRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\"\\\x82\xd3\xe4\x93\x02V\x1aT/power-scheduler/v1/schedule/{schedule_id}/resource_group/{resource_group_id}/append\x12\xd4\x01\n\x15update_resource_group\x12\x35.spaceone.api.power_scheduler.v1.ResourceGroupRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\"U\x82\xd3\xe4\x93\x02O\x1aM/power-scheduler/v1/schedule/{schedule_id}/resource_group/{resource_group_id}\x12\xdb\x01\n\x15remove_resource_group\x12\x35.spaceone.api.power_scheduler.v1.ResourceGroupRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\"\\\x82\xd3\xe4\x93\x02V\x1aT/power-scheduler/v1/schedule/{schedule_id}/resource_group/{resource_group_id}/remove\x12\x86\x01\n\x06\x64\x65lete\x12\x30.spaceone.api.power_scheduler.v1.ScheduleRequest\x1a\x16.google.protobuf.Empty\"2\x82\xd3\xe4\x93\x02,**/power-scheduler/v1/schedule/{schedule_id}\x12\x9d\x01\n\x03get\x12\x33.spaceone.api.power_scheduler.v1.GetScheduleRequest\x1a-.spaceone.api.power_scheduler.v1.ScheduleInfo\"2\x82\xd3\xe4\x93\x02,\x12*/power-scheduler/v1/schedule/{schedule_id}\x12\xb5\x01\n\x04list\x12..spaceone.api.power_scheduler.v1.ScheduleQuery\x1a..spaceone.api.power_scheduler.v1.SchedulesInfo\"M\x82\xd3\xe4\x93\x02G\x12\x1d/power-scheduler/v1/schedulesZ&\"$/power-scheduler/v1/schedules/search\x12\x7f\n\x04stat\x12\x32.spaceone.api.power_scheduler.v1.ScheduleStatQuery\x1a\x17.google.protobuf.Struct\"*\x82\xd3\xe4\x93\x02$\"\"/power-scheduler/v1/schedules/statb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,spaceone_dot_api_dot_core_dot_v1_dot_query__pb2.DESCRIPTOR,])
_SCHEDULESTATE = _descriptor.EnumDescriptor(
name='ScheduleState',
full_name='spaceone.api.power_scheduler.v1.ScheduleState',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENABLED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISABLED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1672,
serialized_end=1724,
)
_sym_db.RegisterEnumDescriptor(_SCHEDULESTATE)
ScheduleState = enum_type_wrapper.EnumTypeWrapper(_SCHEDULESTATE)
NONE = 0
ENABLED = 1
DISABLED = 2
_CREATESCHEDULEREQUEST = _descriptor.Descriptor(
name='CreateScheduleRequest',
full_name='spaceone.api.power_scheduler.v1.CreateScheduleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.power_scheduler.v1.CreateScheduleRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tags', full_name='spaceone.api.power_scheduler.v1.CreateScheduleRequest.tags', index=1,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_id', full_name='spaceone.api.power_scheduler.v1.CreateScheduleRequest.user_id', index=2,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='spaceone.api.power_scheduler.v1.CreateScheduleRequest.project_id', index=3,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.CreateScheduleRequest.domain_id', index=4,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=207,
serialized_end=339,
)
_UPDATESCHEDULEREQUEST = _descriptor.Descriptor(
name='UpdateScheduleRequest',
full_name='spaceone.api.power_scheduler.v1.UpdateScheduleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='schedule_id', full_name='spaceone.api.power_scheduler.v1.UpdateScheduleRequest.schedule_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.power_scheduler.v1.UpdateScheduleRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='spaceone.api.power_scheduler.v1.UpdateScheduleRequest.state', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tags', full_name='spaceone.api.power_scheduler.v1.UpdateScheduleRequest.tags', index=3,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.UpdateScheduleRequest.domain_id', index=4,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=342,
serialized_end=521,
)
_SCHEDULEREQUEST = _descriptor.Descriptor(
name='ScheduleRequest',
full_name='spaceone.api.power_scheduler.v1.ScheduleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='schedule_id', full_name='spaceone.api.power_scheduler.v1.ScheduleRequest.schedule_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.ScheduleRequest.domain_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=523,
serialized_end=580,
)
_GETSCHEDULEREQUEST = _descriptor.Descriptor(
name='GetScheduleRequest',
full_name='spaceone.api.power_scheduler.v1.GetScheduleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='schedule_id', full_name='spaceone.api.power_scheduler.v1.GetScheduleRequest.schedule_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.GetScheduleRequest.domain_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='only', full_name='spaceone.api.power_scheduler.v1.GetScheduleRequest.only', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=582,
serialized_end=656,
)
_SCHEDULEQUERY = _descriptor.Descriptor(
name='ScheduleQuery',
full_name='spaceone.api.power_scheduler.v1.ScheduleQuery',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='spaceone.api.power_scheduler.v1.ScheduleQuery.query', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schedule_id', full_name='spaceone.api.power_scheduler.v1.ScheduleQuery.schedule_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.power_scheduler.v1.ScheduleQuery.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='spaceone.api.power_scheduler.v1.ScheduleQuery.state', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_group_id', full_name='spaceone.api.power_scheduler.v1.ScheduleQuery.resource_group_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='spaceone.api.power_scheduler.v1.ScheduleQuery.project_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.ScheduleQuery.domain_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=659,
serialized_end=882,
)
_RESOURCEGROUP = _descriptor.Descriptor(
name='ResourceGroup',
full_name='spaceone.api.power_scheduler.v1.ResourceGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_group_id', full_name='spaceone.api.power_scheduler.v1.ResourceGroup.resource_group_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='priority', full_name='spaceone.api.power_scheduler.v1.ResourceGroup.priority', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=884,
serialized_end=944,
)
_CREATERESOURCEGROUPREQUEST = _descriptor.Descriptor(
name='CreateResourceGroupRequest',
full_name='spaceone.api.power_scheduler.v1.CreateResourceGroupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='schedule_id', full_name='spaceone.api.power_scheduler.v1.CreateResourceGroupRequest.schedule_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_group_id', full_name='spaceone.api.power_scheduler.v1.CreateResourceGroupRequest.resource_group_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='priority', full_name='spaceone.api.power_scheduler.v1.CreateResourceGroupRequest.priority', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.CreateResourceGroupRequest.domain_id', index=3,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=946,
serialized_end=1059,
)
_RESOURCEGROUPREQUEST = _descriptor.Descriptor(
name='ResourceGroupRequest',
full_name='spaceone.api.power_scheduler.v1.ResourceGroupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='schedule_id', full_name='spaceone.api.power_scheduler.v1.ResourceGroupRequest.schedule_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_group_id', full_name='spaceone.api.power_scheduler.v1.ResourceGroupRequest.resource_group_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='priority', full_name='spaceone.api.power_scheduler.v1.ResourceGroupRequest.priority', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.ResourceGroupRequest.domain_id', index=3,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1061,
serialized_end=1168,
)
_SCHEDULEINFO = _descriptor.Descriptor(
name='ScheduleInfo',
full_name='spaceone.api.power_scheduler.v1.ScheduleInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='schedule_id', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.schedule_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.state', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_groups', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.resource_groups', index=3,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tags', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.tags', index=4,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.project_id', index=5,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.domain_id', index=6,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_by', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.created_by', index=7,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='spaceone.api.power_scheduler.v1.ScheduleInfo.created_at', index=8,
number=31, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1171,
serialized_end=1474,
)
_SCHEDULESINFO = _descriptor.Descriptor(
name='SchedulesInfo',
full_name='spaceone.api.power_scheduler.v1.SchedulesInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='results', full_name='spaceone.api.power_scheduler.v1.SchedulesInfo.results', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_count', full_name='spaceone.api.power_scheduler.v1.SchedulesInfo.total_count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1476,
serialized_end=1576,
)
_SCHEDULESTATQUERY = _descriptor.Descriptor(
name='ScheduleStatQuery',
full_name='spaceone.api.power_scheduler.v1.ScheduleStatQuery',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='spaceone.api.power_scheduler.v1.ScheduleStatQuery.query', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.power_scheduler.v1.ScheduleStatQuery.domain_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1578,
serialized_end=1670,
)
_CREATESCHEDULEREQUEST.fields_by_name['tags'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_UPDATESCHEDULEREQUEST.fields_by_name['state'].enum_type = _SCHEDULESTATE
_UPDATESCHEDULEREQUEST.fields_by_name['tags'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_SCHEDULEQUERY.fields_by_name['query'].message_type = spaceone_dot_api_dot_core_dot_v1_dot_query__pb2._QUERY
_SCHEDULEQUERY.fields_by_name['state'].enum_type = _SCHEDULESTATE
_SCHEDULEINFO.fields_by_name['state'].enum_type = _SCHEDULESTATE
_SCHEDULEINFO.fields_by_name['resource_groups'].message_type = _RESOURCEGROUP
_SCHEDULEINFO.fields_by_name['tags'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_SCHEDULESINFO.fields_by_name['results'].message_type = _SCHEDULEINFO
_SCHEDULESTATQUERY.fields_by_name['query'].message_type = spaceone_dot_api_dot_core_dot_v1_dot_query__pb2._STATISTICSQUERY
DESCRIPTOR.message_types_by_name['CreateScheduleRequest'] = _CREATESCHEDULEREQUEST
DESCRIPTOR.message_types_by_name['UpdateScheduleRequest'] = _UPDATESCHEDULEREQUEST
DESCRIPTOR.message_types_by_name['ScheduleRequest'] = _SCHEDULEREQUEST
DESCRIPTOR.message_types_by_name['GetScheduleRequest'] = _GETSCHEDULEREQUEST
DESCRIPTOR.message_types_by_name['ScheduleQuery'] = _SCHEDULEQUERY
DESCRIPTOR.message_types_by_name['ResourceGroup'] = _RESOURCEGROUP
DESCRIPTOR.message_types_by_name['CreateResourceGroupRequest'] = _CREATERESOURCEGROUPREQUEST
DESCRIPTOR.message_types_by_name['ResourceGroupRequest'] = _RESOURCEGROUPREQUEST
DESCRIPTOR.message_types_by_name['ScheduleInfo'] = _SCHEDULEINFO
DESCRIPTOR.message_types_by_name['SchedulesInfo'] = _SCHEDULESINFO
DESCRIPTOR.message_types_by_name['ScheduleStatQuery'] = _SCHEDULESTATQUERY
DESCRIPTOR.enum_types_by_name['ScheduleState'] = _SCHEDULESTATE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CreateScheduleRequest = _reflection.GeneratedProtocolMessageType('CreateScheduleRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATESCHEDULEREQUEST,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.CreateScheduleRequest)
})
_sym_db.RegisterMessage(CreateScheduleRequest)
UpdateScheduleRequest = _reflection.GeneratedProtocolMessageType('UpdateScheduleRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATESCHEDULEREQUEST,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.UpdateScheduleRequest)
})
_sym_db.RegisterMessage(UpdateScheduleRequest)
ScheduleRequest = _reflection.GeneratedProtocolMessageType('ScheduleRequest', (_message.Message,), {
'DESCRIPTOR' : _SCHEDULEREQUEST,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.ScheduleRequest)
})
_sym_db.RegisterMessage(ScheduleRequest)
GetScheduleRequest = _reflection.GeneratedProtocolMessageType('GetScheduleRequest', (_message.Message,), {
'DESCRIPTOR' : _GETSCHEDULEREQUEST,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.GetScheduleRequest)
})
_sym_db.RegisterMessage(GetScheduleRequest)
ScheduleQuery = _reflection.GeneratedProtocolMessageType('ScheduleQuery', (_message.Message,), {
'DESCRIPTOR' : _SCHEDULEQUERY,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.ScheduleQuery)
})
_sym_db.RegisterMessage(ScheduleQuery)
ResourceGroup = _reflection.GeneratedProtocolMessageType('ResourceGroup', (_message.Message,), {
'DESCRIPTOR' : _RESOURCEGROUP,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.ResourceGroup)
})
_sym_db.RegisterMessage(ResourceGroup)
CreateResourceGroupRequest = _reflection.GeneratedProtocolMessageType('CreateResourceGroupRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATERESOURCEGROUPREQUEST,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.CreateResourceGroupRequest)
})
_sym_db.RegisterMessage(CreateResourceGroupRequest)
ResourceGroupRequest = _reflection.GeneratedProtocolMessageType('ResourceGroupRequest', (_message.Message,), {
'DESCRIPTOR' : _RESOURCEGROUPREQUEST,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.ResourceGroupRequest)
})
_sym_db.RegisterMessage(ResourceGroupRequest)
ScheduleInfo = _reflection.GeneratedProtocolMessageType('ScheduleInfo', (_message.Message,), {
'DESCRIPTOR' : _SCHEDULEINFO,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.ScheduleInfo)
})
_sym_db.RegisterMessage(ScheduleInfo)
SchedulesInfo = _reflection.GeneratedProtocolMessageType('SchedulesInfo', (_message.Message,), {
'DESCRIPTOR' : _SCHEDULESINFO,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.SchedulesInfo)
})
_sym_db.RegisterMessage(SchedulesInfo)
ScheduleStatQuery = _reflection.GeneratedProtocolMessageType('ScheduleStatQuery', (_message.Message,), {
'DESCRIPTOR' : _SCHEDULESTATQUERY,
'__module__' : 'spaceone.api.power_scheduler.v1.schedule_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.power_scheduler.v1.ScheduleStatQuery)
})
_sym_db.RegisterMessage(ScheduleStatQuery)
_SCHEDULE = _descriptor.ServiceDescriptor(
name='Schedule',
full_name='spaceone.api.power_scheduler.v1.Schedule',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1727,
serialized_end=3667,
methods=[
_descriptor.MethodDescriptor(
name='create',
full_name='spaceone.api.power_scheduler.v1.Schedule.create',
index=0,
containing_service=None,
input_type=_CREATESCHEDULEREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\002\037\"\035/power-scheduler/v1/schedules',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='update',
full_name='spaceone.api.power_scheduler.v1.Schedule.update',
index=1,
containing_service=None,
input_type=_UPDATESCHEDULEREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\002,\032*/power-scheduler/v1/schedule/{schedule_id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='enable',
full_name='spaceone.api.power_scheduler.v1.Schedule.enable',
index=2,
containing_service=None,
input_type=_SCHEDULEREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\0023\0321/power-scheduler/v1/schedule/{schedule_id}/enable',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='disable',
full_name='spaceone.api.power_scheduler.v1.Schedule.disable',
index=3,
containing_service=None,
input_type=_SCHEDULEREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\0024\0322/power-scheduler/v1/schedule/{schedule_id}/disable',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='append_resource_group',
full_name='spaceone.api.power_scheduler.v1.Schedule.append_resource_group',
index=4,
containing_service=None,
input_type=_CREATERESOURCEGROUPREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\002V\032T/power-scheduler/v1/schedule/{schedule_id}/resource_group/{resource_group_id}/append',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='update_resource_group',
full_name='spaceone.api.power_scheduler.v1.Schedule.update_resource_group',
index=5,
containing_service=None,
input_type=_RESOURCEGROUPREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\002O\032M/power-scheduler/v1/schedule/{schedule_id}/resource_group/{resource_group_id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='remove_resource_group',
full_name='spaceone.api.power_scheduler.v1.Schedule.remove_resource_group',
index=6,
containing_service=None,
input_type=_RESOURCEGROUPREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\002V\032T/power-scheduler/v1/schedule/{schedule_id}/resource_group/{resource_group_id}/remove',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='delete',
full_name='spaceone.api.power_scheduler.v1.Schedule.delete',
index=7,
containing_service=None,
input_type=_SCHEDULEREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002,**/power-scheduler/v1/schedule/{schedule_id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='get',
full_name='spaceone.api.power_scheduler.v1.Schedule.get',
index=8,
containing_service=None,
input_type=_GETSCHEDULEREQUEST,
output_type=_SCHEDULEINFO,
serialized_options=b'\202\323\344\223\002,\022*/power-scheduler/v1/schedule/{schedule_id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='list',
full_name='spaceone.api.power_scheduler.v1.Schedule.list',
index=9,
containing_service=None,
input_type=_SCHEDULEQUERY,
output_type=_SCHEDULESINFO,
serialized_options=b'\202\323\344\223\002G\022\035/power-scheduler/v1/schedulesZ&\"$/power-scheduler/v1/schedules/search',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='stat',
full_name='spaceone.api.power_scheduler.v1.Schedule.stat',
index=10,
containing_service=None,
input_type=_SCHEDULESTATQUERY,
output_type=google_dot_protobuf_dot_struct__pb2._STRUCT,
serialized_options=b'\202\323\344\223\002$\"\"/power-scheduler/v1/schedules/stat',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_SCHEDULE)
DESCRIPTOR.services_by_name['Schedule'] = _SCHEDULE
# @@protoc_insertion_point(module_scope)
| 49.264108
| 5,273
| 0.770894
| 5,625
| 43,648
| 5.664
| 0.0576
| 0.037665
| 0.072819
| 0.072159
| 0.806089
| 0.785122
| 0.759416
| 0.737163
| 0.682643
| 0.647207
| 0
| 0.042548
| 0.105618
| 43,648
| 885
| 5,274
| 49.319774
| 0.77358
| 0.027699
| 0
| 0.672351
| 1
| 0.013398
| 0.286789
| 0.253548
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010962
| 0
| 0.010962
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23a8002e949023a15adea90683ea65bf9bc7f01b
| 161
|
py
|
Python
|
bioimageio/spec/utils/maybe_convert.py
|
k-dominik/python-bioimage-io
|
aecaa3412c31672ce159335db083ee9fb4fca519
|
[
"MIT"
] | null | null | null |
bioimageio/spec/utils/maybe_convert.py
|
k-dominik/python-bioimage-io
|
aecaa3412c31672ce159335db083ee9fb4fca519
|
[
"MIT"
] | null | null | null |
bioimageio/spec/utils/maybe_convert.py
|
k-dominik/python-bioimage-io
|
aecaa3412c31672ce159335db083ee9fb4fca519
|
[
"MIT"
] | null | null | null |
from bioimageio.spec.utils.maybe_convert_to_v0_3 import maybe_convert_to_v0_3
def maybe_convert(data):
data = maybe_convert_to_v0_3(data)
return data
| 20.125
| 77
| 0.807453
| 28
| 161
| 4.178571
| 0.464286
| 0.410256
| 0.358974
| 0.410256
| 0.435897
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043165
| 0.136646
| 161
| 7
| 78
| 23
| 0.798561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
23af69e57a44e30765f8220c1f827bd21fbc3872
| 15,930
|
py
|
Python
|
src/abaqus/Odb/FieldValue.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/Odb/FieldValue.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/Odb/FieldValue.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from abaqusConstants import *
from .OdbInstance import OdbInstance
from .OdbPart import OdbPart
from .SectionPoint import SectionPoint
class FieldValue:
"""The FieldValue object represents the field data at a point. The FieldValue object has no
constructor; it is created by the Odb object when data are added to the FieldOutput
object using the addData method. For faster, bulk-data access, see Using bulk data
access to an output database.
Attributes
----------
position: SymbolicConstant
A SymbolicConstant specifying the position of the output in the element. Possible values
are:
- NODAL, specifying the values calculated at the nodes.
- INTEGRATION_POINT, specifying the values calculated at the integration points.
- ELEMENT_NODAL, specifying the values obtained by extrapolating results calculated at
the integration points.
- ELEMENT_FACE, specifying the results obtained for surface variables such as cavity
radiation that are defined for the surface facets of an element.
- CENTROID, specifying the value at the centroid obtained by extrapolating results
calculated at the integration points.
precision: SymbolicConstant
A SymbolicConstant specifying the precision of the output in the element. Possible
values are:
- SINGLE_PRECISION, specifying that the output values are in single precision.
- DOUBLE_PRECISION, specifying that the output values are in double precision.
elementLabel: int
An Int specifying the element label of the element containing the location.
**elementLabel** is available only if **position=INTEGRATION_POINT**, CENTROID,
ELEMENT_NODAL, or ELEMENT_FACE.
nodeLabel: int
An Int specifying the node label of the node containing the location. **nodelabel** is
available only if **position=ELEMENT_NODAL** or NODAL.
integrationPoint: int
An Int specifying the integration point in the element. **integrationPoint** is available
only if **position=INTEGRATION_POINT**.
face: SymbolicConstant
A SymbolicConstant specifying the face of the element. **face** is available only if
**position=ELEMENT_FACE**.
type: SymbolicConstant
A SymbolicConstant specifying the output type. Possible values are SCALAR, VECTOR,
TENSOR_3D_FULL, TENSOR_3D_PLANAR, TENSOR_3D_SURFACE, TENSOR_2D_PLANAR, and
TENSOR_2D_SURFACE.
magnitude: float
A Float specifying the length or magnitude of the vector. **magnitude** is valid only when
**type=VECTOR**.
mises: float
A Float specifying the calculated von Mises stress. The value is valid only when the
**validInvariants** member includes MISES; otherwise, the value is indeterminate.
Conjugate data will be ignored in invariant calculation.
tresca: float
A Float specifying the calculated Tresca stress. The value is valid only when the
**validInvariants** member includes TRESCA; otherwise, the value is indeterminate.
Conjugate data will be ignored in invariant calculation.
press: float
A Float specifying the calculated pressure stress. The value is valid only when the
**validInvariants** member includes PRESS; otherwise, the value is indeterminate.
Conjugate data will be ignored in invariant calculation.
inv3: float
A Float specifying the calculated third stress invariant. The value is valid only when
the **validInvariants** member includes INV3; otherwise, the value is indeterminate.
Conjugate data will be ignored in invariant calculation.
maxPrincipal: float
A Float specifying the calculated maximum principal stress. The value is valid only when
the **validInvariants** member includes MAX_PRINCIPAL; otherwise, the value is
indeterminate. Conjugate data will be ignored in invariant calculation.
midPrincipal: float
A Float specifying the calculated intermediate principal stress. The value is valid only
when the **validInvariants** member includes MID_PRINCIPAL; otherwise, the value is
indeterminate. Conjugate data will be ignored in invariant calculation.
minPrincipal: float
A Float specifying the minimum principal stress. The value is valid only when the
**validInvariants** member includes MIN_PRINCIPAL; otherwise, the value is indeterminate.
Conjugate data will be ignored in invariant calculation.
maxInPlanePrincipal: float
A Float specifying the maximum principal in-plane stress. The value is valid only when
the **validInvariants** member includes MAX_INPLANE_PRINCIPAL; otherwise, the value is
indeterminate. Conjugate data will be ignored in invariant calculation.
minInPlanePrincipal: float
A Float specifying the calculated minimum principal in-plane stress. The value is valid
only when the **validInvariants** member includes MIN_INPLANE_PRINCIPAL; otherwise, the
value is indeterminate. Conjugate data will be ignored in invariant calculation.
outOfPlanePrincipal: float
A Float specifying the calculated principal out-of-plane stress. The value is valid only
when the **validInvariants** member includes OUTOFPLANE_PRINCIPAL; otherwise, the value is
indeterminate. Conjugate data will be ignored in invariant calculation.
instance: OdbInstance
An :py:class:`~abaqus.Odb.OdbInstance.OdbInstance` object specifying the part to which the labels belong.
sectionPoint: SectionPoint
A :py:class:`~abaqus.Odb.SectionPoint.SectionPoint` object.
localCoordSystem: tuple
A tuple of tuples of Floats specifying the 3 × 3 matrix of Floats specifying the
direction cosines of the local coordinate system (the rotation from global to local).
Each sequence represents a row in the direction cosine matrix. **localCoordSystem** is
available for TENSOR data written in a local coordinate system. It is also available for
VECTOR data for connector element outputs. For connector element outputs the rotation is
from local to global. If the underlying data are in double precision, an exception will
be thrown.
localCoordSystemDouble: tuple
A tuple of tuples of Floats specifying the 3 × 3 matrix of Doubles specifying the
direction cosines of the local coordinate system (the rotation from global to local).
Each sequence represents a row in the direction cosine matrix. **localCoordSystemDouble**
is available for TENSOR data written in a local coordinate system. It is also available
for VECTOR data for connector element outputs. For connector element outputs the
rotation is from local to global. If the underlying data are in single precision, an
exception will be thrown.
data: tuple
A tuple of Floats specifying data in the form described by **type**. If **type=TENSOR** or
VECTOR, **data** is a sequence containing the components. If the underlying data are in
double precision an exception will be thrown.
dataDouble: tuple
A tuple of Floats specifying data in the form described by **type**. If **type=TENSOR** or
VECTOR, **data** is a sequence containing the components. If the underlying data are in
single precision, an exception will be thrown.
conjugateData: tuple
A tuple of Floats specifying data in the form described by **type**. If **type=TENSOR** or
VECTOR, **conjugateData** is a sequence containing the components. If the underlying data
are in double precision, an exception will be thrown.
conjugateDataDouble: tuple
A tuple of Floats specifying data in the form described by **type**. If **type=TENSOR** or
VECTOR, **conjugateData** is a sequence containing the components. If the underlying data
are in single precision, an exception will be thrown.
Notes
-----
This object can be accessed by:
.. code-block:: python
import odbAccess
session.odbs[name].steps[name].frames[i].fieldOutputs[name].values[i]
"""
# A SymbolicConstant specifying the position of the output in the element. Possible values
# are:
# - NODAL, specifying the values calculated at the nodes.
# - INTEGRATION_POINT, specifying the values calculated at the integration points.
# - ELEMENT_NODAL, specifying the values obtained by extrapolating results calculated at
# the integration points.
# - ELEMENT_FACE, specifying the results obtained for surface variables such as cavity
# radiation that are defined for the surface facets of an element.
# - CENTROID, specifying the value at the centroid obtained by extrapolating results
# calculated at the integration points.
position: SymbolicConstant = None
# A SymbolicConstant specifying the precision of the output in the element. Possible
# values are:
# - SINGLE_PRECISION, specifying that the output values are in single precision.
# - DOUBLE_PRECISION, specifying that the output values are in double precision.
precision: SymbolicConstant = None
# An Int specifying the element label of the element containing the location.
# *elementLabel* is available only if *position*=INTEGRATION_POINT, CENTROID,
# ELEMENT_NODAL, or ELEMENT_FACE.
elementLabel: int = None
# An Int specifying the node label of the node containing the location. *nodelabel* is
# available only if *position*=ELEMENT_NODAL or NODAL.
nodeLabel: int = None
# An Int specifying the integration point in the element. *integrationPoint* is available
# only if *position*=INTEGRATION_POINT.
integrationPoint: int = None
# A SymbolicConstant specifying the face of the element. *face* is available only if
# *position*=ELEMENT_FACE.
face: SymbolicConstant = None
# A SymbolicConstant specifying the output type. Possible values are SCALAR, VECTOR,
# TENSOR_3D_FULL, TENSOR_3D_PLANAR, TENSOR_3D_SURFACE, TENSOR_2D_PLANAR, and
# TENSOR_2D_SURFACE.
type: SymbolicConstant = None
# A Float specifying the length or magnitude of the vector. *magnitude* is valid only when
# *type*=VECTOR.
magnitude: float = None
# A Float specifying the calculated von Mises stress. The value is valid only when the
# *validInvariants* member includes MISES; otherwise, the value is indeterminate.
# Conjugate data will be ignored in invariant calculation.
mises: float = None
# A Float specifying the calculated Tresca stress. The value is valid only when the
# *validInvariants* member includes TRESCA; otherwise, the value is indeterminate.
# Conjugate data will be ignored in invariant calculation.
tresca: float = None
# A Float specifying the calculated pressure stress. The value is valid only when the
# *validInvariants* member includes PRESS; otherwise, the value is indeterminate.
# Conjugate data will be ignored in invariant calculation.
press: float = None
# A Float specifying the calculated third stress invariant. The value is valid only when
# the *validInvariants* member includes INV3; otherwise, the value is indeterminate.
# Conjugate data will be ignored in invariant calculation.
inv3: float = None
# A Float specifying the calculated maximum principal stress. The value is valid only when
# the *validInvariants* member includes MAX_PRINCIPAL; otherwise, the value is
# indeterminate. Conjugate data will be ignored in invariant calculation.
maxPrincipal: float = None
# A Float specifying the calculated intermediate principal stress. The value is valid only
# when the *validInvariants* member includes MID_PRINCIPAL; otherwise, the value is
# indeterminate. Conjugate data will be ignored in invariant calculation.
midPrincipal: float = None
# A Float specifying the minimum principal stress. The value is valid only when the
# *validInvariants* member includes MIN_PRINCIPAL; otherwise, the value is indeterminate.
# Conjugate data will be ignored in invariant calculation.
minPrincipal: float = None
# A Float specifying the maximum principal in-plane stress. The value is valid only when
# the *validInvariants* member includes MAX_INPLANE_PRINCIPAL; otherwise, the value is
# indeterminate. Conjugate data will be ignored in invariant calculation.
maxInPlanePrincipal: float = None
# A Float specifying the calculated minimum principal in-plane stress. The value is valid
# only when the *validInvariants* member includes MIN_INPLANE_PRINCIPAL; otherwise, the
# value is indeterminate. Conjugate data will be ignored in invariant calculation.
minInPlanePrincipal: float = None
# A Float specifying the calculated principal out-of-plane stress. The value is valid only
# when the *validInvariants* member includes OUTOFPLANE_PRINCIPAL; otherwise, the value is
# indeterminate. Conjugate data will be ignored in invariant calculation.
outOfPlanePrincipal: float = None
# An OdbInstance object specifying the part to which the labels belong.
instance: OdbInstance = OdbInstance('instance', OdbPart('part', THREE_D, DEFORMABLE_BODY))
# A SectionPoint object.
sectionPoint: SectionPoint = None
# A tuple of tuples of Floats specifying the 3 × 3 matrix of Floats specifying the
# direction cosines of the local coordinate system (the rotation from global to local).
# Each sequence represents a row in the direction cosine matrix. *localCoordSystem* is
# available for TENSOR data written in a local coordinate system. It is also available for
# VECTOR data for connector element outputs. For connector element outputs the rotation is
# from local to global. If the underlying data are in double precision, an exception will
# be thrown.
localCoordSystem: tuple = ()
# A tuple of tuples of Floats specifying the 3 × 3 matrix of Doubles specifying the
# direction cosines of the local coordinate system (the rotation from global to local).
# Each sequence represents a row in the direction cosine matrix. *localCoordSystemDouble*
# is available for TENSOR data written in a local coordinate system. It is also available
# for VECTOR data for connector element outputs. For connector element outputs the
# rotation is from local to global. If the underlying data are in single precision, an
# exception will be thrown.
localCoordSystemDouble: tuple = ()
# A tuple of Floats specifying data in the form described by *type*. If *type*=TENSOR or
# VECTOR, *data* is a sequence containing the components. If the underlying data are in
# double precision an exception will be thrown.
data: tuple = ()
# A tuple of Floats specifying data in the form described by *type*. If *type*=TENSOR or
# VECTOR, *data* is a sequence containing the components. If the underlying data are in
# single precision, an exception will be thrown.
dataDouble: tuple = ()
# A tuple of Floats specifying data in the form described by *type*. If *type*=TENSOR or
# VECTOR, *conjugateData* is a sequence containing the components. If the underlying data
# are in double precision, an exception will be thrown.
conjugateData: tuple = ()
# A tuple of Floats specifying data in the form described by *type*. If *type*=TENSOR or
# VECTOR, *conjugateData* is a sequence containing the components. If the underlying data
# are in single precision, an exception will be thrown.
conjugateDataDouble: tuple = ()
| 57.717391
| 113
| 0.730069
| 2,051
| 15,930
| 5.643101
| 0.0941
| 0.0629
| 0.03456
| 0.036115
| 0.921116
| 0.91334
| 0.907551
| 0.903923
| 0.900207
| 0.900207
| 0
| 0.001775
| 0.222034
| 15,930
| 275
| 114
| 57.927273
| 0.931816
| 0.868362
| 0
| 0
| 0
| 0
| 0.008005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.129032
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
9b241c952027d09c4c67b31b93937a303f549872
| 5,002
|
py
|
Python
|
test/wecall_acceptance/regions_specification/test_region_padding.py
|
dylex/wecall
|
35d24cefa4fba549e737cd99329ae1b17dd0156b
|
[
"MIT"
] | 8
|
2018-10-08T15:47:21.000Z
|
2021-11-09T07:13:05.000Z
|
test/wecall_acceptance/regions_specification/test_region_padding.py
|
dylex/wecall
|
35d24cefa4fba549e737cd99329ae1b17dd0156b
|
[
"MIT"
] | 4
|
2018-11-05T09:16:27.000Z
|
2020-04-09T12:32:56.000Z
|
test/wecall_acceptance/regions_specification/test_region_padding.py
|
dylex/wecall
|
35d24cefa4fba549e737cd99329ae1b17dd0156b
|
[
"MIT"
] | 4
|
2019-09-03T15:46:39.000Z
|
2021-06-04T07:28:33.000Z
|
# All content Copyright (C) 2018 Genomics plc
from wecall.genomics.variant import Variant
from wecall_test_drivers.base_test import BaseTest
from wecall_test_drivers.svc_driver import SVCDriver
class TestRegionPadding(BaseTest):
def test_should_call_snp_with_minimal_covering_region_using_default_padding(self):
svc = SVCDriver(self)
svc.with_ref_sequence(
"AAAGCGTACAACCGGGTTAGTCACAAACCCGTTACGTATGCCCCCCCCCCCATG", chrom='1'
).with_read(
"......................................................", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ..............................T........... ", chrom='1', n_fwd=10, n_rev=10)
svc.with_region_string('1:40-41')
expect = svc.call()
expect.with_output_vcf().record_count(1).has_record_for_variant(Variant('1', 40, "C", "T"))
def test_should_call_del_with_minimal_covering_region_using_default_padding(self):
svc = SVCDriver(self)
svc.with_ref_sequence(
"AAAGCGTACAACCGGGTTAGTCACAAACCCGTTACGTATGCCCCCCCCCCCATG", chrom='1'
).with_read(
"......................................................", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ..............................*........... ", chrom='1', n_fwd=10, n_rev=10)
svc.with_region_string('1:40-41')
expect = svc.call()
expect.with_output_vcf().record_count(1).has_record_for_variant(Variant('1', 39, "GC", "G"))
def test_should_call_del_with_minimal_covering_region_using_default_padding_with_region_before(self):
svc = SVCDriver(self)
svc.with_ref_sequence(
"AAAGCGTACAACCGGGTTAGTCACAAACCCGTTACGTATGCCCCCCCCCCCATG", chrom='1'
).with_read(
"......................................................", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ..............................*........... ", chrom='1', n_fwd=10, n_rev=10)
svc.with_region_string('1:39-40')
expect = svc.call()
expect.with_output_vcf().record_count(1).has_record_for_variant(Variant('1', 39, "GC", "G"))
def test_should_not_call_del_if_region_doesnt_overlap_deleted_part(self):
svc = SVCDriver(self)
svc.with_ref_sequence(
"AAAGCGTACAACCGGGTTAGTCACAAACCCGTTACGTATGCCCCCCCCCCCATG", chrom='1'
).with_read(
"......................................................", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ..............................*........... ", chrom='1', n_fwd=10, n_rev=10)
svc.with_region_string('1:38-39,1:41-42')
expect = svc.call()
expect.with_output_vcf().record_count(0)
def test_should_call_first_snp_if_region_padding_is_zero(self):
svc = SVCDriver(self)
svc.with_ref_sequence(
"AAAGCGTACAACCGGGTTAGTCACAAACCCGTTACGTATGCCCCCCCCCCCATG", chrom='1'
).with_read(
"......................................................", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ...............................G.......... ", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ..............................T........... ", chrom='1', n_fwd=6, n_rev=6)
svc.with_region_string('1:0-41')
svc.with_region_padding(0)
expect = svc.call()
expect.with_output_vcf().record_count(1).has_record_for_variant(Variant('1', 40, "C", "T"))
def test_should_not_call_first_snp_if_region_padding_is_one(self):
svc = SVCDriver(self)
svc.with_ref_sequence(
"AAAGCGTACAACCGGGTTAGTCACAAACCCGTTACGTATGCCCCCCCCCCCATG", chrom='1'
).with_read(
"......................................................", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ...............................G.......... ", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ..............................T........... ", chrom='1', n_fwd=6, n_rev=6)
svc.with_region_string('1:0-41')
svc.with_region_padding(1)
expect = svc.call()
expect.with_output_vcf().record_count(0)
def test_should_cope_with_region_padding_which_pads_to_negative_index_into_reference(self):
svc = SVCDriver(self)
svc.with_ref_sequence(
"AAAGCGTACAACCGGGTTAGTCACAAACCCGTTACGTATGCCCCCCCCCCCATG", chrom='1'
).with_read(
"......................................................", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ...............................G.......... ", chrom='1', n_fwd=10, n_rev=10
).with_read(
" ..............................T........... ", chrom='1', n_fwd=6, n_rev=6)
svc.with_region_string('1:20-41')
svc.with_region_padding(20)
svc.call(expected_success=True)
| 44.660714
| 105
| 0.518792
| 542
| 5,002
| 4.426199
| 0.154982
| 0.060025
| 0.049604
| 0.070863
| 0.844935
| 0.831597
| 0.831597
| 0.80742
| 0.80742
| 0.80742
| 0
| 0.038432
| 0.219712
| 5,002
| 111
| 106
| 45.063063
| 0.576223
| 0.008597
| 0
| 0.786517
| 0
| 0
| 0.28021
| 0.23724
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078652
| false
| 0
| 0.033708
| 0
| 0.123596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f1a9ac48c1b778e297d87c4a133f910c7bb8327f
| 11,336
|
py
|
Python
|
exavault/api/email_api.py
|
ExaVault/evapi-python
|
769bfa9fbb683f2b4653ca2564029ffb72445c8c
|
[
"MIT"
] | null | null | null |
exavault/api/email_api.py
|
ExaVault/evapi-python
|
769bfa9fbb683f2b4653ca2564029ffb72445c8c
|
[
"MIT"
] | 3
|
2017-07-13T20:58:05.000Z
|
2019-08-02T19:08:37.000Z
|
exavault/api/email_api.py
|
ExaVault/evapi-python
|
769bfa9fbb683f2b4653ca2564029ffb72445c8c
|
[
"MIT"
] | 4
|
2016-11-16T00:14:23.000Z
|
2020-09-24T14:50:46.000Z
|
# coding: utf-8
"""
ExaVault API
See our API reference documentation at https://www.exavault.com/developer/api-docs/ # noqa: E501
OpenAPI spec version: 2.0
Contact: support@exavault.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from exavault.api_client import ApiClient
class EmailApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def send_referral_email(self, ev_api_key, ev_access_token, **kwargs): # noqa: E501
"""Send referral email to a given address # noqa: E501
Invite a friend to sign up for a free trial of ExaVault. Send a [referral](/lp/referafriend/) email to an email address. If the recipient signs up for ExaVault, we'll apply a credit to your account for the referral. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_referral_email(ev_api_key, ev_access_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ev_api_key: API Key required to make the API call. (required)
:param str ev_access_token: Access token required to make the API call. (required)
:param SendReferralEmailRequestBody body:
:return: EmptyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_referral_email_with_http_info(ev_api_key, ev_access_token, **kwargs) # noqa: E501
else:
(data) = self.send_referral_email_with_http_info(ev_api_key, ev_access_token, **kwargs) # noqa: E501
return data
def send_referral_email_with_http_info(self, ev_api_key, ev_access_token, **kwargs): # noqa: E501
"""Send referral email to a given address # noqa: E501
Invite a friend to sign up for a free trial of ExaVault. Send a [referral](/lp/referafriend/) email to an email address. If the recipient signs up for ExaVault, we'll apply a credit to your account for the referral. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_referral_email_with_http_info(ev_api_key, ev_access_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ev_api_key: API Key required to make the API call. (required)
:param str ev_access_token: Access token required to make the API call. (required)
:param SendReferralEmailRequestBody body:
:return: EmptyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ev_api_key', 'ev_access_token', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_referral_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ev_api_key' is set
if ('ev_api_key' not in params or
params['ev_api_key'] is None):
raise ValueError("Missing the required parameter `ev_api_key` when calling `send_referral_email`") # noqa: E501
# verify the required parameter 'ev_access_token' is set
if ('ev_access_token' not in params or
params['ev_access_token'] is None):
raise ValueError("Missing the required parameter `ev_access_token` when calling `send_referral_email`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'ev_api_key' in params:
header_params['ev-api-key'] = params['ev_api_key'] # noqa: E501
if 'ev_access_token' in params:
header_params['ev-access-token'] = params['ev_access_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/email/referral', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmptyResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_welcome_email(self, ev_api_key, ev_access_token, username, **kwargs): # noqa: E501
"""Resend welcome email to specific user # noqa: E501
Send a welcome email to a user. The contents of the welcome email can be set by [PATCH /accounts](#operation/updateAccount). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_welcome_email(ev_api_key, ev_access_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ev_api_key: API Key required to make the API call. (required)
:param str ev_access_token: Access token required to make the API call. (required)
:param str username: A username to send the welcome email to. (required)
:return: EmptyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_welcome_email_with_http_info(ev_api_key, ev_access_token, username, **kwargs) # noqa: E501
else:
(data) = self.send_welcome_email_with_http_info(ev_api_key, ev_access_token, username, **kwargs) # noqa: E501
return data
def send_welcome_email_with_http_info(self, ev_api_key, ev_access_token, username, **kwargs): # noqa: E501
"""Resend welcome email to specific user # noqa: E501
Send a welcome email to a user. The contents of the welcome email can be set by [PATCH /accounts](#operation/updateAccount). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_welcome_email_with_http_info(ev_api_key, ev_access_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ev_api_key: API Key required to make the API call. (required)
:param str ev_access_token: Access token required to make the API call. (required)
:param str username: A username to send the welcome email to. (required)
:return: EmptyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ev_api_key', 'ev_access_token', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_welcome_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ev_api_key' is set
if ('ev_api_key' not in params or
params['ev_api_key'] is None):
raise ValueError("Missing the required parameter `ev_api_key` when calling `send_welcome_email`") # noqa: E501
# verify the required parameter 'ev_access_token' is set
if ('ev_access_token' not in params or
params['ev_access_token'] is None):
raise ValueError("Missing the required parameter `ev_access_token` when calling `send_welcome_email`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `send_welcome_email`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
if 'ev_api_key' in params:
header_params['ev-api-key'] = params['ev_api_key'] # noqa: E501
if 'ev_access_token' in params:
header_params['ev-access-token'] = params['ev_access_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/email/welcome/{username}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmptyResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.28125
| 238
| 0.639732
| 1,445
| 11,336
| 4.775087
| 0.129412
| 0.042899
| 0.037101
| 0.02029
| 0.893188
| 0.886812
| 0.884638
| 0.848116
| 0.83971
| 0.83971
| 0
| 0.014468
| 0.274435
| 11,336
| 255
| 239
| 44.454902
| 0.824438
| 0.39229
| 0
| 0.719697
| 0
| 0
| 0.21625
| 0.031275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037879
| false
| 0
| 0.030303
| 0
| 0.121212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f1ffcfdb29d9b2445de89a2fcd52a428c8126bcf
| 78
|
py
|
Python
|
Sample-repository/main/mymath/addition.py
|
MKupperman/Intro-to-Python
|
9cbc121a07164d1f40fe6cd1102913ba7056e4be
|
[
"MIT"
] | null | null | null |
Sample-repository/main/mymath/addition.py
|
MKupperman/Intro-to-Python
|
9cbc121a07164d1f40fe6cd1102913ba7056e4be
|
[
"MIT"
] | null | null | null |
Sample-repository/main/mymath/addition.py
|
MKupperman/Intro-to-Python
|
9cbc121a07164d1f40fe6cd1102913ba7056e4be
|
[
"MIT"
] | null | null | null |
def addtwo(x, y):
return x + y
def subtracttwo(x, y):
return x - y
| 9.75
| 22
| 0.551282
| 14
| 78
| 3.071429
| 0.428571
| 0.186047
| 0.372093
| 0.418605
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.320513
| 78
| 7
| 23
| 11.142857
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7b10c43a0f9712f9d99e71e7885c0ac6deef0948
| 290
|
py
|
Python
|
kafkaSchemaManager/implementation/localSchema/LocalSchemaHolderFactory.py
|
YendiyarovSV/kafka-avro-producer-topkrabbensteam
|
d7a318b465ff38897150a4a4db267309793373bc
|
[
"Apache-2.0"
] | null | null | null |
kafkaSchemaManager/implementation/localSchema/LocalSchemaHolderFactory.py
|
YendiyarovSV/kafka-avro-producer-topkrabbensteam
|
d7a318b465ff38897150a4a4db267309793373bc
|
[
"Apache-2.0"
] | null | null | null |
kafkaSchemaManager/implementation/localSchema/LocalSchemaHolderFactory.py
|
YendiyarovSV/kafka-avro-producer-topkrabbensteam
|
d7a318b465ff38897150a4a4db267309793373bc
|
[
"Apache-2.0"
] | null | null | null |
class LocalSchemaHolderFactory:
def createLocalSchemaHolder(self,schemaHasBeenModified,schemaName,schema):
return LocalSchemaHolder(schemaHasBeenModified,schemaName,schema)
def getLocalSchemaHolder(self,schemaLoader):
return schemaLoader.loadLocalSchema()
| 36.25
| 78
| 0.786207
| 20
| 290
| 11.4
| 0.65
| 0.27193
| 0.324561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151724
| 290
| 7
| 79
| 41.428571
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
9e27c02b17a92ad742b2bbd3f59c4b06d0b2de52
| 5,516
|
py
|
Python
|
postgres_backend/data/migrations/0003_auto_20200825_1913.py
|
scripted-adventurer/Custom-Fantasy-Football
|
334419d46d2142ceec7630d4582bf61e06a4de1a
|
[
"Unlicense"
] | 1
|
2020-09-12T04:25:19.000Z
|
2020-09-12T04:25:19.000Z
|
postgres_backend/data/migrations/0003_auto_20200825_1913.py
|
scripted-adventurer/Custom-Fantasy-Football
|
334419d46d2142ceec7630d4582bf61e06a4de1a
|
[
"Unlicense"
] | null | null | null |
postgres_backend/data/migrations/0003_auto_20200825_1913.py
|
scripted-adventurer/Custom-Fantasy-Football
|
334419d46d2142ceec7630d4582bf61e06a4de1a
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1 on 2020-08-25 19:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0002_auto_20200825_1855'),
]
operations = [
migrations.AlterField(
model_name='drive',
name='end_field',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='drive',
name='end_time',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='drive',
name='end_transition',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='drive',
name='first_downs',
field=models.SmallIntegerField(null=True),
),
migrations.AlterField(
model_name='drive',
name='penalty_yards',
field=models.SmallIntegerField(null=True),
),
migrations.AlterField(
model_name='drive',
name='play_count',
field=models.SmallIntegerField(null=True),
),
migrations.AlterField(
model_name='drive',
name='pos_time',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='drive',
name='start_field',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='drive',
name='start_time',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='drive',
name='start_transition',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='drive',
name='yards_gained',
field=models.SmallIntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='away_score',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='away_score_ot',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='away_score_q1',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='away_score_q2',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='away_score_q3',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='away_score_q4',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='home_score',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='home_score_ot',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='home_score_q1',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='home_score_q2',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='home_score_q3',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='home_score_q4',
field=models.SmallIntegerField(default=0, null=True),
),
migrations.AlterField(
model_name='game',
name='phase',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='game',
name='stadium',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='game',
name='weather',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='play',
name='description',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='play',
name='end_yardline',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='play',
name='first_down',
field=models.BooleanField(null=True),
),
migrations.AlterField(
model_name='play',
name='penalty',
field=models.BooleanField(null=True),
),
migrations.AlterField(
model_name='play',
name='play_type',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='play',
name='start_yardline',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='play',
name='time',
field=models.TextField(null=True),
),
]
| 30.815642
| 65
| 0.530457
| 485
| 5,516
| 5.882474
| 0.134021
| 0.231335
| 0.289169
| 0.335436
| 0.911321
| 0.911321
| 0.900105
| 0.885734
| 0.885734
| 0.87592
| 0
| 0.01401
| 0.352973
| 5,516
| 178
| 66
| 30.988764
| 0.785374
| 0.007796
| 0
| 0.767442
| 1
| 0
| 0.096509
| 0.004204
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005814
| 0
| 0.023256
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
9e759a4f4e3327b41e4ed24fca7ea6c0f211b620
| 29,079
|
py
|
Python
|
disentanglement_lib/evaluation/metrics/recall.py
|
petrapoklukar/disentanglement_sample_efficiency
|
27b4bac6ee61973a728e48230c6eb449b167c46e
|
[
"Apache-2.0"
] | 1
|
2021-11-28T09:08:03.000Z
|
2021-11-28T09:08:03.000Z
|
disentanglement_lib/evaluation/metrics/recall.py
|
petrapoklukar/disentanglement_sample_efficiency
|
27b4bac6ee61973a728e48230c6eb449b167c46e
|
[
"Apache-2.0"
] | null | null | null |
disentanglement_lib/evaluation/metrics/recall.py
|
petrapoklukar/disentanglement_sample_efficiency
|
27b4bac6ee61973a728e48230c6eb449b167c46e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 24 15:50:24 2020
@author: petrapoklukar
Computes recall metric.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.evaluation.metrics import utils
from disentanglement_lib.evaluation.metrics import iprd_score as iprd
import numpy as np
from six.moves import range
import gin.tf
from disentanglement_lib.data.ground_truth import named_data
import tensorflow as tf
from sklearn.decomposition import PCA
import pickle
@gin.configurable(
"recall_on_holdout",
blacklist=["holdout_ground_truth_data", "encoder_fn", "repr_transform_fn",
"decoder_fn", "random_state", "artifact_dir"])
def compute_recall_on_holdout(holdout_ground_truth_data,
encoder_fn,
repr_transform_fn,
decoder_fn,
random_state,
artifact_dir=None,
nhood_sizes=gin.REQUIRED,
num_interventions_per_latent_dim=gin.REQUIRED,
pca_components=gin.REQUIRED
):
"""TBA
Args:
ground_truth_data: GroundTruthData to be sampled from.
random_state: Numpy random state used for randomness.
artifact_dir: Optional path to directory where artifacts can be saved.
"""
del artifact_dir
num_recall_samples = holdout_ground_truth_data.data_size
dummy_input = holdout_ground_truth_data.sample_observations(1, random_state)
dummy_mean, dummy_var = encoder_fn(dummy_input)
# Samples from the normal prior
latent_dim = repr_transform_fn(*encoder_fn(dummy_input)).shape[-1]
latent_shape = [num_recall_samples, latent_dim]
latent_prior_samples_np = np.random.normal(size=latent_shape)
# Ground truth samples
gt_samples = holdout_ground_truth_data.images
gt_repr = repr_transform_fn(*encoder_fn(gt_samples))
gt_repr_mean = np.mean(gt_repr, axis=0) # latent_shape
gt_repr_std = np.std(gt_repr, axis=0)
gt_repr_min = np.min(gt_repr, axis=0)
gt_repr_max = np.max(gt_repr, axis=0)
# The predetermined set of interventions from the estimated training prior
fixed_trained_prior_samples_np = np.random.normal(loc=gt_repr_mean,
scale=gt_repr_std,
size=latent_shape)
print(fixed_trained_prior_samples_np.shape, fixed_trained_prior_samples_np)
pca_results_per_comp = []
result_d_gen = {'nhoods': nhood_sizes,
'gt_repr_mean': list(gt_repr_mean),
'gt_repr_std': list(gt_repr_std),
'gt_repr_min': list(gt_repr_min),
'gt_repr_max': list(gt_repr_max)
}
subset_interventions = np.random.choice(
np.arange(num_recall_samples), size=num_interventions_per_latent_dim,
replace=False)
result_d_gen['subset_interventions'] = list(subset_interventions)
for num_comp in pca_components:
# Load the pretrained gt pca
pca_path = "backbone/pca/pca_3dshapes_model_all_{0}comp.pkl".format(num_comp)
print(pca_path)
with open(pca_path, 'rb') as f:
gt_pca = pickle.load(f)
result_d = result_d_gen.copy()
result_d['pca_components'] = num_comp
sess = tf.Session()
with sess.as_default():
# Choose a subset of interventions
print('\n\n\n Computing the total recall...')
# Sample ground truth data and vae process it
decoded_gt_samples = decoder_fn(gt_repr)
decoded_gt_samples = decoded_gt_samples.reshape(num_recall_samples, -1)
reduced_decoded_gt_samples = gt_pca.transform(decoded_gt_samples)
# Generated samples from the normal prior, processed with gt PCA
generated_prior_samples = decoder_fn(latent_prior_samples_np)
generated_prior_samples = generated_prior_samples.reshape(num_recall_samples, -1)
reduced_generated_prior_samples = gt_pca.transform(generated_prior_samples)
assert(reduced_generated_prior_samples.shape == reduced_decoded_gt_samples.shape)
# Generated samples from the estimated training prior, processed with gt PCA
generated_trained_prior_samples = decoder_fn(fixed_trained_prior_samples_np)
generated_trained_prior_samples = generated_trained_prior_samples.reshape(num_recall_samples, -1)
reduced_generated_trained_prior_samples = gt_pca.transform(generated_trained_prior_samples)
assert(reduced_generated_prior_samples.shape == reduced_generated_trained_prior_samples.shape)
# --- Original sharp images - discarded to now
# gt_train_samples = gt_samples.reshape(num_recall_samples, -1)
# gt_pca = PCA(n_components=n_comp)
# reduced_gt_samples = gt_pca.fit_transform(gt_samples)
# assert(reduced_gt_samples.shape == reduced_decoded_gt_samples.shape)
# # compute model recall: gt vs generated
# gt_generated_result = iprd.knn_precision_recall_features(
# reduced_gt_samples,
# reduced_generated_prior_samples,
# nhood_sizes=nhood_sizes,
# row_batch_size=500, col_batch_size=100, num_gpus=1)
# update_result_dict(result_d, ['gt_generated_', gt_generated_result])
# ----
# compute model recall: model(gt) vs normal prior generated
decoded_gt_prior_generated_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_generated_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
update_result_dict(result_d, ['decoded_gt_prior_generated_',
decoded_gt_prior_generated_result])
# compute model recall: model(gt) vs estimated training prior generated
decoded_gt_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_generated_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
update_result_dict(result_d, ['decoded_gt_trained_prior_generated_',
decoded_gt_trained_prior_generated_result])
# compute model recall:normal prior generated vs estimated training prior generated
prior_generated_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_prior_samples,
reduced_generated_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
update_result_dict(result_d, ['prior_generated_trained_prior_generated_',
prior_generated_trained_prior_generated_result])
# Pick a latent dimension
for dim in range(latent_dim):
print('\n\n\n Computing the recall for latent dim ', dim)
agg_fix_one_vs_prior_generated_result = {'precision': [], 'recall': []}
agg_fix_one_vs_trained_prior_generated_result = {'precision': [], 'recall': []}
agg_fix_one_vs_decoded_gt_result = {'precision': [], 'recall': []}
agg_vary_one_vs_prior_generated_result = {'precision': [], 'recall': []}
agg_vary_one_vs_trained_prior_generated_result = {'precision': [], 'recall': []}
agg_vary_one_vs_decoded_gt_result = {'precision': [], 'recall': []}
# intervene several times
for intervention in range(num_interventions_per_latent_dim):
inter_id = subset_interventions[intervention]
print('n\n\n Intervention num', intervention)
print(' Intervention row', inter_id)
# --- fix one, vary the rest
latent_intervention = float(fixed_trained_prior_samples_np[inter_id, dim])
fix_one_latent_from_trained_prior_samples = np.copy(fixed_trained_prior_samples_np)
fix_one_latent_from_trained_prior_samples[:, dim] = latent_intervention
# decode the samples and trasform them with the PCA
gen_fix_one_latent_from_trained_prior_samples = decoder_fn(
fix_one_latent_from_trained_prior_samples).reshape(num_recall_samples, -1)
reduced_gen_fix_one_latent_from_trained_prior_samples = gt_pca.transform(
gen_fix_one_latent_from_trained_prior_samples)
assert(reduced_gen_fix_one_latent_from_trained_prior_samples.shape == \
reduced_generated_prior_samples.shape)
# - Calculate the relative recall
# compare to the generated normal prior samples
fix_one_vs_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_prior_samples,
reduced_gen_fix_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
print(agg_fix_one_vs_prior_generated_result)
print(fix_one_vs_prior_generated_result)
agg_fix_one_vs_prior_generated_result = agg_recall_dict(
agg_fix_one_vs_prior_generated_result,
fix_one_vs_prior_generated_result,
intervention)
print(agg_fix_one_vs_prior_generated_result)
# compare to the generated trained prior samples
fix_one_vs_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_trained_prior_samples,
reduced_gen_fix_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_fix_one_vs_trained_prior_generated_result = agg_recall_dict(
agg_fix_one_vs_trained_prior_generated_result,
fix_one_vs_trained_prior_generated_result,
intervention)
# compare to the decoded gt
fix_one_vs_decoded_gt_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_gen_fix_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_fix_one_vs_decoded_gt_result = agg_recall_dict(
agg_fix_one_vs_decoded_gt_result,
fix_one_vs_decoded_gt_result,
intervention)
# --- vary one, fix the rest
latent_variation = np.copy(fixed_trained_prior_samples_np[:, dim])
print(latent_variation.shape, latent_variation)
vary_one_latent_from_trained_prior_samples = np.copy(
fixed_trained_prior_samples_np[inter_id]).reshape(1, latent_dim)
vary_one_latent_from_trained_prior_samples = np.full(
latent_shape, vary_one_latent_from_trained_prior_samples)
print(vary_one_latent_from_trained_prior_samples.shape)
print(vary_one_latent_from_trained_prior_samples)
vary_one_latent_from_trained_prior_samples[:, dim] = latent_variation
print(vary_one_latent_from_trained_prior_samples.shape)
print(vary_one_latent_from_trained_prior_samples)
# decode the samples and trasform them with the PCA
gen_vary_one_latent_from_trained_prior_samples = decoder_fn(
vary_one_latent_from_trained_prior_samples).reshape(num_recall_samples, -1)
reduced_gen_vary_one_latent_from_trained_prior_samples = gt_pca.transform(
gen_vary_one_latent_from_trained_prior_samples)
assert(reduced_gen_vary_one_latent_from_trained_prior_samples.shape == \
reduced_generated_prior_samples.shape)
# - Calculate the recall
# compare to the generated normal prior samples
vary_one_vs_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_prior_samples,
reduced_gen_vary_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
print(agg_vary_one_vs_prior_generated_result)
print(vary_one_vs_prior_generated_result)
agg_vary_one_vs_prior_generated_result = agg_recall_dict(
agg_vary_one_vs_prior_generated_result,
vary_one_vs_prior_generated_result,
intervention)
print(agg_vary_one_vs_prior_generated_result)
# compare to the generated trained prior samples
vary_one_vs_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_trained_prior_samples,
reduced_gen_vary_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_vary_one_vs_trained_prior_generated_result = agg_recall_dict(
agg_vary_one_vs_trained_prior_generated_result,
vary_one_vs_trained_prior_generated_result,
intervention)
# compare to the decoded gt
vary_one_vs_decoded_gt_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_gen_vary_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_vary_one_vs_decoded_gt_result = agg_recall_dict(
agg_vary_one_vs_decoded_gt_result,
vary_one_vs_decoded_gt_result,
intervention)
update_result_dict_with_agg(
result_d,
[str(dim) + '_fix_one_vs_prior_generated_', agg_fix_one_vs_prior_generated_result],
[str(dim) + '_fix_one_vs_trained_prior_generated_', agg_fix_one_vs_trained_prior_generated_result],
[str(dim) + '_fix_one_vs_decoded_gt_', agg_fix_one_vs_decoded_gt_result],
[str(dim) + '_vary_one_vs_prior_generated_', agg_vary_one_vs_prior_generated_result],
[str(dim) + '_vary_one_vs_trained_prior_generated_', agg_vary_one_vs_trained_prior_generated_result],
[str(dim) + '_vary_one_vs_decoded_gt_', agg_vary_one_vs_decoded_gt_result])
pca_results_per_comp.append(result_d)
return pca_results_per_comp
@gin.configurable(
"recall",
blacklist=["ground_truth_data", "encoder_fn", "repr_transform_fn",
"decoder_fn", "random_state", "artifact_dir"])
def compute_recall(ground_truth_data,
encoder_fn,
repr_transform_fn,
decoder_fn,
random_state,
artifact_dir=None,
# num_recall_samples=gin.REQUIRED,
nhood_sizes=gin.REQUIRED,
num_interventions_per_latent_dim=gin.REQUIRED,
num_pca_components=gin.REQUIRED
):
"""TBA
Args:
random_state: Numpy random state used for randomness.
artifact_dir: Optional path to directory where artifacts can be saved.
"""
del artifact_dir
train_ground_truth_data, test_ground_truth_data = ground_truth_data
ground_truth_data = train_ground_truth_data
num_recall_samples = train_ground_truth_data.data_size
dummy_input = ground_truth_data.sample_observations(1, random_state)
dummy_mean, dummy_var = encoder_fn(dummy_input)
# Samples from the normal prior
latent_dim = repr_transform_fn(*encoder_fn(dummy_input)).shape[-1]
latent_shape = [num_recall_samples, latent_dim]
latent_prior_samples_np = np.random.normal(size=latent_shape)
# Ground truth samples
gt_train_samples = ground_truth_data.sample_observations(num_recall_samples, random_state)
gt_train_repr = repr_transform_fn(*encoder_fn(gt_train_samples))
gt_train_repr_mean = np.mean(gt_train_repr, axis=0) # latent_shape
gt_train_repr_std = np.std(gt_train_repr, axis=0)
gt_train_repr_min = np.min(gt_train_repr, axis=0)
gt_train_repr_max = np.max(gt_train_repr, axis=0)
# The predetermined set of interventions from the estimated training prior
fixed_trained_prior_samples_np = np.random.normal(loc=gt_train_repr_mean,
scale=gt_train_repr_std,
size=latent_shape)
print(fixed_trained_prior_samples_np.shape, fixed_trained_prior_samples_np)
result_d = {'nhoods': nhood_sizes,
'gt_train_repr_mean': list(gt_train_repr_mean),
'gt_train_repr_std': list(gt_train_repr_std),
'gt_train_repr_min': list(gt_train_repr_min),
'gt_train_repr_max': list(gt_train_repr_max)}
sess = tf.Session()
with sess.as_default():
n_comp = min(num_recall_samples, num_pca_components)
print('\n\n\n Computing the total recall...')
# Sample ground truth data and vae process it
decoded_gt_samples = decoder_fn(repr_transform_fn(*encoder_fn(gt_train_samples)))
decoded_gt_samples = decoded_gt_samples.reshape(num_recall_samples, -1)
decoded_gt_pca = PCA(n_components=n_comp)
reduced_decoded_gt_samples = decoded_gt_pca.fit_transform(decoded_gt_samples)
# Generated samples from the normal prior, processed with gt PCA
generated_prior_samples = decoder_fn(latent_prior_samples_np)
generated_prior_samples = generated_prior_samples.reshape(num_recall_samples, -1)
reduced_generated_prior_samples = decoded_gt_pca.transform(generated_prior_samples)
assert(reduced_generated_prior_samples.shape == reduced_decoded_gt_samples.shape)
# Generated samples from the estimated training prior, processed with gt PCA
generated_trained_prior_samples = decoder_fn(fixed_trained_prior_samples_np)
generated_trained_prior_samples = generated_trained_prior_samples.reshape(num_recall_samples, -1)
reduced_generated_trained_prior_samples = decoded_gt_pca.transform(generated_trained_prior_samples)
assert(reduced_generated_prior_samples.shape == reduced_generated_trained_prior_samples.shape)
# --- Original sharp images - discarded to now
# gt_train_samples = gt_samples.reshape(num_recall_samples, -1)
# gt_pca = PCA(n_components=n_comp)
# reduced_gt_samples = gt_pca.fit_transform(gt_samples)
# assert(reduced_gt_samples.shape == reduced_decoded_gt_samples.shape)
# # compute model recall: gt vs generated
# gt_generated_result = iprd.knn_precision_recall_features(
# reduced_gt_samples,
# reduced_generated_prior_samples,
# nhood_sizes=nhood_sizes,
# row_batch_size=500, col_batch_size=100, num_gpus=1)
# update_result_dict(result_d, ['gt_generated_', gt_generated_result])
# ----
# compute model recall: model(gt) vs normal prior generated
decoded_gt_prior_generated_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_generated_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
update_result_dict(result_d, ['decoded_gt_prior_generated_',
decoded_gt_prior_generated_result])
# compute model recall: model(gt) vs estimated training prior generated
decoded_gt_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_generated_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
update_result_dict(result_d, ['decoded_gt_trained_prior_generated_',
decoded_gt_trained_prior_generated_result])
# compute model recall:normal prior generated vs estimated training prior generated
prior_generated_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_prior_samples,
reduced_generated_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
update_result_dict(result_d, ['prior_generated_trained_prior_generated_',
prior_generated_trained_prior_generated_result])
# Choose a subset of interventions
subset_interventions = np.random.choice(
np.arange(num_recall_samples), size=num_interventions_per_latent_dim,
replace=False)
result_d['subset_interventions'] = list(subset_interventions)
result_d['num_pca_comp'] = n_comp
# Pick a latent dimension
for dim in range(latent_dim):
print('\n\n\n Computing the recall for latent dim ', dim)
agg_fix_one_vs_prior_generated_result = {'precision': [], 'recall': []}
agg_fix_one_vs_trained_prior_generated_result = {'precision': [], 'recall': []}
agg_fix_one_vs_decoded_gt_result = {'precision': [], 'recall': []}
agg_vary_one_vs_prior_generated_result = {'precision': [], 'recall': []}
agg_vary_one_vs_trained_prior_generated_result = {'precision': [], 'recall': []}
agg_vary_one_vs_decoded_gt_result = {'precision': [], 'recall': []}
# intervene several times
for intervention in range(num_interventions_per_latent_dim):
inter_id = subset_interventions[intervention]
print('n\n\n Intervention num', intervention)
print(' Intervention row', inter_id)
# --- fix one, vary the rest
latent_intervention = float(fixed_trained_prior_samples_np[inter_id, dim])
fix_one_latent_from_trained_prior_samples = np.copy(fixed_trained_prior_samples_np)
fix_one_latent_from_trained_prior_samples[:, dim] = latent_intervention
# decode the samples and trasform them with the PCA
gen_fix_one_latent_from_trained_prior_samples = decoder_fn(
fix_one_latent_from_trained_prior_samples).reshape(num_recall_samples, -1)
reduced_gen_fix_one_latent_from_trained_prior_samples = decoded_gt_pca.transform(
gen_fix_one_latent_from_trained_prior_samples)
assert(reduced_gen_fix_one_latent_from_trained_prior_samples.shape == \
reduced_generated_prior_samples.shape)
# - Calculate the relative recall
# compare to the generated normal prior samples
fix_one_vs_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_prior_samples,
reduced_gen_fix_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
print(agg_fix_one_vs_prior_generated_result)
print(fix_one_vs_prior_generated_result)
agg_fix_one_vs_prior_generated_result = agg_recall_dict(
agg_fix_one_vs_prior_generated_result,
fix_one_vs_prior_generated_result,
intervention)
print(agg_fix_one_vs_prior_generated_result)
# compare to the generated trained prior samples
fix_one_vs_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_trained_prior_samples,
reduced_gen_fix_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_fix_one_vs_trained_prior_generated_result = agg_recall_dict(
agg_fix_one_vs_trained_prior_generated_result,
fix_one_vs_trained_prior_generated_result,
intervention)
# compare to the decoded gt
fix_one_vs_decoded_gt_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_gen_fix_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_fix_one_vs_decoded_gt_result = agg_recall_dict(
agg_fix_one_vs_decoded_gt_result,
fix_one_vs_decoded_gt_result,
intervention)
# --- vary one, fix the rest
latent_variation = np.copy(fixed_trained_prior_samples_np[:, dim])
print(latent_variation.shape, latent_variation)
vary_one_latent_from_trained_prior_samples = np.copy(
fixed_trained_prior_samples_np[inter_id]).reshape(1, latent_dim)
vary_one_latent_from_trained_prior_samples = np.full(
latent_shape, vary_one_latent_from_trained_prior_samples)
print(vary_one_latent_from_trained_prior_samples.shape)
print(vary_one_latent_from_trained_prior_samples)
vary_one_latent_from_trained_prior_samples[:, dim] = latent_variation
print(vary_one_latent_from_trained_prior_samples.shape)
print(vary_one_latent_from_trained_prior_samples)
# decode the samples and trasform them with the PCA
gen_vary_one_latent_from_trained_prior_samples = decoder_fn(
vary_one_latent_from_trained_prior_samples).reshape(num_recall_samples, -1)
reduced_gen_vary_one_latent_from_trained_prior_samples = decoded_gt_pca.transform(
gen_vary_one_latent_from_trained_prior_samples)
assert(reduced_gen_vary_one_latent_from_trained_prior_samples.shape == \
reduced_generated_prior_samples.shape)
# - Calculate the recall
# compare to the generated normal prior samples
vary_one_vs_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_prior_samples,
reduced_gen_vary_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
print(agg_vary_one_vs_prior_generated_result)
print(vary_one_vs_prior_generated_result)
agg_vary_one_vs_prior_generated_result = agg_recall_dict(
agg_vary_one_vs_prior_generated_result,
vary_one_vs_prior_generated_result,
intervention)
print(agg_vary_one_vs_prior_generated_result)
# compare to the generated trained prior samples
vary_one_vs_trained_prior_generated_result = iprd.knn_precision_recall_features(
reduced_generated_trained_prior_samples,
reduced_gen_vary_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_vary_one_vs_trained_prior_generated_result = agg_recall_dict(
agg_vary_one_vs_trained_prior_generated_result,
vary_one_vs_trained_prior_generated_result,
intervention)
# compare to the decoded gt
vary_one_vs_decoded_gt_result = iprd.knn_precision_recall_features(
reduced_decoded_gt_samples,
reduced_gen_vary_one_latent_from_trained_prior_samples,
nhood_sizes=nhood_sizes,
row_batch_size=500, col_batch_size=100, num_gpus=1)
agg_vary_one_vs_decoded_gt_result = agg_recall_dict(
agg_vary_one_vs_decoded_gt_result,
vary_one_vs_decoded_gt_result,
intervention)
update_result_dict_with_agg(
result_d,
[str(dim) + '_fix_one_vs_prior_generated_', agg_fix_one_vs_prior_generated_result],
[str(dim) + '_fix_one_vs_trained_prior_generated_', agg_fix_one_vs_trained_prior_generated_result],
[str(dim) + '_fix_one_vs_decoded_gt_', agg_fix_one_vs_decoded_gt_result],
[str(dim) + '_vary_one_vs_prior_generated_', agg_vary_one_vs_prior_generated_result],
[str(dim) + '_vary_one_vs_trained_prior_generated_', agg_vary_one_vs_trained_prior_generated_result],
[str(dim) + '_vary_one_vs_decoded_gt_', agg_vary_one_vs_decoded_gt_result])
print(result_d)
return [result_d]
def update_result_dict(result_d, *args):
for arg in args:
update_key = arg[0]
update_d = {update_key + key: list(value) for key, value in arg[1].items()}
result_d.update(update_d)
return result_d
def update_result_dict_with_agg(result_d, *args):
for arg in args:
update_key = arg[0]
update_recall = arg[1]['recall']
print('\n\n\n\n\n', arg)
# update_d = {update_key + 'recall_mean': list(np.mean(value, axis=0)) for key, value in arg[1].items()}
update_d = {update_key + 'recall_mean': list(np.mean(update_recall, axis=0)),
update_key + 'recall_std': list(np.std(update_recall, axis=0)),
update_key + 'recall_sum': list(np.sum(update_recall, axis=0))
}
result_d.update(update_d)
return result_d
def agg_recall_dict(agg_d, new_d, inter_id):
if inter_id == 0:
return new_d
else:
for k, v in new_d.items():
agg_d[k] = np.vstack([agg_d[k], new_d[k]])
return agg_d
| 49.203046
| 113
| 0.708071
| 3,790
| 29,079
| 4.887863
| 0.058047
| 0.085506
| 0.094359
| 0.05614
| 0.915304
| 0.896788
| 0.881943
| 0.865965
| 0.854629
| 0.846856
| 0
| 0.008552
| 0.227965
| 29,079
| 591
| 114
| 49.203046
| 0.816615
| 0.137831
| 0
| 0.764423
| 0
| 0
| 0.058072
| 0.025248
| 0
| 0
| 0
| 0
| 0.019231
| 1
| 0.012019
| false
| 0
| 0.028846
| 0
| 0.055288
| 0.086538
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9e9441a93cd63b68d67527d894c0caf840acf8e9
| 282
|
py
|
Python
|
pyoat/preprocessing/__init__.py
|
berkanlafci/pyoat
|
b0cee99adde3c14c5d94f26f6e893a0b2e1fcae2
|
[
"MIT"
] | 5
|
2022-03-07T16:30:58.000Z
|
2022-03-28T13:42:01.000Z
|
pyoat/preprocessing/__init__.py
|
berkanlafci/pyoat
|
b0cee99adde3c14c5d94f26f6e893a0b2e1fcae2
|
[
"MIT"
] | 1
|
2022-03-28T13:41:42.000Z
|
2022-03-28T13:41:42.000Z
|
pyoat/preprocessing/__init__.py
|
berkanlafci/pyoat
|
b0cee99adde3c14c5d94f26f6e893a0b2e1fcae2
|
[
"MIT"
] | null | null | null |
#-----
# Description : init for preprocessing tools
# Date : February 2021
# Author : Berkan Lafci
# E-mail : lafciberkan@gmail.com
#-----
from pyoat.preprocessing.filterBandPass import sigMatFilter
from pyoat.preprocessing.normalize import sigMatNormalize
| 31.333333
| 59
| 0.705674
| 28
| 282
| 7.107143
| 0.821429
| 0.090452
| 0.221106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017699
| 0.198582
| 282
| 9
| 60
| 31.333333
| 0.862832
| 0.539007
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
7b34eb98573163e17aa7ed47d71784f7ee127ab4
| 120
|
py
|
Python
|
torchnet/__init__.py
|
HarshTrivedi/tnt
|
4bd49afaa936e888ea1020a4f9ef54613beea559
|
[
"BSD-3-Clause"
] | 1,463
|
2017-01-18T22:59:37.000Z
|
2022-03-31T01:58:02.000Z
|
torchnet/__init__.py
|
HarshTrivedi/tnt
|
4bd49afaa936e888ea1020a4f9ef54613beea559
|
[
"BSD-3-Clause"
] | 105
|
2017-01-18T20:30:01.000Z
|
2021-12-31T15:08:18.000Z
|
torchnet/__init__.py
|
HarshTrivedi/tnt
|
4bd49afaa936e888ea1020a4f9ef54613beea559
|
[
"BSD-3-Clause"
] | 236
|
2017-01-18T20:17:32.000Z
|
2022-02-16T06:41:40.000Z
|
from . import dataset, meter, engine, transform, logger
__all__ = ['dataset', 'meter', 'engine', 'transform', 'logger']
| 40
| 63
| 0.691667
| 13
| 120
| 6.076923
| 0.615385
| 0.303797
| 0.455696
| 0.683544
| 0.835443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 120
| 2
| 64
| 60
| 0.752381
| 0
| 0
| 0
| 0
| 0
| 0.275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
7b4f2cb696994930d69240332213b8dcf0c6a07d
| 128
|
py
|
Python
|
tests/test_demo.py
|
nikolim/gitlab-vs-github
|
71bf02399c925b6216e66e9c6023409e54f29761
|
[
"MIT"
] | null | null | null |
tests/test_demo.py
|
nikolim/gitlab-vs-github
|
71bf02399c925b6216e66e9c6023409e54f29761
|
[
"MIT"
] | null | null | null |
tests/test_demo.py
|
nikolim/gitlab-vs-github
|
71bf02399c925b6216e66e9c6023409e54f29761
|
[
"MIT"
] | null | null | null |
from uselesspackage.demo import calc_meaning_of_life
def test_calc_meaning_of_life():
assert calc_meaning_of_life() == 42
| 21.333333
| 52
| 0.8125
| 20
| 128
| 4.7
| 0.6
| 0.351064
| 0.414894
| 0.542553
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 0.125
| 128
| 5
| 53
| 25.6
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
7bb2c1eedce3ed5c2c27ed8ce957393889f7ab39
| 40,597
|
py
|
Python
|
SDKs/Aspose.Storage_Cloud_SDK_For_Python/asposestoragecloud/StorageApi.py
|
imranwar/AsposeStoragePHP
|
3795bbfd1ca6d2aa58974bd9133ab4794b3f23d0
|
[
"MIT"
] | null | null | null |
SDKs/Aspose.Storage_Cloud_SDK_For_Python/asposestoragecloud/StorageApi.py
|
imranwar/AsposeStoragePHP
|
3795bbfd1ca6d2aa58974bd9133ab4794b3f23d0
|
[
"MIT"
] | null | null | null |
SDKs/Aspose.Storage_Cloud_SDK_For_Python/asposestoragecloud/StorageApi.py
|
imranwar/AsposeStoragePHP
|
3795bbfd1ca6d2aa58974bd9133ab4794b3f23d0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
import os
import urllib
import json
import re
from models import *
from ApiClient import ApiException
class StorageApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def DeleteFile(self, Path, **kwargs):
"""Remove a specific file. Parameters: path - file path e.g. /file.ext, versionID - file's version, storage - user's storage name.
Args:
Path (str): (required)
versionId (str): (optional)
storage (str): (optional)
Returns: RemoveFileResponse
"""
allParams = dict.fromkeys(['Path', 'versionId', 'storage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteFile" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/file/{path}/?appSid={appSid}&versionId={versionId}&storage={storage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'versionId' in allParams and allParams['versionId'] is not None:
resourcePath = resourcePath.replace("{" + "versionId" + "}" , str(allParams['versionId']))
else:
resourcePath = re.sub("[&?]versionId.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
method = 'DELETE'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'RemoveFileResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetDiscUsage(self, **kwargs):
"""Check the disk usage of the current account. Parameters: storage - user's storage name.
Args:
storage (str): (optional)
Returns: DiscUsageResponse
"""
allParams = dict.fromkeys(['storage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetDiscUsage" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/disc/?appSid={appSid}&storage={storage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'DiscUsageResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetDownload(self, Path, **kwargs):
"""Download a specific file. Parameters: path - file path e.g. /file.ext, versionID - file's version, storage - user's storage name.
Args:
Path (str): (required)
versionId (str): (optional)
storage (str): (optional)
Returns: ResponseMessage
"""
allParams = dict.fromkeys(['Path', 'versionId', 'storage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetDownload" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/file/{path}/?appSid={appSid}&versionId={versionId}&storage={storage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'versionId' in allParams and allParams['versionId'] is not None:
resourcePath = resourcePath.replace("{" + "versionId" + "}" , str(allParams['versionId']))
else:
resourcePath = re.sub("[&?]versionId.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/octet-stream'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'ResponseMessage', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetIsExist(self, Path, **kwargs):
"""Check if a specific file or folder exists. Parameters: path - file or folder path e.g. /file.ext or /Folder1, versionID - file's version, storage - user's storage name.
Args:
Path (str): (required)
versionId (str): (optional)
storage (str): (optional)
Returns: FileExistResponse
"""
allParams = dict.fromkeys(['Path', 'versionId', 'storage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetIsExist" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/exist/{path}/?appSid={appSid}&versionId={versionId}&storage={storage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'versionId' in allParams and allParams['versionId'] is not None:
resourcePath = resourcePath.replace("{" + "versionId" + "}" , str(allParams['versionId']))
else:
resourcePath = re.sub("[&?]versionId.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'FileExistResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetListFileVersions(self, Path, **kwargs):
"""Get the file's versions list. Parameters: path - file path e.g. /file.ext or /Folder1/file.ext, storage - user's storage name.
Args:
Path (str): (required)
storage (str): (optional)
Returns: FileVersionsResponse
"""
allParams = dict.fromkeys(['Path', 'storage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetListFileVersions" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/version/{path}/?appSid={appSid}&storage={storage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'FileVersionsResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def PostMoveFile(self, src, dest, **kwargs):
"""Move a specific file.
Args:
src (str): source file path e.g. /file.ext (required)
dest (str): (required)
versionId (str): source file's version, (optional)
storage (str): user's source storage name (optional)
destStorage (str): user's destination storage name (optional)
Returns: MoveFileResponse
"""
allParams = dict.fromkeys(['src', 'dest', 'versionId', 'storage', 'destStorage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PostMoveFile" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/file/{src}/?dest={dest}&appSid={appSid}&versionId={versionId}&storage={storage}&destStorage={destStorage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'src' in allParams and allParams['src'] is not None:
resourcePath = resourcePath.replace("{" + "src" + "}" , str(allParams['src']))
else:
resourcePath = re.sub("[&?]src.*?(?=&|\\?|$)", "", resourcePath)
if 'dest' in allParams and allParams['dest'] is not None:
resourcePath = resourcePath.replace("{" + "dest" + "}" , str(allParams['dest']))
else:
resourcePath = re.sub("[&?]dest.*?(?=&|\\?|$)", "", resourcePath)
if 'versionId' in allParams and allParams['versionId'] is not None:
resourcePath = resourcePath.replace("{" + "versionId" + "}" , str(allParams['versionId']))
else:
resourcePath = re.sub("[&?]versionId.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'destStorage' in allParams and allParams['destStorage'] is not None:
resourcePath = resourcePath.replace("{" + "destStorage" + "}" , str(allParams['destStorage']))
else:
resourcePath = re.sub("[&?]destStorage.*?(?=&|\\?|$)", "", resourcePath)
method = 'POST'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'MoveFileResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def PutCopy(self, Path, newdest, file, **kwargs):
"""Copy a specific file. Parameters: path - source file path e.g. /file.ext, versionID - source file's version, storage - user's source storage name, newdest - destination file path, destStorage - user's destination storage name.
Args:
Path (str): (required)
newdest (str): (required)
versionId (str): (optional)
storage (str): (optional)
destStorage (str): (optional)
file (File): (required)
Returns: ResponseMessage
"""
allParams = dict.fromkeys(['Path', 'newdest', 'versionId', 'storage', 'destStorage', 'file'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PutCopy" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/file/{path}/?appSid={appSid}&newdest={newdest}&versionId={versionId}&storage={storage}&destStorage={destStorage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'newdest' in allParams and allParams['newdest'] is not None:
resourcePath = resourcePath.replace("{" + "newdest" + "}" , str(allParams['newdest']))
else:
resourcePath = re.sub("[&?]newdest.*?(?=&|\\?|$)", "", resourcePath)
if 'versionId' in allParams and allParams['versionId'] is not None:
resourcePath = resourcePath.replace("{" + "versionId" + "}" , str(allParams['versionId']))
else:
resourcePath = re.sub("[&?]versionId.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'destStorage' in allParams and allParams['destStorage'] is not None:
resourcePath = resourcePath.replace("{" + "destStorage" + "}" , str(allParams['destStorage']))
else:
resourcePath = re.sub("[&?]destStorage.*?(?=&|\\?|$)", "", resourcePath)
method = 'PUT'
queryParams = {}
headerParams = {}
formParams = {}
files = { 'file':open(file, 'rb')}
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'multipart/form-data'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'ResponseMessage', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def PutCreate(self, Path, file, **kwargs):
"""Upload a specific file. Parameters: path - source file path e.g. /file.ext, versionID - source file's version, storage - user's source storage name, newdest - destination file path, destStorage - user's destination storage name.
Args:
Path (str): (required)
versionId (str): (optional)
storage (str): (optional)
file (File): (required)
Returns: ResponseMessage
"""
allParams = dict.fromkeys(['Path', 'versionId', 'storage', 'file'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PutCreate" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/file/{path}/?appSid={appSid}&versionId={versionId}&storage={storage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'versionId' in allParams and allParams['versionId'] is not None:
resourcePath = resourcePath.replace("{" + "versionId" + "}" , str(allParams['versionId']))
else:
resourcePath = re.sub("[&?]versionId.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
method = 'PUT'
queryParams = {}
headerParams = {}
formParams = {}
files = { 'file':open(file, 'rb')}
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'multipart/form-data'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'ResponseMessage', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def DeleteFolder(self, Path, **kwargs):
"""Remove a specific folder. Parameters: path - folder path e.g. /Folder1, storage - user's storage name, recursive - is subfolders and files must be deleted for specified path.
Args:
Path (str): (required)
storage (str): (optional)
recursive (bool): (optional)
Returns: RemoveFolderResponse
"""
allParams = dict.fromkeys(['Path', 'storage', 'recursive'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method DeleteFolder" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/folder/{path}/?appSid={appSid}&storage={storage}&recursive={recursive}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'recursive' in allParams and allParams['recursive'] is not None:
resourcePath = resourcePath.replace("{" + "recursive" + "}" , str(allParams['recursive']))
else:
resourcePath = re.sub("[&?]recursive.*?(?=&|\\?|$)", "", resourcePath)
method = 'DELETE'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'RemoveFolderResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetListFiles(self, **kwargs):
"""Get the file listing of a specific folder. Parametres: path - start with name of storage e.g. root folder '/'or some folder '/folder1/..', storage - user's storage name.
Args:
Path (str): (optional)
storage (str): (optional)
Returns: FilesResponse
"""
allParams = dict.fromkeys(['Path', 'storage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetListFiles" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/folder/{path}/?appSid={appSid}&storage={storage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'FilesResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def PostMoveFolder(self, src, dest, **kwargs):
"""Move a specific folder. Parameters: src - source folder path e.g. /Folder1, storage - user's source storage name, dest - destination folder path e.g. /Folder2, destStorage - user's destination storage name.
Args:
src (str): (required)
dest (str): (required)
storage (str): (optional)
destStorage (str): (optional)
Returns: MoveFolderResponse
"""
allParams = dict.fromkeys(['src', 'dest', 'storage', 'destStorage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PostMoveFolder" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/folder/{src}/?dest={dest}&appSid={appSid}&storage={storage}&destStorage={destStorage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'src' in allParams and allParams['src'] is not None:
resourcePath = resourcePath.replace("{" + "src" + "}" , str(allParams['src']))
else:
resourcePath = re.sub("[&?]src.*?(?=&|\\?|$)", "", resourcePath)
if 'dest' in allParams and allParams['dest'] is not None:
resourcePath = resourcePath.replace("{" + "dest" + "}" , str(allParams['dest']))
else:
resourcePath = re.sub("[&?]dest.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'destStorage' in allParams and allParams['destStorage'] is not None:
resourcePath = resourcePath.replace("{" + "destStorage" + "}" , str(allParams['destStorage']))
else:
resourcePath = re.sub("[&?]destStorage.*?(?=&|\\?|$)", "", resourcePath)
method = 'POST'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'MoveFolderResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def PutCopyFolder(self, Path, newdest, **kwargs):
"""Copy a folder. Parameters: path - source folder path e.g. /Folder1, storage - user's source storage name, newdest - destination folder path e.g. /Folder2, destStorage - user's destination storage name.
Args:
Path (str): (required)
newdest (str): (required)
storage (str): (optional)
destStorage (str): (optional)
Returns: ResponseMessage
"""
allParams = dict.fromkeys(['Path', 'newdest', 'storage', 'destStorage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PutCopyFolder" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/folder/{path}/?appSid={appSid}&newdest={newdest}&storage={storage}&destStorage={destStorage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'newdest' in allParams and allParams['newdest'] is not None:
resourcePath = resourcePath.replace("{" + "newdest" + "}" , str(allParams['newdest']))
else:
resourcePath = re.sub("[&?]newdest.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'destStorage' in allParams and allParams['destStorage'] is not None:
resourcePath = resourcePath.replace("{" + "destStorage" + "}" , str(allParams['destStorage']))
else:
resourcePath = re.sub("[&?]destStorage.*?(?=&|\\?|$)", "", resourcePath)
method = 'PUT'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'ResponseMessage', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def PutCreateFolder(self, Path, **kwargs):
"""Create the folder. Parameters: path - source folder path e.g. /Folder1, storage - user's source storage name, newdest - destination folder path e.g. /Folder2, destStorage - user's destination storage name.
Args:
Path (str): (required)
storage (str): (optional)
destStorage (str): (optional)
Returns: ResponseMessage
"""
allParams = dict.fromkeys(['Path', 'storage', 'destStorage'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PutCreateFolder" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/folder/{path}/?appSid={appSid}&storage={storage}&destStorage={destStorage}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'Path' in allParams and allParams['Path'] is not None:
resourcePath = resourcePath.replace("{" + "Path" + "}" , str(allParams['Path']))
else:
resourcePath = re.sub("[&?]Path.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'destStorage' in allParams and allParams['destStorage'] is not None:
resourcePath = resourcePath.replace("{" + "destStorage" + "}" , str(allParams['destStorage']))
else:
resourcePath = re.sub("[&?]destStorage.*?(?=&|\\?|$)", "", resourcePath)
method = 'PUT'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'ResponseMessage', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetIsStorageExist(self, name, **kwargs):
"""Check if a specific storage exists.
Args:
name (str): Storage name (required)
Returns: StorageExistResponse
"""
allParams = dict.fromkeys(['name'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetIsStorageExist" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/storage/{name}/exist/?appSid={appSid}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'name' in allParams and allParams['name'] is not None:
resourcePath = resourcePath.replace("{" + "name" + "}" , str(allParams['name']))
else:
resourcePath = re.sub("[&?]name.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'StorageExistResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
| 37.17674
| 239
| 0.567111
| 3,701
| 40,597
| 6.204539
| 0.047014
| 0.033532
| 0.0756
| 0.042068
| 0.916736
| 0.908853
| 0.89936
| 0.889736
| 0.882028
| 0.876889
| 0
| 0.004692
| 0.286006
| 40,597
| 1,091
| 240
| 37.210816
| 0.787518
| 0.103259
| 0
| 0.876254
| 0
| 0.016722
| 0.193744
| 0.076332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025084
| false
| 0
| 0.011706
| 0
| 0.061873
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c8e12d9afd46bc75cf1af486a68eae5306296fc5
| 267
|
py
|
Python
|
mellon/events.py
|
LaudateCorpus1/mellon
|
a7a9f6d8abf1dd03b63a94ddb4439c6cc6c2e272
|
[
"MIT"
] | 5
|
2016-12-20T19:39:01.000Z
|
2021-01-08T16:19:17.000Z
|
mellon/events.py
|
CrowdStrike/mellon
|
7216f255d397a41b1c2777a1b02f1c085d07ddfe
|
[
"MIT"
] | 1
|
2018-03-21T17:05:13.000Z
|
2018-03-21T17:05:13.000Z
|
mellon/events.py
|
LaudateCorpus1/mellon
|
a7a9f6d8abf1dd03b63a94ddb4439c6cc6c2e272
|
[
"MIT"
] | 2
|
2017-11-01T15:03:27.000Z
|
2018-11-13T03:04:44.000Z
|
from zope import interface
from zope.interface.interfaces import ObjectEvent
from .interfaces import ISnippetAvailableForSecretsSniffEvent
@interface.implementer(ISnippetAvailableForSecretsSniffEvent)
class SnippetAvailableForSecretsSniffEvent(ObjectEvent):
pass
| 38.142857
| 61
| 0.88764
| 21
| 267
| 11.285714
| 0.52381
| 0.067511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074906
| 267
| 7
| 62
| 38.142857
| 0.959514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0.5
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
cdb05bfddae779da2f457a9d5e52aafcf42092e5
| 9,498
|
py
|
Python
|
tests/test_metadata.py
|
repo-helper/dist-meta
|
8925fdfb624d3a17c68463b74c7f00738b43c82b
|
[
"MIT"
] | 1
|
2021-11-18T11:16:02.000Z
|
2021-11-18T11:16:02.000Z
|
tests/test_metadata.py
|
repo-helper/dist-meta
|
8925fdfb624d3a17c68463b74c7f00738b43c82b
|
[
"MIT"
] | 7
|
2021-07-12T15:31:41.000Z
|
2022-03-18T23:02:43.000Z
|
tests/test_metadata.py
|
repo-helper/dist-meta
|
8925fdfb624d3a17c68463b74c7f00738b43c82b
|
[
"MIT"
] | null | null | null |
# stdlib
from typing import List
# 3rd party
import pytest
from coincidence import AdvancedDataRegressionFixture, AdvancedFileRegressionFixture
from domdf_python_tools.paths import PathPlus
# this package
from dist_meta import metadata
from dist_meta.metadata import MissingFieldError
from dist_meta.metadata_mapping import MetadataMapping
@pytest.fixture()
def example_metadata():
return (PathPlus(__file__).parent / "example_metadata").read_text()
def test_loads(
example_metadata,
advanced_data_regression: AdvancedDataRegressionFixture,
advanced_file_regression: AdvancedFileRegressionFixture,
):
fields = metadata.loads(example_metadata)
advanced_data_regression.check(fields.keys())
assert fields["Metadata-Version"] == "2.1"
assert fields["Name"] == "cawdrey"
assert fields["Version"] == "0.4.2"
assert fields["Summary"] == "Several useful custom dictionaries for Python 📖 🐍"
assert fields["Home-page"] == "https://github.com/domdfcoding/cawdrey"
assert fields["Author"] == "Dominic Davis-Foster"
assert fields["Author-email"] == "dominic@davis-foster.co.uk"
assert fields["License"] == "GNU Lesser General Public License v3 or later (LGPLv3+)"
assert fields.get_all("Project-URL") == [
"Documentation, https://cawdrey.readthedocs.io/en/latest",
"Issue Tracker, https://github.com/domdfcoding/cawdrey/issues",
"Source Code, https://github.com/domdfcoding/cawdrey",
]
assert fields[
"Keywords"
] == "frozenordereddict,orderedfrozendict,frozen,immutable,frozendict,dict,dictionary,map,Mapping,MappingProxyType,Counter"
assert fields.get_all("Platform") == ["Windows", "macOS", "Linux"]
assert fields["Requires-Python"] == ">=3.6.1"
assert fields["Description-Content-Type"] == "text/x-rst"
assert fields.get_all("Requires-Dist") == ["domdf-python-tools (>=1.1.0)", "typing-extensions (>=3.7.4.3)"]
assert fields["Provides-Extra"] == "all"
advanced_file_regression.check(fields["Description"])
def test_load(
example_metadata,
tmp_pathplus: PathPlus,
advanced_data_regression: AdvancedDataRegressionFixture,
advanced_file_regression: AdvancedFileRegressionFixture,
):
(tmp_pathplus / "METADATA").write_text(example_metadata)
fields = metadata.load(tmp_pathplus / "METADATA")
advanced_data_regression.check(fields.keys())
assert fields["Metadata-Version"] == "2.1"
assert fields["Name"] == "cawdrey"
assert fields["Version"] == "0.4.2"
assert fields["Summary"] == "Several useful custom dictionaries for Python 📖 🐍"
assert fields["Home-page"] == "https://github.com/domdfcoding/cawdrey"
assert fields["Author"] == "Dominic Davis-Foster"
assert fields["Author-email"] == "dominic@davis-foster.co.uk"
assert fields["License"] == "GNU Lesser General Public License v3 or later (LGPLv3+)"
assert fields.get_all("Project-URL") == [
"Documentation, https://cawdrey.readthedocs.io/en/latest",
"Issue Tracker, https://github.com/domdfcoding/cawdrey/issues",
"Source Code, https://github.com/domdfcoding/cawdrey",
]
assert fields[
"Keywords"
] == "frozenordereddict,orderedfrozendict,frozen,immutable,frozendict,dict,dictionary,map,Mapping,MappingProxyType,Counter"
assert fields.get_all("Platform") == ["Windows", "macOS", "Linux"]
assert fields["Requires-Python"] == ">=3.6.1"
assert fields["Description-Content-Type"] == "text/x-rst"
assert fields.get_all("Requires-Dist") == ["domdf-python-tools (>=1.1.0)", "typing-extensions (>=3.7.4.3)"]
assert fields["Provides-Extra"] == "all"
advanced_file_regression.check(fields["Description"])
def test_load_no_version(tmp_pathplus):
(tmp_pathplus / "METADATA").write_lines([
"Generator: bdist_wheel (0.36.2)",
"Name: cawdrey",
"Version: 0.4.2",
"Home-page: https://github.com/domdfcoding/cawdrey",
])
with pytest.raises(MissingFieldError, match=f"No 'Metadata-Version' field was provided."):
metadata.load(tmp_pathplus / "METADATA")
def test_dumps(advanced_file_regression: AdvancedFileRegressionFixture):
fields = MetadataMapping()
fields["Metadata-Version"] = "2.1"
fields["Name"] = "cawdrey"
fields["Version"] = "0.4.2"
fields["Home-page"] = "https://github.com/domdfcoding/cawdrey"
fields["Platform"] = "Windows"
fields["Platform"] = "macOS"
fields["Platform"] = "Linux"
advanced_file_regression.check(metadata.dumps(fields), extension='')
def test_dumps_description(advanced_file_regression: AdvancedFileRegressionFixture):
fields = MetadataMapping()
fields["Metadata-Version"] = "2.1"
fields["Name"] = "cawdrey"
fields["Version"] = "0.4.2"
fields["Home-page"] = "https://github.com/domdfcoding/cawdrey"
fields["Platform"] = "Windows"
fields["Platform"] = "macOS"
fields["Platform"] = "Linux"
fields["Description"] = "This is the body\n\nIt can have multiple lines\n\t\tand indents"
advanced_file_regression.check(metadata.dumps(fields), extension='')
def test_dump(
tmp_pathplus: PathPlus,
advanced_file_regression: AdvancedFileRegressionFixture,
):
fields = MetadataMapping()
fields["Metadata-Version"] = "2.1"
fields["Name"] = "cawdrey"
fields["Version"] = "0.4.2"
fields["Home-page"] = "https://github.com/domdfcoding/cawdrey"
fields["Platform"] = "Windows"
fields["Platform"] = "macOS"
fields["Platform"] = "Linux"
metadata.dump(fields, tmp_pathplus / "METADATA")
advanced_file_regression.check_file(tmp_pathplus / "METADATA")
def test_dumps_version_too_low():
fields = MetadataMapping()
fields["Metadata-Version"] = "1.1"
fields["Name"] = "cawdrey"
fields["Version"] = "0.4.2"
fields["Home-page"] = "https://github.com/domdfcoding/cawdrey"
fields["Platform"] = "Windows"
fields["Platform"] = "macOS"
fields["Platform"] = "Linux"
with pytest.raises(ValueError, match="'dump_metadata' only supports metadata version 2.1 and above."):
metadata.dumps(fields)
def test_dump_no_meta_version(tmp_pathplus: PathPlus):
fields = MetadataMapping()
fields["Name"] = "cawdrey"
fields["Version"] = "0.4.2"
fields["Home-page"] = "https://github.com/domdfcoding/cawdrey"
fields["Platform"] = "Windows"
fields["Platform"] = "macOS"
fields["Platform"] = "Linux"
with pytest.raises(MissingFieldError, match=f"No 'Metadata-Version' field was provided."):
metadata.dump(fields, tmp_pathplus / "METADATA")
assert not (tmp_pathplus / "METADATA").exists()
def test_dumps_no_name():
fields = MetadataMapping()
fields["Metadata-Version"] = "2.1"
fields["Version"] = "0.4.2"
fields["Home-page"] = "https://github.com/domdfcoding/cawdrey"
fields["Platform"] = "Windows"
fields["Platform"] = "macOS"
fields["Platform"] = "Linux"
with pytest.raises(MissingFieldError, match="No 'Name' field was provided."):
metadata.dumps(fields)
def test_dumps_no_version():
fields = MetadataMapping()
fields["Metadata-Version"] = "2.1"
fields["Name"] = "cawdrey"
fields["Home-page"] = "https://github.com/domdfcoding/cawdrey"
fields["Platform"] = "Windows"
fields["Platform"] = "macOS"
fields["Platform"] = "Linux"
with pytest.raises(MissingFieldError, match="No 'Version' field was provided."):
metadata.dumps(fields)
def test_loads_description_as_key_pipe():
fields = metadata.loads(
'\n'.join([
"Metadata-Version: 2.1",
"Name: BeagleVote",
"version: 1.0a2",
"Description: This project provides powerful math functions",
" |For example, you can use `sum()` to sum numbers:",
" |",
" |Example::",
" |",
" | >>> sum(1, 2)",
" | 3",
" |",
])
)
assert fields["Metadata-Version"] == "2.1"
assert fields["Name"] == "BeagleVote"
assert fields["Version"] == "1.0a2"
assert fields["Description"] == '\n'.join([
"This project provides powerful math functions",
"For example, you can use `sum()` to sum numbers:",
'',
"Example::",
'',
" >>> sum(1, 2)",
" 3",
'',
])
def test_loads_description_as_key_spaces():
# Not to spec, but how setuptools and distutils do it
fields = metadata.loads(
'\n'.join([
"Metadata-Version: 2.1",
"Name: BeagleVote",
"version: 1.0a2",
"Description: This project provides powerful math functions",
" For example, you can use `sum()` to sum numbers:",
" ",
" Example::",
" ",
" >>> sum(1, 2)",
" 3",
" ",
])
)
assert fields["Metadata-Version"] == "2.1"
assert fields["Name"] == "BeagleVote"
assert fields["Version"] == "1.0a2"
assert fields["Description"] == '\n'.join([
"This project provides powerful math functions",
"For example, you can use `sum()` to sum numbers:",
'',
"Example::",
'',
" >>> sum(1, 2)",
" 3",
'',
])
@pytest.mark.parametrize(
"lines, wsp, expected",
[
(["hello", "world"], '\t', ["hello", "world"]),
(["\thello", "\tworld"], '\t', ["hello", "world"]),
(["hello", "\tworld"], '\t', ["hello", "world"]),
([" hello", " world"], '\t', [" hello", " world"]),
(["|hello", "|world"], '\t', ["|hello", "|world"]),
([" hello", " world"], ' ', ["hello", "world"]),
(["hello", " world"], ' ', ["hello", "world"]),
([" hello", "world"], ' ', [" hello", "world"]),
(["|hello", "|world"], '|', ["hello", "world"]),
(["hello", "|world"], '|', ["hello", "world"]),
(["|hello", "world"], '|', ["|hello", "world"]),
]
)
def test_clean_desc(lines: List[str], wsp: str, expected: List[str]):
assert (metadata._clean_desc(lines, wsp) == expected)
| 33.20979
| 126
| 0.665614
| 1,098
| 9,498
| 5.665756
| 0.173953
| 0.0733
| 0.036168
| 0.056261
| 0.825591
| 0.800354
| 0.766758
| 0.760328
| 0.70696
| 0.70696
| 0
| 0.01313
| 0.150032
| 9,498
| 285
| 127
| 33.326316
| 0.756968
| 0.008528
| 0
| 0.74569
| 0
| 0.008621
| 0.423608
| 0.035274
| 0
| 0
| 0
| 0
| 0.172414
| 1
| 0.060345
| false
| 0
| 0.030172
| 0.00431
| 0.094828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b51d852c07030f9b3d4bc9b0d793ca026045ffae
| 96
|
py
|
Python
|
pyspherical/__init__.py
|
aelanman/pyspherical
|
36e362f1579630fa53b30e3f81de8ce9075f919b
|
[
"MIT"
] | null | null | null |
pyspherical/__init__.py
|
aelanman/pyspherical
|
36e362f1579630fa53b30e3f81de8ce9075f919b
|
[
"MIT"
] | 11
|
2020-05-10T20:41:46.000Z
|
2020-06-13T18:15:45.000Z
|
pyspherical/__init__.py
|
aelanman/pyspherical
|
36e362f1579630fa53b30e3f81de8ce9075f919b
|
[
"MIT"
] | null | null | null |
from .transforms import * # noqa
from .wigner import * # noqa
from .utils import * # noqa
| 24
| 34
| 0.65625
| 12
| 96
| 5.25
| 0.5
| 0.47619
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 96
| 3
| 35
| 32
| 0.875
| 0.145833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b582ee2d97a5295a9a69ee76597b023d8e99a3d4
| 32,223
|
py
|
Python
|
tests/test_gamessus/test_DictionaryFile.py
|
lmmentel/chemtools
|
3f8f870da52b5d8a76a4be4f99e18cd865adaa7c
|
[
"MIT"
] | 7
|
2020-03-07T18:09:34.000Z
|
2021-07-18T15:02:24.000Z
|
tests/test_gamessus/test_DictionaryFile.py
|
lmmentel/chemtools
|
3f8f870da52b5d8a76a4be4f99e18cd865adaa7c
|
[
"MIT"
] | 3
|
2020-11-24T23:34:14.000Z
|
2022-03-28T13:39:00.000Z
|
tests/test_gamessus/test_DictionaryFile.py
|
lmmentel/chemtools
|
3f8f870da52b5d8a76a4be4f99e18cd865adaa7c
|
[
"MIT"
] | 6
|
2020-09-18T10:29:24.000Z
|
2022-01-14T13:19:15.000Z
|
import numpy as np
import os
import unittest
from chemtools.calculators.gamessreader import DictionaryFile
class TestDictionaryFileHeMini(unittest.TestCase):
def setUp(self):
dictfile = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data/he/he_mini_hf.F10')
self.daf = DictionaryFile(dictfile)
def tearDown(self):
self.daf.file.close()
def test_read_irect_he(self):
self.assertEqual(self.daf.irecst, 48)
def test_read_ioda_he(self):
ioda = [26, 25, -1, -1, -1, -1, 2, 3, 4, 6, 28, 29, 30, 39, 35, 37, 38,
-1, -1, -1, -1, -1, 40, 41, 42, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 27, 34, -1, -1, -1, -1, -1, 43,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
44, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 33, -1, -1, -1, -1, -1, -1, -1, 45, 46, 47, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 36,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
31, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 32, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 9, 15, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
self.assertListEqual(list(self.daf.ioda), ioda)
def test_read_ifilen_he(self):
ifilen = [ 3, 115, 0, 0, 0, 0, 432, 1728, 4800,
10800, 1, 1, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 1,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 21168, 37632, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
self.assertListEqual(list(self.daf.ifilen), ifilen)
def test_read_iss_he(self):
self.assertEqual(self.daf.iss, 8)
def test_read_ipk_he(self):
self.assertEqual(self.daf.ipk, 9)
def test_geometry_he(self):
xyz = np.array([0.0, 0.0, 0.0], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(1), xyz))
def test_bare_nucleus_integrals_he(self):
bn = np.array([-1.932108], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(11), bn))
def test_overlap_integrals_he(self):
overlap = np.array([1.0], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(12), overlap))
def test_kinetic_energy_integrals_he(self):
kinetic = np.array([1.417863], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(13), kinetic))
class TestDictionaryFileNeDZ(unittest.TestCase):
def setUp(self):
dictfile = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data/ne/ne_dz_guga.F10')
self.daf = DictionaryFile(dictfile)
def tearDown(self):
self.daf.file.close()
def test_read_irect_ne(self):
self.assertEqual(self.daf.irecst, 41)
def test_read_ioda_ne(self):
ioda = [10, 9, -1, -1, -1, -1, 2, 3, 4, 6, 12, 13, 14, 23, 19, 21, 22,
-1, 36, -1, 37, -1, 24, 25, 26, -1, -1, -1, -1, -1, -1, -1, -1, -1,
33, -1, -1, -1, -1, -1, -1, -1, -1, 11, 18, -1, -1, -1, -1, -1, 27,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 40,
28, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 17, -1, -1, -1, -1, -1, -1, -1, 29, 30, 31, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 20,
38, -1, -1, -1, -1, -1, 32, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 35, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 39, -1, -1, -1,
34, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 16, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]
self.assertListEqual(list(self.daf.ioda), ioda)
def test_read_ifilen_ne(self):
ifilen = [ 3, 115, 0, 0, 0, 0, 432, 1728, 4800,
10800, 120, 120, 120, 120, 225, 120, 15, 0,
225, 0, 15, 0, 120, 120, 120, 0, 0,
0, 0, 0, 0, 0, 0, 0, 120, 0,
0, 0, 0, 0, 0, 0, 0, 225, 225,
0, 0, 0, 0, 0, 120, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 105, 15, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 120, 0, 0, 0,
0, 0, 0, 0, 120, 120, 120, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 15, 15, 0, 0, 0, 0, 0,
15, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 120, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 120, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 105, 0, 0, 0, 15,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
120, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
self.assertListEqual(list(self.daf.ifilen), ifilen)
def test_read_iss_ne(self):
self.assertEqual(self.daf.iss, 8)
def test_read_ipk_ne(self):
self.assertEqual(self.daf.ipk, 9)
def test_geometry_ne(self):
xyz = np.array([0.0, 0.0, 0.0], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(1), xyz))
def test_bare_nucleus_integrals_ne(self):
bn = np.array([-49.90784816, 11.63615917, -14.31984373, -9.08384634,
-6.74448739, -10.4046429 , 0. , 0. ,
0. , -11.72324536, 0. , 0. ,
0. , 0. , -11.72324536, 0. ,
0. , 0. , 0. , 0. ,
-11.72324536, 0. , 0. , 0. ,
-5.01216927, 0. , 0. , -5.91063165,
0. , 0. , 0. , 0. ,
-5.01216927, 0. , 0. , -5.91063165,
0. , 0. , 0. , 0. ,
0. , -5.01216927, 0. , 0. ,
-5.91063165, -3.73701919, -6.84321569, -7.23203653,
0. , 0. , 0. , 0. ,
0. , 0. , -7.85824657, -3.73701919,
-6.84321569, -7.23203653, 0. , 0. ,
0. , 0. , 0. , 0. ,
-4.57674886, -7.85824657, -3.73701919, -6.84321569,
-7.23203653, 0. , 0. , 0. ,
0. , 0. , 0. , -4.57674886,
-4.57674886, -7.85824657, 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , -4.92224657, 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
-4.92224657, 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , -4.92224657], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(11), bn))
def test_overlap_integrals_ne(self):
overlap = np.array([ 1. , -0.21014965, 1. , 0.19598765, 0.70445015,
1. , 0. , 0. , 0. , 1. ,
0. , 0. , 0. , 0. , 1. ,
0. , 0. , 0. , 0. , 0. ,
1. , 0. , 0. , 0. , 0.48415533,
0. , 0. , 1. , 0. , 0. ,
0. , 0. , 0.48415533, 0. , 0. ,
1. , 0. , 0. , 0. , 0. ,
0. , 0.48415533, 0. , 0. , 1. ,
0.08526732, 0.652726 , 0.63912803, 0. , 0. ,
0. , 0. , 0. , 0. , 1. ,
0.08526732, 0.652726 , 0.63912803, 0. , 0. ,
0. , 0. , 0. , 0. , 0.33333333,
1. , 0.08526732, 0.652726 , 0.63912803, 0. ,
0. , 0. , 0. , 0. , 0. ,
0.33333333, 0.33333333, 1. , 0. , 0. ,
0. , 0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. , 0. ,
1. , 0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. , 1. ,
0. , 0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. , 1. ], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(12), overlap))
def test_kinetic_energy_integrals_ne(self):
kinetic = np.array([ 46.23545099, -25.61950625, 16.90342191, 0.2724849 ,
0.75733017, 0.73035 , 0. , 0. ,
0. , 7.1947275 , 0. , 0. ,
0. , 0. , 7.1947275 , 0. ,
0. , 0. , 0. , 0. ,
7.1947275 , 0. , 0. , 0. ,
0.85111908, 0. , 0. , 1.07925 ,
0. , 0. , 0. , 0. ,
0.85111908, 0. , 0. , 1.07925 ,
0. , 0. , 0. , 0. ,
0. , 0.85111908, 0. , 0. ,
1.07925 , -1.40744752, 2.0040608 , 0.65182507,
0. , 0. , 0. , 0. ,
0. , 0. , 4.771 , -1.40744752,
2.0040608 , 0.65182507, 0. , 0. ,
0. , 0. , 0. , 0. ,
-0.367 , 4.771 , -1.40744752, 2.0040608 ,
0.65182507, 0. , 0. , 0. ,
0. , 0. , 0. , -0.367 ,
-0.367 , 4.771 , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 7.707 , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
7.707 , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 0. ,
0. , 0. , 0. , 7.707 ], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(13), kinetic))
def test_orbital_energies_ne(self):
oe = np.array([-32.76563542, -1.91879823, -0.83209725, -0.83209725,
-0.83209725, 1.69455773, 1.69455773, 1.69455773,
2.15942495, 5.1967114 , 5.1967114 , 5.1967114 ,
5.1967114 , 5.1967114 , 0. ], dtype=float)
self.assertTrue(np.allclose(self.daf.read_record(17), oe))
def test_orbital_symmetry_labels_ne(self):
labs = ['AG ', 'AG ', 'B1U ', 'B3U ', 'B2U ',
'B3U ', 'B2U ', 'B1U ', 'AG ', 'B1G ',
'B3G ', 'AG ', 'B2G ', 'AG ', '']
self.assertListEqual([s.decode('utf-8') for s in self.daf.read_record(255)], labs)
if __name__ == "__main__":
unittest.main()
| 63.431102
| 101
| 0.223722
| 4,736
| 32,223
| 1.502534
| 0.039907
| 0.573637
| 0.829258
| 1.06914
| 0.873384
| 0.873384
| 0.852024
| 0.850478
| 0.841484
| 0.834036
| 0
| 0.36207
| 0.562269
| 32,223
| 507
| 102
| 63.556213
| 0.142432
| 0
| 0
| 0.742678
| 0
| 0
| 0.005245
| 0.001365
| 0
| 0
| 0
| 0
| 0.041841
| 1
| 0.050209
| false
| 0
| 0.008368
| 0
| 0.062762
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
b5ae50fa4b05c6c16e42c472829979ea8d8d35b4
| 17,462
|
py
|
Python
|
tests/test_rec2.py
|
mediatemple/ReC2
|
c4074050cad99ddc7b0190cbbc278e0d37c361c7
|
[
"MIT"
] | 3
|
2016-12-17T02:19:36.000Z
|
2017-03-20T05:22:37.000Z
|
tests/test_rec2.py
|
mediatemple/ReC2
|
c4074050cad99ddc7b0190cbbc278e0d37c361c7
|
[
"MIT"
] | null | null | null |
tests/test_rec2.py
|
mediatemple/ReC2
|
c4074050cad99ddc7b0190cbbc278e0d37c361c7
|
[
"MIT"
] | 2
|
2016-07-29T22:40:11.000Z
|
2019-09-09T17:46:29.000Z
|
import imp
import yaml
import datetime
import pytz
from freezegun import freeze_time
rec2 = imp.load_source('rec2', './rec2/rec2.py')
a = rec2.Rec2()
# details
asg_details = {
"AutoScalingGroupARN": "arn:aws:autoscaling:us-west-2:761425999210:autoScalingGroup:f604b0bf-e970-45a2-87c0-9cb336dcaeda:autoScalingGroupName/WebAppASG",
"HealthCheckGracePeriod": 300,
"SuspendedProcesses": [],
"DesiredCapacity": 1,
"Tags": [
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "Production",
"Key": "Environment"
},
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "WebAppASG",
"Key": "Name"
}
],
"EnabledMetrics": [],
"LoadBalancerNames": [
"PROD-ELB"
],
"AutoScalingGroupName": "WebAppASG",
"DefaultCooldown": 300,
"MinSize": 1,
"Instances": [
{
"ProtectedFromScaleIn": False,
"AvailabilityZone": "us-west-2a",
"InstanceId": "i-d031f714",
"HealthStatus": "Healthy",
"LifecycleState": "InService",
"LaunchConfigurationName": "WebAppASGLaunchConfigC"
}
],
"MaxSize": 4,
"VPCZoneIdentifier": "subnet-03b96f66",
"TerminationPolicies": [
"Default"
],
"LaunchConfigurationName": "WebAppASGLaunchConfigC",
"CreatedTime": "2015-06-03T23:34:14.159Z",
"AvailabilityZones": [
"us-west-2a"
],
"HealthCheckType": "EC2",
"NewInstancesProtectedFromScaleIn": False
}
asg_details_cooldown_invalid = {
"AutoScalingGroupARN": "arn:aws:autoscaling:us-west-2:761425999210:autoScalingGroup:f604b0bf-e970-45a2-87c0-9cb336dcaeda:autoScalingGroupName/WebAppASG",
"HealthCheckGracePeriod": 300,
"SuspendedProcesses": [],
"DesiredCapacity": 1,
"Tags": [
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "Production",
"Key": "Environment"
},
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "WebAppASG",
"Key": "Name"
},
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"Key": "rec2-modify-to-standard",
"Value": datetime.datetime(2016, 1, 1, 19, 0, 0, 0, tzinfo=pytz.utc),
"PropagateAtLaunch": False
}
],
"EnabledMetrics": [],
"LoadBalancerNames": [
"PROD-ELB"
],
"AutoScalingGroupName": "WebAppASG",
"DefaultCooldown": 300,
"MinSize": 1,
"Instances": [
{
"ProtectedFromScaleIn": False,
"AvailabilityZone": "us-west-2a",
"InstanceId": "i-d031f714",
"HealthStatus": "Healthy",
"LifecycleState": "InService",
"LaunchConfigurationName": "WebAppASGLaunchConfigC"
}
],
"MaxSize": 4,
"VPCZoneIdentifier": "subnet-03b96f66",
"TerminationPolicies": [
"Default"
],
"LaunchConfigurationName": "WebAppASGLaunchConfigC",
"CreatedTime": "2015-06-03T23:34:14.159Z",
"AvailabilityZones": [
"us-west-2a"
],
"HealthCheckType": "EC2",
"NewInstancesProtectedFromScaleIn": False
}
asg_details_cooldown_not_met = {
"AutoScalingGroupARN": "arn:aws:autoscaling:us-west-2:761425999210:autoScalingGroup:f604b0bf-e970-45a2-87c0-9cb336dcaeda:autoScalingGroupName/WebAppASG",
"HealthCheckGracePeriod": 300,
"SuspendedProcesses": [],
"DesiredCapacity": 1,
"Tags": [
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "Production",
"Key": "Environment"
},
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "WebAppASG",
"Key": "Name"
},
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"Key": "rec2-modify-to-standard",
"Value": "Fri Jan 01 19:15:00 UTC 2016",
"PropagateAtLaunch": False
}
],
"EnabledMetrics": [],
"LoadBalancerNames": [
"PROD-ELB"
],
"AutoScalingGroupName": "WebAppASG",
"DefaultCooldown": 300,
"MinSize": 1,
"Instances": [
{
"ProtectedFromScaleIn": False,
"AvailabilityZone": "us-west-2a",
"InstanceId": "i-d031f714",
"HealthStatus": "Healthy",
"LifecycleState": "InService",
"LaunchConfigurationName": "WebAppASGLaunchConfigC"
}
],
"MaxSize": 4,
"VPCZoneIdentifier": "subnet-03b96f66",
"TerminationPolicies": [
"Default"
],
"LaunchConfigurationName": "WebAppASGLaunchConfigC",
"CreatedTime": "2015-06-03T23:34:14.159Z",
"AvailabilityZones": [
"us-west-2a"
],
"HealthCheckType": "EC2",
"NewInstancesProtectedFromScaleIn": False
}
asg_details_at_capacity = {
"AutoScalingGroupARN": "arn:aws:autoscaling:us-west-2:761425999210:autoScalingGroup:f604b0bf-e970-45a2-87c0-9cb336dcaeda:autoScalingGroupName/WebAppASG",
"HealthCheckGracePeriod": 300,
"SuspendedProcesses": [],
"DesiredCapacity": 1,
"Tags": [
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "Production",
"Key": "Environment"
},
{
"ResourceType": "auto-scaling-group",
"ResourceId": "WebAppASG",
"PropagateAtLaunch": True,
"Value": "WebAppASG",
"Key": "Name"
}
],
"EnabledMetrics": [],
"LoadBalancerNames": [
"PROD-ELB"
],
"AutoScalingGroupName": "WebAppASG",
"DefaultCooldown": 300,
"MinSize": 1,
"Instances": [
{
"ProtectedFromScaleIn": False,
"AvailabilityZone": "us-west-2a",
"InstanceId": "i-d031f714",
"HealthStatus": "Healthy",
"LifecycleState": "InService",
"LaunchConfigurationName": "WebAppASGLaunchConfigC"
}
],
"MaxSize": 1,
"VPCZoneIdentifier": "subnet-03b96f66",
"TerminationPolicies": [
"Default"
],
"LaunchConfigurationName": "WebAppASGLaunchConfigC",
"CreatedTime": "2015-06-03T23:34:14.159Z",
"AvailabilityZones": [
"us-west-2a"
],
"HealthCheckType": "EC2",
"NewInstancesProtectedFromScaleIn": False
}
# launch_configurations
launch_configs_credit = [
{
"UserData": "#!/bin/bash",
"IamInstanceProfile": "LambdaWorker",
"EbsOptimized": False,
"LaunchConfigurationARN": "arn:aws:autoscaling:us-west-2:761425999210:launchConfiguration:d9521163-b772-4922-8c62-a814cc4aedf3:launchConfigurationName/WebAppASGLaunchConfigC",
"InstanceMonitoring": {
"Enabled": False
},
"ClassicLinkVPCSecurityGroups": [],
"CreatedTime": "2015-10-14T17:16:37.491Z",
"BlockDeviceMappings": [
{
"DeviceName": "/dev/xvda",
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 8,
"VolumeType": "gp2"
}
}
],
"KeyName": "DANNO1",
"SecurityGroups": [
"sg-5de76738",
"sg-9ae262ff",
"sg-afe262ca"
],
"LaunchConfigurationName": "WebAppASGLaunchConfigC",
"KernelId": "",
"RamdiskId": "",
"ImageId": "ami-8ce302bf",
"InstanceType": "t2.medium",
"AssociatePublicIpAddress": True
}
]
launch_configs_standard = [
{
"UserData": "IyEvYmluL2Jhc2gNCg==",
"IamInstanceProfile": "LambdaWorker",
"EbsOptimized": False,
"LaunchConfigurationARN": "arn:aws:autoscaling:us-west-2:761425999210:launchConfiguration:d9521163-b772-4922-8c62-a814cc4aedf3:launchConfigurationName/WebAppASGLaunchConfigC",
"InstanceMonitoring": {
"Enabled": False
},
"ClassicLinkVPCSecurityGroups": [],
"CreatedTime": "2015-10-14T17:16:37.491Z",
"BlockDeviceMappings": [
{
"DeviceName": "/dev/xvda",
"Ebs": {
"DeleteOnTermination": True,
"VolumeSize": 8,
"VolumeType": "gp2"
}
}
],
"KeyName": "DANNO1",
"SecurityGroups": [
"sg-5de76738",
"sg-9ae262ff",
"sg-afe262ca"
],
"LaunchConfigurationName": "WebAppASGLaunchConfigC",
"KernelId": "",
"RamdiskId": "",
"ImageId": "ami-8ce302bf",
"InstanceType": "c4.large",
"AssociatePublicIpAddress": True
}
]
launch_configs_missing = []
# alarm_status
no_alarm = {
"MetricAlarms": [{
"AlarmName": "ReC2LowCpu",
"AlarmDescription": "ASG CPU Low alarm",
"StateValue": "OK",
}, {
"AlarmName": "ReC2LowCredits",
"AlarmDescription": "ASG CPU Credits Low Alarm",
"StateValue": "OK",
}, {
"AlarmName": "ReC2DragCredits",
"AlarmDescription": "CPU Credits Drag Alarm",
"StateValue": "OK",
}]
}
low_asg_cpu = {
"MetricAlarms": [{
"AlarmName": "ReC2LowCpu",
"AlarmDescription": "ASG CPU Low alarm",
"StateValue": "ALARM",
}, {
"AlarmName": "ReC2LowCredits",
"AlarmDescription": "ASG CPU Credits Low Alarm",
"StateValue": "OK",
}, {
"AlarmName": "ReC2DragCredits",
"AlarmDescription": "CPU Credits Drag Alarm",
"StateValue": "OK",
}]
}
credit_low = {
"MetricAlarms": [{
"AlarmName": "ReC2LowCpu",
"AlarmDescription": "ASG CPU Low alarm",
"StateValue": "OK",
}, {
"AlarmName": "ReC2LowCredits",
"AlarmDescription": "ASG CPU Credits Low Alarm",
"StateValue": "ALARM",
}, {
"AlarmName": "ReC2DragCredits",
"AlarmDescription": "CPU Credits Drag Alarm",
"StateValue": "OK",
}]
}
drag_alarm = {
"MetricAlarms": [{
"AlarmName": "ReC2LowCpu",
"AlarmDescription": "ASG CPU Low alarm",
"StateValue": "OK",
}, {
"AlarmName": "ReC2LowCredits",
"AlarmDescription": "ASG CPU Credits Low Alarm",
"StateValue": "OK",
}, {
"AlarmName": "ReC2DragCredits",
"AlarmDescription": "CPU Credits Drag Alarm",
"StateValue": "ALARM",
}]
}
def get_vars(extra=None):
suffix = extra if extra else ""
return [yaml.load(file("./tests/test_vars{}.yaml".format(suffix))), yaml.load(file('./tests/test_alarms.yaml'))]
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_noop():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, no_alarm, launch_configs_credit)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Nothing to do')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_invalid_index():
test_yaml = get_vars("_invalid_index")
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, no_alarm, launch_configs_credit)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Current instance type not in allowed sizes! Current: t2.medium, Credit: t2.large, Standard: c4.large')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_missing_config():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, no_alarm, launch_configs_missing)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Config not found!')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_decrease():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, low_asg_cpu, launch_configs_standard)
assert(a.result['Action'] == 'MODIFY')
assert(a.result['Message'] == 'to_credit')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_standard_nominal_no_change():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, no_alarm, launch_configs_standard)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Nothing to do')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_increase():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, credit_low, launch_configs_credit)
assert(a.result['Action'] == 'MODIFY')
assert(a.result['Message'] == 'to_standard')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_increase_drag():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, drag_alarm, launch_configs_credit)
assert(a.result['Action'] == 'MODIFY')
assert(a.result['Message'] == 'to_standard')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_increase_drag_skipped_drag_disabled():
test_yaml = get_vars('_drag_disabled')
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, drag_alarm, launch_configs_credit)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Nothing to do')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_system_disabled():
test_yaml = get_vars('_system_disabled')
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details, drag_alarm, launch_configs_credit)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Launch Config Modification disabled')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_decrease_cooldown_invalid():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details_cooldown_invalid, low_asg_cpu, launch_configs_standard)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Cooldown threshold invalidation')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_decrease_cooldown_not_met():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details_cooldown_not_met, low_asg_cpu, launch_configs_standard)
assert(a.result['Action'] == 'NO_ACTION')
assert(a.result['Message'] == 'Cooldown threshold invalidation')
@freeze_time("2016-01-01 19:30:00", tz_offset=0)
def test_increase_with_asg_size_bump():
test_yaml = get_vars()
a.testing_startup(test_yaml[0], test_yaml[1],
asg_details_at_capacity, credit_low, launch_configs_credit)
assert(a.result['Action'] == 'MODIFY')
assert(a.result['Message'] == 'to_standard')
assert(a.result['AWS'][-1] == 'AWS: EXECUTE disabled - apply launch config 3/WebAppASGLaunchConfigC-ReC2-Fri-Jan-01-19.30.00-UTC-2016')
| 37.391863
| 187
| 0.519757
| 1,396
| 17,462
| 6.343123
| 0.163324
| 0.032524
| 0.036702
| 0.011745
| 0.897685
| 0.885263
| 0.885263
| 0.885263
| 0.885263
| 0.869226
| 0
| 0.06114
| 0.347154
| 17,462
| 466
| 188
| 37.472103
| 0.715614
| 0.002405
| 0
| 0.763514
| 0
| 0.018018
| 0.387517
| 0.109669
| 0
| 0
| 0
| 0
| 0.056306
| 1
| 0.029279
| false
| 0
| 0.011261
| 0
| 0.042793
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d2b0ae2ca8d14bd8409a713410b5dd0de6860d6
| 15,699
|
py
|
Python
|
pyjak/convert.py
|
Miniwa/bink
|
12c7c3e8b25f5f5b594b8ed317d589644585418b
|
[
"MIT"
] | 4
|
2019-01-16T09:31:59.000Z
|
2020-02-06T20:49:14.000Z
|
pyjak/convert.py
|
miniwa/pyjak
|
12c7c3e8b25f5f5b594b8ed317d589644585418b
|
[
"MIT"
] | null | null | null |
pyjak/convert.py
|
miniwa/pyjak
|
12c7c3e8b25f5f5b594b8ed317d589644585418b
|
[
"MIT"
] | null | null | null |
import struct
import re
from pyjak.order import ByteOrder
class BinaryError(Exception):
"""
Base class for exceptions related to converting binary numbers.
"""
pass
class BinarySizeMismatch(BinaryError):
"""
Raised when there is a mismatch in size between expected input and output.
For example when trying to convert the number 1000 into a 1 byte integer.
"""
pass
def parse_int8(_bytes):
"""
Parses a given byte array as a signed 1 byte integer.
Args:
_bytes: The byte array to be parsed.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 1.
BinaryError: If an unexpected conversation error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_INT8_FORMAT, _bytes)
def parse_uint8(_bytes):
"""
Parses a given byte array as an unsigned 1 byte integer.
Args:
_bytes: The byte array to be parsed.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 1.
BinaryError: If an unexpected conversion error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_UINT8_FORMAT, _bytes)
def parse_int16(_bytes, order=None):
"""
Parses a given byte array as a signed 2 byte integer.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 2.
BinaryError: If an unexpected conversion error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_INT16_FORMAT, _bytes, order)
def parse_uint16(_bytes, order=None):
"""
Parses a given byte array as an unsigned 2 byte integer.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 2.
BinaryError: If an unexpected conversion error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_UINT16_FORMAT, _bytes, order)
def parse_int32(_bytes, order=None):
"""
Parses a given byte array as a signed 4 byte integer.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 4.
BinaryError: If an unexpected conversion error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_INT32_FORMAT, _bytes, order)
def parse_uint32(_bytes, order=None):
"""
Parses a given byte array as an unsigned 4 byte integer.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 4.
BinaryError: If an unexpected conversion error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_UINT32_FORMAT, _bytes, order)
def parse_int64(_bytes, order=None):
"""
Parses a given byte array as a signed 8 byte integer.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 8.
BinaryError: If an unexpected conversion error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_INT64_FORMAT, _bytes, order)
def parse_uint64(_bytes, order=None):
"""
Parses a given byte array as an unsigned 8 byte integer.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 8.
BinaryError: If an unexpected conversion error occurs.
Returns:
The integer that was parsed.
"""
return _parse_from_format(_UINT64_FORMAT, _bytes, order)
def parse_float32(_bytes, order=None):
"""
Parses a given byte array as a 4 byte float.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 4.
BinaryError: If an unexpected conversion error occurs.
Returns:
The float that was parsed.
"""
return _parse_from_format(_FLOAT32_FORMAT, _bytes, order)
def parse_float64(_bytes, order=None):
"""
Parses a given byte array as a 8 byte float.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 8.
BinaryError: If an unexpected conversion error occurs.
Returns:
The float that was parsed.
"""
return _parse_from_format(_FLOAT64_FORMAT, _bytes, order)
def parse_bool(_bytes):
"""
Parses a given byte array as a uint8, then converts that value to a bool.
Args:
_bytes: The byte array to be parsed.
order: The byte order of the byte array. Defaults to native order.
Raises:
TypeError: If byte array is not of type 'bytes' or 'bytearray'.
BinarySizeMismatch: If length of byte array is not equal to 2.
BinaryError: If an unexpected conversion error occurs.
Returns:
False if the parsed uint8 equals 0.
True otherwise.
"""
return parse_uint8(_bytes) != 0
def dump_int8(_int):
"""
Serializes a given integer as a signed 1 byte integer in binary form.
Args:
_int: The integer to be serialized.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
a signed 1 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_INT8_FORMAT, _int)
def dump_uint8(_int):
"""
Serializes a given integer as an unsigned 1 byte integer in binary form.
Args:
_int: The integer to be serialized.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
an unsigned 1 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_UINT8_FORMAT, _int)
def dump_int16(_int, order=None):
"""
Serializes a given integer as a signed 2 byte integer in binary form.
Args:
_int: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
a signed 2 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_INT16_FORMAT, _int, order)
def dump_uint16(_int, order=None):
"""
Serializes a given integer as an unsigned 2 byte integer in binary form.
Args:
_int: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
an unsigned 2 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_UINT16_FORMAT, _int, order)
def dump_int32(_int, order=None):
"""
Serializes a given integer as a signed 4 byte integer in binary form.
Args:
_int: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
a signed 4 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_INT32_FORMAT, _int, order)
def dump_uint32(_int, order=None):
"""
Serializes a given integer as an unsigned 4 byte integer in binary form.
Args:
_int: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
an unsigned 4 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_UINT32_FORMAT, _int, order)
def dump_int64(_int, order=None):
"""
Serializes a given integer as a signed 8 byte integer in binary form.
Args:
_int: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
a signed 8 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_INT64_FORMAT, _int, order)
def dump_uint64(_int, order=None):
"""
Serializes a given integer as an unsigned 8 byte integer in binary form.
Args:
_int: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If integer is not of type 'int'.
BinarySizeMismatch: If value is too small or too big to be held by
an unsigned 8 byte integer.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized integer.
"""
return _dump_from_format(_UINT64_FORMAT, _int, order)
def dump_float32(_float, order=None):
"""
Serializes a given float as a 4 byte float in binary form.
Args:
_float: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If _float is not of type 'float'.
BinarySizeMismatch: If value is too small or too big to be held by
a 4 byte float.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized float.
"""
return _dump_from_format(_FLOAT32_FORMAT, _float, order)
def dump_float64(_float, order=None):
"""
Serializes a given float as an 8 byte float in binary form.
Args:
_float: The integer to be serialized.
order: The byte order of the returned byte array. Defaults to native.
Raises:
TypeError: If _float is not of type 'float'.
BinarySizeMismatch: If value is too small or too big to be held by
an 8 byte float.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized float.
"""
return _dump_from_format(_FLOAT64_FORMAT, _float, order)
def dump_bool(_bool):
"""
Serializes a given bool as an unsigned 1 byte integer. The integers value
will equal 1 if _bool is True, else it will equal 0.
Args:
_bool: The bool to be serialized.
Raises:
TypeError: If _bool is not of type 'bool'.
BinaryError: If an unexpected conversion error occurs.
Returns:
A byte array containing the serialized bool.
"""
if not isinstance(_bool, bool):
raise TypeError(
"Expected object of bool-like type, not '{0}'."
.format(type(_bool).__name__))
return dump_uint8(int(_bool))
_INT8_FORMAT = "b"
_UINT8_FORMAT = "B"
_INT16_FORMAT = "h"
_UINT16_FORMAT = "H"
_INT32_FORMAT = "i"
_UINT32_FORMAT = "I"
_INT64_FORMAT = "q"
_UINT64_FORMAT = "Q"
_FLOAT32_FORMAT = "f"
_FLOAT64_FORMAT = "d"
_STRUCT_ARG_OOR1 = "argument out of range"
_STRUCT_ARG_OOR2 = re.compile(
"<= number <=")
_STRUCT_ARG_TOO_LARGE = "int too large to convert"
_STRUCT_TYPE_MISMATCH_REGEX = re.compile(
"required argument is not (an|a) (integer|float)")
def _parse_from_format(_format, _bytes, order=None):
fixed_format = _format_with_order(_format, order)
try:
_values = struct.unpack(fixed_format, _bytes)
return _values[0]
except TypeError as e:
raise TypeError(
"Expected object of bytes-like type, not '{0}'."
.format(type(_bytes).__name__))
except struct.error as e:
calced_size = struct.calcsize(fixed_format)
if calced_size != len(_bytes):
raise BinarySizeMismatch(
"Length of byte array is {0}, expected {1}."
.format(len(_bytes), calced_size))
else:
raise BinaryError(
"Could not parse bytes {0}.".format(_bytes)) from e
def _dump_from_format(_format, value, order=None):
fixed_format = _format_with_order(_format, order)
try:
return struct.pack(fixed_format, value)
except struct.error as e:
msg = str(e)
# Hack to check if error was caused by a type mismatch.
if _STRUCT_TYPE_MISMATCH_REGEX.match(msg):
raise TypeError(
"Expected object of number-like type, not '{0}'."
.format(type(value).__name__))
# Hack to check if error was caused by argument out of bounds.
elif (msg == _STRUCT_ARG_OOR1 or _STRUCT_ARG_OOR2.search(msg) or
msg == _STRUCT_ARG_TOO_LARGE):
raise _mismatch(fixed_format, value)
else:
raise BinaryError(
"Could not dump number {0}.".format(value)) from e
except OverflowError as e:
raise _mismatch(fixed_format, value)
def _format_with_order(_format, order=None):
if order is None:
order = ByteOrder.NATIVE
order_char = "<" if order == ByteOrder.LITTLE else ">"
return order_char + _format
def _mismatch(_format, value):
calced_size = struct.calcsize(_format)
msg = "Number {0} requires a different sign or more than " +\
"{1} bytes to store."
return BinarySizeMismatch(msg.format(value, calced_size))
| 34.128261
| 78
| 0.663227
| 2,169
| 15,699
| 4.660212
| 0.077916
| 0.064998
| 0.02503
| 0.030471
| 0.807677
| 0.742778
| 0.730214
| 0.723091
| 0.700435
| 0.700435
| 0
| 0.013918
| 0.27231
| 15,699
| 459
| 79
| 34.202614
| 0.870886
| 0.616855
| 0
| 0.145299
| 0
| 0
| 0.086282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.017094
| 0.025641
| 0
| 0.487179
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d3e9ac396da646da74273df6700609191512484
| 532
|
py
|
Python
|
gui_elements/cell_patterns.py
|
wrenzenzen/game-of-life-simulation
|
85da6515ea173ca7e1014020da581a8a8a220efe
|
[
"MIT"
] | null | null | null |
gui_elements/cell_patterns.py
|
wrenzenzen/game-of-life-simulation
|
85da6515ea173ca7e1014020da581a8a8a220efe
|
[
"MIT"
] | null | null | null |
gui_elements/cell_patterns.py
|
wrenzenzen/game-of-life-simulation
|
85da6515ea173ca7e1014020da581a8a8a220efe
|
[
"MIT"
] | null | null | null |
cell_patterns = {
"Single" : [[1]],
"Block" : [
[1, 1],
[1, 1],
],
"Beehive" : [
[0, 1, 1, 0],
[1, 0, 0, 1],
[0, 1, 1, 0],
],
"Loaf" : [
[0, 1, 1, 0],
[1, 0, 0, 1],
[0, 1, 0, 1],
[0, 0, 1, 0],
],
"Boat": [
[1, 1, 0],
[1, 0, 1],
[0, 1, 0],
],
"Tub": [
[0, 1, 0],
[1, 0, 1],
[0, 1, 0],
],
"Glider": [
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
],
}
| 16.121212
| 21
| 0.203008
| 69
| 532
| 1.550725
| 0.15942
| 0.373832
| 0.420561
| 0.411215
| 0.495327
| 0.476636
| 0.401869
| 0.336449
| 0.186916
| 0.186916
| 0
| 0.24
| 0.530075
| 532
| 33
| 22
| 16.121212
| 0.188
| 0
| 0
| 0.575758
| 0
| 0
| 0.065666
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d5301e7f2465267ff1517fee77e9caaf2dc11e1
| 2,574
|
py
|
Python
|
tests/structures/test_stack.py
|
setivolkylany/PythonUtils
|
3e41be7f7cda24c1aa2dbf2f025b0bfb9c2746e6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/structures/test_stack.py
|
setivolkylany/PythonUtils
|
3e41be7f7cda24c1aa2dbf2f025b0bfb9c2746e6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/structures/test_stack.py
|
setivolkylany/PythonUtils
|
3e41be7f7cda24c1aa2dbf2f025b0bfb9c2746e6
|
[
"BSD-3-Clause"
] | null | null | null |
"""Test for stack implementation."""
import pytest
from utils.structures.stack import Stack, StackEmptyError
stack = Stack()
def test_create_empty_stack():
assert str(stack) == '[]'
assert repr(stack) == 'Stack([])'
assert stack.length == 0
assert stack.isEmpty is True
with pytest.raises(StackEmptyError):
stack.pop()
with pytest.raises(StackEmptyError):
stack.head
with pytest.raises(StackEmptyError):
stack.tail
def test_push_to_empty_stack():
stack.push(-2321.21)
assert str(stack) == '[-2321.21]'
assert repr(stack) == 'Stack([-2321.21])'
assert stack.length == 1
assert stack.isEmpty is False
assert stack.head == -2321.21
assert stack.tail == -2321.21
def test_push_second_item_to_stack():
stack.push(True)
assert str(stack) == '[-2321.21, True]'
assert repr(stack) == 'Stack([-2321.21, True])'
assert stack.length == 2
assert stack.isEmpty is False
assert stack.head is True
assert stack.tail == -2321.21
def test_push_third_item_to_stack():
stack.push(())
assert str(stack) == '[-2321.21, True, ()]'
assert repr(stack) == 'Stack([-2321.21, True, ()])'
assert stack.length == 3
assert stack.isEmpty is False
assert stack.head == ()
assert stack.tail == -2321.21
def test_push_fourth_item_to_stack():
stack.push({})
assert str(stack) == '[-2321.21, True, (), {}]'
assert repr(stack) == 'Stack([-2321.21, True, (), {}])'
assert stack.length == 4
assert stack.isEmpty is False
assert stack.head == {}
assert stack.tail == -2321.21
def test_pop_item_from_stack():
assert stack.pop() == {}
assert str(stack) == '[-2321.21, True, ()]'
assert repr(stack) == 'Stack([-2321.21, True, ()])'
assert stack.length == 3
assert stack.isEmpty is False
assert stack.head == ()
assert stack.tail == -2321.21
def test_pop_item_again_from_stack():
assert stack.pop() == ()
assert str(stack) == '[-2321.21, True]'
assert repr(stack) == 'Stack([-2321.21, True])'
assert stack.length == 2
assert stack.isEmpty is False
assert stack.head is True
assert stack.tail == -2321.21
def test_clear_stack():
stack.clear()
assert str(stack) == '[]'
assert repr(stack) == 'Stack([])'
assert stack.length == 0
assert stack.isEmpty is True
with pytest.raises(StackEmptyError):
stack.pop()
with pytest.raises(StackEmptyError):
stack.head
with pytest.raises(StackEmptyError):
stack.tail
| 22
| 59
| 0.625486
| 335
| 2,574
| 4.707463
| 0.122388
| 0.209258
| 0.083703
| 0.095117
| 0.854153
| 0.828789
| 0.812302
| 0.812302
| 0.761573
| 0.761573
| 0
| 0.063936
| 0.222222
| 2,574
| 116
| 60
| 22.189655
| 0.723776
| 0.011655
| 0
| 0.648649
| 0
| 0
| 0.108747
| 0
| 0
| 0
| 0
| 0
| 0.621622
| 1
| 0.108108
| false
| 0
| 0.027027
| 0
| 0.135135
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8d80235e71ba131859b9c73237c5a228c5242c09
| 158
|
py
|
Python
|
logic/analyzer.py
|
makeitraina/RapPin
|
45385373dc11c99404b3128b0251d1794e748705
|
[
"MIT"
] | null | null | null |
logic/analyzer.py
|
makeitraina/RapPin
|
45385373dc11c99404b3128b0251d1794e748705
|
[
"MIT"
] | null | null | null |
logic/analyzer.py
|
makeitraina/RapPin
|
45385373dc11c99404b3128b0251d1794e748705
|
[
"MIT"
] | null | null | null |
def process(songs):
return {song_url: song_data for song_url, song_data in songs.iteritems() \
if song_data['likes'] > 200 or song_data['views'] > 150000}
| 39.5
| 75
| 0.727848
| 26
| 158
| 4.192308
| 0.615385
| 0.293578
| 0.201835
| 0.275229
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066176
| 0.139241
| 158
| 3
| 76
| 52.666667
| 0.735294
| 0
| 0
| 0
| 0
| 0
| 0.063291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
8d830c6f96e21617d600247cee4ede3ad972c12d
| 158
|
py
|
Python
|
uncompress/__init__.py
|
StephanGocht/uncompress
|
af62eb2672cc0854fd430ce325bddfb895d47882
|
[
"MIT"
] | null | null | null |
uncompress/__init__.py
|
StephanGocht/uncompress
|
af62eb2672cc0854fd430ce325bddfb895d47882
|
[
"MIT"
] | null | null | null |
uncompress/__init__.py
|
StephanGocht/uncompress
|
af62eb2672cc0854fd430ce325bddfb895d47882
|
[
"MIT"
] | null | null | null |
from uncompress.archive import UnsupportedArchive
from uncompress.archive import ArchiveOfCompressedFiles as Archive
from uncompress.utils import run_cmd_main
| 52.666667
| 66
| 0.898734
| 19
| 158
| 7.368421
| 0.578947
| 0.3
| 0.3
| 0.385714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082278
| 158
| 3
| 67
| 52.666667
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
573e83bb1d070bde93584c0b181a575b6686e9e3
| 122
|
py
|
Python
|
up_enhsp/__init__.py
|
aiplan4eu/up-enhsp
|
bcb4e95adf409e93f36e9c4035182be76bbd0719
|
[
"Apache-2.0"
] | null | null | null |
up_enhsp/__init__.py
|
aiplan4eu/up-enhsp
|
bcb4e95adf409e93f36e9c4035182be76bbd0719
|
[
"Apache-2.0"
] | null | null | null |
up_enhsp/__init__.py
|
aiplan4eu/up-enhsp
|
bcb4e95adf409e93f36e9c4035182be76bbd0719
|
[
"Apache-2.0"
] | null | null | null |
from .enhsp_planner import ENHSPsolver
from .enhsp_planner import ENHSPSatSolver
from .enhsp_planner import ENHSPOptSolver
| 40.666667
| 41
| 0.885246
| 15
| 122
| 7
| 0.466667
| 0.257143
| 0.457143
| 0.628571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090164
| 122
| 3
| 42
| 40.666667
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9395e559a889f658fa9ae896179cb269313a1ef4
| 20,238
|
py
|
Python
|
checkers_ai/model.py
|
chrislarson1/checkers.ai
|
cbd7014d1d7bedc512026366d821f321b8863db3
|
[
"MIT"
] | 2
|
2019-04-06T00:07:02.000Z
|
2019-10-02T13:56:31.000Z
|
checkers_ai/model.py
|
chrislarson1/checkers.ai
|
cbd7014d1d7bedc512026366d821f321b8863db3
|
[
"MIT"
] | null | null | null |
checkers_ai/model.py
|
chrislarson1/checkers.ai
|
cbd7014d1d7bedc512026366d821f321b8863db3
|
[
"MIT"
] | 2
|
2019-02-10T08:44:07.000Z
|
2019-04-15T07:39:31.000Z
|
import tensorflow as tf
from tensorflow.python.framework.graph_util import convert_variables_to_constants
from checkers_ai.config import *
PARAM_INIT = tf.contrib.layers.xavier_initializer()
INCEP_ACT = tf.nn.tanh
class Model:
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
_MODEL_DEF_TEMPLATE = {'batch_size': 1, 'dropout': False, 'init': None}
def __init__(self, name, load_dir, trainable, device):
self.name = name
self.trainable = trainable
self.load_dir = load_dir
self.write_dir = os.path.join(POLICY_PATH, 'tmp/')
self._vars = []
self._ops = []
self.scopes = []
self._device = device
def _build_graph(self):
pass
def _constructor(self):
with tf.variable_scope(self.name, reuse=tf.AUTO_REUSE):
try:
with tf.device(self._device):
self._build_graph()
except AttributeError:
self._build_graph()
def init(self, session, load_dir=None):
load_dir = load_dir or self.load_dir
for i, layer_scope in enumerate(self.scopes):
vars_to_load = [v for v in tf.global_variables() if
v.name.startswith(layer_scope)]
self._vars.extend(vars_to_load)
session.run(tf.variables_initializer(var_list=vars_to_load))
if load_dir and vars_to_load:
saver = tf.train.Saver(vars_to_load)
saver.restore(session, os.path.join(load_dir, self.name))
def save_params(self, session, step=None):
assert self.write_dir
print('Saving {0} to {1}'.format(self.name, self.write_dir))
saver = tf.train.Saver(list(set(self._vars)))
saver.save(session, os.path.join(self.write_dir, self.name), global_step=step)
def save_graph(self, session:tf.Session, fname:str, var_names:list):
frozen_graph = convert_variables_to_constants(session, session.graph_def, var_names)
tf.train.write_graph(frozen_graph, self.write_dir, fname + '.pb', as_text=False)
tf.train.write_graph(frozen_graph, self.write_dir, fname + '.txt', as_text=True)
class Policy(Model):
def __init__(self,
session:tf.Session,
load_dir=None,
trainable=False,
selection='greedy',
device='GPU'):
super().__init__(name="Policy",
load_dir=load_dir,
trainable=trainable,
device=device)
self.session = session
self.state = tf.placeholder(dtype=tf.int32,
shape=(None, 8, 4),
name="state")
self.action_label = tf.placeholder(dtype=tf.int32,
shape=(None, 128),
name="action")
self.selection = selection
self._constructor()
self.init(self.session)
@property
def vars(self):
return [v for v in tf.trainable_variables() if
v.name.lower().__contains__(self.name.lower())]
def _build_graph(self):
#################### Graph inputs ####################
self.batch_size = tf.placeholder(shape=(), dtype=tf.float32, name="batch_size")
self.keep_prob = tf.placeholder(shape=(),
dtype=tf.float32,
name="keep_prob") if self.trainable \
else tf.constant(value=1,
dtype=tf.float32,
name="keep_prob")
self.lr = tf.placeholder(shape=(), dtype=tf.float32, name="learning_rate")
self.adv = tf.placeholder(shape=(None), dtype=tf.float32, name="advantage")
##################### Data layer #####################
X = tf.expand_dims(tf.cast(self.state, tf.float32), axis=3)
###################### Inception #####################
with tf.variable_scope("INCEPTION", reuse=False) as scope:
self.scopes.append(scope.name)
for i, (ksizes, nkernels) in enumerate(zip(KERNEL_SIZES, N_KERNELS)):
conv = []
for ks, nk in zip(ksizes, nkernels):
w = tf.get_variable(shape=[ks[0], ks[1], X.shape[-1], nk],
initializer=PARAM_INIT,
trainable=self.trainable,
name='incep_{0}_w_K{1}{2}'.format(i + 1, ks[0], ks[1]))
b = tf.get_variable(shape=[nk],
initializer=PARAM_INIT,
trainable=self.trainable,
name='incep_{0}_b_K{1}{2}'.format(i + 1, ks[0], ks[1]))
c = tf.nn.conv2d(X, w, strides=[1, 1, 1, 1], padding='SAME')
z = INCEP_ACT(c + b)
conv.append(z)
X = tf.concat(conv, axis=3)
X = tf.nn.dropout(X, keep_prob=self.keep_prob)
####################### Flatten ######################
conv_out = tf.contrib.layers.flatten(inputs=X)
X = conv_out
hwy_size = X.shape[-1]
####################### Highway ######################
with tf.variable_scope("HIGHWAY", reuse=False) as scope:
self.scopes.append(scope.name)
for i in range(HWY_LAYERS):
with tf.variable_scope('HWY_{}'.format(i)):
wh = tf.get_variable(shape=[hwy_size, hwy_size],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="hwy_w_{}".format(i + 1))
bh = tf.get_variable(shape=[hwy_size],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="hwy_b_{}".format(i + 1))
wt = tf.get_variable(shape=[hwy_size, hwy_size],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="T_w_{}".format(i + 1))
T = tf.sigmoid(tf.matmul(X, wt) + HWY_BIAS)
H = tf.nn.relu(tf.matmul(X, wh) + bh)
X = T * H + (1.0 - T) * X
X = tf.nn.dropout(X, keep_prob=self.keep_prob)
X = tf.concat([X, conv_out], axis=1)
####################### Output #######################
with tf.variable_scope("OUTPUT", reuse=False) as scope:
self.scopes.append(scope.name)
w = tf.get_variable(shape=[X.shape[-1], 128],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="w_logit")
b = tf.get_variable(shape=[128],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="b_logit")
self.logits = tf.add(tf.matmul(X, w), b, name="policy_logits")
self.softmax = tf.nn.softmax(logits=self.logits, axis=1, name="policy_softmax")
self.action = tf.argmax(input=self.softmax, axis=1, name="action")
self.probs, self.actions = tf.nn.top_k(input=self.softmax, k=128, sorted=True)
####################### Metrics ######################
with tf.variable_scope("METRICS", reuse=False) as scope:
self.scopes.append(scope.name)
self.top_1_acc = tf.metrics.accuracy(labels=tf.argmax(self.action_label, axis=1),
predictions=self.action,
name="accuracy")
self.top_2_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 2), tf.float32))
self.top_3_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 3), tf.float32))
self.top_5_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 5), tf.float32))
self.top_10_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 10), tf.float32))
###################### Optimizer #####################
if self.trainable:
with tf.variable_scope("LOSS", reuse=False) as scope:
self.scopes.append(scope.name)
self.step = tf.Variable(0, trainable=False)
self.reg_loss = LAMBDA * tf.add_n(
[tf.nn.l2_loss(v) for v in tf.global_variables() if v.name.__contains__("w_")])
self.cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(
labels=self.action_label,
logits=self.logits,
name="cross_entropy")
self.loss1 = tf.add(self.reg_loss, self.cross_entropy, name="loss1")
self.optimizer1 = tf.train.AdamOptimizer(learning_rate=self.lr,
name="optimizer_pretrain")
self.grad_update1 = self.optimizer1.minimize(
loss=self.loss1,
var_list=tf.trainable_variables(),
global_step=self.step,
name="grad_update")
self.gradlogprob_adv = self.adv * tf.log(self.softmax)
self.pg_loss = tf.reduce_mean(input_tensor=-self.gradlogprob_adv,
axis=1,
name="pg_loss")
self.optimizer2 = tf.train.RMSPropOptimizer(learning_rate=self.lr,
decay=0.99,
epsilon=1e-5)
self.policy_update = self.optimizer2.apply_gradients(
grads_and_vars=[self.pg_loss, self.vars],
global_step=self.step)
class Value(Model):
def __init__(self,
session:tf.Session,
load_dir=None,
trainable=False,
selection='greedy',
device='GPU'):
super().__init__(name="VALUE", load_dir=load_dir, trainable=trainable, device=device)
self.session = session
self.state = tf.placeholder(dtype=tf.int32,
shape=(None, 8, 4),
name="state")
self.action_label = tf.placeholder(dtype=tf.int32,
shape=(None, 128),
name="action")
self.selection = selection
self._constructor()
self.init(self.session)
@property
def vars(self):
return [v for v in tf.trainable_variables() if
v.name.lower().__contains__(self.name.lower())]
def _build_graph(self):
#################### Graph inputs ####################
self.batch_size = tf.placeholder(shape=(), dtype=tf.float32, name="batch_size")
self.keep_prob = tf.placeholder(shape=(),
dtype=tf.float32,
name="keep_prob") if self.trainable \
else tf.constant(value=1,
dtype=tf.float32,
name="keep_prob")
self.lr = tf.placeholder(shape=(), dtype=tf.float32, name="learning_rate")
self.adv = tf.placeholder(shape=(None), dtype=tf.float32, name="advantage")
##################### Data layer #####################
X = tf.expand_dims(tf.cast(self.state, tf.float32), axis=3)
###################### Inception #####################
with tf.variable_scope("INCEPTION", reuse=False) as scope:
self.scopes.append(scope.name)
for i, (ksizes, nkernels) in enumerate(zip(KERNEL_SIZES, N_KERNELS)):
conv = []
for ks, nk in zip(ksizes, nkernels):
w = tf.get_variable(shape=[ks[0], ks[1], X.shape[-1], nk],
initializer=PARAM_INIT,
trainable=self.trainable,
name='incep_{0}_w_K{1}{2}'.format(i + 1, ks[0], ks[1]))
b = tf.get_variable(shape=[nk],
initializer=PARAM_INIT,
trainable=self.trainable,
name='incep_{0}_b_K{1}{2}'.format(i + 1, ks[0], ks[1]))
c = tf.nn.conv2d(X, w, strides=[1, 1, 1, 1], padding='SAME')
z = INCEP_ACT(c + b)
conv.append(z)
X = tf.concat(conv, axis=3)
X = tf.nn.dropout(X, keep_prob=self.keep_prob)
####################### Flatten ######################
conv_out = tf.contrib.layers.flatten(inputs=X)
X = conv_out
hwy_size = X.shape[-1]
####################### Highway ######################
with tf.variable_scope("HIGHWAY", reuse=False) as scope:
self.scopes.append(scope.name)
for i in range(HWY_LAYERS):
with tf.variable_scope('HWY_{}'.format(i)):
wh = tf.get_variable(shape=[hwy_size, hwy_size],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="hwy_w_{}".format(i + 1))
bh = tf.get_variable(shape=[hwy_size],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="hwy_b_{}".format(i + 1))
wt = tf.get_variable(shape=[hwy_size, hwy_size],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="T_w_{}".format(i + 1))
T = tf.sigmoid(tf.matmul(X, wt) + HWY_BIAS)
H = tf.nn.relu(tf.matmul(X, wh) + bh)
X = T * H + (1.0 - T) * X
X = tf.nn.dropout(X, keep_prob=self.keep_prob)
X = tf.concat([X, conv_out], axis=1)
####################### Output #######################
with tf.variable_scope("OUTPUT", reuse=False) as scope:
self.scopes.append(scope.name)
w = tf.get_variable(shape=[X.shape[-1], 128],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="w_logit")
b = tf.get_variable(shape=[128],
initializer=PARAM_INIT,
trainable=self.trainable,
dtype=tf.float32,
name="b_logit")
self.logits = tf.add(tf.matmul(X, w), b, name="policy_logits")
self.softmax = tf.nn.softmax(logits=self.logits, axis=1, name="policy_softmax")
self.action = tf.argmax(input=self.softmax, axis=1, name="action")
self.probs, self.actions = tf.nn.top_k(input=self.softmax, k=128, sorted=True)
####################### Metrics ######################
with tf.variable_scope("METRICS", reuse=False) as scope:
self.scopes.append(scope.name)
self.top_1_acc = tf.metrics.accuracy(labels=tf.argmax(self.action_label, axis=1),
predictions=self.action,
name="accuracy")
self.top_2_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 2), tf.float32))
self.top_3_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 3), tf.float32))
self.top_5_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 5), tf.float32))
self.top_10_acc = tf.reduce_mean(
tf.cast(tf.nn.in_top_k(self.softmax, tf.argmax(self.action_label, axis=1), 10), tf.float32))
###################### Optimizer #####################
if self.trainable:
with tf.variable_scope("LOSS", reuse=False) as scope:
self.scopes.append(scope.name)
self.step = tf.Variable(0, trainable=False)
self.reg_loss = LAMBDA * tf.add_n(
[tf.nn.l2_loss(v) for v in tf.global_variables() if v.name.__contains__("w_")])
self.cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(
labels=self.action_label,
logits=self.logits,
name="cross_entropy")
self.loss1 = tf.add(self.reg_loss, self.cross_entropy, name="loss1")
self.optimizer1 = tf.train.AdamOptimizer(learning_rate=self.lr,
name="optimizer_pretrain")
self.grad_update1 = self.optimizer1.minimize(
loss=self.loss1,
var_list=tf.trainable_variables(),
global_step=self.step,
name="grad_update")
self.neg_grad_log_prob_adv = self.adv * -tf.log(self.softmax)
self.SFGE = tf.reduce_mean(input_tensor=self.neg_grad_log_prob_adv,
axis=1,
name="score_func_grad_estimator")
class A2CLoss:
def __init__(self, policy, value):
self.policy = policy
self.value = value
self.__build_graph()
def __build_graph(self):
with tf.variable_scope("AC2_LOSS", reuse=False) as scope:
self.policy.scopes.append(scope.name)
self.value.scopes.append(scope.name)
self.lrate = tf.placeholder(shape=(), dtype=tf.float32, name="lrate")
self.rewards = tf.placeholder(shape=(None), dtype=tf.float32, name="rewards")
self.baseline = tf.placeholder(shape=(None), dtype=tf.float32, name="value_estimate")
self.gradlogp = self.policy.adv * tf.log(self.policy.softmax)
self.logprob = tf.reduce_mean(input_tensor=-self.policy.gradlogprob_adv,
axis=1,
name="pg_loss")
self.policy_entropy = -tf.reduce_sum(
self.policy.softmax * tf.log(self.policy.softmax), axis=1
)
self.optimizer = tf.train.RMSPropOptimizer(learning_rate=self.policy.lr,
decay=0.99,
epsilon=1e-5)
self.policy_update = self.optimizer.apply_gradients(
grads_and_vars=[self.policy.pg_loss, self.policy.vars],
global_step=self.policy.step)
| 51.106061
| 108
| 0.476381
| 2,170
| 20,238
| 4.25576
| 0.110599
| 0.03216
| 0.034867
| 0.044829
| 0.823606
| 0.808446
| 0.778668
| 0.77477
| 0.752463
| 0.752463
| 0
| 0.019255
| 0.384129
| 20,238
| 395
| 109
| 51.235443
| 0.721678
| 0.008104
| 0
| 0.765579
| 0
| 0
| 0.036544
| 0.00129
| 0
| 0
| 0
| 0
| 0.002967
| 1
| 0.041543
| false
| 0.002967
| 0.008902
| 0.005935
| 0.071217
| 0.002967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
93d12555a8f0eedd6e6b5044e29854455dbd7d00
| 48
|
py
|
Python
|
almetro/__init__.py
|
arnour/almetro
|
7d00b1bb746b49dc9dd464395abdf4fe93f028fe
|
[
"Apache-2.0"
] | null | null | null |
almetro/__init__.py
|
arnour/almetro
|
7d00b1bb746b49dc9dd464395abdf4fe93f028fe
|
[
"Apache-2.0"
] | 1
|
2019-08-21T23:02:49.000Z
|
2019-08-21T23:02:49.000Z
|
almetro/__init__.py
|
arnour/almetro
|
7d00b1bb746b49dc9dd464395abdf4fe93f028fe
|
[
"Apache-2.0"
] | null | null | null |
from .al import Al
def new():
return Al()
| 8
| 18
| 0.583333
| 8
| 48
| 3.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291667
| 48
| 5
| 19
| 9.6
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
9e0c2ff9a563a2f566575eacfa0b7c4d2ffc4f31
| 123
|
py
|
Python
|
tests/test_package_import.py
|
fluquid/html-to-etree
|
4c69a8900a2336d89dcb07e6b621078e4e294ebe
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_package_import.py
|
fluquid/html-to-etree
|
4c69a8900a2336d89dcb07e6b621078e4e294ebe
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_package_import.py
|
fluquid/html-to-etree
|
4c69a8900a2336d89dcb07e6b621078e4e294ebe
|
[
"BSD-3-Clause"
] | null | null | null |
import html_to_etree
def test_package_metadata():
assert html_to_etree.__author__
assert html_to_etree.__email__
| 17.571429
| 35
| 0.813008
| 18
| 123
| 4.666667
| 0.611111
| 0.214286
| 0.392857
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 123
| 6
| 36
| 20.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f54a17aa9b28b250570db1602e782c6c0a619cff
| 8,603
|
py
|
Python
|
tests/fixtures/artifact.py
|
mayankcodeops/csv_viewer
|
dc85355182c0cad3b4ec74f725ade9152d09260d
|
[
"MIT"
] | null | null | null |
tests/fixtures/artifact.py
|
mayankcodeops/csv_viewer
|
dc85355182c0cad3b4ec74f725ade9152d09260d
|
[
"MIT"
] | null | null | null |
tests/fixtures/artifact.py
|
mayankcodeops/csv_viewer
|
dc85355182c0cad3b4ec74f725ade9152d09260d
|
[
"MIT"
] | null | null | null |
artifact_fixture = ('111', 'False', '1979.486.245', '1973', 'False', '', '', 'The American Wing', 'Coin',
'Two-and-a-half-dollar Liberty Head Coin', '', '', '', '', '', 'Maker', '',
'Christian Gobrecht', '1785–1844', '',
'Gobrecht, Christian', '', '1785', '1844', '', 'https://www.wikidata.org/wiki/Q5109648'
, 'http://vocab.getty.edu/page/ulan/500077295', '1907', '1907', '1907', 'Gold',
'Dimensions unavailable',
"[{'elementName': 'Other', 'elementDescription': 'Object diameter', "
"'elementMeasurements': {'Diameter': 1.7}}]", 'Gift of Heinz L. Stoppelmann, 1979', '',
'', '', '', '', '', '', '', '', '', '', '', '', '', '2021-04-06T04:41:04.967Z',
'Metropolitan Museum of Art, New York, NY',
'https://www.metmuseum.org/art/collection/search/11', '', '', 'False', '', '1079',
'Maker', 'Christian Gobrecht', 'http://vocab.getty.edu/page/ulan/500077295',
'https://www.wikidata.org/wiki/Q5109648', '')
artifact_fixture1 = ('112', 'False', '1979.486.245', '1973', 'False', '', '', 'The American Wing', 'Coin',
'Two-and-a-half-dollar Liberty Head Coin', '', '', '', '', '', 'Maker', '',
'Christian Gobrecht', '1785–1844', '',
'Gobrecht, Christian', '', '1785', '1844', '', 'https://www.wikidata.org/wiki/Q5109648'
, 'http://vocab.getty.edu/page/ulan/500077295', '1907', '1907', '1907', 'Gold',
'Dimensions unavailable',
"[{'elementName': 'Other', 'elementDescription': 'Object diameter', "
"'elementMeasurements': {'Diameter': 1.7}}]", 'Gift of Heinz L. Stoppelmann, 1979', '',
'', '', '', '', '', '', '', '', '', '', '', '', '', '2021-04-06T04:41:04.967Z',
'Metropolitan Museum of Art, New York, NY',
'https://www.metmuseum.org/art/collection/search/11', '', '', 'False', '', '1079',
'Maker', 'Christian Gobrecht', 'http://vocab.getty.edu/page/ulan/500077295',
'https://www.wikidata.org/wiki/Q5109648', '')
form_dict = {'objectID': '14', 'isHighlight': 'False', 'accessionNumber': '1979.486.5', 'accessionYear': '1979',
'isPublicDomain': 'False', 'primaryImage': '', 'primaryImageSmall': '', 'department': 'The American Wing',
'objectName': 'Coin', 'title': 'Five-dollar Liberty Head Coin', 'culture': '', 'period': '', 'dynasty'
: '', 'reign': '', 'portfolio': '', 'artistRole': 'Maker', 'artistPrefix': '', 'artistDisplayName': 'Christian Gobrecht'
, 'artistDisplayBio': '1785–1844','artistSuffix': '', 'artistAlphaSort': 'Gobrecht, Christian', 'artistNationality': '',
'artistBeginDate': '1785', 'artistEndDate': '1844', 'artistGender':'',
'artistWikiDataURL': 'https://www.wikidata.org/wiki/Q5109648',
'artistULAN_URL': 'http://vocab.getty.edu/page/ulan/500077295', 'objectDate': '1907',
'objectBeginDate': '1907', 'objectEndDate': '1907', 'medium': 'Gold',
'dimensions': 'Dimensions unavailable',
'measurements':
'[{ "elementName": "Other", "elementDescription": "Object diameter","elementMeasurements": {"Diameter": 1.7}}]',
'creditLine': 'Gift of Heinz L. Stoppelmann, 1979', 'geographyType': '',
'city': '', 'state': '', 'county': '', 'country': '', 'region': '', 'subregion': '', 'locale': '', 'locus': '',
'excavation': '', 'river': '',
'classification': '', 'rightsAndReproduction': '', 'linkResource': '',
'metadataDate': '2021-04-06T04:41:04.967Z',
'repository': 'Metropolitan Museum of Art, New York,NY', 'objectURL': 'https://www.metmuseum.org/art/collection/search/11',
'tags': '', 'objectDataWikiURL': '', 'isTimeLineWork': 'False', 'galleryNumber': '', 'constituentID': '1079',
'role': 'Maker', 'name': '', 'constituentULANURL': 'http://vocab.getty.edu/page/ulan/500077295',
'constituentWikiDataURL': 'https://www.wikidata.org/wiki/Q5109648', 'gender': ''}
updated_dict = {'objectID': '14', 'isHighlight': 'True', 'accessionNumber': '1979.486.5', 'accessionYear': '1979',
'isPublicDomain': 'False', 'primaryImage': '', 'primaryImageSmall': '', 'department': 'The American Wing',
'objectName': 'Coin', 'title': 'Five-dollar Liberty Head Coin', 'culture': '', 'period': '', 'dynasty'
: '', 'reign': '', 'portfolio': '', 'artistRole': 'Maker', 'artistPrefix': '', 'artistDisplayName': 'Christian Gobrecht'
, 'artistDisplayBio': '1785–1844','artistSuffix': '', 'artistAlphaSort': 'Gobrecht, Christian', 'artistNationality': '',
'artistBeginDate': '1785', 'artistEndDate': '1844', 'artistGender':'',
'artistWikiDataURL': 'https://www.wikidata.org/wiki/Q5109648',
'artistULAN_URL': 'http://vocab.getty.edu/page/ulan/500077295', 'objectDate': '1907',
'objectBeginDate': '1907', 'objectEndDate': '1907', 'medium': 'Gold',
'dimensions': 'Dimensions unavailable',
'measurements':
'[{ "elementName": "Other", "elementDescription": "Object diameter","elementMeasurements": {"Diameter": 1.7}}]',
'creditLine': 'Gift of Heinz L. Stoppelmann, 1979', 'geographyType': '',
'city': '', 'state': '', 'county': '', 'country': '', 'region': '', 'subregion': '', 'locale': '', 'locus': '',
'excavation': '', 'river': '',
'classification': '', 'rightsAndReproduction': '', 'linkResource': '',
'metadataDate': '2021-04-06T04:41:04.967Z',
'repository': 'Metropolitan Museum of Art, New York,NY', 'objectURL': 'https://www.metmuseum.org/art/collection/search/11',
'tags': '', 'objectDataWikiURL': '', 'isTimeLineWork': 'False', 'galleryNumber': '', 'constituentID': '1079',
'role': 'Maker', 'name': '', 'constituentULANURL': 'http://vocab.getty.edu/page/ulan/500077295',
'constituentWikiDataURL': 'https://www.wikidata.org/wiki/Q5109648', 'gender': ''}
duplicate_dict = {'objectID': '111', 'isHighlight': 'False', 'accessionNumber': '1979.486.5', 'accessionYear': '1979',
'isPublicDomain': 'False', 'primaryImage': '', 'primaryImageSmall': '', 'department': 'The American Wing',
'objectName': 'Coin', 'title': 'Five-dollar Liberty Head Coin', 'culture': '', 'period': '', 'dynasty'
: '', 'reign': '', 'portfolio': '', 'artistRole': 'Maker', 'artistPrefix': '', 'artistDisplayName': 'Christian Gobrecht'
, 'artistDisplayBio': '1785–1844','artistSuffix': '', 'artistAlphaSort': 'Gobrecht, Christian', 'artistNationality': '',
'artistBeginDate': '1785', 'artistEndDate': '1844', 'artistGender':'',
'artistWikiDataURL': 'https://www.wikidata.org/wiki/Q5109648',
'artistULAN_URL': 'http://vocab.getty.edu/page/ulan/500077295', 'objectDate': '1907',
'objectBeginDate': '1907', 'objectEndDate': '1907', 'medium': 'Gold',
'dimensions': 'Dimensions unavailable',
'measurements':
'[{ "elementName": "Other", "elementDescription": "Object diameter","elementMeasurements": {"Diameter": 1.7}}]',
'creditLine': 'Gift of Heinz L. Stoppelmann, 1979', 'geographyType': '',
'city': '', 'state': '', 'county': '', 'country': '', 'region': '', 'subregion': '', 'locale': '', 'locus': '',
'excavation': '', 'river': '',
'classification': '', 'rightsAndReproduction': '', 'linkResource': '',
'metadataDate': '2021-04-06T04:41:04.967Z',
'repository': 'Metropolitan Museum of Art, New York,NY', 'objectURL': 'https://www.metmuseum.org/art/collection/search/11',
'tags': '', 'objectDataWikiURL': '', 'isTimeLineWork': 'False', 'galleryNumber': '', 'constituentID': '1079',
'role': 'Maker', 'name': '', 'constituentULANURL': 'http://vocab.getty.edu/page/ulan/500077295',
'constituentWikiDataURL': 'https://www.wikidata.org/wiki/Q5109648', 'gender': ''}
| 90.557895
| 136
| 0.536325
| 691
| 8,603
| 6.672938
| 0.205499
| 0.026025
| 0.0347
| 0.041206
| 0.97506
| 0.97506
| 0.97506
| 0.97506
| 0.97506
| 0.97506
| 0
| 0.08061
| 0.245844
| 8,603
| 95
| 137
| 90.557895
| 0.629316
| 0
| 0
| 0.94382
| 0
| 0.033708
| 0.589377
| 0.062297
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f554111c3f65c2766ed098befb4f3aae85265010
| 394
|
py
|
Python
|
terrascript/provider/stackpath.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/provider/stackpath.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/provider/stackpath.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/provider/stackpath.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:27:49 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.provider.stackpath
#
# instead of
#
# >>> import terrascript.provider.stackpath.stackpath
#
# This is only available for 'official' and 'partner' providers.
from terrascript.provider.stackpath.stackpath import *
| 26.266667
| 73
| 0.753807
| 49
| 394
| 6.061224
| 0.693878
| 0.255892
| 0.377104
| 0.228956
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034985
| 0.129442
| 394
| 14
| 74
| 28.142857
| 0.830904
| 0.794416
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f559f1fb3191884a124aea7999e3352d4195f6ab
| 106,337
|
py
|
Python
|
qtl/EQTLReport.py
|
longing247/QTL
|
bff03ad815323c9a4e0236f43d1077e19a578323
|
[
"Apache-2.0"
] | null | null | null |
qtl/EQTLReport.py
|
longing247/QTL
|
bff03ad815323c9a4e0236f43d1077e19a578323
|
[
"Apache-2.0"
] | null | null | null |
qtl/EQTLReport.py
|
longing247/QTL
|
bff03ad815323c9a4e0236f43d1077e19a578323
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on Jan 13, 2015
@author: jiao
Genome-wide eQTL mapping report
'''
import json
import sys
import os
import math
import gc
sys.path.append('/home/jiao/QTL')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'QTL.settings')
from qtl.models import Gene,Parent,ExperimentMarker,RIL,ExperimentGene
from django.db.models import Q
from TStatistic import tTest,tToP
from FDR import pvalThld,getDicVal,sortDicByVal
from MySQLCorrelation import mysqlCorrelationAll,mysqlCorrelationSingle
def eQTLReport(fi,fo):
'''
Generate eQTL report file base on the genome-wide eQTL mapping input file.
@type fi: json format file object
@param fi: json format input genome-wide eQTL mapping file. e.g. 'genome_wide_eQTL_mapping_Ligterink_2014_gxe0_3.85.txt.txt'
@type fo: file object
@param fo: path/name of report output file. e.g. 'report_eQTL_mapping_Ligterink_2014_gxe0_3.85.txt.txt'
@rtype: None
@return: None
'''
with open(fi) as data_file:
data = json.load(data_file)
nr_overall_cis_only = 0 # nr of genes that only have cis-eQTL
nr_overall_trans_only = 0 # nr of genes that only have trans-eQTL
nr_overall_cis = 0 # nr of significant cis-eQTLs in total
nr_overall_trans = 0 # nr of significant trans-eQTL in total
nr_overall_both_cis_trans = 0 # nr of genes which have both cis- and trans-eQTL in total
nr_overall_multi_trans = 0 # nr of genes which have more than one trans-eQTL in total
multi_trans_list = [] # record the genes which have more than one trans-eQTLs in the report.
with open(fo,'w') as f:
#with open('report_eQTL_mapping_Ligterink_2014_gxe1_2.7','w') as f:
f.write('Trait\tHas cis-eQTL\tHas trans-eQTL\tHas only cis-eQTL\tHas only trans-eQTL\tHas both cis- and trans-eQTL\tHas multiple trans-eQTL\tNumber of trans-eQTL confidence interval\n')
j =0
for qtl in data:
j+=1
cis = False
trans = False
cis_only = False
trans_only = False
bothCisTrans = False
multiTrans = False
nrOfTrans = 0
if qtl['cisTransProfile'][0] == True:
cis = True
nr_overall_cis+=1
if qtl['cisTransProfile'][1] == True:
trans = True
nr_overall_trans += 1
bothCisTrans = True
nr_overall_both_cis_trans += 1
if qtl['cisTransProfile'][2]>1:
added_trans = qtl['cisTransProfile'][2]-1
multiTrans = True
nr_overall_multi_trans +=1
nr_overall_trans += added_trans
nrOfTrans = qtl['cisTransProfile'][2]
multi_trans_list.append(qtl['trait'])
else:
nrOfTrans = 1
else:
cis_only = True
nr_overall_cis_only+=1
else:
trans = True
nr_overall_trans += 1
trans_only = True
nr_overall_trans_only +=1
nrOfTrans = qtl['cisTransProfile']
if qtl['cisTransProfile'][2]>1:
multiTrans = True
nr_overall_multi_trans +=1
nrOfTrans = qtl['cisTransProfile'][2]
multi_trans_list.append(qtl['trait'])
else:
nrOfTrans = 1
f.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%d\n' % (qtl['trait'], cis, trans,cis_only,trans_only,bothCisTrans,multiTrans,nrOfTrans))
print j,nr_overall_cis_only,nr_overall_trans_only,nr_overall_cis,nr_overall_trans,nr_overall_both_cis_trans,nr_overall_multi_trans
print multi_trans_list
def dEParent(exp_name,n1,n2,parent1,parent2):
'''
differential express analysis between different parent under same condition.
@type experiment_name: string
@param experiment_name: experiment_name e.g. Ligterink_2014
@type n: int
@param n: number of replicates.
@type parent1: string
@param parent1: name of parent1 e.g.BayRP
@type parent2: string
@param parent2: name of the experiment e.g.ShaRP
@rtype pval: dictionary
@return pval: a dictionary that traits is the key and its mapped value (p value) which corresponding to the null hypothesis that they are not differential expressed.
'''
trait_list = Parent.objects.filter(experiment_name = exp_name).values_list('locus_identifier',flat=True).distinct()
pval= {}
for trait in trait_list:
trait_decode = trait.encode('ascii','ignore')
gene = Gene.objects.get(locus_identifier = trait_decode)
p1 = Parent.objects.get(experiment_name = exp_name, parent_type = parent1, locus_identifier = gene)
p2 = Parent.objects.get(experiment_name = exp_name, parent_type = parent2, locus_identifier = gene)
print trait_decode,p1.se,p2.se
p1_se = float(p1.se)
p1_avg = float(p1.expression)
p2_se = float(p2.se)
p2_avg = float(p2.expression)
t_score = tTest(n1,n2,p1_avg,p2_avg,p1_se,p2_se)
df = n1+n2-2
p= tToP(t_score,df)
pval[trait_decode] = p
print pval[trait_decode]
return pval
def degReport(exp_name,n1,n2,parent1,parent2,fo):
'''
differential express analysis between different parent under same condition resulted to a file
@type experiment_name: string
@param experiment_name: experiment_name e.g. Ligterink_2014
@type n1: int
@param n1: number of experiment 1 replicates.
@type n2: int
@param n2: number of experiment 2 replicates.
@type parent1: string
@param parent1: name of parent1 e.g.BayRP
@type parent2: string
@param parent2: name of the experiment e.g.ShaRP
'''
pval = dEParent(exp_name,n1,n2,parent1,parent2)
sorted_pval = getDicVal(pval)
pthld = pvalThld(sorted_pval,0.05)#0.00771196827993
apply_cutoff_pval ={k:v for k,v in pval.iteritems() if v <pthld}
sorted_apply_cutoff_pval = sortDicByVal(apply_cutoff_pval)
i = 0
with open(fo,'w') as out:
for gene,pval in sorted_apply_cutoff_pval :
i+=1
out.write('%s\t%.16f\n' % (gene,pval))
print fo,i,pthld
def deg(fi,fo):
'''
Read DEG report file and give a summary that contains only gene name.
'''
with open(fi) as input:
with open(fo, 'w') as output:
gene_list = [line.split('\t')[0] for line in input]
for gene in gene_list:
output.write(gene+'\n')
def geneHasEQTL(fi,fo):
'''
Read eQTL report file and give a summary that contains only gene names.
'''
input = open(fi)
report = json.load(input)
gene_list = [gene['trait'] for gene in report]
with open(fo,'w') as out:
for gene in gene_list:
out.write(gene+'\n')
def intersectionGenes(f1,f2,fo):
'''
Find out the intersection between genes that are differential expressed between parents and genes that have eQTL mapped.
'''
print 'compare overlap genes between %s and %s ' % (f1,f2)
with open(f1) as input1:
deg_genes = [line.strip() for line in input1]
print '%d of genes are differential expressed in %s' % (len(deg_genes),f1)
with open(f2) as input2:
eQTL_genes = [line.strip() for line in input2]
print '%d of genes that have eQTL mapped in %s' % (len(eQTL_genes),f2)
overlap_genes = list(set(deg_genes) & set(eQTL_genes))
print '%d of overlapped genes' % len(overlap_genes)
with open(fo,'w') as out:
for gene in overlap_genes:
out.write(gene+'\n')
print 'Process completed'
def summary(fi,fo):
'''
results only the locus identifier per row
'''
with open(fi) as input:
with open(fo,'w') as output:
for line in input:
gene = line.split('\t')[0].strip()
output.write(gene+'\n')
def intersectionGenes2(f1,f2,fo):
'''
Find out the intersection genes that are differential expressed over three conditions.
'''
print 'compare overlap genes between %s and %s ' % (f1,f2)
with open(f1) as input1:
deg1 = [line.split('\t')[0].strip() for line in input1]
print 'length %d' % len(deg1)
print '%d of genes are differential expressed in %s' % (len(deg1),f1)
with open(f2) as input2:
deg2 = [line.split('\t')[0].strip() for line in input2]
print '%d of genes are differential expressed in %s' % (len(deg2),f2)
overlap_genes = list(set(deg1) & set(deg2))
print '%d of overlapped genes' % len(overlap_genes)
with open(fo,'w') as out:
for gene in overlap_genes:
out.write(gene+'\n')
print 'Process completed'
def hasOverlapEQTL(fi,cor_thld,fo):
'''
lookup paired (co-expressed) genes that show high correlation over RIL population and have overlap eqtl profile from genome wide eQTL report
@bug: Out of memory (Needed 8164 bytes) django.db.utils.OperationalError: (2008, 'MySQL client ran out of memory') 67/69 marker.
@type fi: string
@param fi: path of eQTL report file. e.g. genome_wide_eQTL_mapping_Ligterink_2014_gxe0_3.85.txt
@type cor_thld: float
@param cor_thld: absolute value of correlation threshold
@type fo: string
@param fo: path of output file
'''
input = open(fi)
report = json.load(input)
#eqtl_list = [eqtl for eqtls in report for eqtl in eqtls['eQTL']]
#eqtl_list = set(eqtl_list)
#eqtl_list = ['IND6375', 'F5I14', 'MSAT518662']
#eqtl_list = [u'MSAT514', u'MSAT399', u'K9I9', u'MSAT500027', u'MSAT127088', u'MSAT59', u'MSAT25', u'NGA225', u'MSAT418', u'IND1136', u'MSAT512110', u'MSAT370', u'MSAT522', u'MSAT100008', u'MSAT15', u'MSAT318406', u'JV6162', u'MSAT332', u'MSAT319', u'MSAT305754', u'MSAT519', u'NGA8', u'NGA128', u'MSAT200897', u'MSAT210', u'CIW7', u'MSAT108193', u'MSAT241', u'MSAT3117', u'MSAT110', u'MSAT443', u'MSAT113', u'MSAT27', u'NGA248', u'NGA249', u'ATHCHIB2', u'T1G11', u'MSAT49', u'MSAT48', u'IND628', u'MSAT318', u'IND2188', u'dCAPsAPR2', u'F21M12', u'IND216199', u'NGA172', u'T27K12', u'MSAT365', u'MSAT222', u'MSAT415', u'MSAT142', u'MSAT321', u'NGA151', u'IND4992', u'MSAT468', u'NGA139', u'JV7576', u'MSAT520037', u'MSAT559', u'CZSOD2', u'MSAT439', u'MSAT435', u'MSAT236', u'MSAT238', u'MSAT512', u'MSAT437']
eqtl_list = [u'IND6375', u'F5I14', u'MSAT518662']
print eqtl_list
print 'Number of markers: %d' %len(eqtl_list)
#create summary of eQTL profile [{gene:eQTL},...]
eqtl_profile = {}
for profile in report:
eqtl_profile[profile['trait']] = profile['eQTL']
out = {}
for eqtl in eqtl_list:
cor_gene = []
print eqtl
gene_overlap_eqtl = [key for key, value in eqtl_profile.iteritems() if eqtl in value]
print len(gene_overlap_eqtl)
for gene in gene_overlap_eqtl:
paired_gene_dic = {}
paired_gene_list = []
for target_gene in gene_overlap_eqtl:
if gene!=target_gene:
cor = mysqlCorrelationSingle(gene,target_gene)[0].r
if math.fabs(cor)>=cor_thld:
paired_gene_list.append(target_gene)
print eqtl,gene,target_gene
del cor
gc.collect()
if len(paired_gene_list)!=0:
paired_gene_dic[gene]= paired_gene_list
cor_gene.append(paired_gene_dic)
if len(cor_gene) !=0:
out[eqtl] = cor_gene
with open(fo,'w') as output:
json.dump(out,output,sort_keys = True, indent = 4)
def transgressiveSegregation(exp_name,_gxe,fo):
'''
lookup genes that the expression level in the RIL population showed transgressive segregation beyond its parent. It is defined as the expression level of gene of a RIL, which lay beyond the region
mu+-2SD of its parents, where mu is the average expression level of the parents, and SD is the standard deviation.
@type exp_name: string
@param exp_name: name of the experiment
@type _gxe: int
@param gxe: 0 or 1, stands for whether the experiment considers environmental interaction.
@type fo: string
@param fo: path of output file
'''
pass
if __name__=="__main__":
#Experiment_name gxe_condition, LOD_threshold, LOD confidence interval
#nr of mapped genes, number of genes have cis-only eQTL, nr of genes have trans-only eQTL, nr of overall cis-eQTL, nr of overall trans-eQTL
#nr of traits that have both cis- and trans-eQTL, nr of traits that have multiple trans-eQTL.
#Ligterink 2014 gxe 0 3.85 1.5
#fi1 = 'genome_wide_eQTL_mapping_Ligterink_2014_gxe0_3.85.txt'
#fo1 = 'report_eQTL_mapping_Ligterink_2014_gxe0_3.85.txt'
#eQTLReport(fi1,fo1)
#2633 1137 1423 1210 1496 73 72
#[u'AT1G01310', u'AT1G01810', u'AT1G02870', u'AT1G10840', u'AT1G19968', u'AT1G20030', u'AT1G25310', u'AT1G45010', u'AT1G49150', u'AT1G50290', u'AT1G59980', u'AT1G60360', u'AT1G61730', u'AT1G67635', u'AT1G68010', u'AT1G70860', u'AT1G72990', u'AT1G74430', u'AT1G78180', u'AT2G03770', u'AT2G05380', u'AT2G11430', u'AT2G16290', u'AT2G24285', u'AT2G25450', u'AT2G32080', u'AT2G34040', u'AT2G35040', u'AT2G35720', u'AT2G38630', u'AT2G41220', u'AT2G42340', u'AT2G43680', u'AT3G05937', u'AT3G14690', u'AT3G17890', u'AT3G20930', u'AT3G22910', u'AT3G24480', u'AT3G26510', u'AT3G44110', u'AT3G45620', u'AT3G46980', u'AT3G47070', u'AT3G51890', u'AT3G59140', u'AT3G61360', u'AT3G63350', u'AT4G03320', u'AT4G08106', u'AT4G08940', u'AT4G09750', u'AT4G10250', u'AT4G15490', u'AT4G16690', u'AT4G21890', u'AT4G24450', u'AT4G29770', u'AT4G30310', u'AT4G31840', u'AT5G02370', u'AT5G26000', u'AT5G32592', u'AT5G35020', u'AT5G38990', u'AT5G46750', u'AT5G48150', u'AT5G49210', u'AT5G50120', u'AT5G56910', u'AT5G58070', u'AT5G62020']
#Ligterink 2014 gxe 1 2.7 1.5
#fi2 = 'genome_wide_eQTL_mapping_Ligterink_2014_gxe1_2.7.txt'
#fo2 = 'report_eQTL_mapping_Ligterink_2014_gxe1_2.7.txt'
#eQTLReport(fi2,fo2)
#2489 54 2419 70 2435 16 176
#[u'AT1G02000', u'AT1G02660', u'AT1G03120', u'AT1G05930', u'AT1G06690', u'AT1G07120', u'AT1G07200', u'AT1G10020', u'AT1G12411', u'AT1G19550', u'AT1G19570', u'AT1G21640', u'AT1G23660', u'AT1G26208', u'AT1G26975', u'AT1G28060', u'AT1G28327', u'AT1G29710', u'AT1G30010', u'AT1G30016', u'AT1G31960', u'AT1G33080', u'AT1G49720', u'AT1G53165', u'AT1G56180', u'AT1G56345', u'AT1G60500', u'AT1G62060', u'AT1G62695', u'AT1G65113', u'AT1G65500', u'AT1G66840', u'AT1G67105', u'AT1G68160', u'AT1G69060', u'AT1G69935', u'AT1G70150', u'AT1G70420', u'AT1G70750', u'AT1G70944', u'AT1G71330', u'AT1G74600', u'AT1G75800', u'AT1G75960', u'AT1G77700', u'AT1G78270', u'AT1G80350', u'AT2G02680', u'AT2G03800', u'AT2G04790', u'AT2G04864', u'AT2G06025', u'AT2G06235', u'AT2G10226', u'AT2G11405', u'AT2G11430', u'AT2G15960', u'AT2G18060', u'AT2G19580', u'AT2G24410', u'AT2G25370', u'AT2G27870', u'AT2G29270', u'AT2G29340', u'AT2G34790', u'AT2G39110', u'AT2G41060', u'AT2G41570', u'AT2G42440', u'AT2G44560', u'AT2G47310', u'AT2G47530', u'AT3G01240', u'AT3G03460', u'AT3G03680', u'AT3G05130', u'AT3G05905', u'AT3G07890', u'AT3G08780', u'AT3G20220', u'AT3G22886', u'AT3G23680', u'AT3G24070', u'AT3G26934', u'AT3G27960', u'AT3G30418', u'AT3G30817', u'AT3G45590', u'AT3G45780', u'AT3G45800', u'AT3G46387', u'AT3G46510', u'AT3G46960', u'AT3G46980', u'AT3G48920', u'AT3G49370', u'AT3G50480', u'AT3G50740', u'AT3G50810', u'AT3G51030', u'AT3G51110', u'AT3G53770', u'AT3G54440', u'AT3G54850', u'AT3G57170', u'AT3G57670', u'AT3G58420', u'AT3G59320', u'AT4G01200', u'AT4G05120', u'AT4G07666', u'AT4G08145', u'AT4G09984', u'AT4G10400', u'AT4G10650', u'AT4G11300', u'AT4G11880', u'AT4G12010', u'AT4G12240', u'AT4G13395', u'AT4G13500', u'AT4G14120', u'AT4G14270', u'AT4G15100', u'AT4G16960', u'AT4G17500', u'AT4G22650', u'AT4G23713', u'AT4G25650', u'AT4G26730', u'AT4G27800', u'AT4G30350', u'AT4G30900', u'AT4G31940', u'AT4G32924', u'AT4G37150', u'AT4G37280', u'AT4G38050', u'AT4G39230', u'AT5G01820', u'AT5G02000', u'AT5G02811', u'AT5G05750', u'AT5G07500', u'AT5G09711', u'AT5G10570', u'AT5G13890', u'AT5G15254', u'AT5G22240', u'AT5G22510', u'AT5G23405', u'AT5G23520', u'AT5G25000', u'AT5G26345', u'AT5G27885', u'AT5G28030', u'AT5G28180', u'AT5G28667', u'AT5G29337', u'AT5G35020', u'AT5G36228', u'AT5G40220', u'AT5G43660', u'AT5G47000', u'AT5G48140', u'AT5G52580', u'AT5G54330', u'AT5G55150', u'AT5G55505', u'AT5G56061', u'AT5G57820', u'AT5G63500', u'AT5G65420', u'AT5G66360', u'AT5G66490', u'AT5G66600']
#Keurentjes 2007 gxe 0 3.3 1.5
#fi3 = 'genome_wide_eQTL_mapping_Keurentjes_2007_gxe0_3.3.txt'
#fo3 = 'report_eQTL_mapping_Keurentjes_2007_gxe0_3.3.txt'
#eQTLReport(fi3,fo3)
#7017 1558 4659 2358 5459 800 1066
#[u'AT1G01710', u'AT1G01730', u'AT1G02450', u'AT1G02650', u'AT1G02860', u'AT1G02900', u'AT1G03710', u'AT1G04130', u'AT1G04280', u'AT1G05000', u'AT1G05140', u'AT1G05610', u'AT1G05830', u'AT1G06100', u'AT1G06430', u'AT1G06650', u'AT1G06730', u'AT1G06840', u'AT1G06960', u'AT1G08180', u'AT1G08460', u'AT1G08500', u'AT1G08630', u'AT1G08910', u'AT1G09430', u'AT1G09780', u'AT1G09840', u'AT1G10030', u'AT1G10200', u'AT1G10300', u'AT1G10500', u'AT1G10700', u'AT1G11680', u'AT1G12410', u'AT1G13340', u'AT1G13930', u'AT1G14000', u'AT1G14150', u'AT1G14490', u'AT1G15260', u'AT1G15670', u'AT1G15830', u'AT1G16080', u'AT1G16390', u'AT1G16920', u'AT1G17190', u'AT1G17710', u'AT1G17860', u'AT1G17880', u'AT1G17890', u'AT1G17940', u'AT1G18630', u'AT1G19090', u'AT1G19180', u'AT1G19380', u'AT1G19660', u'AT1G19800', u'AT1G19960', u'AT1G20150', u'AT1G20410', u'AT1G20440', u'AT1G20450', u'AT1G21590', u'AT1G21760', u'AT1G22110', u'AT1G22270', u'AT1G22410', u'AT1G22430', u'AT1G22630', u'AT1G22690', u'AT1G22750', u'AT1G22900', u'AT1G23730', u'AT1G24180', u'AT1G24240', u'AT1G24560', u'AT1G24590', u'AT1G24851', u'AT1G26170', u'AT1G26360', u'AT1G26450', u'AT1G26620', u'AT1G26940', u'AT1G27210', u'AT1G28150', u'AT1G28400', u'AT1G29580', u'AT1G29760', u'AT1G29830', u'AT1G30270', u'AT1G30480', u'AT1G30510', u'AT1G30755', u'AT1G30880', u'AT1G31120', u'AT1G31180', u'AT1G31220', u'AT1G31817', u'AT1G31860', u'AT1G32460', u'AT1G32550', u'AT1G32580', u'AT1G33855', u'AT1G34000', u'AT1G34120', u'AT1G34480', u'AT1G34780', u'AT1G35320', u'AT1G35550', u'AT1G41900', u'AT1G42540', u'AT1G42580', u'AT1G43850', u'AT1G43910', u'AT1G44170', u'AT1G44800', u'AT1G45474', u'AT1G45688', u'AT1G47240', u'AT1G47420', u'AT1G47530', u'AT1G47750', u'AT1G47960', u'AT1G47990', u'AT1G48090', u'AT1G48160', u'AT1G48320', u'AT1G48330', u'AT1G48380', u'AT1G48600', u'AT1G48640', u'AT1G48900', u'AT1G49000', u'AT1G49390', u'AT1G49440', u'AT1G49510', u'AT1G49980', u'AT1G50280', u'AT1G51620', u'AT1G51690', u'AT1G51800', u'AT1G52030', u'AT1G52200', u'AT1G52410', u'AT1G52730', u'AT1G52760', u'AT1G54060', u'AT1G55030', u'AT1G55680', u'AT1G56120', u'AT1G56260', u'AT1G56650', u'AT1G56660', u'AT1G57540', u'AT1G58025', u'AT1G60130', u'AT1G60350', u'AT1G60460', u'AT1G60610', u'AT1G60680', u'AT1G60990', u'AT1G61380', u'AT1G61540', u'AT1G61800', u'AT1G61820', u'AT1G61890', u'AT1G62300', u'AT1G62660', u'AT1G62910', u'AT1G63050', u'AT1G63090', u'AT1G63180', u'AT1G63630', u'AT1G64355', u'AT1G64700', u'AT1G64770', u'AT1G64850', u'AT1G64920', u'AT1G65030', u'AT1G65140', u'AT1G65290', u'AT1G65365', u'AT1G65400', u'AT1G66160', u'AT1G66230', u'AT1G66270', u'AT1G66430', u'AT1G66490', u'AT1G66530', u'AT1G66700', u'AT1G66740', u'AT1G66980', u'AT1G67190', u'AT1G67330', u'AT1G67360', u'AT1G67600', u'AT1G67810', u'AT1G68050', u'AT1G68440', u'AT1G68540', u'AT1G68590', u'AT1G68820', u'AT1G69430', u'AT1G69510', u'AT1G69530', u'AT1G69730', u'AT1G69890', u'AT1G69900', u'AT1G69970', u'AT1G70550', u'AT1G70680', u'AT1G71230', u'AT1G71410', u'AT1G71790', u'AT1G71880', u'AT1G72300', u'AT1G72330', u'AT1G72370', u'AT1G72520', u'AT1G72950', u'AT1G73010', u'AT1G73230', u'AT1G73540', u'AT1G73700', u'AT1G73840', u'AT1G74070', u'AT1G74590', u'AT1G74650', u'AT1G74790', u'AT1G74910', u'AT1G75040', u'AT1G75530', u'AT1G75600', u'AT1G75680', u'AT1G76030', u'AT1G76520', u'AT1G76530', u'AT1G76600', u'AT1G76680', u'AT1G76690', u'AT1G76790', u'AT1G77710', u'AT1G77850', u'AT1G78700', u'AT1G78820', u'AT1G79020', u'AT1G79520', u'AT1G79550', u'AT1G79940', u'AT1G79970', u'AT1G80290', u'AT1G80360', u'AT1G80380', u'AT1G80560', u'AT2G01320', u'AT2G01760', u'AT2G02100', u'AT2G02130', u'AT2G02220', u'AT2G02930', u'AT2G04400', u'AT2G04460', u'AT2G04900', u'AT2G05590', u'AT2G06005', u'AT2G06050', u'AT2G06490', u'AT2G07330', u'AT2G07510', u'AT2G09970', u'AT2G10940', u'AT2G11690', u'AT2G11830', u'AT2G11890', u'AT2G13720', u'AT2G14460', u'AT2G14750', u'AT2G15490', u'AT2G16060', u'AT2G16860', u'AT2G17240', u'AT2G17450', u'AT2G17720', u'AT2G18200', u'AT2G18210', u'AT2G18370', u'AT2G19190', u'AT2G19520', u'AT2G19800', u'AT2G19850', u'AT2G19910', u'AT2G20140', u'AT2G20145', u'AT2G20570', u'AT2G21140', u'AT2G21150', u'AT2G21160', u'AT2G21180', u'AT2G21560', u'AT2G21690', u'AT2G21820', u'AT2G21850', u'AT2G22030', u'AT2G22170', u'AT2G22250', u'AT2G22480', u'AT2G22880', u'AT2G22900', u'AT2G22970', u'AT2G23180', u'AT2G23320', u'AT2G23450', u'AT2G24560', u'AT2G24765', u'AT2G24860', u'AT2G25200', u'AT2G25410', u'AT2G26440', u'AT2G26470', u'AT2G26890', u'AT2G27090', u'AT2G27510', u'AT2G27910', u'AT2G28140', u'AT2G28270', u'AT2G28550', u'AT2G28720', u'AT2G28890', u'AT2G29400', u'AT2G29450', u'AT2G29890', u'AT2G30250', u'AT2G30260', u'AT2G30430', u'AT2G30620', u'AT2G30650', u'AT2G31180', u'AT2G31360', u'AT2G31670', u'AT2G31680', u'AT2G31790', u'AT2G31880', u'AT2G32060', u'AT2G32240', u'AT2G32570', u'AT2G32990', u'AT2G33070', u'AT2G33530', u'AT2G33780', u'AT2G33850', u'AT2G34310', u'AT2G34430', u'AT2G34630', u'AT2G34860', u'AT2G35290', u'AT2G35380', u'AT2G35460', u'AT2G35580', u'AT2G35800', u'AT2G36340', u'AT2G36390', u'AT2G36540', u'AT2G36610', u'AT2G36690', u'AT2G36710', u'AT2G36840', u'AT2G36930', u'AT2G37040', u'AT2G37180', u'AT2G37260', u'AT2G37520', u'AT2G37550', u'AT2G37640', u'AT2G37710', u'AT2G37750', u'AT2G37970', u'AT2G38120', u'AT2G38170', u'AT2G38380', u'AT2G38470', u'AT2G38490', u'AT2G38750', u'AT2G38860', u'AT2G38870', u'AT2G39010', u'AT2G39100', u'AT2G39240', u'AT2G39400', u'AT2G39660', u'AT2G40300', u'AT2G40475', u'AT2G40600', u'AT2G40750', u'AT2G40765', u'AT2G41010', u'AT2G41140', u'AT2G41180', u'AT2G41250', u'AT2G41480', u'AT2G41530', u'AT2G41560', u'AT2G42360', u'AT2G42770', u'AT2G42840', u'AT2G43010', u'AT2G43290', u'AT2G43510', u'AT2G43520', u'AT2G43580', u'AT2G43590', u'AT2G43750', u'AT2G44380', u'AT2G44450', u'AT2G44650', u'AT2G45210', u'AT2G45470', u'AT2G45630', u'AT2G45660', u'AT2G46310', u'AT2G46430', u'AT2G46490', u'AT2G46640', u'AT2G46650', u'AT2G47140', u'AT2G47730', u'AT2G48090', u'AT3G01060', u'AT3G01850', u'AT3G01860', u'AT3G01940', u'AT3G02140', u'AT3G02960', u'AT3G03290', u'AT3G03640', u'AT3G03830', u'AT3G04110', u'AT3G04310', u'AT3G04360', u'AT3G04605', u'AT3G04650', u'AT3G04770', u'AT3G04880', u'AT3G04940', u'AT3G05410', u'AT3G05540', u'AT3G05570', u'AT3G05730', u'AT3G06040', u'AT3G06110', u'AT3G06300', u'AT3G06483', u'AT3G06510', u'AT3G06810', u'AT3G06890', u'AT3G07170', u'AT3G07200', u'AT3G07540', u'AT3G08000', u'AT3G08530', u'AT3G09270', u'AT3G09310', u'AT3G09320', u'AT3G09830', u'AT3G10220', u'AT3G10410', u'AT3G10910', u'AT3G11090', u'AT3G11210', u'AT3G11690', u'AT3G11930', u'AT3G12040', u'AT3G12060', u'AT3G12500', u'AT3G12610', u'AT3G12750', u'AT3G13080', u'AT3G13100', u'AT3G13260', u'AT3G13450', u'AT3G13610', u'AT3G13625', u'AT3G13790', u'AT3G13910', u'AT3G14067', u'AT3G14225', u'AT3G14510', u'AT3G14940', u'AT3G14990', u'AT3G15450', u'AT3G15510', u'AT3G15950', u'AT3G16030', u'AT3G16150', u'AT3G16210', u'AT3G16400', u'AT3G16410', u'AT3G16460', u'AT3G16480', u'AT3G16710', u'AT3G16800', u'AT3G16990', u'AT3G17680', u'AT3G18240', u'AT3G18280', u'AT3G18820', u'AT3G18940', u'AT3G19170', u'AT3G19500', u'AT3G19660', u'AT3G20083', u'AT3G20330', u'AT3G20370', u'AT3G20440', u'AT3G20470', u'AT3G21110', u'AT3G21175', u'AT3G21640', u'AT3G21910', u'AT3G22060', u'AT3G22231', u'AT3G22240', u'AT3G22250', u'AT3G22260', u'AT3G22330', u'AT3G22380', u'AT3G22420', u'AT3G22422', u'AT3G22435', u'AT3G22760', u'AT3G22840', u'AT3G22890', u'AT3G23090', u'AT3G23190', u'AT3G23710', u'AT3G24030', u'AT3G24500', u'AT3G25020', u'AT3G25070', u'AT3G25410', u'AT3G25600', u'AT3G25710', u'AT3G25740', u'AT3G26040', u'AT3G26500', u'AT3G27920', u'AT3G28925', u'AT3G29760', u'AT3G30180', u'AT3G30280', u'AT3G30415', u'AT3G31390', u'AT3G33530', u'AT3G42660', u'AT3G42860', u'AT3G43120', u'AT3G43520', u'AT3G43740', u'AT3G43960', u'AT3G43980', u'AT3G44120', u'AT3G44190', u'AT3G44230', u'AT3G44310', u'AT3G44450', u'AT3G44590', u'AT3G44670', u'AT3G44860', u'AT3G45040', u'AT3G45140', u'AT3G45470', u'AT3G45710', u'AT3G45970', u'AT3G46030', u'AT3G46430', u'AT3G46490', u'AT3G46930', u'AT3G47080', u'AT3G47520', u'AT3G47550', u'AT3G47640', u'AT3G47800', u'AT3G48000', u'AT3G48100', u'AT3G48130', u'AT3G48140', u'AT3G48220', u'AT3G48360', u'AT3G48410', u'AT3G48740', u'AT3G49320', u'AT3G49510', u'AT3G49780', u'AT3G49820', u'AT3G49890', u'AT3G50060', u'AT3G50090', u'AT3G50260', u'AT3G50270', u'AT3G50930', u'AT3G50970', u'AT3G50980', u'AT3G51000', u'AT3G51130', u'AT3G51660', u'AT3G51790', u'AT3G51860', u'AT3G51895', u'AT3G51940', u'AT3G51960', u'AT3G52155', u'AT3G52360', u'AT3G52400', u'AT3G52430', u'AT3G52450', u'AT3G52540', u'AT3G52960', u'AT3G53110', u'AT3G53120', u'AT3G53620', u'AT3G53670', u'AT3G53980', u'AT3G54080', u'AT3G54380', u'AT3G54510', u'AT3G54530', u'AT3G54620', u'AT3G54850', u'AT3G55005', u'AT3G55050', u'AT3G55120', u'AT3G55130', u'AT3G55980', u'AT3G56060', u'AT3G56170', u'AT3G56210', u'AT3G56360', u'AT3G56400', u'AT3G56480', u'AT3G56620', u'AT3G56790', u'AT3G57230', u'AT3G57920', u'AT3G57990', u'AT3G58200', u'AT3G58600', u'AT3G59070', u'AT3G59080', u'AT3G59340', u'AT3G59880', u'AT3G61010', u'AT3G61140', u'AT3G62040', u'AT3G62420', u'AT3G63350', u'AT3G63440', u'AT3G63470', u'AT4G01220', u'AT4G01480', u'AT4G01700', u'AT4G01870', u'AT4G01950', u'AT4G02530', u'AT4G02750', u'AT4G02860', u'AT4G03020', u'AT4G03070', u'AT4G03260', u'AT4G03460', u'AT4G04020', u'AT4G04220', u'AT4G04840', u'AT4G04960', u'AT4G05010', u'AT4G05020', u'AT4G05400', u'AT4G07730', u'AT4G08290', u'AT4G08570', u'AT4G08685', u'AT4G09040', u'AT4G09670', u'AT4G10300', u'AT4G10450', u'AT4G11190', u'AT4G11280', u'AT4G11290', u'AT4G11320', u'AT4G11720', u'AT4G11790', u'AT4G11800', u'AT4G12030', u'AT4G12710', u'AT4G12730', u'AT4G13470', u'AT4G13630', u'AT4G14130', u'AT4G14430', u'AT4G14650', u'AT4G14750', u'AT4G15530', u'AT4G15560', u'AT4G15620', u'AT4G15640', u'AT4G15650', u'AT4G15760', u'AT4G15940', u'AT4G16140', u'AT4G16310', u'AT4G16370', u'AT4G16390', u'AT4G16520', u'AT4G16950', u'AT4G17050', u'AT4G17190', u'AT4G17440', u'AT4G17500', u'AT4G17610', u'AT4G17870', u'AT4G18060', u'AT4G18170', u'AT4G18220', u'AT4G18250', u'AT4G18800', u'AT4G18880', u'AT4G18930', u'AT4G18950', u'AT4G19380', u'AT4G19560', u'AT4G19690', u'AT4G19810', u'AT4G19820', u'AT4G20700', u'AT4G20860', u'AT4G21105', u'AT4G21400', u'AT4G21990', u'AT4G22100', u'AT4G22330', u'AT4G22720', u'AT4G22900', u'AT4G23610', u'AT4G24020', u'AT4G24090', u'AT4G24780', u'AT4G24920', u'AT4G24990', u'AT4G25030', u'AT4G25230', u'AT4G26060', u'AT4G26510', u'AT4G26520', u'AT4G26670', u'AT4G26870', u'AT4G26910', u'AT4G27450', u'AT4G27500', u'AT4G27680', u'AT4G27740', u'AT4G27840', u'AT4G28050', u'AT4G28110', u'AT4G28280', u'AT4G29030', u'AT4G29480', u'AT4G29570', u'AT4G29760', u'AT4G29770', u'AT4G29905', u'AT4G30210', u'AT4G30430', u'AT4G30580', u'AT4G31910', u'AT4G32915', u'AT4G33490', u'AT4G33580', u'AT4G34050', u'AT4G34120', u'AT4G34160', u'AT4G34180', u'AT4G34640', u'AT4G34710', u'AT4G34730', u'AT4G34770', u'AT4G35180', u'AT4G36040', u'AT4G36220', u'AT4G36360', u'AT4G36590', u'AT4G36720', u'AT4G37010', u'AT4G37070', u'AT4G37610', u'AT4G37980', u'AT4G38010', u'AT4G38020', u'AT4G38460', u'AT4G38950', u'AT4G39300', u'AT4G39470', u'AT4G39780', u'AT4G39830', u'AT4G39940', u'AT5G01420', u'AT5G01480', u'AT5G01710', u'AT5G01780', u'AT5G02000', u'AT5G02290', u'AT5G02580', u'AT5G02810', u'AT5G03070', u'AT5G03120', u'AT5G03640', u'AT5G03920', u'AT5G04530', u'AT5G04720', u'AT5G04760', u'AT5G05110', u'AT5G05260', u'AT5G05320', u'AT5G05930', u'AT5G05960', u'AT5G06040', u'AT5G06100', u'AT5G06200', u'AT5G06360', u'AT5G06570', u'AT5G06580', u'AT5G06590', u'AT5G06850', u'AT5G07000', u'AT5G07180', u'AT5G07360', u'AT5G07700', u'AT5G07980', u'AT5G08000', u'AT5G08260', u'AT5G08590', u'AT5G08600', u'AT5G08660', u'AT5G09220', u'AT5G09480', u'AT5G09490', u'AT5G09530', u'AT5G09540', u'AT5G09980', u'AT5G10030', u'AT5G10290', u'AT5G10400', u'AT5G10700', u'AT5G10930', u'AT5G11000', u'AT5G11330', u'AT5G11590', u'AT5G11640', u'AT5G11840', u'AT5G12010', u'AT5G12120', u'AT5G12170', u'AT5G12210', u'AT5G12400', u'AT5G12420', u'AT5G13180', u'AT5G13240', u'AT5G13550', u'AT5G13580', u'AT5G13800', u'AT5G14020', u'AT5G14090', u'AT5G14320', u'AT5G15160', u'AT5G15960', u'AT5G15970', u'AT5G16370', u'AT5G16470', u'AT5G16780', u'AT5G17000', u'AT5G17020', u'AT5G17110', u'AT5G17140', u'AT5G17190', u'AT5G17330', u'AT5G17420', u'AT5G17460', u'AT5G17470', u'AT5G17650', u'AT5G17700', u'AT5G18020', u'AT5G18060', u'AT5G18230', u'AT5G19890', u'AT5G20960', u'AT5G21105', u'AT5G22500', u'AT5G22580', u'AT5G22630', u'AT5G22700', u'AT5G23010', u'AT5G23250', u'AT5G23350', u'AT5G23730', u'AT5G23820', u'AT5G23840', u'AT5G23890', u'AT5G24080', u'AT5G24510', u'AT5G24530', u'AT5G24760', u'AT5G25190', u'AT5G25220', u'AT5G25610', u'AT5G26280', u'AT5G26290', u'AT5G35080', u'AT5G35480', u'AT5G36880', u'AT5G36910', u'AT5G36920', u'AT5G37030', u'AT5G37210', u'AT5G37400', u'AT5G37500', u'AT5G37590', u'AT5G38290', u'AT5G38600', u'AT5G39020', u'AT5G39050', u'AT5G39090', u'AT5G39110', u'AT5G39450', u'AT5G39580', u'AT5G39890', u'AT5G40270', u'AT5G40350', u'AT5G40370', u'AT5G40850', u'AT5G40950', u'AT5G41070', u'AT5G41080', u'AT5G41270', u'AT5G41350', u'AT5G41660', u'AT5G41860', u'AT5G41990', u'AT5G42070', u'AT5G42150', u'AT5G42180', u'AT5G42200', u'AT5G42570', u'AT5G42840', u'AT5G42890', u'AT5G43070', u'AT5G43350', u'AT5G43440', u'AT5G43460', u'AT5G43500', u'AT5G44290', u'AT5G44360', u'AT5G44740', u'AT5G44785', u'AT5G45040', u'AT5G45650', u'AT5G45775', u'AT5G46190', u'AT5G46230', u'AT5G46900', u'AT5G46910', u'AT5G47100', u'AT5G47230', u'AT5G47250', u'AT5G47450', u'AT5G47540', u'AT5G47620', u'AT5G47630', u'AT5G47740', u'AT5G47770', u'AT5G47960', u'AT5G48000', u'AT5G48050', u'AT5G48370', u'AT5G48480', u'AT5G48540', u'AT5G48570', u'AT5G48620', u'AT5G48930', u'AT5G49450', u'AT5G49560', u'AT5G49630', u'AT5G49710', u'AT5G49840', u'AT5G50050', u'AT5G50150', u'AT5G50160', u'AT5G50200', u'AT5G50210', u'AT5G50380', u'AT5G51070', u'AT5G51820', u'AT5G51900', u'AT5G51950', u'AT5G52070', u'AT5G52120', u'AT5G52140', u'AT5G52310', u'AT5G52320', u'AT5G52370', u'AT5G52730', u'AT5G52760', u'AT5G52900', u'AT5G52950', u'AT5G53330', u'AT5G53730', u'AT5G54100', u'AT5G54110', u'AT5G54160', u'AT5G54220', u'AT5G54490', u'AT5G54500', u'AT5G54760', u'AT5G54800', u'AT5G54870', u'AT5G54940', u'AT5G55150', u'AT5G55260', u'AT5G55450', u'AT5G55530', u'AT5G56870', u'AT5G57110', u'AT5G57220', u'AT5G57300', u'AT5G57410', u'AT5G57510', u'AT5G59130', u'AT5G59550', u'AT5G59590', u'AT5G59613', u'AT5G59720', u'AT5G60150', u'AT5G60200', u'AT5G60540', u'AT5G60950', u'AT5G61080', u'AT5G61160', u'AT5G61320', u'AT5G61650', u'AT5G62140', u'AT5G62570', u'AT5G62640', u'AT5G63600', u'AT5G63620', u'AT5G63760', u'AT5G64100', u'AT5G64120', u'AT5G64130', u'AT5G64200', u'AT5G64510', u'AT5G64640', u'AT5G65110', u'AT5G65640', u'AT5G65730', u'AT5G65860', u'AT5G65870', u'AT5G67260', u'AT5G67420', u'AT5G67430', u'AT5G67500']
#Snoek 2012 gxe 1 3.01 1.5
#fi4 = 'genome_wide_eQTL_mapping_Snoek_2012_gxe1_3.01.txt'
#fo4 = 'report_eQTL_mapping_Snoek_2012_gxe1_3.01.txt'
#eQTLReport(fi4,fo4)
#9137 1952 5850 3287 7185 1335 1730
#[u'AT1G01060', u'AT1G01200', u'AT1G01390', u'AT1G01490', u'AT1G01580', u'AT1G01650', u'AT1G01670', u'AT1G01710', u'AT1G01725', u'AT1G01730', u'AT1G01770', u'AT1G01830', u'AT1G01910', u'AT1G02340', u'AT1G02640', u'AT1G02870', u'AT1G03020', u'AT1G03530', u'AT1G03710', u'AT1G03810', u'AT1G04150', u'AT1G04310', u'AT1G04390', u'AT1G04400', u'AT1G04530', u'AT1G04640', u'AT1G04760', u'AT1G04770', u'AT1G04790', u'AT1G04870', u'AT1G04970', u'AT1G04980', u'AT1G05140', u'AT1G05150', u'AT1G05160', u'AT1G05205', u'AT1G05300', u'AT1G05330', u'AT1G05385', u'AT1G05410', u'AT1G05520', u'AT1G05540', u'AT1G05590', u'AT1G05600', u'AT1G05640', u'AT1G05680', u'AT1G05700', u'AT1G05750', u'AT1G05790', u'AT1G05890', u'AT1G05960', u'AT1G05970', u'AT1G06040', u'AT1G06080', u'AT1G06135', u'AT1G06410', u'AT1G06710', u'AT1G06820', u'AT1G06870', u'AT1G07130', u'AT1G07310', u'AT1G07360', u'AT1G07370', u'AT1G07500', u'AT1G07590', u'AT1G07890', u'AT1G08360', u'AT1G08550', u'AT1G08590', u'AT1G08760', u'AT1G08860', u'AT1G08910', u'AT1G08920', u'AT1G08980', u'AT1G09000', u'AT1G09010', u'AT1G09160', u'AT1G09290', u'AT1G09310', u'AT1G09520', u'AT1G09530', u'AT1G09660', u'AT1G10580', u'AT1G10770', u'AT1G10900', u'AT1G10970', u'AT1G11000', u'AT1G11070', u'AT1G11120', u'AT1G11530', u'AT1G11580', u'AT1G12020', u'AT1G12030', u'AT1G12530', u'AT1G12630', u'AT1G12710', u'AT1G12730', u'AT1G12810', u'AT1G12845', u'AT1G12860', u'AT1G13000', u'AT1G13030', u'AT1G13250', u'AT1G13320', u'AT1G13460', u'AT1G13650', u'AT1G13830', u'AT1G13930', u'AT1G13980', u'AT1G14300', u'AT1G14340', u'AT1G14350', u'AT1G14550', u'AT1G14790', u'AT1G14920', u'AT1G15110', u'AT1G15125', u'AT1G15290', u'AT1G15380', u'AT1G15550', u'AT1G16400', u'AT1G16750', u'AT1G17050', u'AT1G17200', u'AT1G17455', u'AT1G17640', u'AT1G17700', u'AT1G17990', u'AT1G18010', u'AT1G18020', u'AT1G18270', u'AT1G18420', u'AT1G18460', u'AT1G18780', u'AT1G18800', u'AT1G18810', u'AT1G18830', u'AT1G18940', u'AT1G19060', u'AT1G19130', u'AT1G19170', u'AT1G19270', u'AT1G19340', u'AT1G19510', u'AT1G19835', u'AT1G20120', u'AT1G20470', u'AT1G20480', u'AT1G20630', u'AT1G20670', u'AT1G21090', u'AT1G21150', u'AT1G21210', u'AT1G21450', u'AT1G21460', u'AT1G21680', u'AT1G21690', u'AT1G21790', u'AT1G22090', u'AT1G22170', u'AT1G22290', u'AT1G22470', u'AT1G22490', u'AT1G22690', u'AT1G22990', u'AT1G23050', u'AT1G23560', u'AT1G23750', u'AT1G23800', u'AT1G23850', u'AT1G23880', u'AT1G24010', u'AT1G24150', u'AT1G24400', u'AT1G24460', u'AT1G25260', u'AT1G26250', u'AT1G26630', u'AT1G26770', u'AT1G26920', u'AT1G27140', u'AT1G27300', u'AT1G27340', u'AT1G27390', u'AT1G27460', u'AT1G27470', u'AT1G27595', u'AT1G27620', u'AT1G27710', u'AT1G27900', u'AT1G28100', u'AT1G28170', u'AT1G28610', u'AT1G29040', u'AT1G29250', u'AT1G29940', u'AT1G29980', u'AT1G30050', u'AT1G30110', u'AT1G30240', u'AT1G30250', u'AT1G30260', u'AT1G30440', u'AT1G30460', u'AT1G30470', u'AT1G30540', u'AT1G30610', u'AT1G30700', u'AT1G31000', u'AT1G31490', u'AT1G31830', u'AT1G32170', u'AT1G32360', u'AT1G32440', u'AT1G32730', u'AT1G32750', u'AT1G32810', u'AT1G32850', u'AT1G32900', u'AT1G33280', u'AT1G33290', u'AT1G33330', u'AT1G33880', u'AT1G33970', u'AT1G34065', u'AT1G34070', u'AT1G34180', u'AT1G34200', u'AT1G34320', u'AT1G34355', u'AT1G34760', u'AT1G34790', u'AT1G35190', u'AT1G35230', u'AT1G35310', u'AT1G35580', u'AT1G35610', u'AT1G35720', u'AT1G42470', u'AT1G43000', u'AT1G43560', u'AT1G43710', u'AT1G43860', u'AT1G44790', u'AT1G44910', u'AT1G44920', u'AT1G44970', u'AT1G45160', u'AT1G45180', u'AT1G47310', u'AT1G47610', u'AT1G47655', u'AT1G47800', u'AT1G47890', u'AT1G48050', u'AT1G48195', u'AT1G48360', u'AT1G48450', u'AT1G48480', u'AT1G48570', u'AT1G48600', u'AT1G48920', u'AT1G49130', u'AT1G49170', u'AT1G49240', u'AT1G49360', u'AT1G49560', u'AT1G49570', u'AT1G49750', u'AT1G49910', u'AT1G49920', u'AT1G49980', u'AT1G50110', u'AT1G50180', u'AT1G50270', u'AT1G50360', u'AT1G50380', u'AT1G50400', u'AT1G50410', u'AT1G50420', u'AT1G50460', u'AT1G50920', u'AT1G51610', u'AT1G51630', u'AT1G51650', u'AT1G51740', u'AT1G52590', u'AT1G52770', u'AT1G52870', u'AT1G52880', u'AT1G53000', u'AT1G53060', u'AT1G53070', u'AT1G53240', u'AT1G53250', u'AT1G53260', u'AT1G53380', u'AT1G53570', u'AT1G53730', u'AT1G53830', u'AT1G53840', u'AT1G54120', u'AT1G54200', u'AT1G54290', u'AT1G54310', u'AT1G54400', u'AT1G54410', u'AT1G54510', u'AT1G54530', u'AT1G54575', u'AT1G54650', u'AT1G54710', u'AT1G54740', u'AT1G54770', u'AT1G54850', u'AT1G54890', u'AT1G55000', u'AT1G55030', u'AT1G55070', u'AT1G55150', u'AT1G55220', u'AT1G55260', u'AT1G55310', u'AT1G55480', u'AT1G55490', u'AT1G55500', u'AT1G55510', u'AT1G55640', u'AT1G55960', u'AT1G56010', u'AT1G56150', u'AT1G56210', u'AT1G56220', u'AT1G56230', u'AT1G56270', u'AT1G56300', u'AT1G56330', u'AT1G56570', u'AT1G58080', u'AT1G58120', u'AT1G58180', u'AT1G58460', u'AT1G60000', u'AT1G60010', u'AT1G60140', u'AT1G60220', u'AT1G60800', u'AT1G61010', u'AT1G61030', u'AT1G61170', u'AT1G61210', u'AT1G61670', u'AT1G61740', u'AT1G61870', u'AT1G62225', u'AT1G62290', u'AT1G62440', u'AT1G62510', u'AT1G62570', u'AT1G62840', u'AT1G63245', u'AT1G63270', u'AT1G63460', u'AT1G63650', u'AT1G63700', u'AT1G63740', u'AT1G63780', u'AT1G64105', u'AT1G64350', u'AT1G64360', u'AT1G64430', u'AT1G64490', u'AT1G64550', u'AT1G64625', u'AT1G64670', u'AT1G65040', u'AT1G65070', u'AT1G65220', u'AT1G65240', u'AT1G65610', u'AT1G65680', u'AT1G65700', u'AT1G65710', u'AT1G65730', u'AT1G65840', u'AT1G65980', u'AT1G65985', u'AT1G66080', u'AT1G66260', u'AT1G66480', u'AT1G66730', u'AT1G66890', u'AT1G66910', u'AT1G67280', u'AT1G67325', u'AT1G67370', u'AT1G67510', u'AT1G67590', u'AT1G67690', u'AT1G67700', u'AT1G67870', u'AT1G67890', u'AT1G67900', u'AT1G67920', u'AT1G68110', u'AT1G68400', u'AT1G68440', u'AT1G68510', u'AT1G68520', u'AT1G68650', u'AT1G68660', u'AT1G68840', u'AT1G68940', u'AT1G69295', u'AT1G69570', u'AT1G69710', u'AT1G69740', u'AT1G69920', u'AT1G70410', u'AT1G70420', u'AT1G70570', u'AT1G70820', u'AT1G70900', u'AT1G70940', u'AT1G71697', u'AT1G71930', u'AT1G71960', u'AT1G72030', u'AT1G72290', u'AT1G72820', u'AT1G72940', u'AT1G73410', u'AT1G73660', u'AT1G73790', u'AT1G73820', u'AT1G73980', u'AT1G74230', u'AT1G74260', u'AT1G74350', u'AT1G74380', u'AT1G75010', u'AT1G75270', u'AT1G75820', u'AT1G76010', u'AT1G76090', u'AT1G76130', u'AT1G76180', u'AT1G76250', u'AT1G76610', u'AT1G76730', u'AT1G76790', u'AT1G77490', u'AT1G77940', u'AT1G78280', u'AT1G78460', u'AT1G78490', u'AT1G78580', u'AT1G78600', u'AT1G78620', u'AT1G78650', u'AT1G78970', u'AT1G78995', u'AT1G79160', u'AT1G79360', u'AT1G79380', u'AT1G79460', u'AT1G79900', u'AT1G79920', u'AT1G80180', u'AT1G80270', u'AT1G80330', u'AT1G80350', u'AT1G80480', u'AT1G80690', u'AT1G80720', u'AT1G80830', u'AT1G80940', u'AT2G01110', u'AT2G01170', u'AT2G01450', u'AT2G01740', u'AT2G01750', u'AT2G01810', u'AT2G01890', u'AT2G03505', u'AT2G03590', u'AT2G03680', u'AT2G04039', u'AT2G04235', u'AT2G04460', u'AT2G04530', u'AT2G04795', u'AT2G04870', u'AT2G04890', u'AT2G05060', u'AT2G05260', u'AT2G05330', u'AT2G05440', u'AT2G05540', u'AT2G05720', u'AT2G05790', u'AT2G06000', u'AT2G06020', u'AT2G06925', u'AT2G07630', u'AT2G13560', u'AT2G13610', u'AT2G13660', u'AT2G13810', u'AT2G14070', u'AT2G14520', u'AT2G14690', u'AT2G14835', u'AT2G14870', u'AT2G14960', u'AT2G15580', u'AT2G15695', u'AT2G15830', u'AT2G16730', u'AT2G16750', u'AT2G17036', u'AT2G17220', u'AT2G17240', u'AT2G17880', u'AT2G18050', u'AT2G18160', u'AT2G18170', u'AT2G18410', u'AT2G18420', u'AT2G18470', u'AT2G19110', u'AT2G19470', u'AT2G19560', u'AT2G19700', u'AT2G19760', u'AT2G19850', u'AT2G19990', u'AT2G20120', u'AT2G20140', u'AT2G20170', u'AT2G20450', u'AT2G20610', u'AT2G20630', u'AT2G20725', u'AT2G20760', u'AT2G20875', u'AT2G20920', u'AT2G21130', u'AT2G21185', u'AT2G21400', u'AT2G21410', u'AT2G21580', u'AT2G21730', u'AT2G21790', u'AT2G22030', u'AT2G22400', u'AT2G22540', u'AT2G22650', u'AT2G22900', u'AT2G23050', u'AT2G23790', u'AT2G23820', u'AT2G23900', u'AT2G23930', u'AT2G24170', u'AT2G24210', u'AT2G24285', u'AT2G24580', u'AT2G24860', u'AT2G25590', u'AT2G25680', u'AT2G25690', u'AT2G25840', u'AT2G25920', u'AT2G26200', u'AT2G26360', u'AT2G26640', u'AT2G26650', u'AT2G26690', u'AT2G26730', u'AT2G27040', u'AT2G27100', u'AT2G27180', u'AT2G27300', u'AT2G27350', u'AT2G27385', u'AT2G27500', u'AT2G27510', u'AT2G27520', u'AT2G27810', u'AT2G27970', u'AT2G28000', u'AT2G28080', u'AT2G28190', u'AT2G28340', u'AT2G28400', u'AT2G28540', u'AT2G28670', u'AT2G28930', u'AT2G29120', u'AT2G29200', u'AT2G29340', u'AT2G29500', u'AT2G30440', u'AT2G30695', u'AT2G30780', u'AT2G31100', u'AT2G31140', u'AT2G31170', u'AT2G31250', u'AT2G31260', u'AT2G31400', u'AT2G31560', u'AT2G31570', u'AT2G31650', u'AT2G31660', u'AT2G32080', u'AT2G32090', u'AT2G32380', u'AT2G32520', u'AT2G32710', u'AT2G32950', u'AT2G33100', u'AT2G33370', u'AT2G33430', u'AT2G33440', u'AT2G33640', u'AT2G33780', u'AT2G33990', u'AT2G34240', u'AT2G34810', u'AT2G35140', u'AT2G35605', u'AT2G36080', u'AT2G36220', u'AT2G36240', u'AT2G36390', u'AT2G37035', u'AT2G37180', u'AT2G37230', u'AT2G37530', u'AT2G37580', u'AT2G37620', u'AT2G37640', u'AT2G38020', u'AT2G38080', u'AT2G38120', u'AT2G38160', u'AT2G38170', u'AT2G38250', u'AT2G38370', u'AT2G38400', u'AT2G38460', u'AT2G38500', u'AT2G38540', u'AT2G38580', u'AT2G38630', u'AT2G38650', u'AT2G39250', u'AT2G39310', u'AT2G39330', u'AT2G39460', u'AT2G39570', u'AT2G39630', u'AT2G39795', u'AT2G39830', u'AT2G39920', u'AT2G40080', u'AT2G40250', u'AT2G40280', u'AT2G40530', u'AT2G40610', u'AT2G40630', u'AT2G40810', u'AT2G41010', u'AT2G41020', u'AT2G41150', u'AT2G41170', u'AT2G41240', u'AT2G41500', u'AT2G41650', u'AT2G42110', u'AT2G42290', u'AT2G42380', u'AT2G42400', u'AT2G42780', u'AT2G42860', u'AT2G42890', u'AT2G42990', u'AT2G43110', u'AT2G43200', u'AT2G43570', u'AT2G43600', u'AT2G44030', u'AT2G44190', u'AT2G44600', u'AT2G44670', u'AT2G44850', u'AT2G45140', u'AT2G45350', u'AT2G45660', u'AT2G46200', u'AT2G46240', u'AT2G46290', u'AT2G46450', u'AT2G46560', u'AT2G46620', u'AT2G46700', u'AT2G46850', u'AT2G47130', u'AT2G47180', u'AT2G47220', u'AT2G47260', u'AT2G47270', u'AT2G47370', u'AT2G47490', u'AT2G47710', u'AT2G47840', u'AT2G47860', u'AT2G47890', u'AT3G01180', u'AT3G01300', u'AT3G01490', u'AT3G01790', u'AT3G01860', u'AT3G01990', u'AT3G02020', u'AT3G02150', u'AT3G02200', u'AT3G02210', u'AT3G02280', u'AT3G02380', u'AT3G02610', u'AT3G02650', u'AT3G02750', u'AT3G02870', u'AT3G02970', u'AT3G03160', u'AT3G03190', u'AT3G03210', u'AT3G03220', u'AT3G03270', u'AT3G03300', u'AT3G03740', u'AT3G04010', u'AT3G04060', u'AT3G04210', u'AT3G04340', u'AT3G04520', u'AT3G04950', u'AT3G05100', u'AT3G05220', u'AT3G05520', u'AT3G06080', u'AT3G06145', u'AT3G06210', u'AT3G06483', u'AT3G06530', u'AT3G06730', u'AT3G06810', u'AT3G06840', u'AT3G06850', u'AT3G07320', u'AT3G07590', u'AT3G07640', u'AT3G07760', u'AT3G07800', u'AT3G08000', u'AT3G08640', u'AT3G08760', u'AT3G08920', u'AT3G08980', u'AT3G09030', u'AT3G09085', u'AT3G09260', u'AT3G09550', u'AT3G09630', u'AT3G09930', u'AT3G09950', u'AT3G09980', u'AT3G10450', u'AT3G10525', u'AT3G10985', u'AT3G11020', u'AT3G11230', u'AT3G11240', u'AT3G11560', u'AT3G11590', u'AT3G11620', u'AT3G12320', u'AT3G12580', u'AT3G12610', u'AT3G12640', u'AT3G12750', u'AT3G12770', u'AT3G12930', u'AT3G13230', u'AT3G13310', u'AT3G13445', u'AT3G13530', u'AT3G13690', u'AT3G14610', u'AT3G14740', u'AT3G14770', u'AT3G14900', u'AT3G14930', u'AT3G15000', u'AT3G15030', u'AT3G15095', u'AT3G15115', u'AT3G15160', u'AT3G15430', u'AT3G15510', u'AT3G15560', u'AT3G15570', u'AT3G15630', u'AT3G15790', u'AT3G15920', u'AT3G16000', u'AT3G16010', u'AT3G16280', u'AT3G16400', u'AT3G16450', u'AT3G16470', u'AT3G16650', u'AT3G16990', u'AT3G17000', u'AT3G17130', u'AT3G17330', u'AT3G17420', u'AT3G17570', u'AT3G17609', u'AT3G17611', u'AT3G17800', u'AT3G17810', u'AT3G17860', u'AT3G18000', u'AT3G18800', u'AT3G19090', u'AT3G19350', u'AT3G19553', u'AT3G19640', u'AT3G19720', u'AT3G19820', u'AT3G20000', u'AT3G20240', u'AT3G20290', u'AT3G20310', u'AT3G20490', u'AT3G20580', u'AT3G20660', u'AT3G20740', u'AT3G20810', u'AT3G21150', u'AT3G21200', u'AT3G21215', u'AT3G21270', u'AT3G21360', u'AT3G21610', u'AT3G21710', u'AT3G21720', u'AT3G21740', u'AT3G21870', u'AT3G22070', u'AT3G22104', u'AT3G22210', u'AT3G22220', u'AT3G22260', u'AT3G22300', u'AT3G22820', u'AT3G22840', u'AT3G22845', u'AT3G22890', u'AT3G22942', u'AT3G23000', u'AT3G23300', u'AT3G23340', u'AT3G23560', u'AT3G23830', u'AT3G23840', u'AT3G23920', u'AT3G23940', u'AT3G24250', u'AT3G24420', u'AT3G24570', u'AT3G24590', u'AT3G24760', u'AT3G25060', u'AT3G25120', u'AT3G25210', u'AT3G25400', u'AT3G25430', u'AT3G25480', u'AT3G25540', u'AT3G25570', u'AT3G25580', u'AT3G25805', u'AT3G25860', u'AT3G25890', u'AT3G26240', u'AT3G26390', u'AT3G26490', u'AT3G26618', u'AT3G26744', u'AT3G26850', u'AT3G26900', u'AT3G26935', u'AT3G26960', u'AT3G27050', u'AT3G27430', u'AT3G28060', u'AT3G28170', u'AT3G28270', u'AT3G28460', u'AT3G28670', u'AT3G28920', u'AT3G29035', u'AT3G29280', u'AT3G29570', u'AT3G30720', u'AT3G30775', u'AT3G33530', u'AT3G43600', u'AT3G43970', u'AT3G44070', u'AT3G44310', u'AT3G44360', u'AT3G44620', u'AT3G44890', u'AT3G44990', u'AT3G45140', u'AT3G45150', u'AT3G45610', u'AT3G45850', u'AT3G45870', u'AT3G46100', u'AT3G46210', u'AT3G46320', u'AT3G46440', u'AT3G46540', u'AT3G46580', u'AT3G46630', u'AT3G46970', u'AT3G46990', u'AT3G47080', u'AT3G47420', u'AT3G47450', u'AT3G47640', u'AT3G47700', u'AT3G47800', u'AT3G47860', u'AT3G48160', u'AT3G48220', u'AT3G48250', u'AT3G48350', u'AT3G48360', u'AT3G48390', u'AT3G48410', u'AT3G48430', u'AT3G48450', u'AT3G48610', u'AT3G48690', u'AT3G48730', u'AT3G48740', u'AT3G49040', u'AT3G49050', u'AT3G49170', u'AT3G49400', u'AT3G49810', u'AT3G49990', u'AT3G50330', u'AT3G50340', u'AT3G50700', u'AT3G50860', u'AT3G51000', u'AT3G51090', u'AT3G51140', u'AT3G51150', u'AT3G51190', u'AT3G51520', u'AT3G51670', u'AT3G52155', u'AT3G52170', u'AT3G52340', u'AT3G52360', u'AT3G52525', u'AT3G52570', u'AT3G52840', u'AT3G52940', u'AT3G53210', u'AT3G53410', u'AT3G53490', u'AT3G53530', u'AT3G53670', u'AT3G53950', u'AT3G53990', u'AT3G54260', u'AT3G54400', u'AT3G54600', u'AT3G54770', u'AT3G54800', u'AT3G54810', u'AT3G55370', u'AT3G55400', u'AT3G55410', u'AT3G55420', u'AT3G55460', u'AT3G55530', u'AT3G55880', u'AT3G56070', u'AT3G56190', u'AT3G56360', u'AT3G56370', u'AT3G56430', u'AT3G56490', u'AT3G56590', u'AT3G56650', u'AT3G56670', u'AT3G56880', u'AT3G56990', u'AT3G57040', u'AT3G57070', u'AT3G57100', u'AT3G57120', u'AT3G57140', u'AT3G57430', u'AT3G57470', u'AT3G57650', u'AT3G57740', u'AT3G57910', u'AT3G57970', u'AT3G58020', u'AT3G58110', u'AT3G58190', u'AT3G58210', u'AT3G58220', u'AT3G58270', u'AT3G58430', u'AT3G58750', u'AT3G59210', u'AT3G59410', u'AT3G59670', u'AT3G59820', u'AT3G59900', u'AT3G60020', u'AT3G60390', u'AT3G60580', u'AT3G60590', u'AT3G60680', u'AT3G60770', u'AT3G61060', u'AT3G61070', u'AT3G61140', u'AT3G61770', u'AT3G61890', u'AT3G61940', u'AT3G61960', u'AT3G62030', u'AT3G62070', u'AT3G62650', u'AT3G62660', u'AT3G62800', u'AT3G62820', u'AT3G62910', u'AT3G62970', u'AT3G63210', u'AT3G63290', u'AT3G63320', u'AT3G63480', u'AT3G63510', u'AT4G00030', u'AT4G00230', u'AT4G00610', u'AT4G01050', u'AT4G01690', u'AT4G02260', u'AT4G02840', u'AT4G03180', u'AT4G03415', u'AT4G04190', u'AT4G04220', u'AT4G04330', u'AT4G04830', u'AT4G04860', u'AT4G04890', u'AT4G05370', u'AT4G05400', u'AT4G07820', u'AT4G07990', u'AT4G08330', u'AT4G08460', u'AT4G08790', u'AT4G09730', u'AT4G09760', u'AT4G10060', u'AT4G10150', u'AT4G10300', u'AT4G10610', u'AT4G10840', u'AT4G11090', u'AT4G11200', u'AT4G11230', u'AT4G11270', u'AT4G11800', u'AT4G11940', u'AT4G12240', u'AT4G12980', u'AT4G13100', u'AT4G13220', u'AT4G13250', u'AT4G13550', u'AT4G13690', u'AT4G13770', u'AT4G13870', u'AT4G14070', u'AT4G14230', u'AT4G14310', u'AT4G14410', u'AT4G14420', u'AT4G14440', u'AT4G14550', u'AT4G14730', u'AT4G14920', u'AT4G15090', u'AT4G15210', u'AT4G15380', u'AT4G15420', u'AT4G16330', u'AT4G16370', u'AT4G16490', u'AT4G16563', u'AT4G16650', u'AT4G16670', u'AT4G16990', u'AT4G17010', u'AT4G17070', u'AT4G17090', u'AT4G17110', u'AT4G17245', u'AT4G17530', u'AT4G17810', u'AT4G17870', u'AT4G17880', u'AT4G17950', u'AT4G18130', u'AT4G18170', u'AT4G18240', u'AT4G18530', u'AT4G18600', u'AT4G18610', u'AT4G18810', u'AT4G18975', u'AT4G19003', u'AT4G19020', u'AT4G19040', u'AT4G19160', u'AT4G19190', u'AT4G19230', u'AT4G19390', u'AT4G19410', u'AT4G19460', u'AT4G19470', u'AT4G19510', u'AT4G19720', u'AT4G19830', u'AT4G19840', u'AT4G19860', u'AT4G19900', u'AT4G20130', u'AT4G20760', u'AT4G20830', u'AT4G20840', u'AT4G21180', u'AT4G21210', u'AT4G22130', u'AT4G22756', u'AT4G22820', u'AT4G23010', u'AT4G23220', u'AT4G23430', u'AT4G23600', u'AT4G23610', u'AT4G24040', u'AT4G24230', u'AT4G24270', u'AT4G24460', u'AT4G24470', u'AT4G24510', u'AT4G24670', u'AT4G24700', u'AT4G24810', u'AT4G24970', u'AT4G25030', u'AT4G25500', u'AT4G25680', u'AT4G26555', u'AT4G26610', u'AT4G26850', u'AT4G26870', u'AT4G26910', u'AT4G26940', u'AT4G27250', u'AT4G27340', u'AT4G27510', u'AT4G27520', u'AT4G27740', u'AT4G28200', u'AT4G28290', u'AT4G28510', u'AT4G29110', u'AT4G29190', u'AT4G29250', u'AT4G29740', u'AT4G30190', u'AT4G30530', u'AT4G30650', u'AT4G30680', u'AT4G30690', u'AT4G30720', u'AT4G30760', u'AT4G30800', u'AT4G30810', u'AT4G30950', u'AT4G31060', u'AT4G31120', u'AT4G31240', u'AT4G31290', u'AT4G31460', u'AT4G31710', u'AT4G31720', u'AT4G31730', u'AT4G31790', u'AT4G32150', u'AT4G32340', u'AT4G32350', u'AT4G32610', u'AT4G32860', u'AT4G32960', u'AT4G33560', u'AT4G33670', u'AT4G33980', u'AT4G34120', u'AT4G34250', u'AT4G34540', u'AT4G34630', u'AT4G34750', u'AT4G34760', u'AT4G34790', u'AT4G34820', u'AT4G34910', u'AT4G35300', u'AT4G35350', u'AT4G35420', u'AT4G35720', u'AT4G35830', u'AT4G36250', u'AT4G36420', u'AT4G36870', u'AT4G36970', u'AT4G37460', u'AT4G37510', u'AT4G37800', u'AT4G37900', u'AT4G38050', u'AT4G38170', u'AT4G38380', u'AT4G38470', u'AT4G38510', u'AT4G38620', u'AT4G38660', u'AT4G38700', u'AT4G38810', u'AT4G38830', u'AT4G39050', u'AT4G39360', u'AT4G39410', u'AT4G39460', u'AT4G39570', u'AT4G39940', u'AT4G39970', u'AT5G01075', u'AT5G01310', u'AT5G01500', u'AT5G01510', u'AT5G01740', u'AT5G01750', u'AT5G01770', u'AT5G01810', u'AT5G01820', u'AT5G01830', u'AT5G01990', u'AT5G02260', u'AT5G02530', u'AT5G02540', u'AT5G02760', u'AT5G02810', u'AT5G02870', u'AT5G02890', u'AT5G02960', u'AT5G03150', u'AT5G03300', u'AT5G03370', u'AT5G03430', u'AT5G03480', u'AT5G03545', u'AT5G03600', u'AT5G03720', u'AT5G03760', u'AT5G03840', u'AT5G03960', u'AT5G04040', u'AT5G04080', u'AT5G04160', u'AT5G04190', u'AT5G04250', u'AT5G04530', u'AT5G04550', u'AT5G04700', u'AT5G04790', u'AT5G04810', u'AT5G05080', u'AT5G05200', u'AT5G05300', u'AT5G05320', u'AT5G05370', u'AT5G05430', u'AT5G05470', u'AT5G05520', u'AT5G06110', u'AT5G06130', u'AT5G06180', u'AT5G06210', u'AT5G06850', u'AT5G06950', u'AT5G06980', u'AT5G07120', u'AT5G07180', u'AT5G07220', u'AT5G07250', u'AT5G07540', u'AT5G07580', u'AT5G07610', u'AT5G07830', u'AT5G07840', u'AT5G07900', u'AT5G08141', u'AT5G08170', u'AT5G08300', u'AT5G08330', u'AT5G08370', u'AT5G08400', u'AT5G08490', u'AT5G08580', u'AT5G08610', u'AT5G09300', u'AT5G09320', u'AT5G09330', u'AT5G09390', u'AT5G09400', u'AT5G09410', u'AT5G09550', u'AT5G09690', u'AT5G09750', u'AT5G09770', u'AT5G09880', u'AT5G10070', u'AT5G10150', u'AT5G10340', u'AT5G10350', u'AT5G10430', u'AT5G10560', u'AT5G10860', u'AT5G10920', u'AT5G11070', u'AT5G11090', u'AT5G11150', u'AT5G11420', u'AT5G11490', u'AT5G11540', u'AT5G11580', u'AT5G11700', u'AT5G11810', u'AT5G11890', u'AT5G12140', u'AT5G13010', u'AT5G13060', u'AT5G13100', u'AT5G13120', u'AT5G13160', u'AT5G13260', u'AT5G13660', u'AT5G13730', u'AT5G13810', u'AT5G13910', u'AT5G13980', u'AT5G14070', u'AT5G14210', u'AT5G14290', u'AT5G14680', u'AT5G14730', u'AT5G14910', u'AT5G14960', u'AT5G15170', u'AT5G15230', u'AT5G15260', u'AT5G15310', u'AT5G15490', u'AT5G15700', u'AT5G15740', u'AT5G15750', u'AT5G15770', u'AT5G15820', u'AT5G15850', u'AT5G15880', u'AT5G16030', u'AT5G16220', u'AT5G16250', u'AT5G16280', u'AT5G16390', u'AT5G16770', u'AT5G16810', u'AT5G16820', u'AT5G17080', u'AT5G17090', u'AT5G17210', u'AT5G17230', u'AT5G17280', u'AT5G17300', u'AT5G17640', u'AT5G17670', u'AT5G17770', u'AT5G18140', u'AT5G18650', u'AT5G18710', u'AT5G18850', u'AT5G19010', u'AT5G19080', u'AT5G19110', u'AT5G19120', u'AT5G19690', u'AT5G19900', u'AT5G20120', u'AT5G20130', u'AT5G20160', u'AT5G20630', u'AT5G20670', u'AT5G20690', u'AT5G20750', u'AT5G20900', u'AT5G21105', u'AT5G22740', u'AT5G22875', u'AT5G22880', u'AT5G22890', u'AT5G23110', u'AT5G23170', u'AT5G23210', u'AT5G23240', u'AT5G23390', u'AT5G23660', u'AT5G23950', u'AT5G24000', u'AT5G24300', u'AT5G24470', u'AT5G24500', u'AT5G24520', u'AT5G24820', u'AT5G24910', u'AT5G25170', u'AT5G25190', u'AT5G25210', u'AT5G25460', u'AT5G25500', u'AT5G25530', u'AT5G25900', u'AT5G26570', u'AT5G26680', u'AT5G26820', u'AT5G26940', u'AT5G26990', u'AT5G27230', u'AT5G27320', u'AT5G27380', u'AT5G27520', u'AT5G27560', u'AT5G27620', u'AT5G27660', u'AT5G27820', u'AT5G28640', u'AT5G28750', u'AT5G28910', u'AT5G33320', u'AT5G33330', u'AT5G35220', u'AT5G35320', u'AT5G35360', u'AT5G35910', u'AT5G35960', u'AT5G37130', u'AT5G37470', u'AT5G37490', u'AT5G37570', u'AT5G37670', u'AT5G37780', u'AT5G38300', u'AT5G38510', u'AT5G38530', u'AT5G38630', u'AT5G38650', u'AT5G38720', u'AT5G39080', u'AT5G39250', u'AT5G39410', u'AT5G39520', u'AT5G39610', u'AT5G39640', u'AT5G39760', u'AT5G39890', u'AT5G39980', u'AT5G40160', u'AT5G40200', u'AT5G40250', u'AT5G40330', u'AT5G40370', u'AT5G40480', u'AT5G40540', u'AT5G40700', u'AT5G40790', u'AT5G41050', u'AT5G41370', u'AT5G41410', u'AT5G41610', u'AT5G41650', u'AT5G41770', u'AT5G41970', u'AT5G42100', u'AT5G42390', u'AT5G42600', u'AT5G42610', u'AT5G42900', u'AT5G43130', u'AT5G43290', u'AT5G43320', u'AT5G43330', u'AT5G43450', u'AT5G43560', u'AT5G43630', u'AT5G43710', u'AT5G43780', u'AT5G43840', u'AT5G43880', u'AT5G44060', u'AT5G44080', u'AT5G44100', u'AT5G44110', u'AT5G44160', u'AT5G44250', u'AT5G44260', u'AT5G44330', u'AT5G44340', u'AT5G44400', u'AT5G44520', u'AT5G44580', u'AT5G44720', u'AT5G44850', u'AT5G45290', u'AT5G45330', u'AT5G45350', u'AT5G45410', u'AT5G45660', u'AT5G45800', u'AT5G45830', u'AT5G46510', u'AT5G46580', u'AT5G46590', u'AT5G46780', u'AT5G47010', u'AT5G47100', u'AT5G47360', u'AT5G47560', u'AT5G47640', u'AT5G47830', u'AT5G47870', u'AT5G48080', u'AT5G48150', u'AT5G48170', u'AT5G48370', u'AT5G48540', u'AT5G48570', u'AT5G48590', u'AT5G48680', u'AT5G48880', u'AT5G48900', u'AT5G49170', u'AT5G49280', u'AT5G49300', u'AT5G49450', u'AT5G49700', u'AT5G49910', u'AT5G50000', u'AT5G50375', u'AT5G50570', u'AT5G50660', u'AT5G50730', u'AT5G50790', u'AT5G50860', u'AT5G51040', u'AT5G51190', u'AT5G51480', u'AT5G51750', u'AT5G51790', u'AT5G52110', u'AT5G52150', u'AT5G52190', u'AT5G52290', u'AT5G52300', u'AT5G52430', u'AT5G52440', u'AT5G52460', u'AT5G52540', u'AT5G52600', u'AT5G52890', u'AT5G52950', u'AT5G52980', u'AT5G53030', u'AT5G53080', u'AT5G53090', u'AT5G53680', u'AT5G53850', u'AT5G53860', u'AT5G53950', u'AT5G54090', u'AT5G54190', u'AT5G54360', u'AT5G54540', u'AT5G54940', u'AT5G54960', u'AT5G54970', u'AT5G55050', u'AT5G55160', u'AT5G55390', u'AT5G55530', u'AT5G55700', u'AT5G55860', u'AT5G55900', u'AT5G55960', u'AT5G56010', u'AT5G56020', u'AT5G56110', u'AT5G56600', u'AT5G56630', u'AT5G56870', u'AT5G57160', u'AT5G57280', u'AT5G57520', u'AT5G57630', u'AT5G57740', u'AT5G57850', u'AT5G57940', u'AT5G58160', u'AT5G58230', u'AT5G58250', u'AT5G58310', u'AT5G58370', u'AT5G58650', u'AT5G59080', u'AT5G59170', u'AT5G59850', u'AT5G59870', u'AT5G60340', u'AT5G60660', u'AT5G60710', u'AT5G60930', u'AT5G61140', u'AT5G61170', u'AT5G61250', u'AT5G61270', u'AT5G61340', u'AT5G61380', u'AT5G61410', u'AT5G61420', u'AT5G61440', u'AT5G61480', u'AT5G61580', u'AT5G61600', u'AT5G61710', u'AT5G61790', u'AT5G61910', u'AT5G62130', u'AT5G62170', u'AT5G62360', u'AT5G62370', u'AT5G62380', u'AT5G62720', u'AT5G62800', u'AT5G62840', u'AT5G63050', u'AT5G63120', u'AT5G63290', u'AT5G63320', u'AT5G63550', u'AT5G63570', u'AT5G63600', u'AT5G63610', u'AT5G63690', u'AT5G63780', u'AT5G63910', u'AT5G64080', u'AT5G64190', u'AT5G64240', u'AT5G64480', u'AT5G64560', u'AT5G64790', u'AT5G64930', u'AT5G65110', u'AT5G65200', u'AT5G65340', u'AT5G65560', u'AT5G65925', u'AT5G65960', u'AT5G65970', u'AT5G66070', u'AT5G66080', u'AT5G66330', u'AT5G66540', u'AT5G66815', u'AT5G66900', u'AT5G67380', u'AT5G67385', u'AT5G67440']
#Ligterink 2014 gxe 0 3.85 2
#fi5 = 'genome_wide_eQTL_mapping_Ligterink_2014_gxe0_3.85_2.txt'
#fo5 = 'report_eQTL_mapping_Ligterink_2014_gxe0_3.85_2.txt'
#eQTLReport(fi5,fo5)
#2633 1243 1307 1326 1390 83 63
#[u'AT1G01310', u'AT1G01810', u'AT1G02870', u'AT1G10840', u'AT1G19968', u'AT1G20030', u'AT1G25310', u'AT1G45010', u'AT1G49150', u'AT1G50290', u'AT1G59980', u'AT1G60360', u'AT1G61730', u'AT1G67635', u'AT1G68010', u'AT1G70860', u'AT1G72990', u'AT1G74430', u'AT1G78180', u'AT2G03770', u'AT2G05380', u'AT2G11430', u'AT2G16290', u'AT2G24285', u'AT2G25450', u'AT2G32080', u'AT2G34040', u'AT2G35040', u'AT2G38630', u'AT2G41220', u'AT2G42340', u'AT2G43680', u'AT3G05937', u'AT3G14690', u'AT3G17890', u'AT3G20930', u'AT3G22910', u'AT3G24480', u'AT3G26510', u'AT3G45620', u'AT3G47070', u'AT3G51890', u'AT3G61360', u'AT3G63350', u'AT4G08106', u'AT4G08940', u'AT4G09750', u'AT4G16690', u'AT4G21890', u'AT4G24450', u'AT4G30310', u'AT4G31840', u'AT5G02370', u'AT5G26000', u'AT5G32592', u'AT5G35020', u'AT5G38990', u'AT5G46750', u'AT5G48150', u'AT5G50120', u'AT5G56910', u'AT5G58070', u'AT5G62020']
#Ligterink 2014 gxe 1 2.7 2
#fi6 = 'genome_wide_eQTL_mapping_Ligterink_2014_gxe1_2.7_2.txt'
#fo6 = 'report_eQTL_mapping_Ligterink_2014_gxe1_2.7_2.txt'
#eQTLReport(fi6,fo6)
#2489 54 2419 70 2435 16 176
#[u'AT1G02000', u'AT1G02660', u'AT1G03120', u'AT1G05930', u'AT1G06690', u'AT1G07120', u'AT1G07200', u'AT1G10020', u'AT1G12411', u'AT1G19550', u'AT1G19570', u'AT1G21640', u'AT1G23660', u'AT1G26208', u'AT1G26975', u'AT1G28060', u'AT1G28327', u'AT1G29710', u'AT1G30010', u'AT1G30016', u'AT1G31960', u'AT1G33080', u'AT1G49720', u'AT1G53165', u'AT1G56180', u'AT1G56345', u'AT1G60500', u'AT1G62060', u'AT1G62695', u'AT1G65113', u'AT1G65500', u'AT1G66840', u'AT1G67105', u'AT1G68160', u'AT1G69060', u'AT1G69935', u'AT1G70150', u'AT1G70420', u'AT1G70750', u'AT1G70944', u'AT1G71330', u'AT1G74600', u'AT1G75800', u'AT1G75960', u'AT1G77700', u'AT1G78270', u'AT1G80350', u'AT2G02680', u'AT2G03800', u'AT2G04790', u'AT2G04864', u'AT2G06025', u'AT2G06235', u'AT2G10226', u'AT2G11405', u'AT2G11430', u'AT2G15960', u'AT2G18060', u'AT2G19580', u'AT2G24410', u'AT2G25370', u'AT2G27870', u'AT2G29270', u'AT2G29340', u'AT2G34790', u'AT2G39110', u'AT2G41060', u'AT2G41570', u'AT2G42440', u'AT2G44560', u'AT2G47310', u'AT2G47530', u'AT3G01240', u'AT3G03460', u'AT3G03680', u'AT3G05130', u'AT3G05905', u'AT3G07890', u'AT3G08780', u'AT3G20220', u'AT3G22886', u'AT3G23680', u'AT3G24070', u'AT3G26934', u'AT3G27960', u'AT3G30418', u'AT3G30817', u'AT3G45590', u'AT3G45780', u'AT3G45800', u'AT3G46387', u'AT3G46510', u'AT3G46960', u'AT3G46980', u'AT3G48920', u'AT3G49370', u'AT3G50480', u'AT3G50740', u'AT3G50810', u'AT3G51030', u'AT3G51110', u'AT3G53770', u'AT3G54440', u'AT3G54850', u'AT3G57170', u'AT3G57670', u'AT3G58420', u'AT3G59320', u'AT4G01200', u'AT4G05120', u'AT4G07666', u'AT4G08145', u'AT4G09984', u'AT4G10400', u'AT4G10650', u'AT4G11300', u'AT4G11880', u'AT4G12010', u'AT4G12240', u'AT4G13395', u'AT4G13500', u'AT4G14120', u'AT4G14270', u'AT4G15100', u'AT4G16960', u'AT4G17500', u'AT4G22650', u'AT4G23713', u'AT4G25650', u'AT4G26730', u'AT4G27800', u'AT4G30350', u'AT4G30900', u'AT4G31940', u'AT4G32924', u'AT4G37150', u'AT4G37280', u'AT4G38050', u'AT4G39230', u'AT5G01820', u'AT5G02000', u'AT5G02811', u'AT5G05750', u'AT5G07500', u'AT5G09711', u'AT5G10570', u'AT5G13890', u'AT5G15254', u'AT5G22240', u'AT5G22510', u'AT5G23405', u'AT5G23520', u'AT5G25000', u'AT5G26345', u'AT5G27885', u'AT5G28030', u'AT5G28180', u'AT5G28667', u'AT5G29337', u'AT5G35020', u'AT5G36228', u'AT5G40220', u'AT5G43660', u'AT5G47000', u'AT5G48140', u'AT5G52580', u'AT5G54330', u'AT5G55150', u'AT5G55505', u'AT5G56061', u'AT5G57820', u'AT5G63500', u'AT5G65420', u'AT5G66360', u'AT5G66490', u'AT5G66600']
#Keurentjes 2007 gxe 0 3.3 2
#fi7 = 'genome_wide_eQTL_mapping_Keurentjes_2007_gxe0_3.3_2.txt'
#fo7 = 'report_eQTL_mapping_Keurentjes_2007_gxe0_3.3_2.txt'
#eQTLReport(fi7,fo7)
#7017 1704 4467 2550 5313 846 1031
#[u'AT1G01710', u'AT1G01730', u'AT1G02450', u'AT1G02650', u'AT1G02860', u'AT1G02900', u'AT1G04130', u'AT1G04280', u'AT1G05000', u'AT1G05140', u'AT1G05610', u'AT1G05830', u'AT1G06100', u'AT1G06430', u'AT1G06730', u'AT1G06840', u'AT1G06960', u'AT1G08180', u'AT1G08460', u'AT1G08500', u'AT1G08630', u'AT1G08910', u'AT1G09430', u'AT1G09780', u'AT1G09840', u'AT1G10030', u'AT1G10200', u'AT1G10300', u'AT1G10500', u'AT1G10700', u'AT1G12410', u'AT1G13340', u'AT1G13930', u'AT1G14000', u'AT1G14150', u'AT1G14490', u'AT1G15260', u'AT1G15670', u'AT1G15830', u'AT1G16080', u'AT1G16390', u'AT1G16920', u'AT1G17190', u'AT1G17710', u'AT1G17860', u'AT1G17880', u'AT1G17890', u'AT1G17940', u'AT1G18630', u'AT1G19090', u'AT1G19180', u'AT1G19380', u'AT1G19660', u'AT1G19800', u'AT1G19960', u'AT1G20150', u'AT1G20410', u'AT1G20440', u'AT1G20450', u'AT1G21590', u'AT1G21760', u'AT1G22110', u'AT1G22270', u'AT1G22410', u'AT1G22430', u'AT1G22630', u'AT1G22690', u'AT1G22750', u'AT1G22900', u'AT1G24180', u'AT1G24240', u'AT1G24560', u'AT1G24590', u'AT1G24851', u'AT1G26170', u'AT1G26360', u'AT1G26450', u'AT1G26620', u'AT1G26940', u'AT1G27210', u'AT1G28150', u'AT1G28400', u'AT1G29580', u'AT1G29760', u'AT1G29830', u'AT1G30270', u'AT1G30480', u'AT1G30510', u'AT1G30755', u'AT1G30880', u'AT1G31120', u'AT1G31180', u'AT1G31860', u'AT1G32460', u'AT1G32550', u'AT1G32580', u'AT1G33855', u'AT1G34000', u'AT1G34120', u'AT1G34480', u'AT1G34780', u'AT1G35320', u'AT1G35550', u'AT1G41900', u'AT1G42540', u'AT1G42580', u'AT1G43850', u'AT1G43910', u'AT1G44170', u'AT1G44800', u'AT1G45474', u'AT1G45688', u'AT1G47240', u'AT1G47420', u'AT1G47530', u'AT1G47750', u'AT1G47960', u'AT1G47990', u'AT1G48090', u'AT1G48160', u'AT1G48320', u'AT1G48330', u'AT1G48380', u'AT1G48600', u'AT1G48640', u'AT1G48900', u'AT1G49000', u'AT1G49390', u'AT1G49440', u'AT1G49510', u'AT1G49980', u'AT1G50280', u'AT1G51620', u'AT1G51690', u'AT1G51800', u'AT1G52030', u'AT1G52200', u'AT1G52410', u'AT1G52730', u'AT1G52760', u'AT1G54060', u'AT1G55030', u'AT1G55680', u'AT1G56260', u'AT1G56650', u'AT1G56660', u'AT1G57540', u'AT1G58025', u'AT1G60130', u'AT1G60350', u'AT1G60460', u'AT1G60610', u'AT1G60680', u'AT1G60990', u'AT1G61380', u'AT1G61540', u'AT1G61800', u'AT1G61820', u'AT1G61890', u'AT1G62300', u'AT1G62660', u'AT1G62910', u'AT1G63050', u'AT1G63090', u'AT1G63180', u'AT1G63630', u'AT1G64355', u'AT1G64700', u'AT1G64770', u'AT1G64850', u'AT1G64920', u'AT1G65030', u'AT1G65140', u'AT1G65290', u'AT1G65365', u'AT1G65400', u'AT1G66160', u'AT1G66230', u'AT1G66270', u'AT1G66430', u'AT1G66490', u'AT1G66530', u'AT1G66700', u'AT1G66740', u'AT1G67190', u'AT1G67330', u'AT1G67360', u'AT1G67600', u'AT1G67810', u'AT1G68050', u'AT1G68440', u'AT1G68540', u'AT1G68590', u'AT1G68820', u'AT1G69430', u'AT1G69510', u'AT1G69530', u'AT1G69730', u'AT1G69890', u'AT1G69900', u'AT1G69970', u'AT1G70550', u'AT1G70680', u'AT1G71230', u'AT1G71410', u'AT1G71790', u'AT1G71880', u'AT1G72300', u'AT1G72330', u'AT1G72370', u'AT1G72520', u'AT1G72950', u'AT1G73010', u'AT1G73230', u'AT1G73540', u'AT1G73700', u'AT1G73840', u'AT1G74070', u'AT1G74590', u'AT1G74650', u'AT1G74790', u'AT1G74910', u'AT1G75040', u'AT1G75530', u'AT1G75600', u'AT1G75680', u'AT1G76030', u'AT1G76520', u'AT1G76530', u'AT1G76600', u'AT1G76680', u'AT1G76690', u'AT1G76790', u'AT1G77710', u'AT1G77850', u'AT1G78700', u'AT1G78820', u'AT1G79020', u'AT1G79520', u'AT1G79550', u'AT1G79940', u'AT1G79970', u'AT1G80290', u'AT1G80360', u'AT1G80380', u'AT1G80560', u'AT2G01320', u'AT2G01760', u'AT2G02100', u'AT2G02130', u'AT2G02220', u'AT2G02930', u'AT2G04400', u'AT2G04460', u'AT2G04900', u'AT2G05590', u'AT2G06005', u'AT2G06050', u'AT2G06490', u'AT2G07330', u'AT2G07510', u'AT2G09970', u'AT2G10940', u'AT2G11690', u'AT2G11830', u'AT2G11890', u'AT2G13720', u'AT2G14460', u'AT2G14750', u'AT2G15490', u'AT2G16060', u'AT2G16860', u'AT2G17240', u'AT2G17450', u'AT2G17720', u'AT2G18200', u'AT2G18210', u'AT2G18370', u'AT2G19190', u'AT2G19800', u'AT2G19850', u'AT2G19910', u'AT2G20140', u'AT2G20570', u'AT2G21140', u'AT2G21150', u'AT2G21160', u'AT2G21180', u'AT2G21560', u'AT2G21690', u'AT2G21820', u'AT2G21850', u'AT2G22030', u'AT2G22170', u'AT2G22250', u'AT2G22480', u'AT2G22880', u'AT2G22900', u'AT2G23180', u'AT2G23320', u'AT2G23450', u'AT2G24560', u'AT2G24860', u'AT2G25200', u'AT2G25410', u'AT2G26440', u'AT2G26470', u'AT2G26890', u'AT2G27090', u'AT2G27510', u'AT2G27910', u'AT2G28140', u'AT2G28270', u'AT2G28550', u'AT2G28720', u'AT2G28890', u'AT2G29400', u'AT2G29450', u'AT2G29890', u'AT2G30250', u'AT2G30260', u'AT2G30430', u'AT2G30620', u'AT2G30650', u'AT2G31180', u'AT2G31360', u'AT2G31670', u'AT2G31680', u'AT2G31790', u'AT2G31880', u'AT2G32060', u'AT2G32240', u'AT2G32990', u'AT2G33070', u'AT2G33530', u'AT2G33780', u'AT2G33850', u'AT2G34310', u'AT2G34430', u'AT2G34630', u'AT2G34860', u'AT2G35290', u'AT2G35380', u'AT2G35460', u'AT2G35580', u'AT2G35800', u'AT2G36340', u'AT2G36390', u'AT2G36540', u'AT2G36610', u'AT2G36690', u'AT2G36710', u'AT2G36840', u'AT2G36930', u'AT2G37040', u'AT2G37180', u'AT2G37260', u'AT2G37520', u'AT2G37640', u'AT2G37710', u'AT2G37750', u'AT2G37970', u'AT2G38120', u'AT2G38170', u'AT2G38380', u'AT2G38470', u'AT2G38490', u'AT2G38750', u'AT2G38860', u'AT2G38870', u'AT2G39010', u'AT2G39100', u'AT2G39240', u'AT2G39400', u'AT2G39660', u'AT2G40300', u'AT2G40475', u'AT2G40600', u'AT2G40750', u'AT2G40765', u'AT2G41010', u'AT2G41140', u'AT2G41180', u'AT2G41250', u'AT2G41480', u'AT2G41530', u'AT2G41560', u'AT2G42360', u'AT2G42770', u'AT2G42840', u'AT2G43010', u'AT2G43290', u'AT2G43510', u'AT2G43520', u'AT2G43580', u'AT2G43590', u'AT2G43750', u'AT2G44380', u'AT2G44450', u'AT2G44650', u'AT2G45210', u'AT2G45470', u'AT2G45630', u'AT2G45660', u'AT2G46310', u'AT2G46430', u'AT2G46490', u'AT2G46640', u'AT2G46650', u'AT2G47140', u'AT2G47730', u'AT2G48090', u'AT3G01060', u'AT3G01850', u'AT3G01860', u'AT3G01940', u'AT3G02140', u'AT3G02960', u'AT3G03290', u'AT3G03640', u'AT3G03830', u'AT3G04110', u'AT3G04310', u'AT3G04360', u'AT3G04605', u'AT3G04650', u'AT3G04770', u'AT3G04880', u'AT3G04940', u'AT3G05410', u'AT3G05730', u'AT3G06040', u'AT3G06110', u'AT3G06300', u'AT3G06483', u'AT3G06510', u'AT3G06810', u'AT3G06890', u'AT3G07170', u'AT3G07200', u'AT3G07540', u'AT3G08000', u'AT3G08530', u'AT3G09270', u'AT3G09310', u'AT3G09320', u'AT3G09830', u'AT3G10220', u'AT3G10410', u'AT3G10910', u'AT3G11090', u'AT3G11210', u'AT3G11690', u'AT3G11930', u'AT3G12040', u'AT3G12060', u'AT3G12500', u'AT3G12610', u'AT3G12750', u'AT3G13080', u'AT3G13100', u'AT3G13260', u'AT3G13450', u'AT3G13610', u'AT3G13625', u'AT3G13790', u'AT3G13910', u'AT3G14067', u'AT3G14225', u'AT3G14510', u'AT3G14940', u'AT3G14990', u'AT3G15450', u'AT3G15510', u'AT3G16030', u'AT3G16150', u'AT3G16210', u'AT3G16400', u'AT3G16410', u'AT3G16460', u'AT3G16480', u'AT3G16710', u'AT3G16800', u'AT3G16990', u'AT3G17680', u'AT3G18240', u'AT3G18280', u'AT3G18820', u'AT3G18940', u'AT3G19170', u'AT3G19500', u'AT3G19660', u'AT3G20083', u'AT3G20330', u'AT3G20370', u'AT3G20440', u'AT3G20470', u'AT3G21110', u'AT3G21175', u'AT3G21640', u'AT3G21910', u'AT3G22060', u'AT3G22231', u'AT3G22240', u'AT3G22250', u'AT3G22260', u'AT3G22330', u'AT3G22380', u'AT3G22420', u'AT3G22422', u'AT3G22435', u'AT3G22760', u'AT3G22840', u'AT3G22890', u'AT3G23090', u'AT3G23190', u'AT3G23710', u'AT3G24030', u'AT3G24500', u'AT3G25020', u'AT3G25070', u'AT3G25410', u'AT3G25600', u'AT3G25710', u'AT3G25740', u'AT3G26040', u'AT3G26500', u'AT3G27920', u'AT3G28925', u'AT3G30180', u'AT3G30280', u'AT3G30415', u'AT3G31390', u'AT3G33530', u'AT3G42660', u'AT3G42860', u'AT3G43120', u'AT3G43520', u'AT3G43740', u'AT3G43960', u'AT3G43980', u'AT3G44120', u'AT3G44190', u'AT3G44230', u'AT3G44310', u'AT3G44450', u'AT3G44590', u'AT3G44670', u'AT3G44860', u'AT3G45040', u'AT3G45140', u'AT3G45470', u'AT3G45710', u'AT3G45970', u'AT3G46030', u'AT3G46430', u'AT3G46490', u'AT3G46930', u'AT3G47080', u'AT3G47520', u'AT3G47550', u'AT3G47640', u'AT3G47800', u'AT3G48000', u'AT3G48100', u'AT3G48130', u'AT3G48140', u'AT3G48220', u'AT3G48360', u'AT3G48410', u'AT3G48740', u'AT3G49320', u'AT3G49510', u'AT3G49780', u'AT3G49820', u'AT3G49890', u'AT3G50060', u'AT3G50090', u'AT3G50260', u'AT3G50270', u'AT3G50970', u'AT3G50980', u'AT3G51000', u'AT3G51130', u'AT3G51660', u'AT3G51790', u'AT3G51860', u'AT3G51895', u'AT3G51940', u'AT3G51960', u'AT3G52155', u'AT3G52360', u'AT3G52400', u'AT3G52430', u'AT3G52450', u'AT3G52540', u'AT3G52960', u'AT3G53110', u'AT3G53120', u'AT3G53620', u'AT3G53670', u'AT3G53980', u'AT3G54080', u'AT3G54380', u'AT3G54510', u'AT3G54530', u'AT3G54620', u'AT3G54850', u'AT3G55005', u'AT3G55050', u'AT3G55120', u'AT3G55130', u'AT3G55980', u'AT3G56060', u'AT3G56170', u'AT3G56210', u'AT3G56360', u'AT3G56400', u'AT3G56480', u'AT3G56620', u'AT3G56790', u'AT3G57230', u'AT3G57920', u'AT3G57990', u'AT3G58200', u'AT3G58600', u'AT3G59070', u'AT3G59080', u'AT3G59340', u'AT3G59880', u'AT3G61010', u'AT3G61140', u'AT3G62040', u'AT3G62420', u'AT3G63350', u'AT3G63440', u'AT3G63470', u'AT4G01480', u'AT4G01700', u'AT4G01870', u'AT4G01950', u'AT4G02530', u'AT4G02750', u'AT4G03020', u'AT4G03260', u'AT4G03460', u'AT4G03510', u'AT4G04020', u'AT4G04220', u'AT4G04840', u'AT4G04960', u'AT4G05010', u'AT4G05020', u'AT4G05400', u'AT4G07730', u'AT4G08290', u'AT4G08570', u'AT4G08685', u'AT4G09040', u'AT4G09670', u'AT4G10300', u'AT4G10450', u'AT4G11190', u'AT4G11280', u'AT4G11290', u'AT4G11320', u'AT4G11720', u'AT4G11800', u'AT4G12030', u'AT4G12710', u'AT4G12730', u'AT4G13470', u'AT4G13630', u'AT4G14130', u'AT4G14430', u'AT4G14650', u'AT4G14750', u'AT4G15530', u'AT4G15560', u'AT4G15620', u'AT4G15640', u'AT4G15650', u'AT4G15760', u'AT4G15940', u'AT4G16140', u'AT4G16310', u'AT4G16370', u'AT4G16390', u'AT4G16520', u'AT4G16950', u'AT4G17440', u'AT4G17500', u'AT4G17610', u'AT4G17870', u'AT4G18060', u'AT4G18170', u'AT4G18220', u'AT4G18250', u'AT4G18800', u'AT4G18880', u'AT4G18930', u'AT4G18950', u'AT4G19380', u'AT4G19560', u'AT4G19690', u'AT4G19810', u'AT4G19820', u'AT4G20700', u'AT4G20860', u'AT4G21105', u'AT4G21400', u'AT4G21990', u'AT4G22100', u'AT4G22720', u'AT4G22900', u'AT4G23610', u'AT4G24020', u'AT4G24090', u'AT4G24780', u'AT4G24990', u'AT4G25030', u'AT4G25230', u'AT4G26060', u'AT4G26510', u'AT4G26520', u'AT4G26670', u'AT4G26870', u'AT4G26910', u'AT4G27450', u'AT4G27500', u'AT4G27680', u'AT4G27740', u'AT4G27840', u'AT4G28050', u'AT4G28110', u'AT4G28280', u'AT4G29030', u'AT4G29480', u'AT4G29570', u'AT4G29760', u'AT4G29770', u'AT4G29905', u'AT4G30210', u'AT4G30430', u'AT4G30580', u'AT4G31910', u'AT4G33490', u'AT4G33580', u'AT4G34050', u'AT4G34120', u'AT4G34160', u'AT4G34180', u'AT4G34640', u'AT4G34710', u'AT4G34730', u'AT4G34770', u'AT4G35180', u'AT4G36040', u'AT4G36220', u'AT4G36360', u'AT4G36590', u'AT4G36720', u'AT4G37010', u'AT4G37070', u'AT4G37610', u'AT4G37980', u'AT4G38010', u'AT4G38020', u'AT4G38460', u'AT4G38950', u'AT4G39300', u'AT4G39470', u'AT4G39780', u'AT4G39830', u'AT4G39940', u'AT5G01420', u'AT5G01480', u'AT5G01710', u'AT5G01780', u'AT5G02000', u'AT5G02290', u'AT5G02580', u'AT5G03070', u'AT5G03120', u'AT5G03640', u'AT5G03920', u'AT5G04530', u'AT5G04720', u'AT5G04760', u'AT5G05110', u'AT5G05260', u'AT5G05320', u'AT5G05930', u'AT5G05960', u'AT5G06040', u'AT5G06100', u'AT5G06200', u'AT5G06360', u'AT5G06570', u'AT5G06580', u'AT5G06590', u'AT5G06850', u'AT5G07000', u'AT5G07180', u'AT5G07360', u'AT5G07700', u'AT5G07980', u'AT5G08000', u'AT5G08260', u'AT5G08590', u'AT5G08600', u'AT5G08660', u'AT5G09220', u'AT5G09480', u'AT5G09490', u'AT5G09530', u'AT5G09540', u'AT5G09980', u'AT5G10030', u'AT5G10290', u'AT5G10400', u'AT5G10930', u'AT5G11000', u'AT5G11330', u'AT5G11590', u'AT5G11640', u'AT5G11840', u'AT5G12010', u'AT5G12120', u'AT5G12170', u'AT5G12210', u'AT5G12400', u'AT5G12420', u'AT5G13180', u'AT5G13240', u'AT5G13550', u'AT5G13580', u'AT5G13800', u'AT5G14020', u'AT5G14090', u'AT5G14320', u'AT5G15160', u'AT5G15960', u'AT5G15970', u'AT5G16370', u'AT5G16470', u'AT5G16780', u'AT5G17000', u'AT5G17020', u'AT5G17110', u'AT5G17140', u'AT5G17190', u'AT5G17330', u'AT5G17420', u'AT5G17460', u'AT5G17470', u'AT5G17650', u'AT5G17700', u'AT5G18020', u'AT5G18060', u'AT5G18230', u'AT5G19890', u'AT5G20960', u'AT5G21105', u'AT5G22500', u'AT5G22580', u'AT5G22630', u'AT5G22700', u'AT5G23010', u'AT5G23250', u'AT5G23350', u'AT5G23730', u'AT5G23820', u'AT5G23840', u'AT5G23890', u'AT5G24080', u'AT5G24510', u'AT5G24530', u'AT5G24760', u'AT5G25190', u'AT5G25220', u'AT5G25610', u'AT5G26280', u'AT5G26290', u'AT5G35080', u'AT5G35480', u'AT5G36880', u'AT5G36910', u'AT5G36920', u'AT5G37030', u'AT5G37210', u'AT5G37400', u'AT5G37500', u'AT5G37590', u'AT5G38290', u'AT5G38600', u'AT5G39020', u'AT5G39090', u'AT5G39110', u'AT5G39450', u'AT5G39580', u'AT5G39890', u'AT5G40270', u'AT5G40350', u'AT5G40370', u'AT5G40850', u'AT5G40950', u'AT5G41070', u'AT5G41080', u'AT5G41270', u'AT5G41350', u'AT5G41660', u'AT5G41860', u'AT5G41990', u'AT5G42070', u'AT5G42150', u'AT5G42180', u'AT5G42200', u'AT5G42570', u'AT5G42840', u'AT5G42890', u'AT5G43070', u'AT5G43350', u'AT5G43440', u'AT5G43460', u'AT5G43500', u'AT5G44290', u'AT5G44360', u'AT5G44740', u'AT5G44785', u'AT5G45040', u'AT5G45650', u'AT5G45775', u'AT5G46190', u'AT5G46230', u'AT5G46900', u'AT5G46910', u'AT5G47100', u'AT5G47230', u'AT5G47450', u'AT5G47540', u'AT5G47620', u'AT5G47630', u'AT5G47740', u'AT5G47770', u'AT5G47960', u'AT5G48000', u'AT5G48050', u'AT5G48370', u'AT5G48480', u'AT5G48540', u'AT5G48570', u'AT5G48620', u'AT5G48930', u'AT5G49450', u'AT5G49560', u'AT5G49630', u'AT5G49710', u'AT5G49840', u'AT5G50050', u'AT5G50150', u'AT5G50160', u'AT5G50200', u'AT5G50210', u'AT5G50380', u'AT5G51070', u'AT5G51820', u'AT5G51900', u'AT5G51950', u'AT5G52070', u'AT5G52120', u'AT5G52140', u'AT5G52310', u'AT5G52370', u'AT5G52730', u'AT5G52760', u'AT5G52900', u'AT5G53330', u'AT5G53730', u'AT5G54100', u'AT5G54110', u'AT5G54160', u'AT5G54220', u'AT5G54490', u'AT5G54500', u'AT5G54760', u'AT5G54870', u'AT5G54940', u'AT5G55150', u'AT5G55260', u'AT5G55530', u'AT5G56870', u'AT5G57110', u'AT5G57220', u'AT5G57300', u'AT5G57410', u'AT5G57510', u'AT5G59130', u'AT5G59550', u'AT5G59590', u'AT5G59613', u'AT5G59720', u'AT5G60150', u'AT5G60200', u'AT5G60540', u'AT5G60950', u'AT5G61080', u'AT5G61160', u'AT5G61320', u'AT5G61650', u'AT5G62140', u'AT5G62570', u'AT5G62640', u'AT5G63600', u'AT5G63620', u'AT5G63760', u'AT5G64100', u'AT5G64120', u'AT5G64130', u'AT5G64200', u'AT5G64510', u'AT5G64640', u'AT5G65110', u'AT5G65640', u'AT5G65730', u'AT5G65860', u'AT5G65870', u'AT5G67260', u'AT5G67420', u'AT5G67430', u'AT5G67500']
#Snoek 2012 gxe 1 3.01 2
#fi8 = 'genome_wide_eQTL_mapping_Snoek_2012_gxe1_3.01_2.txt'
#fo8 = 'report_eQTL_mapping_Snoek_2012_gxe1_3.01_2.txt'
#eQTLReport(fi8,fo8)
#9137 2212 5482 3655 6925 1443 1636
#[u'AT1G01060', u'AT1G01200', u'AT1G01390', u'AT1G01490', u'AT1G01580', u'AT1G01650', u'AT1G01670', u'AT1G01710', u'AT1G01725', u'AT1G01730', u'AT1G01770', u'AT1G01830', u'AT1G01910', u'AT1G02340', u'AT1G02640', u'AT1G02870', u'AT1G03020', u'AT1G03530', u'AT1G03810', u'AT1G04150', u'AT1G04310', u'AT1G04390', u'AT1G04400', u'AT1G04530', u'AT1G04640', u'AT1G04760', u'AT1G04770', u'AT1G04790', u'AT1G04870', u'AT1G04970', u'AT1G04980', u'AT1G05150', u'AT1G05160', u'AT1G05205', u'AT1G05300', u'AT1G05330', u'AT1G05385', u'AT1G05410', u'AT1G05520', u'AT1G05540', u'AT1G05590', u'AT1G05600', u'AT1G05640', u'AT1G05680', u'AT1G05700', u'AT1G05750', u'AT1G05790', u'AT1G05890', u'AT1G05960', u'AT1G05970', u'AT1G06040', u'AT1G06080', u'AT1G06135', u'AT1G06410', u'AT1G06710', u'AT1G06820', u'AT1G06870', u'AT1G07130', u'AT1G07310', u'AT1G07360', u'AT1G07370', u'AT1G07500', u'AT1G07590', u'AT1G07890', u'AT1G08360', u'AT1G08550', u'AT1G08590', u'AT1G08760', u'AT1G08860', u'AT1G08910', u'AT1G08980', u'AT1G09000', u'AT1G09010', u'AT1G09160', u'AT1G09290', u'AT1G09310', u'AT1G09520', u'AT1G09530', u'AT1G09660', u'AT1G10580', u'AT1G10770', u'AT1G10900', u'AT1G10970', u'AT1G11000', u'AT1G11120', u'AT1G11530', u'AT1G12030', u'AT1G12530', u'AT1G12630', u'AT1G12710', u'AT1G12730', u'AT1G12810', u'AT1G12845', u'AT1G12860', u'AT1G13000', u'AT1G13030', u'AT1G13250', u'AT1G13320', u'AT1G13460', u'AT1G13650', u'AT1G13830', u'AT1G13930', u'AT1G14300', u'AT1G14340', u'AT1G14350', u'AT1G14550', u'AT1G14790', u'AT1G14920', u'AT1G15110', u'AT1G15125', u'AT1G15290', u'AT1G15380', u'AT1G15550', u'AT1G16400', u'AT1G16750', u'AT1G17050', u'AT1G17200', u'AT1G17455', u'AT1G17640', u'AT1G17700', u'AT1G18010', u'AT1G18020', u'AT1G18270', u'AT1G18420', u'AT1G18460', u'AT1G18780', u'AT1G18800', u'AT1G18810', u'AT1G18830', u'AT1G18940', u'AT1G19060', u'AT1G19130', u'AT1G19170', u'AT1G19270', u'AT1G19510', u'AT1G19835', u'AT1G20120', u'AT1G20470', u'AT1G20630', u'AT1G20670', u'AT1G21090', u'AT1G21150', u'AT1G21210', u'AT1G21450', u'AT1G21460', u'AT1G21680', u'AT1G21690', u'AT1G21790', u'AT1G22090', u'AT1G22170', u'AT1G22290', u'AT1G22470', u'AT1G22490', u'AT1G22690', u'AT1G22990', u'AT1G23050', u'AT1G23560', u'AT1G23750', u'AT1G23800', u'AT1G23850', u'AT1G23880', u'AT1G24010', u'AT1G24150', u'AT1G24400', u'AT1G24460', u'AT1G25260', u'AT1G26250', u'AT1G26630', u'AT1G26770', u'AT1G26920', u'AT1G27300', u'AT1G27340', u'AT1G27460', u'AT1G27470', u'AT1G27595', u'AT1G27620', u'AT1G27710', u'AT1G27900', u'AT1G28100', u'AT1G28170', u'AT1G28610', u'AT1G29040', u'AT1G29250', u'AT1G29940', u'AT1G29980', u'AT1G30050', u'AT1G30110', u'AT1G30240', u'AT1G30250', u'AT1G30260', u'AT1G30460', u'AT1G30470', u'AT1G30540', u'AT1G30610', u'AT1G30700', u'AT1G31000', u'AT1G31490', u'AT1G31830', u'AT1G32170', u'AT1G32360', u'AT1G32440', u'AT1G32730', u'AT1G32750', u'AT1G32810', u'AT1G32850', u'AT1G32900', u'AT1G33280', u'AT1G33880', u'AT1G33970', u'AT1G34070', u'AT1G34180', u'AT1G34200', u'AT1G34320', u'AT1G34355', u'AT1G34760', u'AT1G34790', u'AT1G35190', u'AT1G35310', u'AT1G35580', u'AT1G35610', u'AT1G35720', u'AT1G42470', u'AT1G43000', u'AT1G43710', u'AT1G43860', u'AT1G44790', u'AT1G44910', u'AT1G44920', u'AT1G44970', u'AT1G47610', u'AT1G47655', u'AT1G47800', u'AT1G47890', u'AT1G48050', u'AT1G48195', u'AT1G48360', u'AT1G48480', u'AT1G48570', u'AT1G48920', u'AT1G49130', u'AT1G49170', u'AT1G49240', u'AT1G49360', u'AT1G49560', u'AT1G49750', u'AT1G49910', u'AT1G49920', u'AT1G49980', u'AT1G50180', u'AT1G50270', u'AT1G50380', u'AT1G50400', u'AT1G50410', u'AT1G50420', u'AT1G50460', u'AT1G50920', u'AT1G51630', u'AT1G51650', u'AT1G51740', u'AT1G52590', u'AT1G52770', u'AT1G52870', u'AT1G52880', u'AT1G53000', u'AT1G53060', u'AT1G53070', u'AT1G53240', u'AT1G53250', u'AT1G53260', u'AT1G53380', u'AT1G53570', u'AT1G53730', u'AT1G53830', u'AT1G53840', u'AT1G54120', u'AT1G54200', u'AT1G54290', u'AT1G54310', u'AT1G54400', u'AT1G54410', u'AT1G54510', u'AT1G54530', u'AT1G54575', u'AT1G54650', u'AT1G54710', u'AT1G54740', u'AT1G54770', u'AT1G54850', u'AT1G54890', u'AT1G55000', u'AT1G55030', u'AT1G55070', u'AT1G55150', u'AT1G55220', u'AT1G55260', u'AT1G55310', u'AT1G55480', u'AT1G55490', u'AT1G55500', u'AT1G55510', u'AT1G55640', u'AT1G55960', u'AT1G56010', u'AT1G56150', u'AT1G56210', u'AT1G56220', u'AT1G56230', u'AT1G56270', u'AT1G56300', u'AT1G56330', u'AT1G56570', u'AT1G58080', u'AT1G58120', u'AT1G58180', u'AT1G58460', u'AT1G60000', u'AT1G60140', u'AT1G60220', u'AT1G61010', u'AT1G61030', u'AT1G61170', u'AT1G61210', u'AT1G61670', u'AT1G61740', u'AT1G61870', u'AT1G62225', u'AT1G62290', u'AT1G62440', u'AT1G62510', u'AT1G62570', u'AT1G62840', u'AT1G63245', u'AT1G63270', u'AT1G63460', u'AT1G63650', u'AT1G63700', u'AT1G63740', u'AT1G63780', u'AT1G64105', u'AT1G64360', u'AT1G64430', u'AT1G64490', u'AT1G64550', u'AT1G64625', u'AT1G64670', u'AT1G65040', u'AT1G65070', u'AT1G65220', u'AT1G65240', u'AT1G65610', u'AT1G65680', u'AT1G65700', u'AT1G65710', u'AT1G65730', u'AT1G65840', u'AT1G65980', u'AT1G65985', u'AT1G66080', u'AT1G66260', u'AT1G66480', u'AT1G66730', u'AT1G66890', u'AT1G66910', u'AT1G67280', u'AT1G67325', u'AT1G67370', u'AT1G67510', u'AT1G67590', u'AT1G67690', u'AT1G67700', u'AT1G67870', u'AT1G67890', u'AT1G67900', u'AT1G67920', u'AT1G68110', u'AT1G68400', u'AT1G68440', u'AT1G68510', u'AT1G68520', u'AT1G68650', u'AT1G68660', u'AT1G68840', u'AT1G68940', u'AT1G69295', u'AT1G69570', u'AT1G69710', u'AT1G69740', u'AT1G69920', u'AT1G70420', u'AT1G70570', u'AT1G70820', u'AT1G70900', u'AT1G70940', u'AT1G71697', u'AT1G71930', u'AT1G71960', u'AT1G72030', u'AT1G72290', u'AT1G72820', u'AT1G72940', u'AT1G73410', u'AT1G73660', u'AT1G73790', u'AT1G73820', u'AT1G73980', u'AT1G74230', u'AT1G74260', u'AT1G74350', u'AT1G74380', u'AT1G75010', u'AT1G75270', u'AT1G75820', u'AT1G76010', u'AT1G76090', u'AT1G76130', u'AT1G76180', u'AT1G76250', u'AT1G76610', u'AT1G76730', u'AT1G76790', u'AT1G77490', u'AT1G77940', u'AT1G78280', u'AT1G78460', u'AT1G78490', u'AT1G78580', u'AT1G78600', u'AT1G78620', u'AT1G78650', u'AT1G78970', u'AT1G78995', u'AT1G79160', u'AT1G79360', u'AT1G79380', u'AT1G79460', u'AT1G79900', u'AT1G79920', u'AT1G80180', u'AT1G80270', u'AT1G80330', u'AT1G80350', u'AT1G80480', u'AT1G80690', u'AT1G80720', u'AT1G80830', u'AT1G80940', u'AT2G01110', u'AT2G01170', u'AT2G01450', u'AT2G01740', u'AT2G01750', u'AT2G01810', u'AT2G01890', u'AT2G03590', u'AT2G03680', u'AT2G04039', u'AT2G04235', u'AT2G04460', u'AT2G04530', u'AT2G04795', u'AT2G04870', u'AT2G04890', u'AT2G05060', u'AT2G05260', u'AT2G05330', u'AT2G05440', u'AT2G05540', u'AT2G05720', u'AT2G05790', u'AT2G06000', u'AT2G06020', u'AT2G06925', u'AT2G07630', u'AT2G13560', u'AT2G13610', u'AT2G13660', u'AT2G13810', u'AT2G14070', u'AT2G14520', u'AT2G14690', u'AT2G14835', u'AT2G14870', u'AT2G14960', u'AT2G15580', u'AT2G15695', u'AT2G15830', u'AT2G16730', u'AT2G16750', u'AT2G17036', u'AT2G17220', u'AT2G17240', u'AT2G17880', u'AT2G18160', u'AT2G18170', u'AT2G18420', u'AT2G18470', u'AT2G19110', u'AT2G19470', u'AT2G19560', u'AT2G19700', u'AT2G19760', u'AT2G19850', u'AT2G19990', u'AT2G20120', u'AT2G20140', u'AT2G20170', u'AT2G20450', u'AT2G20610', u'AT2G20630', u'AT2G20725', u'AT2G20760', u'AT2G20875', u'AT2G20920', u'AT2G21130', u'AT2G21400', u'AT2G21410', u'AT2G21580', u'AT2G21730', u'AT2G21790', u'AT2G22030', u'AT2G22400', u'AT2G22540', u'AT2G22900', u'AT2G23050', u'AT2G23790', u'AT2G23820', u'AT2G23900', u'AT2G23930', u'AT2G24170', u'AT2G24210', u'AT2G24285', u'AT2G24580', u'AT2G24860', u'AT2G25590', u'AT2G25680', u'AT2G25690', u'AT2G25840', u'AT2G25920', u'AT2G26200', u'AT2G26360', u'AT2G26640', u'AT2G26650', u'AT2G26690', u'AT2G26730', u'AT2G27040', u'AT2G27100', u'AT2G27180', u'AT2G27300', u'AT2G27350', u'AT2G27500', u'AT2G27510', u'AT2G27520', u'AT2G27810', u'AT2G27970', u'AT2G28000', u'AT2G28080', u'AT2G28190', u'AT2G28340', u'AT2G28400', u'AT2G28540', u'AT2G28670', u'AT2G28930', u'AT2G29120', u'AT2G29200', u'AT2G29340', u'AT2G29500', u'AT2G30440', u'AT2G30780', u'AT2G31100', u'AT2G31140', u'AT2G31170', u'AT2G31250', u'AT2G31260', u'AT2G31400', u'AT2G31560', u'AT2G31570', u'AT2G31650', u'AT2G31660', u'AT2G32080', u'AT2G32090', u'AT2G32380', u'AT2G32520', u'AT2G32710', u'AT2G32950', u'AT2G33100', u'AT2G33370', u'AT2G33430', u'AT2G33440', u'AT2G33640', u'AT2G33780', u'AT2G33990', u'AT2G34240', u'AT2G34810', u'AT2G35140', u'AT2G35605', u'AT2G36080', u'AT2G36240', u'AT2G36390', u'AT2G37035', u'AT2G37180', u'AT2G37230', u'AT2G37530', u'AT2G37580', u'AT2G37620', u'AT2G37640', u'AT2G38020', u'AT2G38080', u'AT2G38120', u'AT2G38160', u'AT2G38170', u'AT2G38250', u'AT2G38370', u'AT2G38400', u'AT2G38460', u'AT2G38500', u'AT2G38540', u'AT2G38580', u'AT2G38630', u'AT2G38650', u'AT2G39250', u'AT2G39310', u'AT2G39330', u'AT2G39460', u'AT2G39570', u'AT2G39630', u'AT2G39795', u'AT2G39830', u'AT2G39920', u'AT2G40080', u'AT2G40250', u'AT2G40280', u'AT2G40530', u'AT2G40610', u'AT2G40630', u'AT2G40810', u'AT2G41010', u'AT2G41020', u'AT2G41150', u'AT2G41170', u'AT2G41240', u'AT2G41500', u'AT2G41650', u'AT2G42110', u'AT2G42290', u'AT2G42380', u'AT2G42400', u'AT2G42780', u'AT2G42860', u'AT2G42890', u'AT2G42990', u'AT2G43110', u'AT2G43200', u'AT2G43570', u'AT2G43600', u'AT2G44030', u'AT2G44190', u'AT2G44600', u'AT2G44670', u'AT2G44850', u'AT2G45140', u'AT2G45350', u'AT2G45660', u'AT2G46200', u'AT2G46240', u'AT2G46290', u'AT2G46450', u'AT2G46560', u'AT2G46620', u'AT2G46700', u'AT2G46850', u'AT2G47130', u'AT2G47220', u'AT2G47260', u'AT2G47270', u'AT2G47370', u'AT2G47490', u'AT2G47710', u'AT2G47840', u'AT2G47860', u'AT2G47890', u'AT3G01180', u'AT3G01300', u'AT3G01490', u'AT3G01790', u'AT3G01860', u'AT3G01990', u'AT3G02020', u'AT3G02150', u'AT3G02200', u'AT3G02210', u'AT3G02280', u'AT3G02380', u'AT3G02610', u'AT3G02650', u'AT3G02750', u'AT3G02870', u'AT3G02970', u'AT3G03160', u'AT3G03190', u'AT3G03210', u'AT3G03220', u'AT3G03270', u'AT3G03300', u'AT3G03740', u'AT3G04010', u'AT3G04060', u'AT3G04210', u'AT3G04340', u'AT3G04520', u'AT3G04950', u'AT3G05100', u'AT3G05220', u'AT3G05520', u'AT3G06080', u'AT3G06145', u'AT3G06210', u'AT3G06483', u'AT3G06530', u'AT3G06730', u'AT3G06840', u'AT3G06850', u'AT3G07320', u'AT3G07590', u'AT3G07640', u'AT3G07800', u'AT3G08000', u'AT3G08640', u'AT3G08760', u'AT3G08920', u'AT3G08980', u'AT3G09085', u'AT3G09260', u'AT3G09550', u'AT3G09630', u'AT3G09930', u'AT3G09950', u'AT3G10450', u'AT3G10525', u'AT3G11020', u'AT3G11230', u'AT3G11240', u'AT3G11560', u'AT3G11590', u'AT3G11620', u'AT3G12320', u'AT3G12580', u'AT3G12610', u'AT3G12640', u'AT3G12750', u'AT3G12770', u'AT3G12930', u'AT3G13230', u'AT3G13310', u'AT3G13445', u'AT3G13530', u'AT3G13690', u'AT3G14610', u'AT3G14740', u'AT3G14770', u'AT3G14900', u'AT3G14930', u'AT3G15000', u'AT3G15030', u'AT3G15095', u'AT3G15115', u'AT3G15160', u'AT3G15430', u'AT3G15510', u'AT3G15560', u'AT3G15570', u'AT3G15630', u'AT3G15790', u'AT3G15920', u'AT3G16000', u'AT3G16010', u'AT3G16280', u'AT3G16400', u'AT3G16450', u'AT3G16470', u'AT3G16990', u'AT3G17000', u'AT3G17130', u'AT3G17330', u'AT3G17420', u'AT3G17570', u'AT3G17609', u'AT3G17611', u'AT3G17800', u'AT3G17810', u'AT3G17860', u'AT3G18000', u'AT3G19090', u'AT3G19350', u'AT3G19553', u'AT3G19640', u'AT3G19720', u'AT3G19820', u'AT3G20000', u'AT3G20240', u'AT3G20290', u'AT3G20310', u'AT3G20490', u'AT3G20580', u'AT3G20660', u'AT3G20740', u'AT3G20810', u'AT3G21150', u'AT3G21200', u'AT3G21215', u'AT3G21270', u'AT3G21360', u'AT3G21610', u'AT3G21710', u'AT3G21720', u'AT3G21740', u'AT3G21870', u'AT3G22070', u'AT3G22104', u'AT3G22220', u'AT3G22260', u'AT3G22820', u'AT3G22840', u'AT3G22845', u'AT3G22890', u'AT3G22942', u'AT3G23000', u'AT3G23300', u'AT3G23340', u'AT3G23560', u'AT3G23830', u'AT3G23840', u'AT3G23920', u'AT3G23940', u'AT3G24250', u'AT3G24420', u'AT3G24570', u'AT3G24590', u'AT3G24760', u'AT3G25060', u'AT3G25120', u'AT3G25210', u'AT3G25400', u'AT3G25430', u'AT3G25480', u'AT3G25540', u'AT3G25570', u'AT3G25580', u'AT3G25805', u'AT3G25860', u'AT3G25890', u'AT3G26240', u'AT3G26390', u'AT3G26490', u'AT3G26618', u'AT3G26744', u'AT3G26850', u'AT3G26900', u'AT3G26935', u'AT3G27050', u'AT3G27430', u'AT3G28060', u'AT3G28170', u'AT3G28270', u'AT3G28460', u'AT3G28670', u'AT3G28920', u'AT3G29035', u'AT3G29280', u'AT3G29570', u'AT3G30775', u'AT3G33530', u'AT3G43600', u'AT3G43970', u'AT3G44070', u'AT3G44310', u'AT3G44360', u'AT3G44620', u'AT3G44890', u'AT3G44990', u'AT3G45140', u'AT3G45150', u'AT3G45610', u'AT3G45850', u'AT3G45870', u'AT3G46100', u'AT3G46210', u'AT3G46320', u'AT3G46440', u'AT3G46540', u'AT3G46580', u'AT3G46970', u'AT3G46990', u'AT3G47080', u'AT3G47420', u'AT3G47450', u'AT3G47640', u'AT3G47700', u'AT3G47800', u'AT3G47860', u'AT3G48160', u'AT3G48220', u'AT3G48250', u'AT3G48350', u'AT3G48360', u'AT3G48410', u'AT3G48430', u'AT3G48450', u'AT3G48610', u'AT3G48690', u'AT3G48730', u'AT3G48740', u'AT3G49040', u'AT3G49050', u'AT3G49170', u'AT3G49400', u'AT3G49810', u'AT3G49990', u'AT3G50330', u'AT3G50340', u'AT3G50700', u'AT3G50860', u'AT3G51000', u'AT3G51090', u'AT3G51140', u'AT3G51150', u'AT3G51190', u'AT3G51520', u'AT3G51670', u'AT3G52155', u'AT3G52170', u'AT3G52340', u'AT3G52360', u'AT3G52525', u'AT3G52570', u'AT3G52840', u'AT3G52940', u'AT3G53210', u'AT3G53410', u'AT3G53490', u'AT3G53530', u'AT3G53670', u'AT3G53950', u'AT3G53990', u'AT3G54260', u'AT3G54400', u'AT3G54600', u'AT3G54770', u'AT3G54800', u'AT3G54810', u'AT3G55370', u'AT3G55400', u'AT3G55410', u'AT3G55420', u'AT3G55460', u'AT3G55530', u'AT3G55880', u'AT3G56070', u'AT3G56190', u'AT3G56360', u'AT3G56370', u'AT3G56430', u'AT3G56490', u'AT3G56590', u'AT3G56650', u'AT3G56670', u'AT3G56880', u'AT3G56990', u'AT3G57040', u'AT3G57070', u'AT3G57100', u'AT3G57120', u'AT3G57140', u'AT3G57430', u'AT3G57470', u'AT3G57650', u'AT3G57740', u'AT3G57910', u'AT3G57970', u'AT3G58020', u'AT3G58110', u'AT3G58190', u'AT3G58210', u'AT3G58220', u'AT3G58270', u'AT3G58430', u'AT3G58750', u'AT3G59210', u'AT3G59410', u'AT3G59670', u'AT3G59820', u'AT3G59900', u'AT3G60020', u'AT3G60390', u'AT3G60580', u'AT3G60590', u'AT3G60680', u'AT3G60770', u'AT3G61060', u'AT3G61070', u'AT3G61140', u'AT3G61770', u'AT3G61890', u'AT3G61940', u'AT3G61960', u'AT3G62030', u'AT3G62070', u'AT3G62650', u'AT3G62660', u'AT3G62800', u'AT3G62820', u'AT3G62910', u'AT3G62970', u'AT3G63210', u'AT3G63290', u'AT3G63320', u'AT3G63480', u'AT3G63510', u'AT4G00030', u'AT4G00230', u'AT4G00610', u'AT4G01050', u'AT4G01690', u'AT4G02260', u'AT4G02840', u'AT4G03180', u'AT4G03415', u'AT4G04190', u'AT4G04220', u'AT4G04330', u'AT4G04830', u'AT4G04860', u'AT4G04890', u'AT4G05370', u'AT4G05400', u'AT4G07820', u'AT4G07990', u'AT4G08330', u'AT4G08460', u'AT4G08790', u'AT4G09730', u'AT4G10060', u'AT4G10150', u'AT4G10300', u'AT4G10610', u'AT4G10840', u'AT4G11090', u'AT4G11200', u'AT4G11230', u'AT4G11270', u'AT4G11940', u'AT4G12240', u'AT4G12980', u'AT4G13100', u'AT4G13220', u'AT4G13250', u'AT4G13550', u'AT4G13690', u'AT4G13770', u'AT4G13870', u'AT4G14070', u'AT4G14230', u'AT4G14310', u'AT4G14420', u'AT4G14440', u'AT4G14550', u'AT4G14730', u'AT4G14920', u'AT4G15090', u'AT4G15210', u'AT4G15380', u'AT4G15420', u'AT4G16330', u'AT4G16370', u'AT4G16490', u'AT4G16563', u'AT4G16650', u'AT4G16670', u'AT4G16990', u'AT4G17070', u'AT4G17090', u'AT4G17110', u'AT4G17245', u'AT4G17530', u'AT4G17810', u'AT4G17870', u'AT4G17950', u'AT4G18130', u'AT4G18170', u'AT4G18240', u'AT4G18600', u'AT4G18610', u'AT4G18810', u'AT4G18975', u'AT4G19003', u'AT4G19190', u'AT4G19230', u'AT4G19390', u'AT4G19410', u'AT4G19460', u'AT4G19470', u'AT4G19720', u'AT4G19840', u'AT4G19860', u'AT4G19900', u'AT4G20130', u'AT4G20760', u'AT4G20830', u'AT4G20840', u'AT4G21180', u'AT4G21210', u'AT4G22130', u'AT4G22756', u'AT4G22820', u'AT4G23010', u'AT4G23220', u'AT4G23430', u'AT4G23600', u'AT4G23610', u'AT4G24040', u'AT4G24230', u'AT4G24270', u'AT4G24460', u'AT4G24470', u'AT4G24510', u'AT4G24670', u'AT4G24700', u'AT4G24810', u'AT4G24970', u'AT4G25030', u'AT4G25500', u'AT4G25680', u'AT4G26610', u'AT4G26850', u'AT4G26940', u'AT4G27250', u'AT4G27340', u'AT4G27510', u'AT4G27520', u'AT4G27740', u'AT4G28200', u'AT4G28290', u'AT4G28510', u'AT4G29110', u'AT4G29190', u'AT4G29250', u'AT4G29740', u'AT4G30190', u'AT4G30530', u'AT4G30650', u'AT4G30680', u'AT4G30690', u'AT4G30720', u'AT4G30800', u'AT4G30810', u'AT4G30950', u'AT4G31060', u'AT4G31120', u'AT4G31240', u'AT4G31290', u'AT4G31460', u'AT4G31710', u'AT4G31720', u'AT4G31730', u'AT4G31790', u'AT4G32150', u'AT4G32340', u'AT4G32350', u'AT4G32610', u'AT4G32860', u'AT4G32960', u'AT4G33560', u'AT4G33670', u'AT4G33980', u'AT4G34120', u'AT4G34250', u'AT4G34540', u'AT4G34630', u'AT4G34750', u'AT4G34760', u'AT4G34790', u'AT4G34820', u'AT4G34910', u'AT4G35350', u'AT4G35420', u'AT4G35720', u'AT4G35830', u'AT4G36250', u'AT4G36420', u'AT4G36870', u'AT4G36970', u'AT4G37460', u'AT4G37510', u'AT4G37800', u'AT4G37900', u'AT4G38050', u'AT4G38170', u'AT4G38380', u'AT4G38470', u'AT4G38510', u'AT4G38620', u'AT4G38660', u'AT4G38700', u'AT4G38810', u'AT4G38830', u'AT4G39050', u'AT4G39360', u'AT4G39410', u'AT4G39460', u'AT4G39570', u'AT4G39940', u'AT4G39970', u'AT5G01075', u'AT5G01310', u'AT5G01500', u'AT5G01510', u'AT5G01740', u'AT5G01750', u'AT5G01770', u'AT5G01810', u'AT5G01820', u'AT5G01830', u'AT5G01990', u'AT5G02260', u'AT5G02530', u'AT5G02540', u'AT5G02760', u'AT5G02810', u'AT5G02870', u'AT5G02890', u'AT5G02960', u'AT5G03150', u'AT5G03300', u'AT5G03480', u'AT5G03545', u'AT5G03600', u'AT5G03720', u'AT5G03760', u'AT5G03840', u'AT5G03960', u'AT5G04040', u'AT5G04080', u'AT5G04160', u'AT5G04190', u'AT5G04250', u'AT5G04530', u'AT5G04550', u'AT5G04700', u'AT5G04790', u'AT5G04810', u'AT5G05080', u'AT5G05200', u'AT5G05300', u'AT5G05320', u'AT5G05370', u'AT5G05430', u'AT5G05470', u'AT5G05520', u'AT5G06110', u'AT5G06130', u'AT5G06180', u'AT5G06210', u'AT5G06850', u'AT5G06950', u'AT5G06980', u'AT5G07120', u'AT5G07180', u'AT5G07220', u'AT5G07250', u'AT5G07540', u'AT5G07580', u'AT5G07610', u'AT5G07830', u'AT5G07840', u'AT5G07900', u'AT5G08141', u'AT5G08170', u'AT5G08300', u'AT5G08330', u'AT5G08400', u'AT5G08490', u'AT5G08580', u'AT5G08610', u'AT5G09300', u'AT5G09320', u'AT5G09330', u'AT5G09390', u'AT5G09400', u'AT5G09410', u'AT5G09550', u'AT5G09690', u'AT5G09750', u'AT5G09770', u'AT5G09880', u'AT5G10070', u'AT5G10340', u'AT5G10350', u'AT5G10560', u'AT5G10860', u'AT5G10920', u'AT5G11070', u'AT5G11090', u'AT5G11150', u'AT5G11420', u'AT5G11490', u'AT5G11540', u'AT5G11700', u'AT5G11810', u'AT5G11890', u'AT5G13010', u'AT5G13060', u'AT5G13100', u'AT5G13120', u'AT5G13160', u'AT5G13260', u'AT5G13660', u'AT5G13730', u'AT5G13810', u'AT5G13910', u'AT5G13980', u'AT5G14070', u'AT5G14210', u'AT5G14290', u'AT5G14680', u'AT5G14730', u'AT5G14910', u'AT5G14960', u'AT5G15170', u'AT5G15230', u'AT5G15310', u'AT5G15490', u'AT5G15700', u'AT5G15740', u'AT5G15750', u'AT5G15820', u'AT5G15850', u'AT5G15880', u'AT5G16030', u'AT5G16250', u'AT5G16390', u'AT5G16770', u'AT5G16810', u'AT5G16820', u'AT5G17090', u'AT5G17210', u'AT5G17230', u'AT5G17300', u'AT5G17640', u'AT5G17670', u'AT5G18140', u'AT5G18650', u'AT5G18710', u'AT5G18850', u'AT5G19010', u'AT5G19080', u'AT5G19120', u'AT5G19690', u'AT5G19900', u'AT5G20120', u'AT5G20130', u'AT5G20160', u'AT5G20630', u'AT5G20670', u'AT5G20690', u'AT5G20750', u'AT5G20900', u'AT5G21105', u'AT5G22740', u'AT5G22875', u'AT5G22880', u'AT5G22890', u'AT5G23110', u'AT5G23170', u'AT5G23210', u'AT5G23240', u'AT5G23390', u'AT5G23660', u'AT5G23950', u'AT5G24000', u'AT5G24300', u'AT5G24470', u'AT5G24500', u'AT5G24520', u'AT5G24820', u'AT5G24910', u'AT5G25170', u'AT5G25190', u'AT5G25210', u'AT5G25460', u'AT5G25500', u'AT5G25530', u'AT5G25900', u'AT5G26570', u'AT5G26680', u'AT5G26820', u'AT5G26940', u'AT5G26990', u'AT5G27230', u'AT5G27320', u'AT5G27380', u'AT5G27520', u'AT5G27560', u'AT5G27620', u'AT5G27660', u'AT5G27820', u'AT5G28640', u'AT5G28750', u'AT5G28910', u'AT5G33320', u'AT5G33330', u'AT5G35220', u'AT5G35320', u'AT5G35360', u'AT5G35910', u'AT5G35960', u'AT5G37130', u'AT5G37470', u'AT5G37490', u'AT5G37570', u'AT5G37670', u'AT5G37780', u'AT5G38300', u'AT5G38510', u'AT5G38530', u'AT5G38630', u'AT5G38650', u'AT5G38720', u'AT5G39080', u'AT5G39250', u'AT5G39410', u'AT5G39520', u'AT5G39610', u'AT5G39640', u'AT5G39760', u'AT5G39890', u'AT5G39980', u'AT5G40160', u'AT5G40200', u'AT5G40330', u'AT5G40370', u'AT5G40480', u'AT5G40540', u'AT5G40700', u'AT5G40790', u'AT5G41050', u'AT5G41410', u'AT5G41610', u'AT5G41650', u'AT5G41770', u'AT5G41970', u'AT5G42100', u'AT5G42390', u'AT5G42600', u'AT5G42610', u'AT5G42900', u'AT5G43130', u'AT5G43290', u'AT5G43320', u'AT5G43330', u'AT5G43450', u'AT5G43560', u'AT5G43630', u'AT5G43710', u'AT5G43780', u'AT5G43840', u'AT5G43880', u'AT5G44060', u'AT5G44080', u'AT5G44160', u'AT5G44250', u'AT5G44260', u'AT5G44330', u'AT5G44340', u'AT5G44400', u'AT5G44520', u'AT5G44580', u'AT5G44720', u'AT5G44850', u'AT5G45290', u'AT5G45330', u'AT5G45350', u'AT5G45410', u'AT5G45660', u'AT5G45800', u'AT5G46510', u'AT5G46580', u'AT5G46590', u'AT5G46780', u'AT5G47010', u'AT5G47100', u'AT5G47360', u'AT5G47560', u'AT5G47640', u'AT5G47830', u'AT5G47870', u'AT5G48080', u'AT5G48150', u'AT5G48170', u'AT5G48370', u'AT5G48540', u'AT5G48570', u'AT5G48590', u'AT5G49170', u'AT5G49280', u'AT5G49300', u'AT5G49450', u'AT5G49700', u'AT5G49910', u'AT5G50000', u'AT5G50375', u'AT5G50570', u'AT5G50660', u'AT5G50730', u'AT5G50790', u'AT5G50860', u'AT5G51040', u'AT5G51190', u'AT5G51480', u'AT5G51750', u'AT5G51790', u'AT5G52110', u'AT5G52150', u'AT5G52190', u'AT5G52290', u'AT5G52300', u'AT5G52440', u'AT5G52460', u'AT5G52540', u'AT5G52600', u'AT5G52890', u'AT5G52950', u'AT5G52980', u'AT5G53030', u'AT5G53080', u'AT5G53090', u'AT5G53680', u'AT5G53850', u'AT5G53860', u'AT5G53950', u'AT5G54090', u'AT5G54190', u'AT5G54360', u'AT5G54540', u'AT5G54940', u'AT5G54960', u'AT5G54970', u'AT5G55050', u'AT5G55160', u'AT5G55390', u'AT5G55530', u'AT5G55700', u'AT5G55860', u'AT5G55900', u'AT5G56010', u'AT5G56020', u'AT5G56110', u'AT5G56600', u'AT5G56630', u'AT5G56870', u'AT5G57160', u'AT5G57280', u'AT5G57520', u'AT5G57630', u'AT5G57740', u'AT5G57850', u'AT5G57940', u'AT5G58160', u'AT5G58230', u'AT5G58250', u'AT5G58310', u'AT5G58370', u'AT5G58650', u'AT5G59080', u'AT5G59170', u'AT5G59850', u'AT5G59870', u'AT5G60340', u'AT5G60660', u'AT5G60710', u'AT5G60930', u'AT5G61140', u'AT5G61170', u'AT5G61250', u'AT5G61270', u'AT5G61340', u'AT5G61380', u'AT5G61410', u'AT5G61420', u'AT5G61440', u'AT5G61480', u'AT5G61580', u'AT5G61600', u'AT5G61710', u'AT5G61790', u'AT5G61910', u'AT5G62130', u'AT5G62170', u'AT5G62360', u'AT5G62370', u'AT5G62380', u'AT5G62720', u'AT5G62800', u'AT5G62840', u'AT5G63050', u'AT5G63120', u'AT5G63290', u'AT5G63320', u'AT5G63550', u'AT5G63570', u'AT5G63600', u'AT5G63610', u'AT5G63690', u'AT5G63780', u'AT5G63910', u'AT5G64080', u'AT5G64190', u'AT5G64240', u'AT5G64480', u'AT5G64560', u'AT5G64790', u'AT5G64930', u'AT5G65110', u'AT5G65200', u'AT5G65340', u'AT5G65560', u'AT5G65925', u'AT5G65960', u'AT5G65970', u'AT5G66070', u'AT5G66080', u'AT5G66330', u'AT5G66540', u'AT5G66815', u'AT5G66900', u'AT5G67380', u'AT5G67385', u'AT5G67440']
# overall parent type
# | Bay_gen |
# | Sha_gen |
# | BayFR |
# | ShaFR |
# | BayAR |
# | ShaAR |
# | Bay6H |
# | Sha6H |
# | BayRP |
# | ShaRP |
#degReport('Ligterink_2014',3,3,'BayFR','ShaFR','BayShaFRDE.txt') #BayShaFRDE.txt 1986 0.00338959627825 #
#degReport('Ligterink_2014',3,3,'BayAR','ShaAR','BayShaARDE.txt') #BayShaARDE.txt 1796 0.00306131444034
#degReport('Ligterink_2014',3,3,'Bay6H','Sha6H','BaySha6HDE.txt') #BaySha6HDE.txt 3661 0.00624523271178
#degReport('Ligterink_2014',3,3,'BayRP','ShaRP','BayShaRPDE.txt') #BayShaRPDE.txt 752 0.0012786959715
#degReport('Ligterink_2014',3,3,'BayFR','BayAR','BayFRARDE.txt') #BayFRARDE.txt 241 0.000406249474028 #
#degReport('Ligterink_2014',3,3,'BayFR','Bay6H','BayFR6HDE.txt') #BayFR6HDE.txt 6573 0.0112131048545
#degReport('Ligterink_2014',3,3,'BayFR','BayRP','BayFRRPDE.txt') #BayFRRPDE.txt 10596 0.0180763106305
#degReport('Ligterink_2014',3,3,'BayAR','Bay6H','BayAR6HDE.txt') #BayAR6HDE.txt 5831 0.00994968391212 #
#degReport('Ligterink_2014',3,3,'BayAR','BayRP','BayARRPDE.txt') #BayARRPDE.txt 10315 0.0175939561326
#degReport('Ligterink_2014',3,3,'Bay6H','BayRP','Bay6HRPDE.txt') #Bay6HRPDE.txt 9738 0.0166171111549 #
#degReport('Ligterink_2014',3,3,'ShaFR','ShaAR','ShaFRARDE.txt') #ShaFRARDE.txt 209 0.000358259483388 #
#degReport('Ligterink_2014',3,3,'ShaFR','Sha6H','ShaFR6HDE.txt') #ShaFR6HDE.txt 4516 0.00770543837268
#degReport('Ligterink_2014',3,3,'ShaFR','ShaRP','ShaFRRPDE.txt') #ShaFRRPDE.txt 10534 0.0179724826951
#degReport('Ligterink_2014',3,3,'ShaAR','Sha6H','ShaAR6HDE.txt') #ShaAR6HDE.txt 4726 0.0080650550216 #
#degReport('Ligterink_2014',3,3,'ShaAR','ShaRP','ShaARRPDE.txt') #ShaARRPDE.txt 10330 0.0176247644091
#degReport('Ligterink_2014',3,3,'Sha6H','ShaRP','Sha6HRPDE.txt') #Sha6HRPDE.txt 9800 0.0167214257597 #
#pass
#deg('BayShaFRDE.txt','gene_list_BayShaFRDE.txt')
#deg('BaySha6HDE.txt','gene_list_BaySha6HDE.txt')
#deg('BayShaARDE.txt','gene_list_BayShaARDE.txt')
#deg('BayShaRPDE.txt','gene_list_BayShaRPDE.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Ligterink_2014_gxe0_3.85.txt','gene_list_Ligterink_2014_gxe0_3.85.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Ligterink_2014_gxe1_2.7.txt','gene_list_Ligterink_2014_gxe1_2.7.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Keurentjes_2007_gxe0_3.3.txt','gene_list_Keurentjes_2007_gxe0.3.3.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Snoek_2012_gxe1_3.01.txt','gene_list_Snoek_2012_gxe1_3.01.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Ligterink_2014_gxe0_3.85_2.txt','gene_list_Ligterink_2014_gxe0_3.85_2.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Ligterink_2014_gxe1_2.7_2.txt','gene_list_Ligterink_2014_gxe1_2.7_2.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Keurentjes_2007_gxe0_3.3_2.txt','gene_list_Keurentjes_2007_gxe0.3.3_2.txt')
#geneHasEQTL('genome_wide_eQTL_mapping_Snoek_2012_gxe1_3.01_2.txt','gene_list_Snoek_2012_gxe1_3.01_2.txt')
#intersectionGenes('gene_list_BayShaFRDE.txt','gene_list_Ligterink_2014_gxe0_3.85.txt','BayShaFR_ligterink_2014_gxe0_3.85.txt') #529
#intersectionGenes('gene_list_BayShaFRDE.txt','gene_list_Ligterink_2014_gxe1_2.7.txt','BayShaFR_ligterink_2014_gxe1_2.7.txt') #197
#intersectionGenes('gene_list_BayShaFRDE.txt','gene_list_Ligterink_2014_gxe0_3.85_2.txt','BayShaFR_ligterink_2014_gxe0_3.85_2.txt') #529
#intersectionGenes('gene_list_BayShaFRDE.txt','gene_list_Ligterink_2014_gxe1_2.7_2.txt','BayShaFR_ligterink_2014_gxe1_2.7_2.txt') #197
#intersectionGenes('gene_list_BaySha6HDE.txt','gene_list_Ligterink_2014_gxe0_3.85.txt','BaySha6H_ligterink_2014_gxe0_3.85.txt') #712
#intersectionGenes('gene_list_BaySha6HDE.txt','gene_list_Ligterink_2014_gxe1_2.7.txt','BaySha6H_ligterink_2014_gxe1_2.7.txt') #257
#intersectionGenes('gene_list_BaySha6HDE.txt','gene_list_Ligterink_2014_gxe0_3.85_2.txt','BaySha6H_ligterink_2014_gxe0_3.85_2.txt') #712
#intersectionGenes('gene_list_BaySha6HDE.txt','gene_list_Ligterink_2014_gxe1_2.7_2.txt','BaySha6H_ligterink_2014_gxe1_2.7_2.txt') #257
#intersectionGenes('gene_list_BayShaARDE.txt','gene_list_Ligterink_2014_gxe0_3.85.txt','BayShaAR_ligterink_2014_gxe0_3.85.txt') #459
#intersectionGenes('gene_list_BayShaARDE.txt','gene_list_Ligterink_2014_gxe1_2.7.txt','BayShaAR_ligterink_2014_gxe1_2.7.txt') #170
#intersectionGenes('gene_list_BayShaARDE.txt','gene_list_Ligterink_2014_gxe0_3.85_2.txt','BayShaAR_ligterink_2014_gxe0_3.85_2.txt') #459
#intersectionGenes('gene_list_BayShaARDE.txt','gene_list_Ligterink_2014_gxe1_2.7_2.txt','BayShaAR_ligterink_2014_gxe1_2.7_2.txt')#170
#intersectionGenes('gene_list_BayShaRPDE.txt','gene_list_Ligterink_2014_gxe0_3.85.txt','BayShaRP_ligterink_2014_gxe0_3.85.txt') #138
#intersectionGenes('gene_list_BayShaRPDE.txt','gene_list_Ligterink_2014_gxe1_2.7.txt','BayShaRP_ligterink_2014_gxe1_2.7.txt') #43
#intersectionGenes('gene_list_BayShaRPDE.txt','gene_list_Ligterink_2014_gxe0_3.85_2.txt','BayShaRP_ligterink_2014_gxe0_3.85_2.txt') #138
#intersectionGenes('gene_list_BayShaRPDE.txt','gene_list_Ligterink_2014_gxe1_2.7_2.txt','BayShaRP_ligterink_2014_gxe1_2.7_2.txt') #43
#summary('report/BayFRARDE.txt','report/summary_BayFRARDE.txt')
#summary('report/BayFR6HDE.txt','report/summary_BayFR6HDE.txt')
#summary('report/BayFRRPDE.txt','report/summary_BayFRRPDE.txt')
#summary('report/BayAR6HDE.txt','report/summary_BayAR6HDE.txt')
#summary('report/BayARRPDE.txt','report/summary_BayARRPDE.txt')
#summary('report/Bay6HRPDE.txt','report/summary_Bay6HRPDE.txt')
summary('report/ShaFRARDE.txt','report/summary_ShaFRARDE.txt')
summary('report/ShaFR6HDE.txt','report/summary_ShaFR6HDE.txt')
summary('report/ShaFRRPDE.txt','report/summary_ShaFRRPDE.txt')
summary('report/ShaAR6HDE.txt','report/summary_ShaAR6HDE.txt')
summary('report/ShaARRPDE.txt','report/summary_ShaARRPDE.txt')
summary('report/Sha6HRPDE.txt','report/summary_Sha6HRPDE.txt')
#intersectionGenes('report/summary_BayFRARDE.txt','report/summary_BayAR6HDE.txt','report/Bay012intersection.txt') #97
#intersectionGenes('report/summary_BayFRARDE.txt','report/summary_BayARRPDE.txt','report/Bay013intersection.txt') #114
#intersectionGenes('report/summary_BayFR6HDE.txt','report/summary_dBay6HRPDE.txt','report/Bay023intersection.txt') #3636
#intersectionGenes('report/summary_BayAR6HDE.txt','report/summary_Bay6HRPDE.txt','report/Bay123intersection.txt') #3222
#intersectionGenes('report/Bay012intersection.txt','report/Bay123intersection.txt','report/Bay123intersection.txt') #45
intersectionGenes('report/summary_ShaFRARDE.txt','report/summary_ShaAR6HDE.txt','report/Sha012intersection.txt') #83
intersectionGenes('report/summary_ShaFRARDE.txt','report/summary_ShaARRPDE.txt','report/Sha013intersection.txt') #111
intersectionGenes('report/summary_ShaFR6HDE.txt','report/summary_Sha6HRPDE.txt','report/Sha023intersection.txt') #2628
intersectionGenes('report/summary_ShaAR6HDE.txt','report/summary_Sha6HRPDE.txt','report/Sha123intersection.txt') #2715
intersectionGenes('report/Sha012intersection.txt','report/Sha123intersection.txt','report/Sha123intersection.txt') #43
#hasOverlapEQTL('report/genome_wide_eQTL_mapping_Ligterink_2014_gxe0_3.85.txt',0.8,'report/Overlap_correlation_Ligterink_2014_gxe0_3.85_add.txt')
#hasOverlapEQTL('report/genome_wide_eQTL_mapping_Ligterink_2014_gxe1_2.7.txt',0.8,'report/Overlap_correlation_Ligterink_2014_gxe1_2.7_add.txt')
| 217.014286
| 24,225
| 0.704158
| 15,076
| 106,337
| 4.918745
| 0.229836
| 0.013674
| 0.006648
| 0.007039
| 0.883379
| 0.865538
| 0.854723
| 0.83695
| 0.830099
| 0.81122
| 0
| 0.39404
| 0.103482
| 106,337
| 489
| 24,226
| 217.458078
| 0.383813
| 0.872462
| 0
| 0.265306
| 0
| 0.010204
| 0.155575
| 0.06427
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.005102
| 0.05102
| null | null | 0.107143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f5a1320992175531e0997e48fbe574acc19edfeb
| 2,996
|
py
|
Python
|
get_airbnb_to_txt.py
|
mccap079/detourningTheWeb_airBnbExperiences
|
70395f32301f0c584a8639c376e5dd93a86753a5
|
[
"MIT"
] | 1
|
2018-06-01T16:59:37.000Z
|
2018-06-01T16:59:37.000Z
|
get_airbnb_to_txt.py
|
mccap079/detourningTheWeb_airBnbExperiences
|
70395f32301f0c584a8639c376e5dd93a86753a5
|
[
"MIT"
] | null | null | null |
get_airbnb_to_txt.py
|
mccap079/detourningTheWeb_airBnbExperiences
|
70395f32301f0c584a8639c376e5dd93a86753a5
|
[
"MIT"
] | null | null | null |
import requests
def get_page(_offset):
url = "https://www.airbnb.com/api/v2/explore_tabs?version=1.3.3&_format=for_explore_search_web&experiences_per_grid=20&items_per_grid=18&guidebooks_per_grid=20&auto_ib=true&fetch_filters=true&is_guided_search=true&is_new_cards_experiment=true&luxury_pre_launch=false&query_understanding_enabled=false&show_groupings=true&supports_for_you_v3=true&timezone_offset=-300&metadata_only=false&is_standard_search=true&tab_id=experience_tab§ion_offset=3&items_offset=" + str(_offset) + "&recommendation_item_cursor=&refinement_paths[]=/experiences&query=&last_search_session_id=&federated_search_session_id=320016fd-d09c-48c0-b7ed-2786432d35fb&screen_size=large&_intents=p1&key=d306zoyjsyarp7ifhu67rjxn52tv0t20¤cy=USD&locale=en"
responses = requests.get(url).json()
return responses
offset = 0
while offset <= 280: #280
#add all results from this json pull to results var
results = get_page(offset)
items = results['explore_tabs'][0]['sections'][0]['trip_templates']
for item in items:
print '{'
print '\"title\":' + '\"' + item['title'].encode('utf-8') + '\",'
print '\"kicker_text\":' + '\"' + item['kicker_text'].encode('utf-8') + '\",'
print '\"country\":' + '\"' + item['country'].encode('utf-8') + '\",'
print '\"picture\":' + '\"' + item['picture']['large_ro'] + '\",'
print '\"star_rating\":' + str(item['star_rating']) + ','
print '\"lat\":' + str(item['lat']) + ','
print '\"lng\":' + str(item['lng'])
print '},'
#update offset
offset = offset + 40
#for airbnb:
#https://www.airbnb.com/api/v2/explore_tabs?version=1.3.3&_format=for_explore_search_web&experiences_per_grid=20&items_per_grid=18&guidebooks_per_grid=20&auto_ib=true&fetch_filters=true&is_guided_search=true&is_new_cards_experiment=true&luxury_pre_launch=false&query_understanding_enabled=false&show_groupings=true&supports_for_you_v3=true&timezone_offset=-300&metadata_only=false&is_standard_search=true&tab_id=experience_tab§ion_offset=3&items_offset=40&recommendation_item_cursor=&refinement_paths[]=/experiences&query=&last_search_session_id=&federated_search_session_id=320016fd-d09c-48c0-b7ed-2786432d35fb&screen_size=large&_intents=p1&key=d306zoyjsyarp7ifhu67rjxn52tv0t20¤cy=USD&locale=en
# https://www.airbnb.com/api/v2/explore_tabs?version=1.3.3&_format=for_explore_search_web&experiences_per_grid=20&items_per_grid=18&guidebooks_per_grid=20&auto_ib=true&fetch_filters=true&is_guided_search=true&is_new_cards_experiment=true&luxury_pre_launch=false&query_understanding_enabled=false&show_groupings=true&supports_for_you_v3=true&timezone_offset=-300&metadata_only=false&is_standard_search=true&tab_id=experience_tab§ion_offset=5&items_offset=800&recommendation_item_cursor=&refinement_paths[]=/experiences&query=&last_search_session_id=&federated_search_session_id=320016fd-d09c-48c0-b7ed-2786432d35fb&screen_size=large&_intents=p1&key=d306zoyjsyarp7ifhu67rjxn52tv0t20¤cy=USD&locale=en
# items_offset= increments by 40
| 83.222222
| 731
| 0.798732
| 441
| 2,996
| 5.092971
| 0.278912
| 0.02805
| 0.024043
| 0.022707
| 0.770703
| 0.770703
| 0.770703
| 0.770703
| 0.770703
| 0.770703
| 0
| 0.057956
| 0.049733
| 2,996
| 36
| 732
| 83.222222
| 0.730945
| 0.506676
| 0
| 0
| 0
| 0.1
| 0.620245
| 0.16712
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.05
| null | null | 0.45
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
198e2bba111502fd0553ec2ca7bda3b1fc462a22
| 27,821
|
py
|
Python
|
lib/coginvasion/toon/ToonHeads.py
|
theclashingfritz/Cog-Invasion-Online-Dump
|
2561abbacb3e2e288e06f3f04b935b5ed589c8f8
|
[
"Apache-2.0"
] | 1
|
2020-03-12T16:44:10.000Z
|
2020-03-12T16:44:10.000Z
|
lib/coginvasion/toon/ToonHeads.py
|
theclashingfritz/Cog-Invasion-Online-Dump
|
2561abbacb3e2e288e06f3f04b935b5ed589c8f8
|
[
"Apache-2.0"
] | null | null | null |
lib/coginvasion/toon/ToonHeads.py
|
theclashingfritz/Cog-Invasion-Online-Dump
|
2561abbacb3e2e288e06f3f04b935b5ed589c8f8
|
[
"Apache-2.0"
] | null | null | null |
# uncompyle6 version 3.2.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.15 (v2.7.15:ca079a3ea3, Apr 30 2018, 16:30:26) [MSC v.1500 64 bit (AMD64)]
# Embedded file name: lib.coginvasion.toon.ToonHeads
from lib.coginvasion.globals import CIGlobals
import json, random
from direct.directnotify.DirectNotifyGlobal import directNotify
from panda3d.core import Point3
from direct.actor import Actor
lookSpots = [
(50, 0, 0),
(-50, 0, 0),
(50, 35, 0),
(-50, 35, 0),
(50, -35, 0),
(-50, -35, 0),
(0, 35, 0),
(0, -35, 0),
(0, 0, 0)]
class ToonHeads(Actor.Actor):
notify = directNotify.newCategory('ToonHeads')
def __init__(self):
Actor.Actor.__init__(self)
def generateFromFile(self, jsonfile, slot):
infoFile = open(jsonfile)
jsonInfo = json.load(infoFile)
if not jsonInfo['toon' + str(slot)]['head'] == 'dog':
h = loader.loadModel('phase_3/models/char/' + jsonInfo['toon' + str(slot)]['head'] + '-heads-' + str(CIGlobals.getModelDetail(CIGlobals.Toon)) + '.bam')
else:
h = loader.loadModel('phase_3/models/char/tt_a_chr_' + jsonInfo['toon' + str(slot)]['headtype'] + '_head_' + str(CIGlobals.getModelDetail(CIGlobals.Toon)) + '.bam')
if jsonInfo['toon' + str(slot)]['headtype'] == '1':
try:
h.find('**/eyes-long').hide()
except:
pass
try:
h.find('**/ears-long').hide()
except:
pass
try:
h.find('**/head-long').hide()
except:
pass
try:
h.find('**/joint_pupilR_long').hide()
h.find('**/joint_pupilL_long').hide()
except:
pass
try:
h.find('**/head-front-long').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-long-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
else:
if jsonInfo['toon' + str(slot)]['headtype'] == '2':
try:
h.find('**/eyes-short').hide()
except:
pass
try:
h.find('**/ears-short').hide()
except:
pass
try:
h.find('**/head-short').hide()
except:
pass
try:
h.find('**/joint_pupilR_short').hide()
h.find('**/joint_pupilL_short').hide()
except:
pass
try:
h.find('**/head-front-short').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-long-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
else:
if jsonInfo['toon' + str(slot)]['headtype'] == '3':
try:
h.find('**/eyes-long').hide()
except:
pass
try:
h.find('**/ears-long').hide()
except:
pass
try:
h.find('**/head-long').hide()
except:
pass
try:
h.find('**/joint_pupilR_long').hide()
h.find('**/joint_pupilL_long').hide()
except:
pass
try:
h.find('**/head-front-long').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-short-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
else:
if jsonInfo['toon' + str(slot)]['headtype'] == '4':
try:
h.find('**/eyes-short').hide()
except:
pass
try:
h.find('**/ears-short').hide()
except:
pass
try:
h.find('**/head-short').hide()
except:
pass
try:
h.find('**/joint_pupilR_short').hide()
h.find('**/joint_pupilL_short').hide()
except:
pass
try:
h.find('**/head-front-short').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-short-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
try:
if jsonInfo['toon' + str(slot)]['head'] == 'monkey':
pass
else:
h.find('**/ears-long').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
h.find('**/ears-short').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
except:
pass
try:
h.find('**/head-front-short').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
h.find('**/head-front-long').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
h.find('**/head-short').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
h.find('**/head-long').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
except:
pass
try:
h.find('**/head-front').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
h.find('**/head').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
except:
pass
try:
if jsonInfo['toon' + str(slot)]['head'] == 'dog' or jsonInfo['toon' + str(slot)]['head'] == 'monkey':
pass
else:
h.find('**/ears').setColor(jsonInfo['toon' + str(slot)]['headcolor'][0], jsonInfo['toon' + str(slot)]['headcolor'][1], jsonInfo['toon' + str(slot)]['headcolor'][2], jsonInfo['toon' + str(slot)]['headcolor'][3])
except:
pass
if jsonInfo['toon' + str(slot)]['gender'] == 'girl':
print jsonInfo['toon' + str(slot)]['head']
lashes = loader.loadModel('phase_3/models/char/' + jsonInfo['toon' + str(slot)]['head'] + '-lashes.bam')
lashes.reparentTo(h)
if jsonInfo['toon' + str(slot)]['headtype'] == '1' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgm_skirt' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgm_shorts' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgs_shorts':
lashes.find('**/open-long').hide()
lashes.find('**/closed-long').hide()
lashes.find('**/closed-short').hide()
elif jsonInfo['toon' + str(slot)]['headtype'] == '2' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgl_shorts':
lashes.find('**/open-short').hide()
lashes.find('**/closed-short').hide()
lashes.find('**/closed-long').hide()
elif jsonInfo['toon' + str(slot)]['headtype'] == '3' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgm_skirt' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgm_shorts' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgs_shorts':
lashes.find('**/open-long').hide()
lashes.find('**/closed-long').hide()
lashes.find('**/closed-short').hide()
elif jsonInfo['toon' + str(slot)]['headtype'] == '4' or jsonInfo['toon' + str(slot)]['headtype'] == 'dgl_shorts':
lashes.find('**/open-short').hide()
lashes.find('**/closed-short').hide()
lashes.find('**/closed-long').hide()
return h
def startLookAround(self, head):
self.head = head
delay = random.randint(3, 10)
taskMgr.doMethodLater(delay, self.lookAtSpot, 'lookAtSpot')
def lookAtSpot(self, task):
spot = random.randint(0, len(lookSpots) - 1)
delay = random.randint(3, 10)
lookInt = self.head.hprInterval(3, Point3(lookSpots[spot]), startHpr=self.head.getHpr(), blendType='easeInOut')
lookInt.start()
task.delayTime = delay
return task.again
def stop(self):
taskMgr.remove('lookAtSpot')
def generate(self, gender, head, headtype):
if not head == 'dog':
h = loader.loadModel('phase_3/models/char/' + head + '-heads-' + str(CIGlobals.getModelDetail(CIGlobals.Toon)) + '.bam')
else:
h = Actor('phase_3/models/char/tt_a_chr_' + headtype + '_head_' + str(CIGlobals.getModelDetail(CIGlobals.Toon)) + '.bam', {'neutral': 'phase_3/models/char/tt_a_chr_' + headtype + '_head_neutral.bam', 'run': 'phase_3/models/char/tt_a_chr_' + headtype + '_head_run.bam',
'walk': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_walk.bam',
'pie': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_pie-throw.bam',
'fallb': 'phase_4/models/char/tt_a_chr_' + headtype + '_head_slip-backward.bam',
'fallf': 'phase_4/models/char/tt_a_chr_' + headtype + '_head_slip-forward.bam',
'lose': 'phase_5/models/char/tt_a_chr_' + headtype + '_head_lose.bam',
'win': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_victory-dance.bam',
'squirt': 'phase_5/models/char/tt_a_chr_' + headtype + '_head_water-gun.bam',
'zend': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_jump-zend.bam',
'tele': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_teleport.bam',
'book': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_book.bam',
'leap': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_leap_zhang.bam',
'jump': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_jump-zhang.bam',
'happy': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_jump.bam',
'shrug': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_shrug.bam',
'hdance': 'phase_5/models/char/tt_a_chr_' + headtype + '_head_happy-dance.bam',
'wave': 'phase_3.5/models/char/tt_a_chr_' + headtype + '_head_wave.bam'})
if headtype == '1':
try:
h.find('**/eyes-long').hide()
except:
pass
try:
h.find('**/ears-long').hide()
except:
pass
try:
h.find('**/head-long').hide()
except:
pass
try:
h.find('**/joint_pupilR_long').hide()
h.find('**/joint_pupilL_long').hide()
except:
pass
try:
h.find('**/head-front-long').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-long-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
else:
if headtype == '2':
try:
h.find('**/eyes-short').hide()
except:
pass
try:
h.find('**/ears-short').hide()
except:
pass
try:
h.find('**/head-short').hide()
except:
pass
try:
h.find('**/joint_pupilR_short').hide()
h.find('**/joint_pupilL_short').hide()
except:
pass
try:
h.find('**/head-front-short').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-long-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
else:
if headtype == '3':
try:
h.find('**/eyes-long').hide()
except:
pass
try:
h.find('**/ears-long').hide()
except:
pass
try:
h.find('**/head-long').hide()
except:
pass
try:
h.find('**/joint_pupilR_long').hide()
h.find('**/joint_pupilL_long').hide()
except:
pass
try:
h.find('**/head-front-long').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-short-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
else:
if headtype == '4':
try:
h.find('**/eyes-short').hide()
except:
pass
try:
h.find('**/ears-short').hide()
except:
pass
try:
h.find('**/head-short').hide()
except:
pass
try:
h.find('**/head-front-short').hide()
except:
pass
try:
h.find('**/muzzle-long-laugh').hide()
except:
pass
try:
h.find('**/muzzle-long-angry').hide()
except:
pass
try:
h.find('**/muzzle-long-smile').hide()
except:
pass
try:
h.find('**/muzzle-long-sad').hide()
except:
pass
try:
h.find('**/muzzle-long-surprise').hide()
except:
pass
try:
h.find('**/muzzle-short-neutral').hide()
except:
pass
try:
h.find('**/muzzle-short-laugh').hide()
except:
pass
try:
h.find('**/muzzle-short-angry').hide()
except:
pass
try:
h.find('**/muzzle-short-smile').hide()
except:
pass
try:
h.find('**/muzzle-short-sad').hide()
except:
pass
try:
h.find('**/muzzle-short-surprise').hide()
except:
pass
if gender == 'girl':
lashes = loader.loadModel('phase_3/models/char/' + head + '-lashes.bam')
lashes.reparentTo(h)
if headtype == '1' or headtype == 'dgm_skirt' or headtype == 'dgm_shorts' or headtype == 'dgs_shorts':
lashes.find('**/open-long').hide()
lashes.find('**/closed-long').hide()
lashes.find('**/closed-short').hide()
else:
if headtype == '2' or headtype == 'dgl_shorts':
lashes.find('**/open-short').hide()
lashes.find('**/closed-short').hide()
lashes.find('**/closed-long').hide()
else:
if headtype == '3' or headtype == 'dgm_skirt' or headtype == 'dgm_shorts' or headtype == 'dgs_shorts':
lashes.find('**/open-long').hide()
lashes.find('**/closed-long').hide()
lashes.find('**/closed-short').hide()
else:
if headtype == '4' or headtype == 'dgl_shorts':
lashes.find('**/open-short').hide()
lashes.find('**/closed-short').hide()
lashes.find('**/closed-long').hide()
return tuple((h, lashes))
return h
| 34.646326
| 281
| 0.368103
| 2,328
| 27,821
| 4.323024
| 0.072595
| 0.071045
| 0.102544
| 0.168323
| 0.882154
| 0.873311
| 0.867647
| 0.845787
| 0.836248
| 0.808327
| 0
| 0.012855
| 0.491104
| 27,821
| 803
| 282
| 34.646326
| 0.69798
| 0.007404
| 0
| 0.865356
| 0
| 0
| 0.186085
| 0.06798
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.20121
| 0.007564
| null | null | 0.001513
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
19964d93c357c281455b6a0c5fad8307d65ef761
| 140
|
py
|
Python
|
geneal/applications/tsp/helpers/__init__.py
|
NeveIsa/geneal
|
064b0409912088886bf56fe9a729d74dac92a235
|
[
"MIT"
] | 47
|
2020-07-10T14:28:52.000Z
|
2022-03-25T17:20:52.000Z
|
geneal/applications/tsp/helpers/__init__.py
|
NeveIsa/geneal
|
064b0409912088886bf56fe9a729d74dac92a235
|
[
"MIT"
] | 10
|
2020-08-08T16:35:40.000Z
|
2022-03-08T00:07:19.000Z
|
geneal/applications/tsp/helpers/__init__.py
|
NeveIsa/geneal
|
064b0409912088886bf56fe9a729d74dac92a235
|
[
"MIT"
] | 14
|
2020-08-07T20:49:18.000Z
|
2022-03-31T17:55:47.000Z
|
from geneal.applications.tsp.helpers._plot_cities import plot_cities
from geneal.applications.tsp.helpers._create_graph import create_graph
| 46.666667
| 70
| 0.885714
| 20
| 140
| 5.9
| 0.5
| 0.169492
| 0.372881
| 0.423729
| 0.542373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 140
| 2
| 71
| 70
| 0.893939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
273e5530e7c3508053a172772e92a3e394cd6103
| 4,780
|
py
|
Python
|
test/manual_tests.py
|
Adrian-Samoticha/zmija
|
c4ee1623c98cae8e794508daab77fbf29e259982
|
[
"MIT"
] | 3
|
2021-07-05T11:53:39.000Z
|
2021-11-23T10:22:53.000Z
|
test/manual_tests.py
|
Adrian-Samoticha/zmija
|
c4ee1623c98cae8e794508daab77fbf29e259982
|
[
"MIT"
] | null | null | null |
test/manual_tests.py
|
Adrian-Samoticha/zmija
|
c4ee1623c98cae8e794508daab77fbf29e259982
|
[
"MIT"
] | null | null | null |
import unittest
import os
from os.path import exists
import shutil
import sys
testdir = os.path.dirname(__file__)
srcdir = '../src'
sys.path.insert(0, os.path.abspath(os.path.join(testdir, srcdir)))
from zmija import Zmija
class ManualTest(unittest.TestCase):
def _remove_directory(self, path):
try:
shutil.rmtree(path)
except OSError as e:
print("Directory deletion failed: %s - %s." % (e.filename, e.strerror))
def test_manually_1(self):
PATH = "./test/test_project/"
if exists(PATH):
self._remove_directory(PATH)
os.mkdir(PATH)
file1 = open(PATH + "file1.txt", 'x')
file1.write("""begin
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["file1_list"] = []
pass
def init(variables):
variables["file2_list"].append("TEXT_FILE1")
pass
def generate(variables):
return "\\n".join(variables["file1_list"])
*/// ~ZMIJA.GENERATED_CODE:
// ~ZMIJA.END
end
""")
file1.close()
file2 = open(PATH + "file2.txt", 'x')
file2.write("""begin
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["file2_list"] = []
pass
def init(variables):
variables["file1_list"].append("TEXT_FILE2")
pass
def generate(variables):
return "\\n".join(variables["file2_list"])
*/// ~ZMIJA.GENERATED_CODE:
// ~ZMIJA.END
end
""")
file2.close()
Zmija.run(False, False, False, PATH, lambda x: True)
file1 = open(PATH + "file1.txt", "r")
self.assertEqual(file1.read(), """begin
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["file1_list"] = []
pass
def init(variables):
variables["file2_list"].append("TEXT_FILE1")
pass
def generate(variables):
return "\\n".join(variables["file1_list"])
*/// ~ZMIJA.GENERATED_CODE:
TEXT_FILE2
// ~ZMIJA.END
end
""")
file1.close()
file2 = open(PATH + "file2.txt", "r")
self.assertEqual(file2.read(), """begin
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["file2_list"] = []
pass
def init(variables):
variables["file1_list"].append("TEXT_FILE2")
pass
def generate(variables):
return "\\n".join(variables["file2_list"])
*/// ~ZMIJA.GENERATED_CODE:
TEXT_FILE1
// ~ZMIJA.END
end
""")
file2.close()
self._remove_directory(PATH)
def test_manually_2(self):
PATH = "./test/test_project/"
if exists(PATH):
self._remove_directory(PATH)
os.mkdir(PATH)
file = open(PATH + "file.txt", 'x')
file.write("""begin
some indentation:
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["first_list"] = []
pass
def init(variables):
variables["second_list"].append("ONE")
variables["third_list"].append("ONE")
pass
def generate(variables):
return "\\n".join(variables["first_list"])
*/// ~ZMIJA.GENERATED_CODE:
SOME GARBAGE
// ~ZMIJA.END
upper middle
even more:
indentation:
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["second_list"] = []
pass
def init(variables):
variables["first_list"].append("TWO")
variables["third_list"].append("TWO")
pass
def generate(variables):
return "\\n".join(variables["second_list"])
*/// ~ZMIJA.GENERATED_CODE:
SOME GARBAGE
// ~ZMIJA.END
lower middle
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["third_list"] = []
pass
def init(variables):
variables["first_list"].append("THREE")
variables["second_list"].append("THREE")
pass
def generate(variables):
return "\\n".join(variables["third_list"])
*/// ~ZMIJA.GENERATED_CODE:
SOME GARBAGE
// ~ZMIJA.END
end""")
file.close()
Zmija.run(False, False, False, PATH, lambda x: True)
file = open(PATH + "file.txt", "r")
self.assertEqual(file.read(), """begin
some indentation:
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["first_list"] = []
pass
def init(variables):
variables["second_list"].append("ONE")
variables["third_list"].append("ONE")
pass
def generate(variables):
return "\\n".join(variables["first_list"])
*/// ~ZMIJA.GENERATED_CODE:
TWO
THREE
// ~ZMIJA.END
upper middle
even more:
indentation:
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["second_list"] = []
pass
def init(variables):
variables["first_list"].append("TWO")
variables["third_list"].append("TWO")
pass
def generate(variables):
return "\\n".join(variables["second_list"])
*/// ~ZMIJA.GENERATED_CODE:
ONE
THREE
// ~ZMIJA.END
lower middle
/* ~ZMIJA.GENERATOR:
def declare(variables):
variables["third_list"] = []
pass
def init(variables):
variables["first_list"].append("THREE")
variables["second_list"].append("THREE")
pass
def generate(variables):
return "\\n".join(variables["third_list"])
*/// ~ZMIJA.GENERATED_CODE:
ONE
TWO
// ~ZMIJA.END
end""")
file.close()
self._remove_directory(PATH)
if __name__ == '__main__':
unittest.main()
| 20.964912
| 74
| 0.667992
| 598
| 4,780
| 5.205686
| 0.148829
| 0.115644
| 0.05461
| 0.077096
| 0.847093
| 0.785095
| 0.785095
| 0.781561
| 0.762608
| 0.762608
| 0
| 0.009161
| 0.155021
| 4,780
| 228
| 75
| 20.964912
| 0.761575
| 0
| 0
| 0.831633
| 0
| 0
| 0.7306
| 0.300356
| 0
| 0
| 0
| 0
| 0.015306
| 1
| 0.015306
| false
| 0.102041
| 0.030612
| 0
| 0.102041
| 0.005102
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
278627ab180f567a2bbd668e9b0a2d5653a0e076
| 134
|
py
|
Python
|
src/data/__init__.py
|
RomainGratier/Black-box_Optimization_via_Deep_Generative-Exploratory_Networks
|
2cce334b473df709eb67d2f351a96cde1addc5a6
|
[
"MIT"
] | 4
|
2020-04-07T08:40:11.000Z
|
2022-03-31T12:57:19.000Z
|
src/data/__init__.py
|
RomainGratier/Black-box_Optimization_via_Deep_Generative-Exploratory_Networks
|
2cce334b473df709eb67d2f351a96cde1addc5a6
|
[
"MIT"
] | 4
|
2020-04-08T08:26:03.000Z
|
2020-04-08T08:28:56.000Z
|
src/data/__init__.py
|
RomainGratier/Black-box_Optimization_via_Deep_Generative-Exploratory_Networks
|
2cce334b473df709eb67d2f351a96cde1addc5a6
|
[
"MIT"
] | null | null | null |
from .dataset import MNISTDataset, RotationDataset, MNISTDatasetLeNet, RotationDatasetLeNet, getDataset, getDataloader, SyntheticTesla
| 134
| 134
| 0.880597
| 10
| 134
| 11.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067164
| 134
| 1
| 134
| 134
| 0.944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
27975f601228850ded150b361424afab0441db7b
| 3,857
|
py
|
Python
|
utils/log_bp_solver.py
|
DELTA37/LDPC-code
|
e055685d8dfebd50fa19a2638a8934b4a43b7ad3
|
[
"BSD-2-Clause"
] | 2
|
2020-10-21T07:13:28.000Z
|
2020-10-21T17:03:35.000Z
|
utils/log_bp_solver.py
|
DELTA37/LDPC-code
|
e055685d8dfebd50fa19a2638a8934b4a43b7ad3
|
[
"BSD-2-Clause"
] | null | null | null |
utils/log_bp_solver.py
|
DELTA37/LDPC-code
|
e055685d8dfebd50fa19a2638a8934b4a43b7ad3
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
import warnings
from numba import njit, int64, types, float64
output_type_log2 = types.Tuple((float64[:, :, :], float64[:, :, :],
float64[:, :]))
@njit(output_type_log2(int64[:], int64[:], int64[:], int64[:], float64[:, :],
float64[:, :, :], float64[:, :, :], int64), cache=True)
def _logbp_numba(bits_hist, bits_values, nodes_hist, nodes_values, Lc, Lq, Lr,
n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
bits_counter = 0
nodes_counter = 0
for i in range(m):
# ni = bits[i]
ff = bits_hist[i]
ni = bits_values[bits_counter: bits_counter + ff]
bits_counter += ff
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
# mj = nodes[j]
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
nodes_counter = 0
for j in range(n):
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
@njit(output_type_log2(int64[:], int64[:, :], int64[:], int64[:, :],
float64[:, :], float64[:, :, :], float64[:, :, :],
int64), cache=True)
def _logbp_numba_regular(bits_hist, bits_values, nodes_hist, nodes_values, Lc,
Lq, Lr, n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
for i in range(m):
ni = bits_values[i]
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
mj = nodes_values[j]
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
for j in range(n):
mj = nodes_values[j]
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
| 31.357724
| 79
| 0.436349
| 514
| 3,857
| 3.161479
| 0.143969
| 0.060308
| 0.022154
| 0.044308
| 0.862154
| 0.854769
| 0.854769
| 0.832615
| 0.832615
| 0.812308
| 0
| 0.034065
| 0.421571
| 3,857
| 122
| 80
| 31.614754
| 0.694307
| 0.052632
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021053
| false
| 0
| 0.031579
| 0
| 0.073684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
27b6e36871dc55cf935a58db065698319a0fc43a
| 2,291
|
py
|
Python
|
demo/annotation/__init__.py
|
trompamusic/ce-queries-template
|
cc5ae69d0e76623bfd72e9453f569f6624bf7c3b
|
[
"Apache-2.0"
] | 1
|
2020-06-18T15:43:18.000Z
|
2020-06-18T15:43:18.000Z
|
demo/annotation/__init__.py
|
trompamusic/ce-queries-template
|
cc5ae69d0e76623bfd72e9453f569f6624bf7c3b
|
[
"Apache-2.0"
] | 60
|
2019-12-17T11:08:28.000Z
|
2021-03-02T16:19:41.000Z
|
demo/annotation/__init__.py
|
trompamusic/trompace-client
|
cc5ae69d0e76623bfd72e9453f569f6624bf7c3b
|
[
"Apache-2.0"
] | null | null | null |
from demo import send_query_and_get_id
from trompace.mutations import audioobject
from trompace.queries import audioobject as query_audioobject
def audio_file_liebestraum(print_queries, submit_queries):
# A file that we're annotating
contenturl = "https://trompa-mtg.upf.edu/data/anno-component-test/SMC_005.wav"
get_audio = query_audioobject.query_audioobject(contenturl=contenturl)
if submit_queries:
audio_id = send_query_and_get_id(get_audio, "AudioObject")
if audio_id:
print("get AudioObject")
if print_queries:
print(get_audio)
return audio_id[0]
audio = audioobject.mutation_create_audioobject(
name="Liebestraum No. 3",
title="Liebestraum No. 3",
creator="https://github.com/trompamusic/audio-annotator",
contributor="https://mtg.upf.edu/",
source=contenturl,
format_="audio/wav",
encodingformat="audio/wav",
contenturl=contenturl
)
audio_id = "audio-node-id"
print("AudioObject")
if print_queries:
print(audio)
if submit_queries:
audio_id = send_query_and_get_id(audio, "CreateAudioObject")
return audio_id
def audio_file_pierri_etude(print_queries, submit_queries):
# A file that we're annotating
contenturl = "https://trompa-mtg.upf.edu/data/anno-component-test/SMC_015.wav"
get_audio = query_audioobject.query_audioobject(contenturl=contenturl)
if submit_queries:
audio_id = send_query_and_get_id(get_audio, "AudioObject")
if audio_id:
print("get AudioObject")
if print_queries:
print(get_audio)
return audio_id[0]
audio = audioobject.mutation_create_audioobject(
name="Pierri Etude no. 4",
title="Pierri Etude no. 4",
creator="https://github.com/trompamusic/audio-annotator",
contributor="https://mtg.upf.edu/",
source=contenturl,
format_="audio/wav",
encodingformat="audio/wav",
contenturl=contenturl
)
audio_id = "audio-node-id"
print("AudioObject")
if print_queries:
print(audio)
if submit_queries:
audio_id = send_query_and_get_id(audio, "CreateAudioObject")
return audio_id
| 30.959459
| 82
| 0.66914
| 276
| 2,291
| 5.307971
| 0.221014
| 0.057338
| 0.040956
| 0.051195
| 0.847099
| 0.835495
| 0.835495
| 0.835495
| 0.835495
| 0.835495
| 0
| 0.006838
| 0.233959
| 2,291
| 73
| 83
| 31.383562
| 0.82792
| 0.02488
| 0
| 0.77193
| 0
| 0.035088
| 0.223218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.052632
| 0
| 0.157895
| 0.245614
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
27caff6455cbbb017752bfd1954cf53565da6a66
| 183
|
py
|
Python
|
DelibeRating/DelibeRating/deliberating-env/Lib/site-packages/resources/admin.py
|
Severose/DelibeRating
|
5d227f35c071477ce3fd6fbf3ab13a44d13f6e08
|
[
"MIT"
] | null | null | null |
DelibeRating/DelibeRating/deliberating-env/Lib/site-packages/resources/admin.py
|
Severose/DelibeRating
|
5d227f35c071477ce3fd6fbf3ab13a44d13f6e08
|
[
"MIT"
] | null | null | null |
DelibeRating/DelibeRating/deliberating-env/Lib/site-packages/resources/admin.py
|
Severose/DelibeRating
|
5d227f35c071477ce3fd6fbf3ab13a44d13f6e08
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from resources.models import Resource
admin.site.register(Resource)
from resources.models import MergedResource
admin.site.register(MergedResource)
| 22.875
| 43
| 0.852459
| 23
| 183
| 6.782609
| 0.478261
| 0.166667
| 0.24359
| 0.320513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087432
| 183
| 7
| 44
| 26.142857
| 0.934132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fd805a760155e21e3ca2efab0c6a7d5bb417d2e6
| 1,369
|
py
|
Python
|
ServerComponent/UnitTests/credibility_score_test.py
|
CDU55/FakeNews
|
707bd48dd78851081d98ad21bbdadfc2720bd644
|
[
"MIT"
] | null | null | null |
ServerComponent/UnitTests/credibility_score_test.py
|
CDU55/FakeNews
|
707bd48dd78851081d98ad21bbdadfc2720bd644
|
[
"MIT"
] | 37
|
2020-10-20T08:30:53.000Z
|
2020-12-22T13:15:45.000Z
|
ServerComponent/UnitTests/credibility_score_test.py
|
CDU55/FakeNews
|
707bd48dd78851081d98ad21bbdadfc2720bd644
|
[
"MIT"
] | 1
|
2020-10-19T14:55:23.000Z
|
2020-10-19T14:55:23.000Z
|
from DataLayer.CredibilityScore import get_post_score
def test_credibility_score():
followers = 600000
likes = 200000
shares = 30000
quote = 5000
grammar = 1
tweets_number = 35000
subject_relevance = 85
score = get_post_score(followers, 1, tweets_number, shares, quote, likes, grammar, subject_relevance)
assert score == 100, "Credibility score should be 100"
followers = 50000
likes = 200000
shares = 30000
quote = 5000
grammar = 1
tweets_number = 35000
subject_relevance = 85
score = get_post_score(followers, 1, tweets_number, shares, quote, likes, grammar, subject_relevance)
assert score == 95, "Credibility score should be 95"
followers = 50000
likes = 50000
shares = 30000
quote = 5000
grammar = 1
tweets_number = 35000
subject_relevance = 85
score = get_post_score(followers, 1, tweets_number, shares, quote, likes, grammar, subject_relevance)
assert score == 93.75, "Credibility score should be 93.75"
followers = 50000
likes = 50000
shares = 30000
quote = 5000
grammar = 1
tweets_number = 35000
subject_relevance = 85
score = get_post_score(followers, 0, tweets_number, shares, quote, likes, grammar, subject_relevance)
assert score == 93.75, "Credibility score should be 93.75"
test_credibility_score()
| 26.843137
| 105
| 0.693207
| 170
| 1,369
| 5.405882
| 0.205882
| 0.104461
| 0.099021
| 0.087051
| 0.806311
| 0.806311
| 0.806311
| 0.806311
| 0.806311
| 0.806311
| 0
| 0.13467
| 0.235208
| 1,369
| 50
| 106
| 27.38
| 0.743075
| 0
| 0
| 0.820513
| 0
| 0
| 0.092768
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 1
| 0.025641
| false
| 0
| 0.025641
| 0
| 0.051282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e32e87200904d1d8cd710bdb94b76329d3e43eb8
| 2,351
|
py
|
Python
|
wedsite/rsvp/migrations/0004_auto_20151127_1656.py
|
jehutymax/wedsite
|
acea5fb0e6bd3225433f056eb402d97ad1af6161
|
[
"BSD-3-Clause"
] | 2
|
2017-05-31T16:26:32.000Z
|
2017-08-29T22:25:41.000Z
|
wedsite/rsvp/migrations/0004_auto_20151127_1656.py
|
jehutymax/wedsite
|
acea5fb0e6bd3225433f056eb402d97ad1af6161
|
[
"BSD-3-Clause"
] | null | null | null |
wedsite/rsvp/migrations/0004_auto_20151127_1656.py
|
jehutymax/wedsite
|
acea5fb0e6bd3225433f056eb402d97ad1af6161
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('rsvp', '0003_auto_20151127_1650'),
]
operations = [
migrations.AlterField(
model_name='event',
name='date',
field=models.DateTimeField(null=True),
),
migrations.AlterField(
model_name='event',
name='name',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='event',
name='url',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='event',
name='venue',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='guest',
name='address1',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='guest',
name='address2',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='guest',
name='city',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='guest',
name='country',
field=models.CharField(max_length=12, null=True),
),
migrations.AlterField(
model_name='guest',
name='email',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='guest',
name='name',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='guest',
name='state',
field=models.CharField(max_length=2, null=True),
),
migrations.AlterField(
model_name='guest',
name='zipcode',
field=models.CharField(max_length=12, null=True),
),
migrations.AlterField(
model_name='person',
name='name',
field=models.CharField(max_length=100, null=True),
),
]
| 29.3875
| 62
| 0.534241
| 217
| 2,351
| 5.635945
| 0.225806
| 0.212592
| 0.26574
| 0.308258
| 0.802944
| 0.779231
| 0.74816
| 0.735078
| 0.597711
| 0.597711
| 0
| 0.031756
| 0.343684
| 2,351
| 79
| 63
| 29.759494
| 0.760855
| 0.008932
| 0
| 0.712329
| 0
| 0
| 0.069158
| 0.00988
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027397
| 0
| 0.068493
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e360ac2491ec59a601a4412971eb47940ac9bcd2
| 12,285
|
py
|
Python
|
ipycytoscape/tests/test_graph_methods.py
|
dreness/ipycytoscape
|
5e9695df32f00d47d71b053edf3bccbac3388b68
|
[
"BSD-3-Clause"
] | 146
|
2020-03-09T11:55:45.000Z
|
2021-05-09T04:08:43.000Z
|
ipycytoscape/tests/test_graph_methods.py
|
dreness/ipycytoscape
|
5e9695df32f00d47d71b053edf3bccbac3388b68
|
[
"BSD-3-Clause"
] | 201
|
2020-03-10T12:08:01.000Z
|
2021-05-10T10:01:09.000Z
|
ipycytoscape/tests/test_graph_methods.py
|
dreness/ipycytoscape
|
5e9695df32f00d47d71b053edf3bccbac3388b68
|
[
"BSD-3-Clause"
] | 48
|
2020-02-05T13:48:04.000Z
|
2021-05-03T08:30:51.000Z
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) 2021, QuantStack and ipycytoscape Contributors
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
import pytest
import copy
from ipycytoscape.cytoscape import Graph, Node, Edge
def compare_nodes(expected_nodes, actual_nodes):
# if one list is empty
assert bool(expected_nodes) == bool(actual_nodes)
for expected, actual in zip(expected_nodes, actual_nodes):
assert expected.data == actual.data
assert expected.classes == actual.classes
assert expected.position == actual.position
def compare_edges(expected_edges, actual_edges):
assert bool(expected_edges) == bool(actual_edges)
for expected, actual in zip(expected_edges, actual_edges):
assert expected.data == actual.data
assert expected.classes == actual.classes
class TestGraphRemoveMethods:
def test_remove_edge(self):
"""
Test to ensure that edges will be removed
"""
# only a small test because everything else is covered in remove_edge_by_id()
data = {
"nodes": [
{"data": {"id": "0"}},
{"data": {"id": "1"}},
{"data": {"id": "2"}},
],
"edges": [
{"data": {"source": "0", "target": "1"}},
{"data": {"source": "1", "target": "2"}},
{"data": {"source": "2", "target": "0"}},
],
}
expected_nodes = [
Node(data={"id": "0"}, position={}),
Node(data={"id": "1"}, position={}),
Node(data={"id": "2"}, position={}),
]
expected_edges = [
Edge(classes="", data={"source": "1", "target": "2"}),
Edge(classes="", data={"source": "2", "target": "0"}),
]
graph = Graph()
graph.add_graph_from_json(data)
graph.remove_edge(graph.edges[0])
compare_edges(expected_edges, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
def test_remove_edge_by_id(self):
"""
Test to ensure that edges will be removed given the ids of the nodes
for different graphs
"""
data = {
"nodes": [
{"data": {"id": "0"}},
{"data": {"id": "1"}},
{"data": {"id": "2"}},
],
"edges": [
{"data": {"source": "0", "target": "1", "weight": "1"}},
{"data": {"source": "0", "target": "1", "weight": "2"}},
{"data": {"source": "1", "target": "0"}},
{"data": {"source": "1", "target": "2"}},
{"data": {"source": "2", "target": "0"}},
],
}
expected_nodes = [
Node(data={"id": "0"}, position={}),
Node(data={"id": "1"}, position={}),
Node(data={"id": "2"}, position={}),
]
expected_edges_undirected = [
Edge(classes="", data={"source": "1", "target": "2"}),
Edge(classes="", data={"source": "2", "target": "0"}),
]
expected_edges_directed = [
Edge(classes=" directed ", data={"source": "1", "target": "0"}),
Edge(classes=" directed ", data={"source": "1", "target": "2"}),
Edge(classes=" directed ", data={"source": "2", "target": "0"}),
]
expected_edges_multiple = [
Edge(classes=" multiple_edges ", data={"source": "1", "target": "2"}),
Edge(classes=" multiple_edges ", data={"source": "2", "target": "0"}),
]
graph = Graph()
graph.add_graph_from_json(data)
graph.remove_edge_by_id("0", "1")
compare_edges(expected_edges_undirected, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph = Graph()
graph.add_graph_from_json(data, directed=True)
graph.remove_edge_by_id("0", "1")
compare_edges(expected_edges_directed, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph.remove_edge_by_id("1", "0")
compare_edges(expected_edges_directed[1:], graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph = Graph()
graph.add_graph_from_json(data, multiple_edges=True)
graph.remove_edge_by_id("0", "1")
compare_edges(expected_edges_multiple, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
def test_remove_node(self):
"""
Test to ensure that nodes will be removed correctly
"""
data = {
"nodes": [
{"data": {"id": "0"}},
{"data": {"id": "1"}},
{"data": {"id": "2"}},
],
"edges": [
{"data": {"source": "0", "target": "1"}},
{"data": {"source": "1", "target": "2"}},
{"data": {"source": "2", "target": "0"}},
],
}
expected_nodes = [
Node(data={"id": "1"}, position={}),
Node(data={"id": "2"}, position={}),
]
expected_edges = [
Edge(classes="", data={"source": "1", "target": "2"}),
]
graph = Graph()
graph.add_graph_from_json(data)
graph.remove_node(graph.nodes[0])
compare_edges(expected_edges, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
def test_remove_node_by_id(self):
"""
Test to ensure that nodes will be removed given the id
for different graphs with the corresponding edges
"""
data = {
"nodes": [
{"data": {"id": "0"}},
{"data": {"id": "1"}},
{"data": {"id": "2"}},
],
"edges": [
{"data": {"source": "0", "target": "1", "weight": "1"}},
{"data": {"source": "0", "target": "1", "weight": "2"}},
{"data": {"source": "1", "target": "0"}},
{"data": {"source": "1", "target": "2"}},
{"data": {"source": "2", "target": "0"}},
],
}
expected_nodes = [
Node(data={"id": "1"}, position={}),
Node(data={"id": "2"}, position={}),
]
expected_edges_undirected = [
Edge(classes="", data={"source": "1", "target": "2"}),
]
expected_edges_directed = [
Edge(classes=" directed ", data={"source": "1", "target": "2"}),
]
expected_edges_multiple = [
Edge(classes=" multiple_edges ", data={"source": "1", "target": "2"}),
]
graph = Graph()
graph.add_graph_from_json(data)
graph.remove_node_by_id("0")
compare_edges(expected_edges_undirected, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph = Graph()
graph.add_graph_from_json(data, directed=True)
graph.remove_node_by_id("0")
compare_edges(expected_edges_directed, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph.remove_node_by_id("1")
compare_edges(expected_edges_directed[1:], graph.edges)
compare_nodes(expected_nodes[1:], graph.nodes)
graph = Graph()
graph.add_graph_from_json(data, multiple_edges=True)
graph.remove_node_by_id("0")
compare_edges(expected_edges_multiple, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph.remove_node_by_id("1")
compare_edges(expected_edges_multiple[1:], graph.edges)
compare_nodes(expected_nodes[1:], graph.nodes)
class TestGraphAddMethods:
def test_add_nodes(self):
"""
Test to ensure that nodes will be added to the graph
"""
# create some nodes
ids = ["0", "1"]
nodes = [Node(data={"id": i}) for i in ids]
expected_nodes = [
Node(data={"id": "0"}, position={}),
Node(data={"id": "1"}, position={}),
]
expected_edges = []
graph = Graph()
graph.add_nodes(nodes)
compare_edges(expected_edges, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
def test_add_node(self):
"""
Test to ensure that a single node will be added to the graph
"""
# will call add_edges, so no extensive test are necessary
node = Node(data={"id": "0"})
expected_nodes = [
Node(data={"id": "0"}, position={}),
]
expected_edges = []
graph = Graph()
graph.add_node(node)
compare_edges(expected_edges, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
def test_add_edges(self):
"""
Test to ensure that edges with the corresponding nodes will be added to the graph
"""
ids = ["0", "1", "2"]
edges = [
Edge(data={"source": source, "target": target})
for source, target in zip(ids[:-1], ids[1:])
]
edges_weighted = [
Edge(data={"source": "0", "target": "1", "weight": str(i)})
for i in range(1, 3)
]
edge_inv = Edge(data={"source": "1", "target": "0"})
edges += edges_weighted
edges += [edge_inv]
expected_nodes = [
Node(data={"id": "0"}, position={}),
Node(data={"id": "1"}, position={}),
Node(data={"id": "2"}, position={}),
]
expected_edges_undirected = [
Edge(classes="", data={"source": "0", "target": "1"}),
Edge(classes="", data={"source": "1", "target": "2"}),
]
expected_edges_directed = [
Edge(classes=" directed ", data={"source": "0", "target": "1"}),
Edge(classes=" directed ", data={"source": "1", "target": "2"}),
Edge(classes=" directed ", data={"source": "1", "target": "0"}),
]
expected_edges_multiple = [
Edge(classes=" multiple_edges ", data={"source": "0", "target": "1"}),
Edge(classes=" multiple_edges ", data={"source": "1", "target": "2"}),
Edge(
classes=" multiple_edges ",
data={"source": "0", "target": "1", "weight": "1"},
),
Edge(
classes=" multiple_edges ",
data={"source": "0", "target": "1", "weight": "2"},
),
Edge(classes=" multiple_edges ", data={"source": "1", "target": "0"}),
]
graph = Graph()
graph.add_edges([copy.copy(edge) for edge in edges])
compare_edges(expected_edges_undirected, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph = Graph()
graph.add_edges([copy.copy(edge) for edge in edges], directed=True)
compare_edges(expected_edges_directed, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
graph = Graph()
graph.add_edges([copy.copy(edge) for edge in edges], multiple_edges=True)
compare_edges(expected_edges_multiple, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
def test_add_edges(self):
"""
Test to ensure that an edge with the corresponding nodes will be added to the graph
"""
edge = Edge(data={"source": "0", "target": "1"})
edge_inv = Edge(data={"source": "1", "target": "0"})
expected_nodes = [
Node(data={"id": "0"}, position={}),
Node(data={"id": "1"}, position={}),
]
expected_edges = [
Edge(classes="", data={"source": "0", "target": "1"}),
]
expected_edges_multiple = [
Edge(classes="", data={"source": "0", "target": "1"}),
Edge(classes=" multiple_edges ", data={"source": "1", "target": "0"}),
]
graph = Graph()
graph.add_edge(copy.copy(edge))
compare_edges(expected_edges, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
# test for passing arguments
graph.add_edge(copy.copy(edge_inv), multiple_edges=True)
compare_edges(expected_edges_multiple, graph.edges)
compare_nodes(expected_nodes, graph.nodes)
| 36.346154
| 91
| 0.520716
| 1,333
| 12,285
| 4.622656
| 0.084021
| 0.076274
| 0.041058
| 0.063453
| 0.837553
| 0.822136
| 0.788218
| 0.770204
| 0.750893
| 0.723142
| 0
| 0.019186
| 0.308425
| 12,285
| 337
| 92
| 36.454006
| 0.706097
| 0.080749
| 0
| 0.714829
| 0
| 0
| 0.106759
| 0
| 0
| 0
| 0
| 0
| 0.026616
| 1
| 0.038023
| false
| 0
| 0.011407
| 0
| 0.057034
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3714d10c8945947fab728e9f27b3ef86eb6fded
| 26,282
|
py
|
Python
|
TEST3D/GUI/0011100_page_field/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 31
|
2015-04-01T15:59:36.000Z
|
2022-03-18T20:21:47.000Z
|
TEST3D/GUI/0011100_page_field/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 3
|
2015-02-06T19:30:24.000Z
|
2017-05-25T14:14:31.000Z
|
TEST3D/GUI/0011100_page_field/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 7
|
2015-01-23T15:19:22.000Z
|
2021-06-09T09:03:59.000Z
|
# -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
import tests
#Testing the Fields & Equations sections and their dependencies handling
findWidget('OOF3D').resize(550, 350)
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
findWidget('OOF3D:Microstructure Page:Pane').set_position(156)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint microstructure page sensitized
findMenu(findWidget('OOF3D:MenuBar'), 'File:Load:Data').activate()
checkpoint toplevel widget mapped Dialog-Data
findWidget('Dialog-Data').resize(190, 67)
findWidget('Dialog-Data:filename').set_text('TEST_DATA/two_walls.skeleton')
findWidget('Dialog-Data:gtk-ok').clicked()
findWidget('OOF3D:Microstructure Page:Pane').set_position(159)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint named analysis chooser set
checkpoint named analysis chooser set
checkpoint pixel page updated
checkpoint active area status updated
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint Solver page sensitized
checkpoint microstructure page sensitized
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint Solver page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint pinnodes page sensitized
checkpoint pinnodes page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint toplevel widget mapped OOF3D Activity Viewer
checkpoint boundary page updated
findWidget('OOF3D Activity Viewer').resize(400, 300)
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint OOF.File.Load.Data
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Fields & Equations')
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint page installed Fields & Equations
assert tests.fieldButtonCheck('Temperature',False,False)
assert tests.fieldButtonCheck('Displacement',False,False)
assert tests.fieldButtonCheck('Voltage',False,False)
assert tests.eqnButtonCheck('Heat_Eqn',False)
assert tests.eqnButtonCheck('Force_Balance',False)
assert tests.eqnButtonCheck('Coulomb_Eqn',False)
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'FE Mesh')
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint page installed FE Mesh
findWidget('OOF3D:FE Mesh Page:Pane').set_position(304)
findWidget('OOF3D:FE Mesh Page:New').clicked()
checkpoint toplevel widget mapped Dialog-Create a new mesh
findWidget('Dialog-Create a new mesh').resize(345, 153)
findWidget('Dialog-Create a new mesh:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Mesh.New
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Fields & Equations')
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint page installed Fields & Equations
assert tests.fieldButtonCheck('Temperature',False,False)
assert tests.fieldButtonCheck('Displacement',False,False)
assert tests.fieldButtonCheck('Voltage',False,False)
assert tests.eqnButtonCheck('Heat_Eqn',False)
assert tests.eqnButtonCheck('Force_Balance',False)
assert tests.eqnButtonCheck('Coulomb_Eqn',False)
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'FE Mesh')
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint page installed FE Mesh
findWidget('OOF3D:FE Mesh Page:New').clicked()
checkpoint toplevel widget mapped Dialog-Create a new mesh
findWidget('Dialog-Create a new mesh').resize(345, 153)
findWidget('Dialog-Create a new mesh:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Mesh.New
findWidget('OOF3D:FE Mesh Page:Pane:Subproblems:New').clicked()
checkpoint toplevel widget mapped Dialog-Create a new subproblem
findWidget('Dialog-Create a new subproblem').resize(286, 97)
findWidget('Dialog-Create a new subproblem:gtk-ok').clicked()
checkpoint mesh page subproblems sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.New
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Fields & Equations')
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint page installed Fields & Equations
assert tests.fieldButtonCheck('Temperature',False,False)
assert tests.fieldButtonCheck('Displacement',False,False)
assert tests.fieldButtonCheck('Voltage',False,False)
assert tests.eqnButtonCheck('Heat_Eqn',False)
assert tests.eqnButtonCheck('Force_Balance',False)
assert tests.eqnButtonCheck('Coulomb_Eqn',False)
findWidget('OOF3D').resize(699, 350)
findWidget('OOF3D:Fields & Equations Page:HPane').set_position(312)
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Temperature defined').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Displacement defined').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Equations:Heat_Eqn active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Equation.Activate
findWidget('OOF3D:Fields & Equations Page:HPane:Equations:Force_Balance active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Equation.Activate
assert tests.fieldButtonCheck('Temperature',True,False)
assert tests.fieldButtonCheck('Displacement',True,False)
assert tests.fieldButtonCheck('Voltage',False,False)
assert tests.eqnButtonCheck('Heat_Eqn',True)
assert tests.eqnButtonCheck('Force_Balance',True)
assert tests.eqnButtonCheck('Coulomb_Eqn',False)
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh<2>')
checkpoint Field page sensitized
assert tests.fieldButtonCheck('Temperature',False,False)
assert tests.fieldButtonCheck('Displacement',False,False)
assert tests.fieldButtonCheck('Voltage',False,False)
assert tests.eqnButtonCheck('Heat_Eqn',False)
assert tests.eqnButtonCheck('Force_Balance',False)
assert tests.eqnButtonCheck('Coulomb_Eqn',False)
findWidget('OOF3D').resize(723, 350)
findWidget('OOF3D:Fields & Equations Page:HPane').set_position(323)
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Voltage defined').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Equations:Coulomb_Eqn active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Equation.Activate
assert tests.fieldButtonCheck('Temperature',False,False)
assert tests.fieldButtonCheck('Displacement',False,False)
assert tests.fieldButtonCheck('Voltage',True,False)
assert tests.eqnButtonCheck('Heat_Eqn',False)
assert tests.eqnButtonCheck('Force_Balance',False)
assert tests.eqnButtonCheck('Coulomb_Eqn',True)
setComboBox(findWidget('OOF3D:Fields & Equations Page:SubProblem'), 'subproblem')
checkpoint Field page sensitized
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Temperature defined').clicked()
findWidget('OOF3D Messages 1').resize(543, 200)
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Temperature active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Field.Activate
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Displacement defined').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Displacement active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Field.Activate
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Voltage defined').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Voltage active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint OOF.Subproblem.Field.Activate
assert tests.fieldButtonCheck('Temperature',True,True)
assert tests.fieldButtonCheck('Displacement',True,True)
assert tests.fieldButtonCheck('Voltage',True,True)
assert tests.eqnButtonCheck('Heat_Eqn',False)
assert tests.eqnButtonCheck('Force_Balance',False)
assert tests.eqnButtonCheck('Coulomb_Eqn',False)
setComboBox(findWidget('OOF3D:Fields & Equations Page:SubProblem'), 'default')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh<2>')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:SubProblem'), 'subproblem')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh')
checkpoint Field page sensitized
findWidget('OOF3D:Fields & Equations Page:HPane:CopyField').clicked()
checkpoint toplevel widget mapped Dialog-Select a target Subproblem
findWidget('Dialog-Select a target Subproblem').resize(190, 159)
setComboBox(findWidget('Dialog-Select a target Subproblem:target:Mesh'), 'mesh<2>')
assert tests.CopyFieldDialogCheck(microstructures=['two_walls'],skeletons=['skeleton'],meshes=['mesh', 'mesh<2>'],subproblems=['default', 'subproblem'])
assert tests.CopyFieldDialogSelect(microstructure='two_walls',skeleton='skeleton',mesh='mesh<2>',subproblem='default')
findWidget('Dialog-Select a target Subproblem:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Copy_Field_State
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh<2>')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:SubProblem'), 'subproblem')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh')
checkpoint Field page sensitized
findWidget('OOF3D:Fields & Equations Page:HPane:CopyEquation').clicked()
checkpoint toplevel widget mapped Dialog-Select a target subproblem
findWidget('Dialog-Select a target subproblem').resize(190, 159)
setComboBox(findWidget('Dialog-Select a target subproblem:target:Mesh'), 'mesh<2>')
assert tests.CopyEquationDialogCheck(microstructures=['two_walls'],skeletons=['skeleton'],meshes=['mesh', 'mesh<2>'],subproblems=['default','subproblem'])
assert tests.CopyEquationDialogSelect(microstructure='two_walls',skeleton='skeleton',mesh='mesh<2>',subproblem='default')
findWidget('Dialog-Select a target subproblem:gtk-ok').clicked()
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Copy_Equation_State
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh<2>')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:SubProblem'), 'subproblem')
checkpoint Field page sensitized
findWidget('OOF3D:Fields & Equations Page:HPane:CopyEquation').clicked()
checkpoint toplevel widget mapped Dialog-Select a target subproblem
findWidget('Dialog-Select a target subproblem').resize(190, 159)
setComboBox(findWidget('Dialog-Select a target subproblem:target:Mesh'), 'mesh')
assert tests.CopyEquationDialogCheck(microstructures=['two_walls'],skeletons=['skeleton'],meshes=['mesh', 'mesh<2>'],subproblems=['default'])
assert tests.CopyEquationDialogSelect(microstructure='two_walls',skeleton='skeleton',mesh='mesh',subproblem='default')
findWidget('Dialog-Select a target subproblem:gtk-ok').clicked()
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Copy_Equation_State
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'FE Mesh')
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint page installed FE Mesh
findWidget('OOF3D:FE Mesh Page:Pane:Subproblems:SubproblemScroll:SubproblemList').get_selection().select_path((1,))
checkpoint mesh page subproblems sensitized
findWidget('OOF3D:FE Mesh Page:Pane').set_position(477)
findWidget('OOF3D:FE Mesh Page:Pane:Subproblems:Delete').clicked()
checkpoint toplevel widget mapped Questioner
findWidget('Questioner').resize(399, 89)
findWidget('Questioner:gtk-yes').clicked()
checkpoint mesh page subproblems sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Delete
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Fields & Equations')
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint page installed Fields & Equations
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh<2>')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh<2>')
checkpoint Field page sensitized
findWidget('OOF3D:Fields & Equations Page:HPane:CopyEquation').clicked()
checkpoint toplevel widget mapped Dialog-Select a target subproblem
findWidget('Dialog-Select a target subproblem').resize(190, 159)
setComboBox(findWidget('Dialog-Select a target subproblem:target:Mesh'), 'mesh')
assert tests.CopyEquationDialogCheck(microstructures=['two_walls'],skeletons=['skeleton'],meshes=['mesh', 'mesh<2>'],subproblems=['default'])
assert tests.CopyEquationDialogSelect(microstructure='two_walls',skeleton='skeleton',mesh='mesh',subproblem='default')
findWidget('Dialog-Select a target subproblem:gtk-ok').clicked()
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Copy_Equation_State
setComboBox(findWidget('OOF3D:Fields & Equations Page:Mesh'), 'mesh')
checkpoint Field page sensitized
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'FE Mesh')
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint page installed FE Mesh
findWidget('OOF3D:FE Mesh Page:Delete').clicked()
checkpoint toplevel widget mapped Questioner
findWidget('Questioner').resize(325, 89)
findWidget('Questioner:gtk-yes').clicked()
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Mesh.Delete
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Fields & Equations')
checkpoint Field page sensitized
checkpoint page installed Fields & Equations
assert tests.fieldButtonCheck('Temperature',True,False)
assert tests.fieldButtonCheck('Displacement',True,False)
assert tests.fieldButtonCheck('Voltage',False,False)
assert tests.eqnButtonCheck('Heat_Eqn',True)
assert tests.eqnButtonCheck('Force_Balance',True)
assert tests.eqnButtonCheck('Coulomb_Eqn',False)
findMenu(findWidget('OOF3D:MenuBar'), 'File:Save:Python_Log').activate()
checkpoint toplevel widget mapped Dialog-Python_Log
findWidget('Dialog-Python_Log').resize(190, 92)
findWidget('Dialog-Python_Log:filename').set_text('fieldeqn.log')
findWidget('Dialog-Python_Log:gtk-ok').clicked()
checkpoint OOF.File.Save.Python_Log
assert tests.filediff('fieldeqn.log')
widget_2=findWidget('OOF3D')
handled_2=widget_2.event(event(gtk.gdk.DELETE,window=widget_2.window))
postpone if not handled_2: widget_2.destroy()
checkpoint OOF.Graphics_1.File.Close
| 44.39527
| 154
| 0.846283
| 3,163
| 26,282
| 7.009485
| 0.062599
| 0.232736
| 0.190519
| 0.126877
| 0.939651
| 0.92409
| 0.91101
| 0.899644
| 0.88918
| 0.878761
| 0
| 0.008534
| 0.090518
| 26,282
| 591
| 155
| 44.470389
| 0.919006
| 0.018834
| 0
| 0.878893
| 0
| 0
| 0.165574
| 0.036389
| 0
| 0
| 0
| 0
| 0.098616
| 0
| null | null | 0
| 0.00173
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8b60316a470660759ff48c6594b9db86f8e92b6e
| 6,914
|
py
|
Python
|
stocks/class_based_views.py
|
Gomax-07/stockInvent
|
614beaa2fad59857b568452ebcda2c3f9a0c6d7f
|
[
"MIT"
] | null | null | null |
stocks/class_based_views.py
|
Gomax-07/stockInvent
|
614beaa2fad59857b568452ebcda2c3f9a0c6d7f
|
[
"MIT"
] | null | null | null |
stocks/class_based_views.py
|
Gomax-07/stockInvent
|
614beaa2fad59857b568452ebcda2c3f9a0c6d7f
|
[
"MIT"
] | null | null | null |
from stocks.models import *
from stocks.serializers import *
from rest_framework import mixins
from rest_framework import generics
class SupplierList(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = Supplier.objects.all()
serializer_class = SupplierSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class BuyerList(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = Buyer.objects.all()
serializer_class = BuyerSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class SeasonList(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = Season.objects.all()
serializer_class = SeasonSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class DropList(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = Drop.objects.all()
serializer_class = DropSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class ProductList(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class OrderList(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = Order.objects.all()
serializer_class = OrderSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class DeliveryList(mixins.ListModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView):
queryset = Delivery.objects.all()
serializer_class = DeliverySerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
#details fn(s)
class SupplierDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Supplier.objects.all()
serializer_class = SupplierSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class BuyerDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Buyer.objects.all()
serializer_class = BuyerSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class SeasonDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Season.objects.all()
serializer_class = SeasonSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class DropDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Drop.objects.all()
serializer_class = DropSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class ProductDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class OrderDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Order.objects.all()
serializer_class = OrderSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class DeliveryDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Delivery.objects.all()
serializer_class = DeliverySerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
| 32.009259
| 54
| 0.629881
| 665
| 6,914
| 6.524812
| 0.099248
| 0.17746
| 0.274257
| 0.169394
| 0.934317
| 0.934317
| 0.934317
| 0.866559
| 0.866559
| 0.755243
| 0
| 0
| 0.252676
| 6,914
| 216
| 55
| 32.009259
| 0.839752
| 0.00188
| 0
| 0.880795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.231788
| false
| 0
| 0.02649
| 0.231788
| 0.768212
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 12
|
47482c52687c7d8ccef8b5784c7efb0a8633ce8d
| 17,170
|
py
|
Python
|
classes/model/layermodels/poisson_theano_scan.py
|
dennisforster/NeSi
|
db59d24ae87167ea2817bba0c65aae732c9a3bbe
|
[
"AFL-3.0"
] | 1
|
2021-07-30T16:17:26.000Z
|
2021-07-30T16:17:26.000Z
|
classes/model/layermodels/poisson_theano_scan.py
|
dennisforster/NeSi
|
db59d24ae87167ea2817bba0c65aae732c9a3bbe
|
[
"AFL-3.0"
] | null | null | null |
classes/model/layermodels/poisson_theano_scan.py
|
dennisforster/NeSi
|
db59d24ae87167ea2817bba0c65aae732c9a3bbe
|
[
"AFL-3.0"
] | 1
|
2016-08-06T10:55:08.000Z
|
2016-08-06T10:55:08.000Z
|
# Copyright (C) 2015, Dennis Forster <forster@fias.uni-frankfurt.de>
#
# LICENSE: THE SOFTWARE IS PROVIDED "AS IS" UNDER THE
# ACADEMIC FREE LICENSE (AFL) v3.0.
#
import theano
import theano.tensor as T
import numpy as np
from _layermodels import LayerModel_Theano_Scan
from utils.decorators import DocInherit
doc_inherit = DocInherit
#------------------------------------------------------------------------------
class Poisson(LayerModel_Theano_Scan):
"""(FF-)Mixture of Poisson layer for theano.scan calculation"""
def __init__(self, nmultilayer, nlayer, input_source):
self.W_t = T.matrix("W_%d.%d"%(nmultilayer,nlayer), dtype='float32')
self.s_t = T.matrix("s_%d.%d"%(nmultilayer,nlayer), dtype='float32')
self._s = None
self.parameters_t = [
T.scalar("epsilon_%d.%d"%(nmultilayer,nlayer), dtype='float32')]
self._nmultilayer = nmultilayer
self._nlayer = nlayer
self._input_source = input_source
# _input_source gives for each input variable which is not from
# this layer the multilayer and the layer of its source:
# _input_source[i][0]: MultiLayer of variable i
# _input_source[i][1]: Layer of variable i
@doc_inherit
def sequences(self, mode='train'):
if (mode == 'train'):
sequences = []
elif (mode == 'test'):
sequences = []
elif (mode == 'likelihood'):
sequences = []
return sequences
@doc_inherit
def outputs_info(self, mode='train'):
if (mode == 'train'):
outputs_info = [self.W_t]
elif (mode == 'test'):
outputs_info = [self.s_t]
elif (mode == 'likelihood'):
sequences = []
return outputs_info
@doc_inherit
def non_sequences(self, mode='train'):
if (mode == 'train'):
non_sequences = self.parameters_t
elif (mode == 'test'):
non_sequences = [self.W_t]
elif (mode == 'likelihood'):
non_sequences = [self.W_t]
return non_sequences
@doc_inherit
def input_parameters(self, mode='train'):
if (mode == 'train'):
parameters = [
's_%d.%d[t]'%(self._input_source[0][0], self._input_source[0][1]),
'W_%d.%d[t-1]'%(self._nmultilayer, self._nlayer),
'epsilon_%d.%d'%(self._nmultilayer, self._nlayer)
]
elif (mode == 'test'):
parameters = [
's_%d.%d[t]'%(self._input_source[0][0], self._input_source[0][1]),
'W_%d.%d'%(self._nmultilayer, self._nlayer)
]
return parameters
@doc_inherit
def learningstep(self, Y, W, epsilon):
# activation
s = self._activation(Y,W)
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
# weight update
W_new = W + epsilon*(T.tensordot(s,Y,axes=[0,0]) -
T.sum(s,axis=0)[:,np.newaxis]*W)
W_new.name = 'W_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s, W_new
def learningstep_m1(self, Y, W, epsilon):
"""Perform a single learning step.
This is a faster learning step for the case of
mini-batch-size = 1.
Keyword arguments:
the keyword arguments must be the same as given in
self.input_parameters(mode) for mode='train'.
"""
# Input integration:
I = T.dot(T.log(W),Y)
# numeric trick to prevent overflow in the exp-function
max_exponent = 88. - T.log(I.shape[0]).astype('float32')
scale = theano.ifelse.ifelse(T.gt(I[T.argmax(I)], max_exponent),
I[T.argmax(I)] - max_exponent, 0.)
# activation: softmax with overflow protection
s = T.exp(I-scale)/T.sum(T.exp(I-scale))
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
# weight update
W_new = W + epsilon*(T.outer(s,Y) - s[:,np.newaxis]*W)
W_new.name = 'W_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s, W_new
@doc_inherit
def teststep(self, Y, W):
# activation
s = self._activation(Y,W)
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s
@doc_inherit
def set_weights(self, W):
self.W = W
@doc_inherit
def get_weights(self):
return self.W
def _activation(self, Y, W):
"""Returns the activation for a given input.
Derived from the generative model formulation of Poisson
mixtures, the formular for the activation in the network reads
as follows:
I_c = \sum_d \log(W_{cd})y_d
s_c = softmax(I_c)
"""
# input integration
I = T.tensordot(Y,T.log(W),axes=[1,1])
# activation
# to prevent numerical over- or underflows in the exponential, a
# scaling factor in the softmax function is used, utilizing the
# identity:
# exp(x_i)/sum_i(exp(x_i)) = exp(x_i-a)/sum_i(exp(x_i-a))
max_exponent = 86. - T.log(I.shape[1].astype('float32'))
scale = T.switch(
T.gt(T.max(I, axis=1, keepdims=True), max_exponent),
T.max(I, axis=1, keepdims=True) - max_exponent,
0.)
s = T.exp(I-scale)/T.sum(T.exp(I-scale), axis=1, keepdims=True)
return s
#------------------------------------------------------------------------------
class Poisson_Recurrent(LayerModel_Theano_Scan):
"""Recurrent Mixture of Poisson layer for theano.scan calculation"""
def __init__(self, nmultilayer, nlayer, input_source):
self.W = None
self.W_t = T.matrix("W_%d.%d"%(nmultilayer,nlayer), dtype='float32')
self.s_t = T.matrix("s_%d.%d"%(nmultilayer,nlayer), dtype='float32')
self.parameters_t = [
T.scalar("epsilon_%d.%d"%(nmultilayer,nlayer),dtype='float32')]
self._nmultilayer = nmultilayer
self._nlayer = nlayer
self._input_source = input_source
# _input_source gives for each input variable which is not from
# this layer the multilayer and the Layer of its source:
# _input_source[i][0]: MultiLayer of variable i
# _input_source[i][1]: Layer of variable i
# self._srng = T.shared_randomstreams.RandomStreams(seed=137)
@doc_inherit
def sequences(self, mode='train'):
if (mode == 'train'):
sequences = []
elif (mode == 'test'):
sequences = []
elif (mode == 'likelihood'):
sequences = []
return sequences
@doc_inherit
def outputs_info(self, mode='train'):
if (mode == 'train'):
outputs_info = [self.W_t]
elif (mode == 'test'):
outputs_info = [self.s_t]
elif (mode == 'likelihood'):
outputs_info = []
return outputs_info
@doc_inherit
def non_sequences(self, mode='train'):
if (mode == 'train'):
non_sequences = self.parameters_t
elif (mode == 'test'):
non_sequences = [self.W_t]
elif (mode == 'likelihood'):
non_sequences = [self.W_t]
return non_sequences
@doc_inherit
def input_parameters(self, mode='train'):
if (mode == 'train'):
parameters = [
's_%d.%d[t]'%(self._input_source[0][0], self._input_source[0][1]),
'L[t]',
'W_%d.%d[t-1]'%(self._input_source[1][0], self._input_source[1][1]),
'W_%d.%d[t-1]'%(self._nmultilayer, self._nlayer),
'epsilon_%d.%d'%(self._nmultilayer, self._nlayer)
]
elif (mode == 'test'):
parameters = [
's_%d.%d[t]'%(self._input_source[0][0], self._input_source[0][1]),
'W_%d.%d'%(self._input_source[1][0], self._input_source[1][1]),
'W_%d.%d'%(self._nmultilayer, self._nlayer)
]
return parameters
@doc_inherit
def learningstep(self, Y, L, M, W, epsilon):
s = self._activation(Y,L,M,W)
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
# weight update
W_new = W + epsilon*(T.tensordot(s,Y,axes=[0,0]) -
T.sum(s,axis=0)[:,np.newaxis]*W)
W_new.name = 'W_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s, W_new
def learningstep_m1(self, Y, L, M, W, epsilon):
"""Perform a single learning step.
This is a faster learning step for the case of
mini-batch-size = 1.
Keyword arguments:
the keyword arguments must be the same as given in
self.input_parameters(mode) for mode='train'.
"""
# Input integration:
I = T.dot(T.log(W),Y)
# recurrent term:
vM = theano.ifelse.ifelse(
T.eq(L,-1), # if no label is provided
T.sum(M, axis=0),
M[L,:]
)
# numeric trick to prevent overflow in the exp-function:
max_exponent = 88. - T.log(I.shape[0]).astype('float32')
scale = theano.ifelse.ifelse(T.gt(I[T.argmax(I)], max_exponent),
I[T.argmax(I)] - max_exponent, 0.)
# activation: recurrent softmax with overflow protection
s = vM*T.exp(I-scale)/T.sum(vM*T.exp(I-scale))
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
# weight update
W_new = W + epsilon*(T.outer(s,Y) - s[:,np.newaxis]*W)
W_new.name = 'W_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s, W_new
@doc_inherit
def teststep(self, Y, M, W):
# activation
L = (-1)*T.ones_like(Y[:,0], dtype='int32')
s = self._activation(Y,L,M,W)
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s
@doc_inherit
def set_weights(self, W):
self.W = W
@doc_inherit
def get_weights(self):
return self.W
def _activation(self, Y, L, M, W):
"""Returns the activation for a given input.
Derived from the generative model formulation of hierarchical
Poisson mixtures, the formular for the activation in the network
reads as follows:
I_c =
\sum_d \log(W_{cd})y_d + \log(M_{lc}) for labeled data
\sum_d \log(W_{cd})y_d + \log(\sum_k M_{kc}) for unlabeled data
s_c = softmax(I_c)
"""
# Input integration:
I = T.tensordot(Y,T.log(W),axes=[1,1])
# dropout (does not work with scan)
# I = T.cast(self._srng.binomial((I.shape[0],I.shape[1])),'float32')
# recurrent term:
vM = M[L]
L_index = T.eq(L,-1).nonzero()
vM = T.set_subtensor(vM[L_index], T.sum(M, axis=0))
# numeric trick to prevent overflow in the exp-function
max_exponent = 86. - T.ceil(T.log(I.shape[1].astype('float32')))
scale = T.switch(
T.gt(T.max(I, axis=1, keepdims=True), max_exponent),
T.max(I, axis=1, keepdims=True) - max_exponent,
0.)
# numeric approximation to prevent underflow in the exp-function:
# map too low values of I to a fixed minimum value
min_exponent = -87. + T.ceil(T.log(I.shape[1].astype('float32')))
I = T.switch(
T.lt(I-scale, min_exponent),
scale+min_exponent,
I)
# activation: recurrent softmax with overflow protection
s = vM*T.exp(I-scale)/T.sum(vM*T.exp(I-scale), axis=1, keepdims=True)
return s
#------------------------------------------------------------------------------
class Poisson_Recurrent_IF(LayerModel_Theano_Scan):
"""Recurrent Mixture of Poisson layer for theano.scan calculation"""
def __init__(self, nmultilayer, nlayer, input_source):
self.W = None
self.W_t = T.matrix("W_%d.%d"%(nmultilayer,nlayer), dtype='float32')
self.s_t = T.matrix("s_%d.%d"%(nmultilayer,nlayer), dtype='float32')
self.parameters_t = [
T.scalar("epsilon_%d.%d"%(nmultilayer,nlayer),dtype='float32')]
self._nmultilayer = nmultilayer
self._nlayer = nlayer
self._input_source = input_source
# _input_source gives for each input variable which is not from
# this layer the multilayer and the Layer of its source:
# _input_source[i][0]: MultiLayer of variable i
# _input_source[i][1]: Layer of variable i
# self._srng = T.shared_randomstreams.RandomStreams(seed=137)
@doc_inherit
def sequences(self, mode='train'):
if (mode == 'train'):
sequences = []
elif (mode == 'test'):
sequences = []
elif (mode == 'likelihood'):
sequences = []
return sequences
@doc_inherit
def outputs_info(self, mode='train'):
if (mode == 'train'):
outputs_info = [self.W_t]
elif (mode == 'test'):
outputs_info = [self.s_t]
elif (mode == 'likelihood'):
outputs_info = []
return outputs_info
@doc_inherit
def non_sequences(self, mode='train'):
if (mode == 'train'):
non_sequences = self.parameters_t
elif (mode == 'test'):
non_sequences = [self.W_t]
elif (mode == 'likelihood'):
non_sequences = [self.W_t]
return non_sequences
@doc_inherit
def input_parameters(self, mode='train'):
if (mode == 'train'):
parameters = [
's_%d.%d[t]'%(self._input_source[0][0], self._input_source[0][1]),
'L[t]',
'W_%d.%d[t-1]'%(self._input_source[1][0], self._input_source[1][1]),
'W_%d.%d[t-1]'%(self._nmultilayer, self._nlayer),
'epsilon_%d.%d'%(self._nmultilayer, self._nlayer),
'threshold_%d.%d'%(self._nmultilayer, self._nlayer+1)
]
elif (mode == 'test'):
parameters = [
's_%d.%d[t]'%(self._input_source[0][0], self._input_source[0][1]),
'W_%d.%d'%(self._input_source[1][0], self._input_source[1][1]),
'W_%d.%d'%(self._nmultilayer, self._nlayer)
]
return parameters
@doc_inherit
def learningstep(self, Y, L, M, W, epsilon, threshold):
s = self._activation(Y,L,M,W)
L_inf = T.switch(
T.eq(L,-1), # if no label is provided
self._inferred_labels(s,M,threshold),
-1
)
s = self._activation(Y,L_inf,M,W)
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
# weight update
W_new = W + epsilon*(T.tensordot(s,Y,axes=[0,0]) -
T.sum(s,axis=0)[:,np.newaxis]*W)
W_new.name = 'W_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s, W_new
@doc_inherit
def teststep(self, Y, M, W):
# activation
L = (-1)*T.ones_like(Y[:,0], dtype='int32')
s = self._activation(Y,L,M,W)
s.name = 's_%d.%d[t]'%(self._nmultilayer,self._nlayer)
return s
@doc_inherit
def set_weights(self, W):
self.W = W
@doc_inherit
def get_weights(self):
return self.W
def _activation(self, Y, L, M, W):
"""Returns the activation for a given input.
Derived from the generative model formulation of hierarchical
Poisson mixtures, the formular for the activation in the network
reads as follows:
I_c =
\sum_d \log(W_{cd})y_d + \log(M_{lc}) for labeled data
\sum_d \log(W_{cd})y_d + \log(\sum_k M_{kc}) for unlabeled data
s_c = softmax(I_c)
"""
# first: complete inference to find label
# Input integration:
I = T.tensordot(Y,T.log(W),axes=[1,1])
# recurrent term:
vM = M[L]
L_index = T.eq(L,-1).nonzero()
vM = T.set_subtensor(vM[L_index], T.sum(M, axis=0))
# numeric trick to prevent overflow in the exp-function
max_exponent = 86. - T.ceil(T.log(I.shape[1].astype('float32')))
scale = T.switch(
T.gt(T.max(I, axis=1, keepdims=True), max_exponent),
T.max(I, axis=1, keepdims=True) - max_exponent,
0.)
# numeric approximation to prevent underflow in the exp-function:
# map too low values of I to a fixed minimum value
min_exponent = -87. + T.ceil(T.log(I.shape[1].astype('float32')))
I = T.switch(
T.lt(I-scale, min_exponent),
scale+min_exponent,
I)
# activation: recurrent softmax with overflow protection
s = vM*T.exp(I-scale)/T.sum(vM*T.exp(I-scale), axis=1, keepdims=True)
return s
def _inferred_labels(self, s, M, threshold):
inference = T.tensordot(
s,
T.switch(T.eq(M,0), 0, M/T.sum(M, axis=0)),
axes=[1,1])
BvSB = T.sort(inference,axis=1)[:,-1]-T.sort(inference,axis=1)[:,-2]
L_inf = T.switch(
T.gt(BvSB,threshold),
T.cast(T.argmax(inference,axis=1),'int32'),
-1
)
return L_inf
| 37.571116
| 86
| 0.552068
| 2,299
| 17,170
| 3.956503
| 0.096129
| 0.009235
| 0.060026
| 0.063215
| 0.915128
| 0.897427
| 0.894239
| 0.89215
| 0.89204
| 0.886763
| 0
| 0.015089
| 0.293652
| 17,170
| 457
| 87
| 37.571116
| 0.734911
| 0.229703
| 0
| 0.85209
| 0
| 0
| 0.063273
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106109
| false
| 0
| 0.016077
| 0.009646
| 0.21865
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
47dfaa96b2e209204ca0cd4e4010bc71af0dc118
| 5,601
|
py
|
Python
|
pipeline/metrics.py
|
HSE-LAMBDA/RheologyReconstruction
|
fe89dea28ab0873d075e69c51e9ae2aeb07fe8e2
|
[
"Apache-2.0"
] | 1
|
2021-01-12T11:43:31.000Z
|
2021-01-12T11:43:31.000Z
|
pipeline/metrics.py
|
HSE-LAMBDA/RheologyReconstruction
|
fe89dea28ab0873d075e69c51e9ae2aeb07fe8e2
|
[
"Apache-2.0"
] | null | null | null |
pipeline/metrics.py
|
HSE-LAMBDA/RheologyReconstruction
|
fe89dea28ab0873d075e69c51e9ae2aeb07fe8e2
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import torch
from scipy.ndimage import sobel
class BaseMetric():
def __init__(self, name: str):
self.name = name
def __call__(self):
raise NotImplementedError
class Dice(BaseMetric):
"""
Dice coefficient for binary classification problem over the batch of images
:param preds: batch of model predictions of pixel classes
Shape: (N_batch, W_image, H_image)
:type preds: np.ndarray
:param labels: batch of correct pixel clasees for images
Shape: (N_batch, W_image, H_image)
:type labels: np.ndarray
:param weights: a dummy weight matrix, not used for this metrics.
Added for code uniformity
:params tolerance (default = 0.5): threshold value to consider a pixel to be in the class 1
:type tolerance: float
:returns: DICE - average Dice metrics over the batch
"""
def __init__(self):
super().__init__("average_dice_score")
self.EPS = 1e-10 # a constant for numerical stability
@staticmethod
def _dice(true, pred):
true = true.astype(bool)
pred = pred.astype(bool)
intersection = (true & pred).sum()
im_sum = true.sum() + pred.sum()
return 2.0 * intersection / (im_sum + self.EPS)
def __call__(self, batch, preds, tolerance=0.5):
# TODO: rewrite to actual weights
preds_l = preds[0].data.cpu().numpy()
preds_m = preds[1].data.cpu().numpy()
preds_r = preds[2].data.cpu().numpy()
ground_l = batch.mask.cpu().copy().data.numpy()
ground_m = batch.mask.cpu().copy().data.numpy()
ground_r = batch.mask.cpu().copy().data.numpy()
AVG_DICE_l = 0.0
empty_l = 0.0
for t, p in zip(ground_l, preds_l):
if not np.sum(t): empty_l += 1.
AVG_DICE_l += self._dice(t, p > tolerance)
AVG_DICE_l = AVG_DICE_l / (preds_l.shape[0] - empty_l) if empty_l != preds_l.shape[0] else 0.0
AVG_DICE_m = 0.0
empty_m = 0.0
for t, p in zip(ground_m, preds_m):
if not np.sum(t): empty_m += 1.
AVG_DICE_m += self._dice(t, p > tolerance)
AVG_DICE_m = AVG_DICE_m / (preds_m.shape[0] - empty_m) if empty_m != preds_m.shape[0] else 0.0
AVG_DICE_r = 0.0
empty_r = 0.0
for t, p in zip(ground_r, preds_r):
if not np.sum(t): empty_r += 1.
AVG_DICE_m += self._dice(t, p > tolerance)
AVG_DICE_r = AVG_DICE_r / (preds_r.shape[0] - empty_r) if empty_r != preds_r.shape[0] else 0.0
AVG_DICE = (AVG_DICE_l + AVG_DICE_m + AVG_DICE_r) / 3.
return AVG_DICE
class WeightedDice(BaseMetric):
"""
A weighted Dice coefficient for binary classification problem over the batch of images
:param preds: batch of model predictions of pixel classes
Shape: (N_batch, W_image, H_image)
:type preds: np.ndarray
:param labels: batch of correct pixel clasees for images
Shape: (N_batch, W_image, H_image)
:type labels: np.ndarray
:param weights: batch of weight matrices for images
Shape: (N_batch, W_image, H_image)
:type weights: np.ndarray
:params tolerance (default = 0.5): threshold value to consider a pixel to be in the class 1
:type tolerance: float
:returns:
DICE - average weighted Dice metrics over the batch
"""
def __init__(self):
super().__init__("average_weighted_dice_score")
self.EPS = 1e-10 # a constant for numerical stability
@staticmethod
def _weighted_dice(true, pred, weights):
true = true.astype(bool)
pred = pred.astype(bool)
intersection = (weights * (true & pred)).sum()
im_sum = (weights * true).sum() + (weights * pred).sum()
return 2.0 * intersection / (im_sum + self.EPS)
def __call__(self, preds, labels, weights, tolerance=0.5):
# TODO: rewrite to actual weights an masks
AVG_DICE = 0.0
empty = 0.0
preds_l = preds[0].data.cpu().numpy()
preds_m = preds[1].data.cpu().numpy()
preds_r = preds[2].data.cpu().numpy()
ground_l = batch.mask.cpu().copy().data.numpy()
ground_m = batch.mask.cpu().copy().data.numpy()
ground_r = batch.mask.cpu().copy().data.numpy()
weights_l = batch.weights.cpu().copy().data.numpy()
weights_m = batch.weights.cpu().copy().data.numpy()
weights_r = batch.weights.cpu().copy().data.numpy()
AVG_DICE_l = 0.0
empty_l = 0.0
for t, p, w in zip(ground_l, preds_l, weights_l):
if not np.sum(t): empty_l += 1.
AVG_DICE_l += self._weighted_dice(t, p > tolerance, w)
AVG_DICE_l = AVG_DICE_l / (preds_l.shape[0] - empty_l) if empty_l != preds_l.shape[0] else 0.0
AVG_DICE_m = 0.0
empty_m = 0.0
for t, p in zip(ground_m, preds_m, weights_m):
if not np.sum(t): empty_m += 1.
AVG_DICE_m += self._weighted_dice(t, p > tolerance, w)
AVG_DICE_m = AVG_DICE_m / (preds_m.shape[0] - empty_m) if empty_m != preds_m.shape[0] else 0.0
AVG_DICE_r = 0.0
empty_r = 0.0
for t, p in zip(ground_r, preds_r, weights_r):
if not np.sum(t): empty_r += 1.
AVG_DICE_m += self._weighted_dice(t, p > tolerance, w)
AVG_DICE_r = AVG_DICE_r / (preds_r.shape[0] - empty_r) if empty_r != preds_r.shape[0] else 0.0
AVG_DICE = (AVG_DICE_l + AVG_DICE_m + AVG_DICE_r) / 3.
return AVG_DICE
| 32.563953
| 102
| 0.602035
| 858
| 5,601
| 3.698135
| 0.134033
| 0.077214
| 0.030255
| 0.045383
| 0.857548
| 0.845257
| 0.833911
| 0.807123
| 0.780334
| 0.742515
| 0
| 0.021372
| 0.281557
| 5,601
| 171
| 103
| 32.754386
| 0.767147
| 0.253348
| 0
| 0.629213
| 0
| 0
| 0.011106
| 0.006663
| 0
| 0
| 0
| 0.011696
| 0
| 1
| 0.089888
| false
| 0
| 0.033708
| 0
| 0.202247
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a0c66be14d03990c50a81b45d161379ebd36b53
| 186
|
py
|
Python
|
Code/sample.py
|
nolongerwait/Python_Tutorial
|
3961d6ee9c371fa7666a26b7a114e276170724dd
|
[
"MIT"
] | null | null | null |
Code/sample.py
|
nolongerwait/Python_Tutorial
|
3961d6ee9c371fa7666a26b7a114e276170724dd
|
[
"MIT"
] | null | null | null |
Code/sample.py
|
nolongerwait/Python_Tutorial
|
3961d6ee9c371fa7666a26b7a114e276170724dd
|
[
"MIT"
] | null | null | null |
def show():
print(b'\xe8\xb4\xba\xe6\xa2\xa6\xe9\x9c\xb2\xe6\x98\xaf\xe4\xb8\x96\xe7\x95\x8c\xe4\xb8\x8a\xe6\x9c\x80\xe5\xa5\xbd\xe7\x9a\x84\xe4\xbb\x99\xe5\xa5\xb3'.decode('UTF-8'))
| 93
| 174
| 0.704301
| 43
| 186
| 3.046512
| 0.790698
| 0.091603
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216667
| 0.032258
| 186
| 2
| 174
| 93
| 0.511111
| 0
| 0
| 0
| 0
| 0.5
| 0.796791
| 0.770053
| 0
| 1
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
9a4468cdebd41feaed7b2d56d6e405c15f39b6b8
| 202
|
py
|
Python
|
gyb_resources_all/gyb_resources.py
|
illescasDaniel/Swift-Resources
|
a7a2208cdad06fa5328ed0904da5692e90fccbf3
|
[
"MIT"
] | null | null | null |
gyb_resources_all/gyb_resources.py
|
illescasDaniel/Swift-Resources
|
a7a2208cdad06fa5328ed0904da5692e90fccbf3
|
[
"MIT"
] | null | null | null |
gyb_resources_all/gyb_resources.py
|
illescasDaniel/Swift-Resources
|
a7a2208cdad06fa5328ed0904da5692e90fccbf3
|
[
"MIT"
] | null | null | null |
from gyb_resources_assets import _assets_folder_name
from gyb_resources_assets import image_resources
from gyb_resources_assets import color_resources
from gyb_resources_strings import string_resources
| 40.4
| 52
| 0.920792
| 29
| 202
| 5.931034
| 0.37931
| 0.162791
| 0.372093
| 0.383721
| 0.488372
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079208
| 202
| 4
| 53
| 50.5
| 0.924731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d065e4f3bb4088ba63a715037894719a321acaac
| 226
|
py
|
Python
|
angr/procedures/win32/GetCommandLine.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 6,132
|
2015-08-06T23:24:47.000Z
|
2022-03-31T21:49:34.000Z
|
angr/procedures/win32/GetCommandLine.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 2,272
|
2015-08-10T08:40:07.000Z
|
2022-03-31T23:46:44.000Z
|
angr/procedures/win32/GetCommandLine.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 1,155
|
2015-08-06T23:37:39.000Z
|
2022-03-31T05:54:11.000Z
|
import angr
class GetCommandLineA(angr.SimProcedure):
def run(self):
return self.project.simos.acmdln_ptr
class GetCommandLineW(angr.SimProcedure):
def run(self):
return self.project.simos.wcmdln_ptr
| 22.6
| 44
| 0.734513
| 28
| 226
| 5.857143
| 0.535714
| 0.195122
| 0.231707
| 0.268293
| 0.585366
| 0.585366
| 0.585366
| 0.585366
| 0.585366
| 0
| 0
| 0
| 0.176991
| 226
| 9
| 45
| 25.111111
| 0.88172
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
ef5a0d4732e49f02ca2427919684458a684db1c0
| 245
|
py
|
Python
|
PythonExercicios/ex034.py
|
Caio-Moretti/115.Exercicios-Python
|
7e66fb1f44ea3eb4ade63f37d843242ac42ade84
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex034.py
|
Caio-Moretti/115.Exercicios-Python
|
7e66fb1f44ea3eb4ade63f37d843242ac42ade84
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex034.py
|
Caio-Moretti/115.Exercicios-Python
|
7e66fb1f44ea3eb4ade63f37d843242ac42ade84
|
[
"MIT"
] | null | null | null |
sal = float(input('Qual o salário? '))
if sal <= 1250:
print('O seu salário de R$: {:.2f} com aumento fica R$:{:.2f}'.format(sal, sal * 1.15))
else:
print('O seu salário de R$: {:.2f} com aumento fica R$:{:.2f}'.format(sal, sal * 1.10))
| 40.833333
| 91
| 0.587755
| 44
| 245
| 3.272727
| 0.477273
| 0.083333
| 0.125
| 0.222222
| 0.708333
| 0.708333
| 0.708333
| 0.708333
| 0.708333
| 0.708333
| 0
| 0.07
| 0.183673
| 245
| 5
| 92
| 49
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0.506122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
322e0eef2e7e193ac81b01de5fe87b3a425666dc
| 6,425
|
py
|
Python
|
polynomials_on_simplices/probability_theory/uniform_sampling.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | 1
|
2021-03-17T11:41:21.000Z
|
2021-03-17T11:41:21.000Z
|
polynomials_on_simplices/probability_theory/uniform_sampling.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | null | null | null |
polynomials_on_simplices/probability_theory/uniform_sampling.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | null | null | null |
"""Uniform sampling of random points in different geometries."""
import numpy as np
def closed_unit_interval_sample():
"""
Generate a random number sampled from the uniform distribution over the closed unit interval [0, 1].
:return: Random number.
"""
x = np.random.rand()
while x > 0.875:
x = np.random.rand()
# x is now a random number in the interval [0, 0.875]
return x / 0.875
def open_unit_interval_sample():
"""
Generate a random number sampled from the uniform distribution over the open unit interval (0, 1).
:return: Random number.
"""
x = np.random.rand()
while x <= 0.125:
x = np.random.rand()
# x is now a random number in the interval (0.125, 1)
x -= 0.125
# x is now a random number in the interval (0, 0.875)
return x / 0.875
def left_closed_interval_sample():
"""
Generate a random number sampled from the uniform distribution over the left closed unit interval [0, 1).
:return: Random number.
"""
return np.random.rand()
def right_closed_interval_sample():
"""
Generate a random number sampled from the uniform distribution over the right closed unit interval (0, 1].
:return: Random number.
"""
x = np.random.rand()
x *= -1
# x is now a random number in the interval (-1, 0]
x += 1
return x
def unit_interval_sampling(num_points):
"""
Uniform random sampling of points in the unit interval [0, 1).
:param num_points: Number of points to sample.
:return: List of random points.
"""
return np.random.rand(num_points)
def closed_unit_interval_sampling(num_points):
"""
Uniform random sampling of points in the closed unit interval [0, 1].
:param num_points: Number of points to sample.
:return: List of random points.
"""
points = np.empty(num_points)
for i in range(num_points):
points[i] = closed_unit_interval_sample()
return points
def open_unit_interval_sampling(num_points):
"""
Uniform random sampling of points in the unit interval (0, 1).
:param num_points: Number of points to sample.
:return: List of random points.
"""
points = np.empty(num_points)
for i in range(num_points):
points[i] = open_unit_interval_sample()
return points
def left_closed_unit_interval_sampling(num_points):
"""
Uniform random sampling of points in the unit interval [0, 1).
:param num_points: Number of points to sample.
:return: List of random points.
"""
return unit_interval_sampling(num_points)
def right_closed_unit_interval_sampling(num_points):
"""
Uniform random sampling of points in the unit interval (0, 1].
:param num_points: Number of points to sample.
:return: List of random points.
"""
points = np.empty(num_points)
for i in range(num_points):
points[i] = right_closed_interval_sample()
return points
def unit_square_sampling(num_points):
"""
Uniform random sampling of points in the unit square [0, 1) x [0, 1).
:param num_points: Number of points to sample.
:return: List of random points.
"""
xy = np.random.rand(2 * num_points)
return np.reshape(xy, (-1, 2))
def unit_disc_sampling(num_points):
r"""
Uniform random sampling of points in the unit disc :math:`\{x \in \mathbb{R}^2 : \|x\| \leq 1\}`.
:param num_points: Number of points to sample.
:return: List of random points.
"""
sample_points = np.empty((num_points, 2))
accepted_samples = 0
while accepted_samples < num_points:
# Rejection sampling. Sample in square [-1, 1]x[-1, 1] and reject the sample if it's norm is greater than one
sample = np.random.rand(2) * 2 - 1
if not np.dot(sample, sample) > 1:
sample_points[accepted_samples] = sample
accepted_samples += 1
return sample_points
def unit_circle_sampling(num_points):
"""
Uniform random sampling of points on the unit circle (:math:`S^1`).
:param num_points: Number of points to sample.
:return: List of random points.
"""
return nsphere_surface_sampling(2, num_points)
def ncube_sampling(n, num_points):
r"""
Uniform random sampling of points in the n-dimensional unit cube :math:`[0, 1)^n`.
:param n: Dimension of cube.
:param num_points: Number of points to sample.
:return: List of random points.
"""
points = np.random.rand(n * num_points)
return np.reshape(points, (num_points, n))
def nsphere_sampling(n, num_points):
r"""
Uniform random sampling of points in the n-dimensional unit sphere :math:`\{x \in \mathbb{R}^n : \|x\| \leq 1\}`.
:param n: Dimension of sphere.
:param num_points: Number of points to sample.
:return: List of random points.
"""
sample_points = np.empty((num_points, n))
accepted_samples = 0
while accepted_samples < num_points:
# Rejection sampling. Sample in unit cube and reject the sample if it's norm is greater than one
sample = np.random.rand(n) * 2 - 1
if not np.dot(sample, sample) > 1:
sample_points[accepted_samples] = sample
accepted_samples += 1
return sample_points
def nsphere_surface_sampling(n, num_points):
r"""
Uniform random sampling of points on the surface of the n-dimensional unit sphere (:math:`\partial B^n = S^{n-1}`).
:param n: Dimension of sphere.
:param num_points: Number of points to sample.
:return: List of random points.
"""
# See http://mathworld.wolfram.com/SpherePointPicking.html (Muller 1959, Marsaglia 1972)
sample_points = np.random.randn(num_points, n)
for i in range(num_points):
sample_points[i, :] /= np.linalg.norm(sample_points[i, :])
return sample_points
def nsimplex_sampling(n, num_points):
"""
Uniform random sampling of points inside the n-dimensional unit simplex.
See :func:`polynomials_on_simplices.geometry.primitives.simplex.unit()`.
:param n: Dimension of the simplex.
:param num_points: Number of points to sample.
:return: List of random points.
"""
sample_points = np.empty((num_points, n))
for i in range(num_points):
sample_points[i] = ncube_sampling(n, 1)[0]
while sum(sample_points[i]) > 1.0:
sample_points[i] = ncube_sampling(n, 1)[0]
return sample_points
| 29.74537
| 119
| 0.662568
| 941
| 6,425
| 4.393199
| 0.109458
| 0.095791
| 0.044025
| 0.066763
| 0.808902
| 0.777213
| 0.750363
| 0.738752
| 0.70658
| 0.698839
| 0
| 0.020753
| 0.235019
| 6,425
| 215
| 120
| 29.883721
| 0.820346
| 0.493697
| 0
| 0.45
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.0125
| 0
| 0.4125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
324b05f8ce34a22e473468b320e1f1b3f230dfec
| 44,721
|
py
|
Python
|
src/linformer.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
src/linformer.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
src/linformer.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
import math
import torch
from torch import nn
from operator import mul
from fractions import gcd
import torch.nn.functional as F
from inspect import isfunction
from functools import partial, wraps, reduce
import numpy as np
########## Linformer projection on each kernel
class LinformerProjectionKernel(nn.Module):
def __init__(self,
in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules,
matrix_pose, layer_type, input_img_size, output_img_size, hidden_dim=None, kernel_size=None, parameter_sharing='headwise',
dropout = 0.):
super().__init__()
self.in_d_capsules = in_d_capsules
self.out_d_capsules = out_d_capsules
self.in_n_capsules = in_n_capsules
self.out_n_capsules = out_n_capsules
self.input_img_size=input_img_size
self.output_img_size=output_img_size
self.hidden_dim=hidden_dim
self.pose_dim = in_d_capsules
self.layer_type = layer_type
self.kernel_size = kernel_size
self.matrix_pose = matrix_pose
self.parameter_sharing = parameter_sharing
if self.layer_type == 'FC':
self.kernel_size=1
if matrix_pose:
# Random Initialisation of Two matrices
self.matrix_pose_dim = int(np.sqrt(self.in_d_capsules))
# w_current =(3,3,32,4,4)
self.w_current = nn.Parameter(0.02*torch.randn(kernel_size, kernel_size,
in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim))
self.w_next = nn.Parameter(0.02*torch.randn(
out_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim))
else:
self.w_current = nn.Parameter(0.02*torch.randn(kernel_size, kernel_size,
in_n_capsules, self.pose_dim, self.pose_dim))
self.w_next = nn.Parameter(0.02*torch.randn(
out_n_capsules, self.pose_dim, self.pose_dim))
max_seq_len = self.kernel_size*self.kernel_size*self.in_n_capsules
heads = 1
if parameter_sharing == "headwise":
# print("Hello")
self.E_proj = nn.Parameter(0.02*torch.randn(kernel_size, kernel_size,
in_n_capsules, hidden_dim))
else:
assert (False),"Yet to write the non-headwise method"
# Positional embeddings: (7,7,16)
# self.rel_embedd = None
self.dropout = nn.Dropout(dropout)
print("You are using Bilinear routing with Linformer")
def forward(self, current_pose, h_out=1, w_out=1, next_pose=None):
# print('Using linformer kernels')
# current pose: (b,32,3,3,7,7,16)
# if FC current pose is (b, numcaps*h_in*w_in, caps_dim)
if next_pose is None:
# ist iteration
batch_size = current_pose.shape[0]
if self.layer_type=='conv':
# (b, h_out, w_out, num_capsules, kernel_size, kernel_size, capsule_dim)
# (b,7,7,32,3,3,16)
current_pose = current_pose.permute([0,4,5,1,2,3,6])
h_out = h_out
w_out = w_out
elif self.layer_type=='FC':
h_out = 1
w_out = 1
pose_dim = self.pose_dim
w_current = self.w_current
w_next = self.w_next
if self.matrix_pose:
#w_current =(3,3,32,4,4) --> (3*3*32, 4, 4)
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim, self.pose_dim)
#
# W_current is C_{L} and w_next is N_{L}
w_current = w_current.unsqueeze(0)
w_next = w_next.unsqueeze(0)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)#view error
if self.matrix_pose:
# (b*7*7, 3*3*32, 4, 4) = (49b, 288, 4, 4)
# print(current_pose.shape)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)#replace the 2 reshapes
else:
current_pose = current_pose.unsqueeze(2)
# Multiplying p{L} by C_{L} to change to c_{L}
# Current pose: (49b, 288, 4, 4), w_current = (1, 288, 4, 4)
# Same matrix for the entire batch, output = (49b, 288, 4, 4)
current_pose = torch.matmul(current_pose, w_current)
if self.matrix_pose:
# Current_pose = (49b, 288, 16)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
else:
current_pose = current_pose.squeeze(2)
############## Linformer Projection
current_pose = current_pose.permute(2,0,1) # (16,49b,288)
E_proj = self.E_proj.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.hidden_dim) # (288, hidden_dim)
current_pose = torch.matmul(current_pose, E_proj) # (16,49b,hidden_dim)
current_pose = current_pose.permute(1,2,0) # (49b, hidden_Dim, 16)
# R_{i,j} = (49b, m, 288)
dots=(torch.ones(batch_size*h_out*w_out, self.out_n_capsules, self.hidden_dim)* (pose_dim ** -0.5)).type_as(current_pose).to(current_pose)
dots = dots.softmax(dim=-2)
next_pose_candidates = current_pose
# Multiplies r_{i,j} with c_{L} ( no sorting in the 1st iteration) to give X. Still have to
# multiply with N_{L}
# next pose: (49b, m, 16)
next_pose_candidates = torch.einsum('bij,bje->bie', dots, next_pose_candidates)
###################### Positional Embeddings
# (49b,m,16) --> (b,m,7,7,16) + rel_embedding (7,7,16) and then reshaped to (49b,m,16)
next_pose_candidates = next_pose_candidates.reshape(batch_size,self.out_n_capsules, h_out, w_out, self.pose_dim)
# next_pose_candidates = next_pose_candidates + self.rel_embedd
next_pose_candidates = next_pose_candidates.permute(0,2,3,1,4)
next_pose_candidates = next_pose_candidates.reshape(-1,next_pose_candidates.shape[3], next_pose_candidates.shape[4])
if self.matrix_pose:
# Correct shapes: (49b, m, 4, 4)
next_pose_candidates = next_pose_candidates.view(next_pose_candidates.shape[0], next_pose_candidates.shape[1], self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose_candidates = next_pose_candidates.unsqueeze(2)
# Found final pose of next layer by multiplying X with N_{L}
# Multiply (49b, m, 4, 4) with (1, m, 4, 4) == (49b, m , 4, 4)
next_pose_candidates = torch.matmul(next_pose_candidates, w_next)
# Reshape: (b, 7, 7, m, 16)
next_pose_candidates = next_pose_candidates.view(batch_size, h_out, w_out, self.out_n_capsules, self.pose_dim)
if self.layer_type == 'conv':
# Reshape: (b,m,7,7,16) (just like original input, without expansion)
next_pose_candidates = next_pose_candidates.permute([0,3,1,2,4])
elif self.layer_type == 'FC':
# Reshape: (b, 1, 1, m, 16) --> (b, 1, m, 16) (h_out, w_out ==1)
next_pose_candidates = next_pose_candidates.squeeze(1)
return next_pose_candidates
else:
# 2nd to T iterations
batch_size = next_pose.shape[0]
if self.layer_type=='conv':
# Current_pose = (b,7,7,32,3,3,16)
current_pose = current_pose.permute([0,4,5,1,2,3,6])
# next_pose = (b,m,7,7,16) --> (b,7,7,m,16)
next_pose = next_pose.permute([0,2,3,1,4])
h_out = next_pose.shape[1]
w_out = next_pose.shape[2]
elif self.layer_type=='FC':
h_out = 1
w_out = 1
pose_dim = self.pose_dim
w_current = self.w_current
w_next = self.w_next
if self.matrix_pose:
# w_current = (288,4,4)
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim, self.pose_dim)
# w_current = (1,288,4,4)
w_current = w_current.unsqueeze(0)
w_next = w_next.unsqueeze(0)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
if self.matrix_pose:
# Current_pose = (49b, 288, 4, 4)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)#replace the 2 reshapes
else:
current_pose = current_pose.unsqueeze(2)
# Tranformed currentlayer capsules to c_{L}
# Multiply (49b, 288, 4, 4) with (1,288,4,4) --> (49b, 288, 4, 4)
current_pose = torch.matmul(current_pose, w_current)
if self.matrix_pose:
# Current_pose = (49b, 288, 16)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
else:
current_pose = current_pose.squeeze(2)
############## Linformer Projection
current_pose = current_pose.permute(2,0,1) # (16,49b,288)
E_proj = self.E_proj.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.hidden_dim) # (288, hidden_dim)
current_pose = torch.matmul(current_pose, E_proj) # (16,49b,hidden_dim)
current_pose = current_pose.permute(1,2,0) # (49b, hidden_Dim, 16)
###################### Positonal Embeddings
# Adding positional embeddings to next pose: (b,7,7,m,16) -->(b,m,7,7,16)+(7,7,16)
# print("original ", next_pose.shape)
next_pose = next_pose.reshape(batch_size,self.out_n_capsules, h_out, w_out, self.pose_dim)
# print(next_pose.shape, self.rel_embedd.shape)
# next_pose = next_pose + self.rel_embedd
# next_pose = (b,m,7,7,16) --> (49b,m,16)
next_pose = next_pose.reshape(batch_size*h_out*w_out, self.out_n_capsules, self.pose_dim)
if self.matrix_pose:
# next_pose = (49b,m,16) --> (49b,m,4,4)
next_pose = next_pose.reshape(batch_size*h_out*w_out, self.out_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose = next_pose.unsqueeze(3)
# Tranform next pose using N_{L}: w_next = (49b,m,4,4) * (1,m,4,4)
next_pose = torch.matmul(w_next, next_pose)
if self.matrix_pose:
# next_pose = (49b,m,16)
next_pose = next_pose.view(batch_size*h_out*w_out, self.out_n_capsules, self.pose_dim)
else:
next_pose = next_pose.squeeze(3)
# Finding scaled alignment scores between updated buckets
# dots = (49b, m ,288)
dots = torch.einsum('bje,bie->bji', next_pose, current_pose) * (pose_dim ** -0.5)
# attention routing along dim=-2 (next layer buckets)
# Dim=-1 if you wanna invert the inverted attention
dots = dots.softmax(dim=-2)
next_pose_candidates = current_pose
# Yet to multiply with N_{L} (next_w)
next_pose_candidates = torch.einsum('bji,bie->bje', dots, next_pose_candidates)
if self.matrix_pose:
# next pose: 49b,m,16 --> 49b,m,4,4
next_pose_candidates = next_pose_candidates.view(next_pose_candidates.shape[0], next_pose_candidates.shape[1],self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose_candidates = next_pose_candidates.unsqueeze(3)
# Multiplied with N_{j} to get final pose
# w_next: (49b,m,4,4); b_next_pose_candidates: (49b,m , 4, 4)
next_pose_candidates = torch.matmul(next_pose_candidates, w_next)
# next_pose_candidates = (b,7,7,m,16)
next_pose_candidates = next_pose_candidates.view(batch_size, h_out, w_out, self.out_n_capsules, self.pose_dim)
if self.layer_type == 'conv':
# next_pose_candidates = (b,m,7,7,16)
next_pose_candidates = next_pose_candidates.permute([0,3,1,2,4])
elif self.layer_type == 'FC':
# next_pose_candidates = (b,1,1,m,16) --> (b,1,m,16)
next_pose_candidates = next_pose_candidates.squeeze(1)
return next_pose_candidates
#Capsules Linformer projections but with convolution capsules too
class LinformerProjectionEntireOutImg(nn.Module):
def __init__(self,
in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules,
matrix_pose, layer_type, input_img_size, output_img_size, hidden_dim=None, kernel_size=None, parameter_sharing='headwise',
dropout = 0.):
super().__init__()
self.in_d_capsules = in_d_capsules
self.out_d_capsules = out_d_capsules
self.in_n_capsules = in_n_capsules
self.out_n_capsules = out_n_capsules
self.input_img_size=input_img_size
self.output_img_size=output_img_size
self.hidden_dim=hidden_dim
self.pose_dim = in_d_capsules
self.layer_type = layer_type
self.kernel_size = kernel_size
self.matrix_pose = matrix_pose
self.parameter_sharing = parameter_sharing
if self.layer_type == 'FC':
self.kernel_size=1
if matrix_pose:
# Random Initialisation of Two matrices
self.matrix_pose_dim = int(np.sqrt(self.in_d_capsules))
# w_current =(3,3,32,4,4)
self.w_current = nn.Parameter(0.02*torch.randn(kernel_size, kernel_size,
in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim))
self.w_next = nn.Parameter(0.02*torch.randn(
out_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim))
else:
self.w_current = nn.Parameter(0.02*torch.randn(kernel_size, kernel_size,
in_n_capsules, self.pose_dim, self.pose_dim))
self.w_next = nn.Parameter(0.02*torch.randn(
out_n_capsules, self.pose_dim, self.pose_dim))
max_seq_len = self.kernel_size*self.kernel_size*self.in_n_capsules
heads = 1
if parameter_sharing == "headwise":
# print("Hello")
if self.layer_type =='conv':
self.E_proj = nn.Parameter(0.02*torch.randn(self.in_n_capsules, output_img_size * output_img_size, hidden_dim))
else:
self.E_proj = nn.Parameter(0.02*torch.randn(int(self.in_n_capsules/(self.input_img_size * self.input_img_size)), input_img_size * input_img_size, hidden_dim))
else:
assert (False),"Yet to write the non-headwise method"
# Positional embeddings: (7,7,16)
self.rel_embedd = nn.Parameter(torch.randn(output_img_size, output_img_size, self.out_d_capsules), requires_grad=True)
# self.rel_embedd = None
self.dropout = nn.Dropout(dropout)
print("You are using Bilinear routing with Linformer")
def forward(self, current_pose, h_out=1, w_out=1, next_pose=None):
# current pose: (b,32,3,3,7,7,16)
# if FC current pose is (b, numcaps*h_in*w_in, caps_dim)
if next_pose is None:
# ist iteration
batch_size = current_pose.shape[0]
if self.layer_type=='conv':
# (b, h_out, w_out, num_capsules, kernel_size, kernel_size, capsule_dim)
# (b,7,7,32,3,3,16)
current_pose = current_pose.permute([0,4,5,1,2,3,6])
h_out = h_out
w_out = w_out
elif self.layer_type=='FC':
h_out = 1
w_out = 1
pose_dim = self.pose_dim
w_current = self.w_current
w_next = self.w_next
if self.matrix_pose:
#w_current =(3,3,32,4,4) --> (3*3*32, 4, 4)
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim, self.pose_dim)
#
# W_current is C_{L} and w_next is N_{L}
w_current = w_current.unsqueeze(0)
w_next = w_next.unsqueeze(0)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)#view error
if self.matrix_pose:
# (b*7*7, 3*3*32, 4, 4) = (49b, 288, 4, 4)
# print(current_pose.shape)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)#replace the 2 reshapes
else:
current_pose = current_pose.unsqueeze(2)
# Multiplying p{L} by C_{L} to change to c_{L}
# Current pose: (49b, 288, 4, 4), w_current = (1, 288, 4, 4)
# Same matrix for the entire batch, output = (49b, 288, 4, 4)
current_pose = torch.matmul(current_pose, w_current)
if self.matrix_pose:
# Current_pose = (49b, 288, 16)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
else:
current_pose = current_pose.squeeze(2)
# Linformer projection
# (b,3,3,16,32,1,7*7) X (32,49,hidden_dim) --> (b,3,3,16,32,1,hidden_dim)
if self.layer_type=='conv':
current_pose = current_pose.reshape(batch_size, self.kernel_size, self.kernel_size , self.pose_dim, self.in_n_capsules, 1, h_out*w_out)
# print("Input shape: ", current_pose.shape, self.E_proj.shape)
current_pose = torch.matmul(current_pose, self.E_proj).squeeze(5)
current_pose = current_pose.reshape(current_pose.shape[0], current_pose.shape[1]*current_pose.shape[2]*current_pose.shape[4]*current_pose.shape[5], current_pose.shape[3])
dots=(torch.ones(batch_size, self.out_n_capsules*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules * self.hidden_dim)* (pose_dim ** -0.5)).type_as(current_pose).to(current_pose)
else:
# Input is (b, num_Caps*input_size*input_size,16) ==(b,5*5*32,16) -> (b,16,32,1,5*5) X (32,25,hidden_dim) --> (b,16,32,1,hidden_dim) -> (b,32*hidden_dim, 16)
current_pose = current_pose.reshape(batch_size, self.pose_dim, int(self.in_n_capsules/(self.input_img_size * self.input_img_size)), 1, self.input_img_size * self.input_img_size)
# print("Input shape: ", current_pose.shape, self.E_proj.shape)
current_pose = torch.matmul(current_pose, self.E_proj).squeeze(3)
current_pose = current_pose.reshape(current_pose.shape[0], current_pose.shape[2]*current_pose.shape[3], current_pose.shape[1])
dots=(torch.ones(batch_size*h_out*w_out, self.out_n_capsules, self.kernel_size*self.kernel_size* int(self.in_n_capsules/(self.input_img_size * self.input_img_size)) * self.hidden_dim)* (pose_dim ** -0.5)).type_as(current_pose).to(current_pose)
# print("Input shape: ", current_pose.shape, dots.shape)
# R_{i,j} = (b, m*7*7, 3*3*32*hidden_dim)
dots = dots.softmax(dim=-2)
next_pose_candidates = current_pose
# Multiplies r_{i,j} with c_{L} ( no sorting in the 1st iteration) to give X. Still have to
# multiply with N_{L}
# next pose: (49b, m, 16)
next_pose_candidates = torch.einsum('bij,bje->bie', dots, next_pose_candidates)
# (49b,m,16) --> (b,m,7,7,16) + rel_embedding (7,7,16) and then reshaped to (49b,m,16)
next_pose_candidates = next_pose_candidates.reshape(batch_size,self.out_n_capsules, h_out, w_out, self.pose_dim)
next_pose_candidates = next_pose_candidates + self.rel_embedd
next_pose_candidates = next_pose_candidates.permute(0,2,3,1,4)
next_pose_candidates = next_pose_candidates.reshape(-1,next_pose_candidates.shape[3], next_pose_candidates.shape[4])
if self.matrix_pose:
# Correct shapes: (49b, m, 4, 4)
next_pose_candidates = next_pose_candidates.view(next_pose_candidates.shape[0], next_pose_candidates.shape[1], self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose_candidates = next_pose_candidates.unsqueeze(2)
# Found final pose of next layer by multiplying X with N_{L}
# Multiply (49b, m, 4, 4) with (1, m, 4, 4) == (49b, m , 4, 4)
next_pose_candidates = torch.matmul(next_pose_candidates, w_next)
# Reshape: (b, 7, 7, m, 16)
next_pose_candidates = next_pose_candidates.view(batch_size, h_out, w_out, self.out_n_capsules, self.pose_dim)
if self.layer_type == 'conv':
# Reshape: (b,m,7,7,16) (just like original input, without expansion)
next_pose_candidates = next_pose_candidates.permute([0,3,1,2,4])
elif self.layer_type == 'FC':
# Reshape: (b, 1, 1, m, 16) --> (b, 1, m, 16) (h_out, w_out ==1)
next_pose_candidates = next_pose_candidates.squeeze(1)
return next_pose_candidates
else:
# 2nd to T iterations
batch_size = next_pose.shape[0]
if self.layer_type=='conv':
# Current_pose = (b,7,7,32,3,3,16)
current_pose = current_pose.permute([0,4,5,1,2,3,6])
# next_pose = (b,m,7,7,16) --> (b,7,7,m,16)
next_pose = next_pose.permute([0,2,3,1,4])
h_out = next_pose.shape[1]
w_out = next_pose.shape[2]
elif self.layer_type=='FC':
h_out = 1
w_out = 1
pose_dim = self.pose_dim
w_current = self.w_current
w_next = self.w_next
if self.matrix_pose:
# w_current = (288,4,4)
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim, self.pose_dim)
# w_current = (1,288,4,4)
w_current = w_current.unsqueeze(0)
w_next = w_next.unsqueeze(0)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
if self.matrix_pose:
# Current_pose = (49b, 288, 4, 4)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)#replace the 2 reshapes
else:
current_pose = current_pose.unsqueeze(2)
# Tranformed currentlayer capsules to c_{L}
# Multiply (49b, 288, 4, 4) with (1,288,4,4) --> (49b, 288, 4, 4)
current_pose = torch.matmul(current_pose, w_current)
if self.matrix_pose:
# Current_pose = (49b, 288, 16)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
else:
current_pose = current_pose.squeeze(2)
# Linformer projection
# (b,3,3,16,32,1,7*7) X (32,49,hidden_dim) --> (b,3,3,16,32,1,hidden_dim)
if self.layer_type=='conv':
# print(current_pose.shape)
current_pose = current_pose.reshape(batch_size, self.kernel_size, self.kernel_size , self.pose_dim, self.in_n_capsules, 1, h_out*w_out)
# print("Input shape: ", current_pose.shape, self.E_proj.shape)
current_pose = torch.matmul(current_pose, self.E_proj).squeeze(5)
current_pose = current_pose.reshape(current_pose.shape[0], current_pose.shape[1]*current_pose.shape[2]*current_pose.shape[4]*current_pose.shape[5], current_pose.shape[3])
else:
# Input is (b, num_Caps*input_size*input_size,16) ==(b,5*5*32,16) -> (b,16,32,1,5*5) X (32,25,hidden_dim) --> (b,16,32,1,hidden_dim) -> (b,32*hidden_dim, 16)
current_pose = current_pose.reshape(batch_size, self.pose_dim, int(self.in_n_capsules/(self.input_img_size * self.input_img_size)), 1, self.input_img_size * self.input_img_size)
# print("Input shape: ", current_pose.shape, self.E_proj.shape)
current_pose = torch.matmul(current_pose, self.E_proj).squeeze(3)
current_pose = current_pose.reshape(current_pose.shape[0], current_pose.shape[2]*current_pose.shape[3], current_pose.shape[1])
# Adding positional embeddings to next pose: (b,7,7,m,16) -->(b,m,7,7,16)+(7,7,16)
# print("original ", next_pose.shape)
next_pose = next_pose.reshape(batch_size,self.out_n_capsules, h_out, w_out, self.pose_dim)
# print(next_pose.shape, self.rel_embedd.shape)
next_pose = next_pose + self.rel_embedd
# next_pose = (b,m,7,7,16) --> (49b,m,16)
next_pose = next_pose.reshape(batch_size*h_out*w_out, self.out_n_capsules, self.pose_dim)
if self.matrix_pose:
# next_pose = (49b,m,16) --> (49b,m,4,4)
next_pose = next_pose.reshape(batch_size*h_out*w_out, self.out_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose = next_pose.unsqueeze(3)
# Tranform next pose using N_{L}: w_next = (49b,m,4,4) * (1,m,4,4)
next_pose = torch.matmul(w_next, next_pose)
if self.matrix_pose:
# next_pose = (b,49m,16)
if self.layer_type=='conv':
next_pose = next_pose.view(batch_size, self.out_n_capsules*h_out*w_out, self.pose_dim)
else:
next_pose = next_pose.view(batch_size*h_out*w_out, self.out_n_capsules, self.pose_dim)
else:
next_pose = next_pose.squeeze(3)
# Finding scaled alignment scores between updated buckets
# dots = (49b, m ,288*hidden_dim)
dots = torch.einsum('bje,bie->bji', next_pose, current_pose) * (pose_dim ** -0.5)
# print("dots time shape: ", current_pose.shape, next_pose.shape, dots.shape)
# attention routing along dim=-2 (next layer buckets)
# Dim=-1 if you wanna invert the inverted attention
dots = dots.softmax(dim=-2)
next_pose_candidates = current_pose
# Yet to multiply with N_{L} (next_w)
next_pose_candidates = torch.einsum('bji,bie->bje', dots, next_pose_candidates)
# print("Netx canditate: ", next_pose_candidates.shape)
if self.matrix_pose:
# next pose: 49b,m,16 --> 49b,m,4,4
next_pose_candidates=next_pose_candidates.reshape(batch_size*h_out*w_out, self.out_n_capsules, self.pose_dim)
next_pose_candidates = next_pose_candidates.view(next_pose_candidates.shape[0], next_pose_candidates.shape[1],self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose_candidates = next_pose_candidates.unsqueeze(3)
# Multiplied with N_{j} to get final pose
# w_next: (49b,m,4,4); b_next_pose_candidates: (49b,m , 4, 4)
next_pose_candidates = torch.matmul(next_pose_candidates, w_next)
# next_pose_candidates = (b,7,7,m,16)
next_pose_candidates = next_pose_candidates.view(batch_size, h_out, w_out, self.out_n_capsules, self.pose_dim)
if self.layer_type == 'conv':
# next_pose_candidates = (b,m,7,7,16)
next_pose_candidates = next_pose_candidates.permute([0,3,1,2,4])
elif self.layer_type == 'FC':
# next_pose_candidates = (b,1,1,m,16) --> (b,1,m,16)
next_pose_candidates = next_pose_candidates.squeeze(1)
return next_pose_candidates
class BilinearProjectionWithEmbeddings(nn.Module):
def __init__(self,
in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules,
matrix_pose, layer_type, input_img_size, output_img_size, hidden_dim=None, kernel_size=None, parameter_sharing='headwise',
dropout = 0.):
super().__init__()
self.in_d_capsules = in_d_capsules
self.out_d_capsules = out_d_capsules
self.in_n_capsules = in_n_capsules
self.out_n_capsules = out_n_capsules
self.pose_dim = in_d_capsules
self.layer_type = layer_type
self.kernel_size = kernel_size
self.matrix_pose = matrix_pose
self.parameter_sharing = parameter_sharing
if self.layer_type == 'FC':
self.kernel_size=1
if matrix_pose:
# Random Initialisation of Two matrices
self.matrix_pose_dim = int(np.sqrt(self.in_d_capsules))
# w_current =(3,3,32,4,4)
self.w_current = nn.Parameter(0.02*torch.randn(kernel_size, kernel_size,
in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim))
self.w_next = nn.Parameter(0.02*torch.randn(
out_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim))
else:
self.w_current = nn.Parameter(0.02*torch.randn(kernel_size, kernel_size,
in_n_capsules, self.pose_dim, self.pose_dim))
self.w_next = nn.Parameter(0.02*torch.randn(
out_n_capsules, self.pose_dim, self.pose_dim))
max_seq_len = self.kernel_size*self.kernel_size*self.in_n_capsules
heads = 1
# Positional embeddings: 2 embeddings (1,7,1,8) and (1,1,7,8)
self.rel_embedd_h = nn.Parameter(torch.randn(1, output_img_size,1, self.out_d_capsules //2), requires_grad=True)
self.rel_embedd_w = nn.Parameter(torch.randn(1, 1, output_img_size, self.out_d_capsules //2), requires_grad=True)
# self.rel_embedd = None
self.dropout = nn.Dropout(dropout)
print("You are using Bilinear routing with Linformer")
def forward(self, current_pose, h_out=1, w_out=1, next_pose=None):
# current pose: (b,32,3,3,7,7,16)
# if FC current pose is (b, numcaps*h_in*w_in, caps_dim)
if next_pose is None:
# ist iteration
batch_size = current_pose.shape[0]
if self.layer_type=='conv':
# (b, h_out, w_out, num_capsules, kernel_size, kernel_size, capsule_dim)
# (b,7,7,32,3,3,16)
current_pose = current_pose.permute([0,4,5,1,2,3,6])
h_out = h_out
w_out = w_out
elif self.layer_type=='FC':
h_out = 1
w_out = 1
pose_dim = self.pose_dim
w_current = self.w_current
w_next = self.w_next
if self.matrix_pose:
#w_current =(3,3,32,4,4) --> (3*3*32, 4, 4)
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim, self.pose_dim)
#
# W_current is C_{L} and w_next is N_{L}
w_current = w_current.unsqueeze(0)
w_next = w_next.unsqueeze(0)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)#view error
if self.matrix_pose:
# (b*7*7, 3*3*32, 4, 4) = (49b, 288, 4, 4)
# print(current_pose.shape)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)#replace the 2 reshapes
else:
current_pose = current_pose.unsqueeze(2)
# Multiplying p{L} by C_{L} to change to c_{L}
# Current pose: (49b, 288, 4, 4), w_current = (1, 288, 4, 4)
# Same matrix for the entire batch, output = (49b, 288, 4, 4)
current_pose = torch.matmul(current_pose, w_current)
if self.matrix_pose:
# Current_pose = (49b, 288, 16)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
else:
current_pose = current_pose.squeeze(2)
# R_{i,j} = (49b, m, 288)
dots=(torch.ones(batch_size*h_out*w_out, self.out_n_capsules, self.kernel_size*self.kernel_size*self.in_n_capsules)* (pose_dim ** -0.5)).type_as(current_pose).to(current_pose)
dots = dots.softmax(dim=-2)
next_pose_candidates = current_pose
# Multiplies r_{i,j} with c_{L} ( no sorting in the 1st iteration) to give X. Still have to
# multiply with N_{L}
# next pose: (49b, m, 16)
next_pose_candidates = torch.einsum('bij,bje->bie', dots, next_pose_candidates)
###################### Positional Embeddings
# (49b,m,16) --> (b,m,7,7,16) + rel_embedding (7,7,16) and then reshaped to (49b,m,16)
next_pose_candidates = next_pose_candidates.reshape(batch_size,self.out_n_capsules, h_out, w_out, self.pose_dim)
# next_pose_candidates = next_pose_candidates + self.rel_embedd
next_pose_candidates = next_pose_candidates.permute(0,2,3,1,4)
next_pose_candidates = next_pose_candidates.reshape(-1,next_pose_candidates.shape[3], next_pose_candidates.shape[4])
if self.matrix_pose:
# Correct shapes: (49b, m, 4, 4)
next_pose_candidates = next_pose_candidates.view(next_pose_candidates.shape[0], next_pose_candidates.shape[1], self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose_candidates = next_pose_candidates.unsqueeze(2)
# Found final pose of next layer by multiplying X with N_{L}
# Multiply (49b, m, 4, 4) with (1, m, 4, 4) == (49b, m , 4, 4)
next_pose_candidates = torch.matmul(next_pose_candidates, w_next)
# Reshape: (b, 7, 7, m, 16)
next_pose_candidates = next_pose_candidates.view(batch_size, h_out, w_out, self.out_n_capsules, self.pose_dim)
###################### Positional Embeddings in the end
next_pose_candidates = next_pose_candidates.permute(0,3,1,2,4) #(b,m,7,7,16)
next_pose_candidates_h, next_pose_candidates_w = next_pose_candidates.split(self.pose_dim // 2, dim=4) # (b,m,7,7,8) and (b,m,7,7,8)
# adding and concatenating (1,7,1,8) and (1,1,7,8) to (b,m,7,7,8)
next_pose_candidates = torch.cat((next_pose_candidates_h + self.rel_embedd_h, next_pose_candidates_w + self.rel_embedd_w), dim=4)
# next_pose_candidates = next_pose_candidates+self.rel_embedd
next_pose_candidates = next_pose_candidates.permute(0,2,3,1,4)
if self.layer_type == 'conv':
# Reshape: (b,m,7,7,16) (just like original input, without expansion)
next_pose_candidates = next_pose_candidates.permute([0,3,1,2,4])
elif self.layer_type == 'FC':
# Reshape: (b, 1, 1, m, 16) --> (b, 1, m, 16) (h_out, w_out ==1)
next_pose_candidates = next_pose_candidates.squeeze(1)
return next_pose_candidates
else:
# 2nd to T iterations
batch_size = next_pose.shape[0]
if self.layer_type=='conv':
# Current_pose = (b,7,7,32,3,3,16)
current_pose = current_pose.permute([0,4,5,1,2,3,6])
# next_pose = (b,m,7,7,16) --> (b,7,7,m,16)
next_pose = next_pose.permute([0,2,3,1,4])
h_out = next_pose.shape[1]
w_out = next_pose.shape[2]
elif self.layer_type=='FC':
h_out = 1
w_out = 1
pose_dim = self.pose_dim
w_current = self.w_current
w_next = self.w_next
if self.matrix_pose:
# w_current = (288,4,4)
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
w_current = w_current.view(self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim, self.pose_dim)
# w_current = (1,288,4,4)
w_current = w_current.unsqueeze(0)
w_next = w_next.unsqueeze(0)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
if self.matrix_pose:
# Current_pose = (49b, 288, 4, 4)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)#replace the 2 reshapes
else:
current_pose = current_pose.unsqueeze(2)
# Tranformed currentlayer capsules to c_{L}
# Multiply (49b, 288, 4, 4) with (1,288,4,4) --> (49b, 288, 4, 4)
current_pose = torch.matmul(current_pose, w_current)
if self.matrix_pose:
# Current_pose = (49b, 288, 16)
current_pose = current_pose.reshape(batch_size*h_out*w_out, self.kernel_size*self.kernel_size*self.in_n_capsules, self.pose_dim)
else:
current_pose = current_pose.squeeze(2)
###################### Positonal Embeddings
# Adding positional embeddings to next pose: (b,7,7,m,16) -->(b,m,7,7,16)+(7,7,16)
# print("original ", next_pose.shape)
next_pose = next_pose.reshape(batch_size,self.out_n_capsules, h_out, w_out, self.pose_dim)
# print(next_pose.shape, self.rel_embedd.shape)
# next_pose = next_pose + self.rel_embedd
# next_pose = (b,m,7,7,16) --> (49b,m,16)
next_pose = next_pose.reshape(batch_size*h_out*w_out, self.out_n_capsules, self.pose_dim)
if self.matrix_pose:
# next_pose = (49b,m,16) --> (49b,m,4,4)
next_pose = next_pose.reshape(batch_size*h_out*w_out, self.out_n_capsules, self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose = next_pose.unsqueeze(3)
# Tranform next pose using N_{L}: w_next = (49b,m,4,4) * (1,m,4,4)
next_pose = torch.matmul(w_next, next_pose)
if self.matrix_pose:
# next_pose = (49b,m,16)
next_pose = next_pose.view(batch_size*h_out*w_out, self.out_n_capsules, self.pose_dim)
else:
next_pose = next_pose.squeeze(3)
# Finding scaled alignment scores between updated buckets
# dots = (49b, m ,288)
dots = torch.einsum('bje,bie->bji', next_pose, current_pose) * (pose_dim ** -0.5)
# attention routing along dim=-2 (next layer buckets)
# Dim=-1 if you wanna invert the inverted attention
dots = dots.softmax(dim=-2)
next_pose_candidates = current_pose
# Yet to multiply with N_{L} (next_w)
next_pose_candidates = torch.einsum('bji,bie->bje', dots, next_pose_candidates)
if self.matrix_pose:
# next pose: 49b,m,16 --> 49b,m,4,4
next_pose_candidates = next_pose_candidates.view(next_pose_candidates.shape[0], next_pose_candidates.shape[1],self.matrix_pose_dim, self.matrix_pose_dim)
else:
next_pose_candidates = next_pose_candidates.unsqueeze(3)
# Multiplied with N_{j} to get final pose
# w_next: (49b,m,4,4); b_next_pose_candidates: (49b,m , 4, 4)
next_pose_candidates = torch.matmul(next_pose_candidates, w_next)
# next_pose_candidates = (b,7,7,m,16)
next_pose_candidates = next_pose_candidates.view(batch_size, h_out, w_out, self.out_n_capsules, self.pose_dim)
###################### Positional Embeddings in the end
next_pose_candidates = next_pose_candidates.permute(0,3,1,2,4) #(b,m,7,7,16)
next_pose_candidates_h, next_pose_candidates_w = next_pose_candidates.split(self.pose_dim // 2, dim=4) # (b,m,7,7,8) and (b,m,7,7,8)
# adding and concatenating (1,7,1,8) and (1,1,7,8) to (b,m,7,7,8)
next_pose_candidates = torch.cat((next_pose_candidates_h + self.rel_embedd_h, next_pose_candidates_w + self.rel_embedd_w), dim=4)
# next_pose_candidates = next_pose_candidates+self.rel_embedd
next_pose_candidates = next_pose_candidates.permute(0,2,3,1,4)
if self.layer_type == 'conv':
# next_pose_candidates = (b,m,7,7,16)
next_pose_candidates = next_pose_candidates.permute([0,3,1,2,4])
elif self.layer_type == 'FC':
# next_pose_candidates = (b,1,1,m,16) --> (b,1,m,16)
next_pose_candidates = next_pose_candidates.squeeze(1)
return next_pose_candidates
# temp = torch.randn((2, 3, 32, 32))
# conv = AttentionConv(3, 16, kernel_size=3, padding=1)
# print(conv(temp).size())
| 49.143956
| 259
| 0.584759
| 6,268
| 44,721
| 3.872846
| 0.034939
| 0.092935
| 0.131246
| 0.058579
| 0.970711
| 0.967168
| 0.965149
| 0.959588
| 0.959423
| 0.955881
| 0
| 0.044036
| 0.306366
| 44,721
| 909
| 260
| 49.19802
| 0.738524
| 0.208135
| 0
| 0.942675
| 0
| 0
| 0.01282
| 0
| 0
| 0
| 0
| 0
| 0.004246
| 1
| 0.012739
| false
| 0
| 0.019108
| 0
| 0.050955
| 0.006369
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32629f50c91de3040b1f1c547c580df02e9f2ab3
| 157
|
py
|
Python
|
spiderBasic/urllibTest/parseTest/urlJoinTest.py
|
turoDog/LearningPython
|
8e87e1a6926e2d6d7f131fbadaf63a03e2aa41cd
|
[
"Apache-2.0"
] | 2
|
2018-07-11T02:13:44.000Z
|
2019-04-06T02:41:44.000Z
|
spiderBasic/urllibTest/parseTest/urlJoinTest.py
|
turoDog/LearningPython
|
8e87e1a6926e2d6d7f131fbadaf63a03e2aa41cd
|
[
"Apache-2.0"
] | null | null | null |
spiderBasic/urllibTest/parseTest/urlJoinTest.py
|
turoDog/LearningPython
|
8e87e1a6926e2d6d7f131fbadaf63a03e2aa41cd
|
[
"Apache-2.0"
] | null | null | null |
from urllib.parse import urljoin
print(urljoin('http://www.baidu.com','FAQ.html'))
print(urljoin('http://www.baidu.com','https://cuiqingcai.com/FAQ.html'))
| 31.4
| 72
| 0.726115
| 24
| 157
| 4.75
| 0.583333
| 0.210526
| 0.280702
| 0.333333
| 0.473684
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044586
| 157
| 4
| 73
| 39.25
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0.503185
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
3ec2346e7af30bc497124cb1abadfeb9ccbd34c8
| 834
|
py
|
Python
|
microdc/networking.py
|
joaosousafranco/microdc-init
|
ee0de117cebfe2ffc23cf5138578d51f6e0a6cb4
|
[
"Apache-2.0"
] | null | null | null |
microdc/networking.py
|
joaosousafranco/microdc-init
|
ee0de117cebfe2ffc23cf5138578d51f6e0a6cb4
|
[
"Apache-2.0"
] | null | null | null |
microdc/networking.py
|
joaosousafranco/microdc-init
|
ee0de117cebfe2ffc23cf5138578d51f6e0a6cb4
|
[
"Apache-2.0"
] | null | null | null |
def generate_subnets(network_cidr, offset):
subnets_19 = []
subnets_22 = []
cidr_octs = network_cidr.split('.')
subnets_19.append("{}.{}.{}.{}".format(cidr_octs[0], cidr_octs[1], int(cidr_octs[2]) + 32 + offset, '0/19'))
subnets_19.append("{}.{}.{}.{}".format(cidr_octs[0], cidr_octs[1], int(cidr_octs[2]) + 64 + offset, '0/19'))
subnets_19.append("{}.{}.{}.{}".format(cidr_octs[0], cidr_octs[1], int(cidr_octs[2]) + 96 + offset, '0/19'))
subnets_22.append("{}.{}.{}.{}".format(cidr_octs[0], cidr_octs[1], int(cidr_octs[2]) + 0 + offset, '0/22'))
subnets_22.append("{}.{}.{}.{}".format(cidr_octs[0], cidr_octs[1], int(cidr_octs[2]) + 4 + offset, '0/22'))
subnets_22.append("{}.{}.{}.{}".format(cidr_octs[0], cidr_octs[1], int(cidr_octs[2]) + 8 + offset, '0/22'))
return subnets_19, subnets_22
| 64.153846
| 112
| 0.601918
| 128
| 834
| 3.671875
| 0.171875
| 0.323404
| 0.204255
| 0.255319
| 0.72766
| 0.72766
| 0.72766
| 0.72766
| 0.72766
| 0.72766
| 0
| 0.089041
| 0.1247
| 834
| 12
| 113
| 69.5
| 0.554795
| 0
| 0
| 0
| 1
| 0
| 0.109113
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3ed25cdf98d39aab42054ff45be7eb6e3b80f035
| 183
|
py
|
Python
|
violas_client/lbrtypes/account_config/__init__.py
|
violas-core/violas-client
|
e8798f7d081ac218b78b81fd7eb2f8da92631a16
|
[
"MIT"
] | null | null | null |
violas_client/lbrtypes/account_config/__init__.py
|
violas-core/violas-client
|
e8798f7d081ac218b78b81fd7eb2f8da92631a16
|
[
"MIT"
] | null | null | null |
violas_client/lbrtypes/account_config/__init__.py
|
violas-core/violas-client
|
e8798f7d081ac218b78b81fd7eb2f8da92631a16
|
[
"MIT"
] | 1
|
2022-01-05T06:49:42.000Z
|
2022-01-05T06:49:42.000Z
|
from violas_client.lbrtypes.account_config.constants import *
from violas_client.lbrtypes.account_config.events import *
from violas_client.lbrtypes.account_config.resources import *
| 45.75
| 61
| 0.868852
| 24
| 183
| 6.375
| 0.416667
| 0.196078
| 0.313725
| 0.470588
| 0.803922
| 0.803922
| 0.562092
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 183
| 3
| 62
| 61
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
3ee8c06f6162a6ef4ca1150829f7d9470f93a488
| 28,505
|
py
|
Python
|
tests/unit_tests/test_tethys_apps/test_cli/test_services_commands.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2020-10-08T20:38:33.000Z
|
2020-10-08T20:38:33.000Z
|
tests/unit_tests/test_tethys_apps/test_cli/test_services_commands.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2018-04-14T19:40:54.000Z
|
2018-04-14T19:40:54.000Z
|
tests/unit_tests/test_tethys_apps/test_cli/test_services_commands.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2021-09-07T14:47:11.000Z
|
2021-09-07T14:47:11.000Z
|
try:
from StringIO import StringIO
except ImportError:
from io import StringIO # noqa: F401
import unittest
import mock
from tethys_apps.cli.services_commands import services_create_persistent_command, services_remove_persistent_command,\
services_create_spatial_command, services_remove_spatial_command, services_list_command
from django.core.exceptions import ObjectDoesNotExist
from django.db.utils import IntegrityError
class ServicesCommandsTest(unittest.TestCase):
"""
Tests for tethys_apps.cli.services_commands
"""
# Dictionary used in some of the tests
my_dict = {'id': 'Id_foo', 'name': 'Name_foo', 'host': 'Host_foo', 'port': 'Port_foo', 'endpoint': 'EndPoint_foo',
'public_endpoint': 'PublicEndPoint_bar', 'apikey': 'APIKey_foo'}
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.PersistentStoreService')
def test_services_create_persistent_command(self, mock_service, mock_pretty_output):
"""
Test for services_create_persistent_command.
For running the test without any errors or problems.
:param mock_service: mock for PersistentStoreService
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
services_create_persistent_command(mock_args)
mock_service.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertEqual('Successfully created new Persistent Store Service!', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.PersistentStoreService')
def test_services_create_persistent_command_exception_indexerror(self, mock_service, mock_pretty_output):
"""
Test for services_create_persistent_command.
For running the test with an IndexError exception thrown.
:param mock_service: mock for PersistentStoreService
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_service.side_effect = IndexError
services_create_persistent_command(mock_args)
mock_service.assert_called()
mock_service.objects.get().save.assert_not_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('The connection argument (-c) must be of the form', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.PersistentStoreService')
def test_services_create_persistent_command_exception_integrityerror(self, mock_service, mock_pretty_output):
"""
Test for services_create_persistent_command.
For running the test with an IntegrityError exception thrown.
:param mock_service: mock for PersistentStoreService
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_service.side_effect = IntegrityError
services_create_persistent_command(mock_args)
mock_service.assert_called()
mock_service.objects.get().save.assert_not_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Persistent Store Service with name', po_call_args[0][0][0])
self.assertIn('already exists. Command aborted.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.PersistentStoreService')
def test_services_remove_persistent_command_Exceptions(self, mock_service, mock_exit, mock_pretty_output):
"""
Test for services_remove_persistent_command
Test for handling all exceptions thrown by the function.
:param mock_service: mock for PersistentStoreService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_args.force = True
mock_service.objects.get.side_effect = [ValueError, ObjectDoesNotExist]
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
self.assertRaises(SystemExit, services_remove_persistent_command, mock_args)
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('A Persistent Store Service with ID/Name', po_call_args[0][0][0])
self.assertIn('does not exist', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.PersistentStoreService')
def test_services_remove_persistent_command_force(self, mock_service, mock_exit, mock_pretty_output):
"""
Test for services_remove_persistent_command
Test for forcing a delete of the service
:param mock_service: mock for PersistentStoreService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_args.force = True
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
self.assertRaises(SystemExit, services_remove_persistent_command, mock_args)
mock_service.objects.get().delete.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Successfully removed Persistent Store Service', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.input')
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.PersistentStoreService')
def test_services_remove_persistent_command_no_proceed_invalid_char(self, mock_service, mock_exit,
mock_pretty_output, mock_input):
"""
Test for services_remove_persistent_command
Handles answering the prompt to delete with invalid characters, and answering no.
:param mock_service: mock for PersistentStoreService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:param mock_input: mock for handling raw_input requests
:return:
"""
mock_args = mock.MagicMock()
mock_args.force = False
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
mock_input.side_effect = ['foo', 'N']
self.assertRaises(SystemExit, services_remove_persistent_command, mock_args)
mock_service.objects.get().delete.assert_not_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertEqual('Aborted. Persistent Store Service not removed.', po_call_args[0][0][0])
po_call_args = mock_input.call_args_list
self.assertEqual(2, len(po_call_args))
self.assertEqual('Are you sure you want to delete this Persistent Store Service? [y/n]: ',
po_call_args[0][0][0])
self.assertEqual('Please enter either "y" or "n": ', po_call_args[1][0][0])
@mock.patch('tethys_apps.cli.services_commands.input')
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.PersistentStoreService')
def test_services_remove_persistent_command_proceed(self, mock_service, mock_exit, mock_pretty_output, mock_input):
"""
Test for services_remove_persistent_command
Handles answering the prompt to delete with invalid characters by answering yes
:param mock_service: mock for PersistentStoreService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:param mock_input: mock for handling raw_input requests
:return:
"""
mock_args = mock.MagicMock()
mock_args.force = False
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
mock_input.side_effect = ['y']
self.assertRaises(SystemExit, services_remove_persistent_command, mock_args)
mock_service.objects.get().delete.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Successfully removed Persistent Store Service', po_call_args[0][0][0])
po_call_args = mock_input.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertEqual('Are you sure you want to delete this Persistent Store Service? [y/n]: ',
po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_create_spatial_command_IndexError(self, mock_service, mock_pretty_output):
"""
Test for services_create_spatial_command
Handles an IndexError exception
:param mock_service: mock for SpatialDatasetService
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_args.connection = 'IndexError:9876@IndexError' # No 'http' or '://'
services_create_spatial_command(mock_args)
mock_service.assert_not_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('The connection argument (-c) must be of the form', po_call_args[0][0][0])
self.assertIn('"<username>:<password>@<protocol>//<host>:<port>".', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_create_spatial_command_FormatError(self, mock_service, mock_pretty_output):
"""
Test for services_create_spatial_command
Handles an FormatError exception
:param mock_service: mock for SpatialDatasetService
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_args.connection = 'foo:pass@http:://foo:1234'
mock_args.public_endpoint = 'foo@foo:foo' # No 'http' or '://'
services_create_spatial_command(mock_args)
mock_service.assert_not_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('The public_endpoint argument (-p) must be of the form ', po_call_args[0][0][0])
self.assertIn('"<protocol>//<host>:<port>".', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_create_spatial_command_IntegrityError(self, mock_service, mock_pretty_output):
"""
Test for services_create_spatial_command
Handles an IntegrityError exception
:param mock_service: mock for SpatialDatasetService
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_args.connection = 'foo:pass@http:://foo:1234'
mock_args.public_endpoint = 'http://foo:1234'
mock_service.side_effect = IntegrityError
services_create_spatial_command(mock_args)
mock_service.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Spatial Dataset Service with name ', po_call_args[0][0][0])
self.assertIn('already exists. Command aborted.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_create_spatial_command(self, mock_service, mock_pretty_output):
"""
Test for services_create_spatial_command
For going through the function and saving
:param mock_service: mock for SpatialDatasetService
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_args.connection = 'foo:pass@http:://foo:1234'
mock_args.public_endpoint = 'http://foo:1234'
mock_service.return_value = mock.MagicMock()
services_create_spatial_command(mock_args)
mock_service.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertEqual('Successfully created new Spatial Dataset Service!', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_remove_spatial_command_Exceptions(self, mock_service, mock_exit, mock_pretty_output):
"""
Test for services_remove_spatial_command
Handles testing all of the exceptions thrown
:param mock_service: mock for SpatialDatasetService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_service.objects.get.side_effect = [ValueError, ObjectDoesNotExist]
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
self.assertRaises(SystemExit, services_remove_spatial_command, mock_args)
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('A Spatial Dataset Service with ID/Name', po_call_args[0][0][0])
self.assertIn('does not exist.', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_remove_spatial_command_force(self, mock_service, mock_exit, mock_pretty_output):
"""
Test for services_remove_spatial_command
For when a delete is forced
:param mock_service: mock for SpatialDatasetService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:return:
"""
mock_args = mock.MagicMock()
mock_args.force = True
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
self.assertRaises(SystemExit, services_remove_spatial_command, mock_args)
mock_service.objects.get().delete.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Successfully removed Spatial Dataset Service', po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.input')
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_remove_spatial_command_no_proceed_invalid_char(self, mock_service, mock_exit,
mock_pretty_output, mock_input):
"""
Test for services_remove_spatial_command
For when deleting is not forced, and when prompted, giving an invalid answer, then no delete
:param mock_service: mock for SpatialDatasetService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:param mock_input: mock for handling raw_input requests
:return:
"""
mock_args = mock.MagicMock()
mock_args.force = False
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
mock_input.side_effect = ['foo', 'N']
self.assertRaises(SystemExit, services_remove_spatial_command, mock_args)
mock_service.objects.get().delete.assert_not_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertEqual('Aborted. Spatial Dataset Service not removed.', po_call_args[0][0][0])
po_call_args = mock_input.call_args_list
self.assertEqual(2, len(po_call_args))
self.assertEqual('Are you sure you want to delete this Persistent Store Service? [y/n]: ',
po_call_args[0][0][0])
self.assertEqual('Please enter either "y" or "n": ', po_call_args[1][0][0])
@mock.patch('tethys_apps.cli.services_commands.input')
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_apps.cli.services_commands.exit')
@mock.patch('tethys_services.models.SpatialDatasetService')
def test_services_remove_spatial_command_proceed(self, mock_service, mock_exit, mock_pretty_output, mock_input):
"""
Test for services_remove_spatial_command
For when deleting is not forced, and when prompted, giving a valid answer to delete
:param mock_service: mock for SpatialDatasetService
:param mock_exit: mock for handling exit() code in function
:param mock_pretty_output: mock for pretty_output text
:param mock_input: mock for handling raw_input requests
:return:
"""
mock_args = mock.MagicMock()
mock_args.force = False
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
mock_input.side_effect = ['y']
self.assertRaises(SystemExit, services_remove_spatial_command, mock_args)
mock_service.objects.get().delete.assert_called()
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertIn('Successfully removed Spatial Dataset Service', po_call_args[0][0][0])
po_call_args = mock_input.call_args_list
self.assertEqual(1, len(po_call_args))
self.assertEqual('Are you sure you want to delete this Persistent Store Service? [y/n]: ',
po_call_args[0][0][0])
@mock.patch('tethys_apps.cli.services_commands.print')
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.PersistentStoreService')
@mock.patch('tethys_services.models.SpatialDatasetService')
@mock.patch('tethys_apps.cli.services_commands.model_to_dict')
def test_services_list_command_not_spatial_not_persistent(self, mock_mtd, mock_spatial, mock_persistent,
mock_pretty_output, mock_print):
"""
Test for services_list_command
Both spatial and persistent are not set, so both are processed
:param mock_mtd: mock for model_to_dict to return a dictionary
:param mock_spatial: mock for SpatialDatasetService
:param mock_persistent: mock for PersistentStoreService
:param mock_pretty_output: mock for pretty_output text
:param mock_stdout: mock for text written with print statements
:return:
"""
mock_mtd.return_value = self.my_dict
mock_args = mock.MagicMock()
mock_args.spatial = False
mock_args.persistent = False
mock_spatial.objects.order_by('id').all.return_value = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]
mock_persistent.objects.order_by('id').all.return_value = [mock.MagicMock(), mock.MagicMock()]
services_list_command(mock_args)
# Check expected pretty_output
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(4, len(po_call_args))
self.assertIn('Persistent Store Services:', po_call_args[0][0][0])
self.assertIn('ID', po_call_args[1][0][0])
self.assertIn('Name', po_call_args[1][0][0])
self.assertIn('Host', po_call_args[1][0][0])
self.assertIn('Port', po_call_args[1][0][0])
self.assertNotIn('Endpoint', po_call_args[1][0][0])
self.assertNotIn('Public Endpoint', po_call_args[1][0][0])
self.assertNotIn('API Key', po_call_args[1][0][0])
self.assertIn('Spatial Dataset Services:', po_call_args[2][0][0])
self.assertIn('ID', po_call_args[3][0][0])
self.assertIn('Name', po_call_args[3][0][0])
self.assertNotIn('Host', po_call_args[3][0][0])
self.assertNotIn('Port', po_call_args[3][0][0])
self.assertIn('Endpoint', po_call_args[3][0][0])
self.assertIn('Public Endpoint', po_call_args[3][0][0])
self.assertIn('API Key', po_call_args[3][0][0])
# Check text written with Python's print
rts_call_args = mock_print.call_args_list
self.assertIn(self.my_dict['id'], rts_call_args[0][0][0])
self.assertIn(self.my_dict['name'], rts_call_args[0][0][0])
self.assertIn(self.my_dict['host'], rts_call_args[0][0][0])
self.assertIn(self.my_dict['port'], rts_call_args[0][0][0])
self.assertIn(self.my_dict['id'], rts_call_args[4][0][0])
self.assertIn(self.my_dict['name'], rts_call_args[4][0][0])
self.assertNotIn(self.my_dict['host'], rts_call_args[4][0][0])
self.assertNotIn(self.my_dict['port'], rts_call_args[4][0][0])
self.assertIn(self.my_dict['endpoint'], rts_call_args[4][0][0])
self.assertIn(self.my_dict['public_endpoint'], rts_call_args[4][0][0])
self.assertIn(self.my_dict['apikey'], rts_call_args[4][0][0])
@mock.patch('tethys_apps.cli.services_commands.print')
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.SpatialDatasetService')
@mock.patch('tethys_apps.cli.services_commands.model_to_dict')
def test_services_list_command_spatial(self, mock_mtd, mock_spatial, mock_pretty_output, mock_print):
"""
Test for services_list_command
Only spatial is set
:param mock_mtd: mock for model_to_dict to return a dictionary
:param mock_spatial: mock for SpatialDatasetService
:param mock_pretty_output: mock for pretty_output text
:param mock_stdout: mock for text written with print statements
:return:
"""
mock_mtd.return_value = self.my_dict
mock_args = mock.MagicMock()
mock_args.spatial = True
mock_args.persistent = False
mock_spatial.objects.order_by('id').all.return_value = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]
services_list_command(mock_args)
# Check expected pretty_output
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(2, len(po_call_args))
self.assertIn('Spatial Dataset Services:', po_call_args[0][0][0])
self.assertIn('ID', po_call_args[1][0][0])
self.assertIn('Name', po_call_args[1][0][0])
self.assertNotIn('Host', po_call_args[1][0][0])
self.assertNotIn('Port', po_call_args[1][0][0])
self.assertIn('Endpoint', po_call_args[1][0][0])
self.assertIn('Public Endpoint', po_call_args[1][0][0])
self.assertIn('API Key', po_call_args[1][0][0])
# Check text written with Python's print
rts_call_args = mock_print.call_args_list
self.assertIn(self.my_dict['id'], rts_call_args[2][0][0])
self.assertIn(self.my_dict['name'], rts_call_args[2][0][0])
self.assertNotIn(self.my_dict['host'], rts_call_args[2][0][0])
self.assertNotIn(self.my_dict['port'], rts_call_args[2][0][0])
self.assertIn(self.my_dict['endpoint'], rts_call_args[2][0][0])
self.assertIn(self.my_dict['public_endpoint'], rts_call_args[2][0][0])
self.assertIn(self.my_dict['apikey'], rts_call_args[2][0][0])
@mock.patch('tethys_apps.cli.services_commands.print')
@mock.patch('tethys_apps.cli.services_commands.pretty_output')
@mock.patch('tethys_services.models.PersistentStoreService')
@mock.patch('tethys_apps.cli.services_commands.model_to_dict')
def test_services_list_command_persistent(self, mock_mtd, mock_persistent, mock_pretty_output, mock_print):
"""
Test for services_list_command
Only persistent is set
:param mock_mtd: mock for model_to_dict to return a dictionary
:param mock_persistent: mock for PersistentStoreService
:param mock_pretty_output: mock for pretty_output text
:param mock_stdout: mock for text written with print statements
:return:
"""
mock_mtd.return_value = self.my_dict
mock_args = mock.MagicMock()
mock_args.spatial = False
mock_args.persistent = True
mock_persistent.objects.order_by('id').all.return_value = [mock.MagicMock(), mock.MagicMock()]
services_list_command(mock_args)
# Check expected pretty_output
po_call_args = mock_pretty_output().__enter__().write.call_args_list
self.assertEqual(2, len(po_call_args))
self.assertIn('Persistent Store Services:', po_call_args[0][0][0])
self.assertIn('ID', po_call_args[1][0][0])
self.assertIn('Name', po_call_args[1][0][0])
self.assertIn('Host', po_call_args[1][0][0])
self.assertIn('Port', po_call_args[1][0][0])
self.assertNotIn('Endpoint', po_call_args[1][0][0])
self.assertNotIn('Public Endpoint', po_call_args[1][0][0])
self.assertNotIn('API Key', po_call_args[1][0][0])
# Check text written with Python's print
rts_call_args = mock_print.call_args_list
self.assertIn(self.my_dict['id'], rts_call_args[1][0][0])
self.assertIn(self.my_dict['name'], rts_call_args[1][0][0])
self.assertIn(self.my_dict['host'], rts_call_args[1][0][0])
self.assertIn(self.my_dict['port'], rts_call_args[1][0][0])
self.assertNotIn(self.my_dict['endpoint'], rts_call_args[1][0][0])
self.assertNotIn(self.my_dict['public_endpoint'], rts_call_args[1][0][0])
self.assertNotIn(self.my_dict['apikey'], rts_call_args[1][0][0])
| 49.573913
| 119
| 0.69465
| 3,778
| 28,505
| 4.943356
| 0.056114
| 0.066824
| 0.055151
| 0.029985
| 0.939869
| 0.935104
| 0.932105
| 0.929107
| 0.912722
| 0.904048
| 0
| 0.013234
| 0.20207
| 28,505
| 574
| 120
| 49.660279
| 0.80787
| 0.229644
| 0
| 0.712963
| 0
| 0
| 0.206055
| 0.124567
| 0
| 0
| 0
| 0
| 0.398148
| 1
| 0.061728
| false
| 0.018519
| 0.024691
| 0
| 0.092593
| 0.027778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ef9d31f84d1987c0a5ac0b585ae28806d59f465
| 41
|
py
|
Python
|
yacs/component/utils/__init__.py
|
RuchikaShashidhara/YACS
|
7ca1d087f06cbb9a5fe7af767ddf502654938660
|
[
"MIT"
] | 3
|
2020-12-31T15:16:52.000Z
|
2021-01-01T12:28:14.000Z
|
yacs/component/utils/__init__.py
|
RuchikaShashidhara/YACS
|
7ca1d087f06cbb9a5fe7af767ddf502654938660
|
[
"MIT"
] | null | null | null |
yacs/component/utils/__init__.py
|
RuchikaShashidhara/YACS
|
7ca1d087f06cbb9a5fe7af767ddf502654938660
|
[
"MIT"
] | 3
|
2021-01-02T10:54:08.000Z
|
2021-09-12T07:05:37.000Z
|
from . import errors
from . import logger
| 20.5
| 20
| 0.780488
| 6
| 41
| 5.333333
| 0.666667
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 41
| 2
| 21
| 20.5
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eb381748ec0c09ad9c7df9899c33a498ff8d1ea5
| 90
|
py
|
Python
|
lazaro/agents/replay_buffers/base/segment_trees/__init__.py
|
GabrielMusat/lazaro
|
9879e938eb1e6da1b6974edf8ab41ece7f33063c
|
[
"Apache-2.0"
] | 4
|
2021-05-03T15:48:44.000Z
|
2021-05-23T16:05:42.000Z
|
lazaro/agents/replay_buffers/base/segment_trees/__init__.py
|
GabrielMusat/lazaro
|
9879e938eb1e6da1b6974edf8ab41ece7f33063c
|
[
"Apache-2.0"
] | null | null | null |
lazaro/agents/replay_buffers/base/segment_trees/__init__.py
|
GabrielMusat/lazaro
|
9879e938eb1e6da1b6974edf8ab41ece7f33063c
|
[
"Apache-2.0"
] | null | null | null |
from .sum_segment_tree import SumSegmentTree
from .min_segment_tree import MinSegmentTree
| 30
| 44
| 0.888889
| 12
| 90
| 6.333333
| 0.666667
| 0.289474
| 0.447368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 90
| 2
| 45
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
de6ba8bcf5db86a81be9b9df47b1344c98477912
| 564
|
py
|
Python
|
sfaira_extension/versions/topology_versions/mouse/embedding/__init__.py
|
theislab/sfaira_extension
|
22910c7f20e48defbcb5b82c2137e97ee7ed428f
|
[
"BSD-3-Clause"
] | null | null | null |
sfaira_extension/versions/topology_versions/mouse/embedding/__init__.py
|
theislab/sfaira_extension
|
22910c7f20e48defbcb5b82c2137e97ee7ed428f
|
[
"BSD-3-Clause"
] | 3
|
2020-11-03T17:37:37.000Z
|
2021-02-15T12:47:52.000Z
|
sfaira_extension/versions/topology_versions/mouse/embedding/__init__.py
|
theislab/sfaira_extension
|
22910c7f20e48defbcb5b82c2137e97ee7ed428f
|
[
"BSD-3-Clause"
] | 1
|
2022-03-03T15:11:14.000Z
|
2022-03-03T15:11:14.000Z
|
from sfaira_extension.versions.topology_versions.mouse.embedding.ae import AE_TOPOLOGIES
from sfaira_extension.versions.topology_versions.mouse.embedding.linear import LINEAR_TOPOLOGIES
from sfaira_extension.versions.topology_versions.mouse.embedding.nmf import NMF_TOPOLOGIES
from sfaira_extension.versions.topology_versions.mouse.embedding.vae import VAE_TOPOLOGIES
from sfaira_extension.versions.topology_versions.mouse.embedding.vaeiaf import VAEIAF_TOPOLOGIES
from sfaira_extension.versions.topology_versions.mouse.embedding.vaevamp import VAEVAMP_TOPOLOGIES
| 80.571429
| 98
| 0.904255
| 72
| 564
| 6.833333
| 0.208333
| 0.121951
| 0.231707
| 0.329268
| 0.796748
| 0.796748
| 0.796748
| 0.796748
| 0.680894
| 0
| 0
| 0
| 0.042553
| 564
| 6
| 99
| 94
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
dec4f63eaac2d2084ede3ffa98b6505ea1e1de7b
| 147
|
py
|
Python
|
ivy/functional/backends/torch/nn/__init__.py
|
sert121/ivy
|
286f86e487b0c83d46a3ef8d30aa96316337db32
|
[
"Apache-2.0"
] | 161
|
2021-01-20T22:11:13.000Z
|
2022-01-09T09:46:33.000Z
|
ivy/functional/backends/torch/nn/__init__.py
|
sert121/ivy
|
286f86e487b0c83d46a3ef8d30aa96316337db32
|
[
"Apache-2.0"
] | 4
|
2021-11-10T17:04:36.000Z
|
2021-11-26T06:40:43.000Z
|
ivy/functional/backends/torch/nn/__init__.py
|
sert121/ivy
|
286f86e487b0c83d46a3ef8d30aa96316337db32
|
[
"Apache-2.0"
] | 8
|
2021-02-17T20:56:33.000Z
|
2022-01-09T16:45:40.000Z
|
from . import activations
from .activations import *
from . import converters
from .converters import *
from . import layers
from .layers import *
| 21
| 26
| 0.77551
| 18
| 147
| 6.333333
| 0.277778
| 0.263158
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 147
| 6
| 27
| 24.5
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
720252e1086e319a7429f1a0d20f24dea7f46fdc
| 27,086
|
py
|
Python
|
pksampler/pksequencer/newpatchform.py
|
patrickkidd/pksampler-0.3
|
ffe5f1fde1d86052da34d9ee9c44934461c441e2
|
[
"MIT"
] | null | null | null |
pksampler/pksequencer/newpatchform.py
|
patrickkidd/pksampler-0.3
|
ffe5f1fde1d86052da34d9ee9c44934461c441e2
|
[
"MIT"
] | null | null | null |
pksampler/pksequencer/newpatchform.py
|
patrickkidd/pksampler-0.3
|
ffe5f1fde1d86052da34d9ee9c44934461c441e2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/patrick/pksampler-0.3/pksampler/pksequencer/newpatchform.ui'
#
# Created: Wed Jun 29 02:39:01 2005
# by: The PyQt User Interface Compiler (pyuic) 3.14.1
#
# WARNING! All changes made in this file will be lost!
import sys
from qt import *
import os, os.path
image0_data = \
"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d" \
"\x49\x48\x44\x52\x00\x00\x00\x20\x00\x00\x00\x20" \
"\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\x00\x00\x09" \
"\xd6\x49\x44\x41\x54\x78\x9c\x75\x97\x5b\x6f\x1b" \
"\xd7\xb9\x86\x9f\xb5\xd6\x90\x33\x9c\x19\x0e\x49" \
"\x91\x22\x25\x8a\x52\x6c\x49\x6e\x63\xc5\x89\x0d" \
"\xcb\x70\x63\x37\x4a\xe3\x16\x28\xda\x8d\x8d\x5d" \
"\x20\xd8\x17\xfd\x09\x01\xfa\x27\xf2\x5b\x72\x59" \
"\xa0\x17\xbb\xd9\x45\x73\x51\x04\x6d\x53\xc7\x68" \
"\x82\x04\xa9\x63\xc1\xb2\x64\xd9\x96\x6d\x3a\x3a" \
"\x99\x1a\x9e\x86\xc3\x39\xad\x5e\xc8\xa6\xad\xb8" \
"\x79\x81\xc1\x80\x33\x58\xf3\x3d\xeb\x3b\x2e\x8a" \
"\x5f\xfd\xef\xbb\x7a\x6e\x79\x01\xc3\x32\x59\xa9" \
"\xbf\xc5\xe6\xfa\x3d\xb6\x0f\x77\x68\x9c\xaa\x92" \
"\xb3\x04\xb9\x03\x41\x67\xf3\x3e\x8b\xa5\x12\x42" \
"\x6b\x76\x82\x80\x2c\x8a\xa8\x7b\x1e\x61\x92\x90" \
"\xcd\xcc\xf0\xb4\xd3\xa1\x58\x2c\xd2\x6c\xb6\xf0" \
"\x7d\x49\xb5\xda\xc2\xb6\xf3\x14\x8b\x19\xa6\x99" \
"\xf2\x43\xaa\xd7\xeb\x42\x2c\x2c\x2f\x68\xaf\xe4" \
"\xe1\x59\x1e\x6b\xef\xac\xe1\x79\x1e\x5f\x7d\xf3" \
"\x15\x1d\xbf\x83\x10\x82\xd6\x6c\x0b\x43\x1a\xec" \
"\xb5\xdb\x18\xb9\x1c\x8e\xeb\x62\x5a\x16\x49\x92" \
"\x90\xcf\xe7\xc9\xe5\xf3\x98\xa6\x49\xbd\x5e\xc7" \
"\xf7\xbb\x7c\xf9\xc5\x37\x1c\x1e\xf6\x29\x57\x2a" \
"\xe4\xf3\x0a\xab\x90\xe1\xfb\x3e\x96\x65\xb1\xf6" \
"\xce\x3b\x38\x8e\x33\x01\x98\x9b\x9b\x43\x5c\xbb" \
"\x76\x4d\xd7\x6a\x35\x86\x47\x47\x74\xf6\xf6\x78" \
"\xf3\xec\x59\x12\xa5\xe8\x8e\xc7\x08\x21\x50\xca" \
"\xc4\x34\xab\x08\x21\xc9\x32\x49\x2e\x37\x40\x88" \
"\x94\xfe\xdd\xbb\xbc\x5e\xa9\x20\x72\x39\x06\xe3" \
"\x31\x1b\x7b\x7b\x34\x84\x60\xb5\xd9\x24\xe7\x79" \
"\xe4\x6a\x35\x36\xc6\x63\xfe\xfc\xb7\xbf\x81\x10" \
"\x44\xe3\x31\xef\xfe\xec\x67\x84\xa3\x11\x7e\xb7" \
"\x8b\x10\x82\x2b\x57\xae\xa0\x6a\x4a\x7d\xb8\x6a" \
"\x9a\x5c\x32\x4d\xae\xce\xcf\x53\x8f\x63\x1a\x51" \
"\x44\x79\x65\x85\xe6\xc2\x02\xf7\xee\x3d\x62\x73" \
"\x33\xc0\xb6\xcf\x92\x24\x0e\x71\xec\x31\x3f\x5f" \
"\xe4\xc6\x8d\xcf\x78\xf8\xe8\x11\x35\xcb\xa2\x9c" \
"\xcf\xd3\x2a\x16\xd9\xde\xdb\x63\x63\x77\x97\xf5" \
"\x4e\x87\xc3\x76\x9b\x6a\x1c\x73\xba\x58\xa4\x29" \
"\x25\xca\x34\x71\xa7\xa7\xf9\xfc\xc6\x0d\x1e\x3d" \
"\x7a\x44\xbf\xdf\x27\xcb\x32\xd4\x7b\xad\xd6\x87" \
"\x59\x1c\xf3\x79\x77\x8f\xdb\x6e\xc8\x46\x7e\xc4" \
"\x1d\x67\x84\xae\x19\x94\x0a\x15\x8a\x05\x9b\x5b" \
"\xb7\xfe\xc9\xdd\x87\xff\x20\xf7\xa3\x3d\x0a\x75" \
"\x9f\x51\xae\xcf\xec\x85\x05\xac\xe2\x14\x0f\xf6" \
"\x7d\x6e\x6f\x6e\x52\xb7\x6d\xae\xae\xac\x70\x71" \
"\x7a\x9a\x53\x95\x0a\x77\x3a\x1d\xfe\x79\xe7\x0e" \
"\x0d\xcb\xe2\xcd\x33\x67\x98\x73\x5d\xa2\x6e\x97" \
"\x1d\xdf\x27\x97\xcb\x91\x33\x0c\x5a\xad\x16\xe2" \
"\xec\x8f\xe7\x75\xf9\xad\x19\xec\xa6\x87\x61\x19" \
"\xd8\xb6\x8d\xa1\x0c\x92\x2c\x21\xaf\xf3\x9c\xb7" \
"\xce\x23\x32\xc1\x5f\xfe\xfa\x17\x6e\x6e\xdf\x64" \
"\xe5\xe7\x2b\xd4\xa6\x6b\xc4\x71\x8c\x8e\x34\x0b" \
"\x72\x81\xcd\xaf\x6e\xb1\x75\xeb\x16\xa7\xeb\x75" \
"\x6c\xc7\xc1\x1f\x0c\x18\x06\x01\x8b\xe7\xcf\xb3" \
"\x7d\xfb\x36\x3a\xcb\x70\x5c\x17\xdb\x75\x79\x63" \
"\x75\x95\x28\x8e\xc9\xb2\x8c\x95\x95\x15\x8c\xdc" \
"\x8f\x2b\x14\x66\x8b\x04\x7e\xc0\xc1\xf6\x01\xe3" \
"\xfe\x98\x24\x4a\x48\xa2\x04\xc7\x72\x68\xfd\x77" \
"\x8b\xe9\xca\x34\x6b\x57\xd6\x08\x82\x80\x9b\x7f" \
"\xba\x89\xa1\x0c\x2c\xd3\xc2\xb6\x6d\xa6\x56\xa7" \
"\xf8\xc5\xaf\x7f\xcd\x5b\xab\xab\x1c\x3c\x79\x82" \
"\x65\xdb\x9c\x71\x1c\x1c\xd7\xa5\x58\x2a\x71\xf9" \
"\xa7\x3f\x25\x8e\x63\x46\xa3\x11\xc3\xe1\x70\x72" \
"\x45\x51\x84\xef\xfb\x88\xcb\xbf\xba\xac\xd7\xde" \
"\x5e\xe3\x0f\xbf\xff\x03\xc3\xc1\x10\xcb\xb4\x70" \
"\x1c\x87\xbc\x91\x67\xb1\xb5\xc8\xc5\x8b\x17\xb1" \
"\x6d\x9b\x24\x49\xd0\x5a\x33\x1e\x8f\x31\x0c\x03" \
"\xa5\x14\x42\x88\x13\x1f\xee\xf7\xfb\x93\x8f\x0b" \
"\x21\x00\x10\x42\x60\x18\x06\x49\x92\x60\x18\x06" \
"\x9e\xe7\xe1\x38\x0e\xb5\x5a\xed\x38\x04\xe7\xde" \
"\x3c\xa7\x7f\x72\xf9\x27\xcc\xcf\xcf\xe3\x38\x0e" \
"\x49\x92\x30\x1a\x8d\xb8\x77\xef\x1e\xed\x76\x9b" \
"\xad\xad\x2d\x96\x96\x96\x68\x34\x1a\x8c\x46\x23" \
"\xe2\x38\x9e\x94\x91\x10\x82\x5c\x2e\x87\x52\x0a" \
"\x29\x25\xae\xeb\x62\xdb\x36\xa6\x69\x22\x84\x40" \
"\x6b\x8d\x10\x02\x29\x25\xc5\x62\x11\xcb\xb2\x26" \
"\x6b\xb3\x2c\xa3\xd1\x68\x20\x5a\xad\x96\x4e\xd3" \
"\x94\x46\xa3\xc1\xa5\x4b\x97\x98\x99\x99\xe1\xe0" \
"\xe0\x80\x9d\x9d\x1d\xd6\xd7\xd7\xc9\xb2\x8c\x8b" \
"\x17\x2f\xb2\xb4\xb4\x84\xd6\x7a\x62\xf8\xf9\x5d" \
"\x08\x41\xa1\x50\xc0\x71\x1c\xa4\x94\x27\x0c\x84" \
"\x61\x48\xaf\xd7\xa3\xdf\xef\xd3\xeb\xf5\x18\x0e" \
"\x87\x8c\xc7\x63\xb4\xd6\x18\x86\xc1\xf9\xf3\xe7" \
"\x11\xd5\x6a\x55\x5f\xb8\x70\x81\xf7\xdf\x7f\x9f" \
"\x8f\x3f\xfe\x98\x76\xbb\x4d\xa9\x54\x42\x4a\x89" \
"\xd6\x9a\xa9\xa9\x29\x0e\x0f\x0f\x59\x5d\x5d\xe5" \
"\xd4\xa9\x53\x8c\x9f\xf5\x07\xad\x35\xa3\xd1\x88" \
"\x7e\xbf\x3f\x31\xd0\xeb\xf5\x18\x8d\x46\x68\xad" \
"\x51\x4a\x61\x59\x16\x9e\xe7\xe1\xba\xee\x04\x5a" \
"\x4a\x39\x01\x5d\x5a\x5a\x42\xac\xad\xad\xe9\x5c" \
"\x2e\xc7\xe5\xcb\x97\x59\x5c\x5c\x24\x49\x12\x3a" \
"\x9d\x0e\x61\x18\x02\xb0\xb5\xb5\x45\x14\x45\x7c" \
"\xfb\xed\xb7\x84\x61\xc8\xfc\xfc\xfc\x64\xe7\xb6" \
"\x6d\x53\x2c\x16\x29\x95\x4a\x14\x0a\x05\x92\x24" \
"\x21\x4d\x53\x94\x52\x68\xad\x27\x9b\x50\x4a\xe1" \
"\x79\x1e\xc5\x62\x11\xc3\x30\x4e\x74\x42\xa3\xd9" \
"\x6c\xb2\xbe\xbe\xce\x47\x1f\x7d\x84\x61\x18\x7c" \
"\xf0\xc1\x07\x98\xa6\x89\xe7\x79\x6c\x6f\x6f\x63" \
"\xdb\x36\x77\xee\xdc\x61\x38\x1c\x02\xb0\xb0\xb0" \
"\x80\xeb\xba\x27\x42\xf1\x3c\x34\xb5\x5a\x0d\xcf" \
"\xf3\x26\x06\x92\x44\x32\x1e\x2b\xc2\x50\x11\x04" \
"\x0a\xdf\x17\x14\x0a\x29\xd3\xd3\x21\x4a\x69\x86" \
"\x83\x01\xe2\xdc\xb9\x73\xba\xdb\xed\x92\x24\xc9" \
"\x64\xe1\xe2\xe2\x22\xa3\xd1\x08\xc3\x30\xd8\xdd" \
"\xdd\x25\x49\x12\x94\x52\xd4\x6a\x35\x7e\xf1\xde" \
"\x7b\xcc\x94\xcb\xe4\xc7\x63\x54\x92\x30\x2c\x97" \
"\xc9\x5e\x8a\x3d\x40\x1c\xc7\xfc\xf1\x8f\x5f\xd3" \
"\x68\x5c\x25\x9f\x7f\x0e\x9b\xa2\xd4\x18\xc3\x88" \
"\x50\xca\x60\x66\xc6\xa0\x39\x0b\x46\x9a\xa6\xa4" \
"\x69\x3a\x29\x17\xd7\x75\xb1\x2c\x8b\x66\xb3\x49" \
"\xa9\x54\xe2\xed\x37\xde\xa0\x78\x74\x44\xcd\x30" \
"\x28\x1b\x06\xf9\x76\x1b\xf1\xf0\x21\x5a\x6b\x34" \
"\xe0\x94\xcb\xec\x2f\x2f\x9f\x80\x78\xf2\x64\x97" \
"\xcd\xcd\x2f\x79\xfc\xf8\x2e\xaf\xbf\xfe\x5f\x54" \
"\xab\x4d\x4c\x33\x20\xcb\x4c\x92\xe4\xb8\xd2\xda" \
"\xed\x90\x72\xf9\x35\x8c\xd3\xa7\x4f\x53\xab\xd5" \
"\x28\x14\x0a\x54\xab\x55\x5c\xd7\x45\x4a\x89\x08" \
"\x02\xe4\xfd\xfb\x64\xbb\xbb\x18\x42\x50\x75\x1c" \
"\x64\x96\x31\x2e\x14\x08\x5d\x17\x01\xe4\x86\x43" \
"\x2c\xdf\xa7\xbe\xbd\xcd\xde\xf2\x32\x7a\x92\x68" \
"\xe0\xba\x16\x83\xc1\x11\xff\xfa\xd7\xef\x59\x5a" \
"\x7a\x97\x56\xeb\x12\x96\x15\x52\x28\x8c\x48\x53" \
"\x93\x38\x2e\x32\x1c\x5a\xa8\xab\x57\xaf\x7e\xb8" \
"\xb2\xb2\x42\xa5\x52\xc1\x34\x4d\xe4\x78\x8c\xda" \
"\xda\x42\x6f\x6c\x90\xf6\x7a\x64\x5a\x93\x6a\x8d" \
"\x00\x4c\xa5\x30\xd2\x94\xe2\xcc\x0c\x59\xa5\x42" \
"\x3f\x08\x18\xe6\xf3\xc4\x51\x44\x65\x38\x24\xa8" \
"\x54\x40\x08\x3c\xcf\x63\x6a\xca\x63\x77\x77\x8f" \
"\x20\x08\x79\xfa\xf4\x3e\x41\xd0\xa1\x5c\x3e\x43" \
"\x9a\x7a\x24\x49\x1e\x50\x34\x1a\x1e\x06\xc0\x78" \
"\x3c\xe6\xd1\xf6\x36\x4b\x5a\x23\xf7\xf7\x8f\xbb" \
"\xde\xf7\x0e\x0f\x83\x28\xa2\x90\xcb\x51\x5c\x5e" \
"\xc6\x9d\x9b\x23\xfa\xec\x33\x6a\xcf\x92\x2f\x55" \
"\x0a\xd9\xe9\x50\x17\x82\xfd\xd3\xa7\x41\x08\x96" \
"\x97\x97\xb1\x2c\x8b\x4f\x3f\xfd\x8c\x27\x4f\x3a" \
"\xec\xed\xdd\x26\x08\x9e\xf2\x9b\xdf\xfc\x8e\xd5" \
"\xd5\x0b\x78\x9e\x4b\xa5\x62\xa1\x2e\x5c\xb8\xf0" \
"\xa1\x10\x82\x8d\x4f\x3f\x65\x51\x4a\xb2\x2c\xe3" \
"\x3f\x49\x03\x69\x96\x91\x1f\x0c\x08\x1e\x3c\x80" \
"\x67\x39\xa0\x85\x40\x09\x81\x69\x59\x4c\x17\x8b" \
"\xd8\x8e\xc3\x91\x94\x08\x21\x28\x95\x4a\xcc\xce" \
"\xd6\xe9\xf5\x7c\x7c\x7f\xc0\x78\x3c\x64\x73\xf3" \
"\x4b\xfa\xfd\x23\x3e\xf9\xe4\xff\xd8\xdc\xfc\xf2" \
"\xd8\x03\xd5\x6a\x95\xfd\x30\x24\xc9\x32\x8c\xef" \
"\x65\xf4\xcb\x0a\xe2\x98\xfe\x78\x4c\xd1\xb6\xb1" \
"\x1d\x87\x5a\xa3\x41\xb5\xd9\xc4\xf0\x3c\xfa\x59" \
"\x46\xbb\xd3\xe1\xf1\xf6\x36\x69\xa3\x81\x2a\x14" \
"\x00\x68\x34\x1a\xfc\xf2\x97\x3f\xe7\xfa\xf5\xcf" \
"\x59\x5f\x7f\x40\x18\x0e\xf9\xfb\xdf\x3f\x06\xa0" \
"\x52\x59\x3d\x06\x30\x0c\x83\x50\x29\x06\x51\x44" \
"\xf9\xa5\x7e\xfd\x5c\x99\xd6\xa4\x59\x46\x94\xa6" \
"\xf4\x93\x84\xb3\xb3\xb3\x1c\x66\x19\x5f\xdf\xba" \
"\xc5\xd6\x27\x9f\xb0\xeb\xfb\x74\xc3\x90\x20\x8e" \
"\x89\xb3\x8c\xff\xf9\xed\x6f\x69\x9e\x3a\x35\x59" \
"\x5f\x2e\x97\xb9\x76\xed\x3d\xca\xe5\x6f\xf8\xfa" \
"\xeb\x75\xc2\x30\xc6\xb6\x2d\x66\x67\x6b\x4c\xda" \
"\xd2\x4c\xab\xc5\x93\x5e\xef\x04\x80\x7e\x96\x80" \
"\x71\x9a\x32\x88\x22\xf6\x86\x43\x1e\xf8\x3e\xff" \
"\xbf\xb1\xc1\x38\x49\x88\xb2\x6c\xd2\x84\x5e\x96" \
"\xdf\xed\xd2\xfc\xde\x33\xdb\xb6\xb9\x72\xe5\x6d" \
"\xce\x9d\x7b\x83\x28\x8a\x70\x1c\x87\xa5\xa5\xa5" \
"\x17\x00\xf3\xf3\xf3\x3c\xb8\x7e\x9d\xd7\x6b\x35" \
"\xc4\xb3\x5d\xc7\x59\x46\x10\xc7\x1c\x06\x01\xf7" \
"\x7d\x9f\x76\xaf\x47\x94\xfe\xf0\x29\x17\x40\x1a" \
"\x06\x8f\xf6\x1f\xd3\x32\x4e\x13\xc8\x00\x3b\xb3" \
"\xf1\x92\xe3\xee\x28\xa5\xa4\x54\x2a\xbd\xf0\x6c" \
"\x96\x9d\x04\xb8\xde\xed\x12\xa5\x29\x02\x08\x93" \
"\x84\x4e\x18\xf2\xd0\xf7\xd9\xe9\x76\x09\x5e\x1a" \
"\xc3\x2f\x4b\x99\x79\xdc\x66\x89\xe2\x7c\x19\xb7" \
"\x55\xc6\x9d\xf3\x30\x1c\x83\x2d\xbd\x05\x1a\x84" \
"\x14\x34\xf3\x4d\x66\xa2\x99\x57\xd6\x0e\x87\xc3" \
"\x17\x00\x53\x53\x53\x8c\xa4\x64\x77\x30\x40\x09" \
"\x41\xbb\xdf\xe7\xbe\xef\xd3\x0b\xc3\x49\x49\x4a" \
"\xa5\xc8\x59\x16\x05\xaf\x88\x3d\xef\x51\x5e\x99" \
"\xa2\x38\xef\x81\x7c\x36\x0f\x34\x08\x04\x64\x20" \
"\x85\x44\xc8\xe3\xa1\xb5\x2f\xf6\x21\xcf\x2b\x10" \
"\x8e\xe3\xbc\x00\x00\xa8\x37\x9b\x7c\xd1\x6e\xa3" \
"\x84\xc0\x8f\x63\xbc\x4a\x85\x1f\x9d\x39\x43\xb1" \
"\x52\xc1\xf1\x3c\x2c\xdb\x86\x02\x1c\x98\x07\x8c" \
"\xc4\x08\x2d\x20\x89\x13\x94\xa1\x26\x13\xf2\xf9" \
"\xf5\x32\x80\x10\x82\xa7\xea\x29\x12\x49\x3d\xaa" \
"\x4f\xec\x19\x86\x71\x12\xe0\xda\xb5\x6b\xec\xb7" \
"\xdb\xcc\xcc\xce\x52\x6b\x34\xc8\x9b\xe6\xe4\x5d" \
"\x42\xc2\x4e\xb4\xc3\xe3\xe0\x31\x49\x9c\xa0\xb3" \
"\x63\xbf\x8c\x07\x63\x9c\xb2\x73\x6c\x50\x08\x84" \
"\x7c\x61\xfc\xe5\xbb\xcc\x24\x47\xea\x08\x91\x17" \
"\x4c\x47\xd3\x2f\x20\x5e\x06\x68\xb5\x5a\xb4\x5a" \
"\xad\x13\x6e\xd2\x68\xf6\x92\x3d\xee\x8d\xee\x11" \
"\x8e\x43\xb2\xe7\x99\xff\x2c\x2e\xd1\x28\xc2\x2c" \
"\x98\x98\xb6\x79\x3c\x43\xe4\xf1\x11\x4c\x0a\x79" \
"\xe2\xf7\xa5\xca\x25\x46\x83\x11\xeb\xf1\x3a\xb2" \
"\x2f\xa9\x46\xd5\x57\x01\x26\x46\xb5\xe6\xe8\xe8" \
"\x88\x6e\xd6\xe5\xc8\x3e\xc2\x0f\x7d\xb2\x24\x03" \
"\xcd\x2b\x65\xa7\x33\x4d\xd0\x0b\x30\x6d\x13\x53" \
"\x9a\x48\x5f\x92\x16\x53\x64\x41\x22\xd5\x31\x84" \
"\x29\x4d\xce\x35\xcf\x71\xfd\xbb\xeb\x14\x46\x05" \
"\xfa\x46\x1f\x71\x24\x98\x63\xee\x55\x80\xdb\xb7" \
"\x6f\x73\xe3\x8b\x1b\x4c\x9d\x9f\xa2\xf4\x5a\x89" \
"\x6c\x98\xa1\x33\xfd\x1f\xeb\xfd\xb9\xd2\x38\x65" \
"\xd4\x1f\xe1\xd6\x5d\x9a\xb3\x4d\x8c\xbe\x41\x78" \
"\x10\xd2\xd5\x5d\xb2\x52\xc6\x42\x63\x01\xa5\x14" \
"\x87\xc9\x21\xf9\x67\xff\x25\x03\x23\xc0\xcf\xfb" \
"\xaf\x02\xec\xee\xee\xa2\x3d\x8d\x3b\xeb\x92\xc6" \
"\xe9\x09\x77\xff\x90\xb4\x3e\xf6\x42\xe0\x05\xb4" \
"\xed\x36\xaa\xa4\x70\xa7\x5d\x66\xd5\x2c\xaa\xaf" \
"\x68\xa9\x16\x8f\x83\xc7\xa8\x9c\x22\x27\x73\x08" \
"\x29\xb0\x4c\x0b\xed\x68\x8c\xef\xbe\xfb\xee\xc4" \
"\x69\x36\x8a\x22\x8e\xda\x47\x14\xee\x16\x90\xea" \
"\x87\xe7\xc2\x2b\x12\xd0\xdf\xed\x53\xae\x95\x31" \
"\x0c\x03\x29\x25\x4a\x29\x94\x52\xdc\x6c\xdf\x3c" \
"\xb6\x21\x79\x91\x9c\x42\x20\x5f\x93\xfc\x1b\xc1" \
"\xcd\x66\x9e\x42\xf6\x57\x8f\x00\x00\x00\x00\x49" \
"\x45\x4e\x44\xae\x42\x60\x82"
image1_data = \
"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d" \
"\x49\x48\x44\x52\x00\x00\x00\x16\x00\x00\x00\x16" \
"\x08\x06\x00\x00\x00\xc4\xb4\x6c\x3b\x00\x00\x02" \
"\xc2\x49\x44\x41\x54\x78\x9c\xd5\x94\x3d\x8f\x1c" \
"\x45\x10\x86\x9f\xd9\x9d\x35\x77\x3e\xf0\xc7\x05" \
"\xc8\x32\x92\x03\x02\x10\x01\x82\x7f\x80\x90\x90" \
"\x08\x10\x88\x3f\x00\x31\x11\x22\x75\x06\xd1\x45" \
"\x04\xfc\x03\x62\x48\x40\x20\x41\x82\x21\x73\x00" \
"\x42\x32\x10\xfa\x64\x19\x64\xc9\xbe\x15\x77\xbb" \
"\x33\xbb\xd3\xdd\x55\xd5\xd5\x04\x33\x3b\xc7\xc9" \
"\x6b\x3b\x76\x4b\xad\xaa\xee\x19\xbd\xfd\xf4\xdb" \
"\xd5\x0d\x4f\x5b\xab\x36\xc9\x0f\x3f\xfe\xfa\x51" \
"\x5d\x4f\x0f\x54\x33\xb3\x7a\x42\xd3\x76\xc4\xa4" \
"\x64\xcb\xcc\xea\x29\x66\x19\x11\xc5\x2c\xa3\x6a" \
"\xa8\x6e\xa2\x71\xe7\x9f\x39\xf3\xa3\xc5\x77\xcd" \
"\x62\xf5\xf1\xcf\x37\x3e\x5f\x00\xd4\x1b\xe1\xba" \
"\x9e\x1e\xbc\xf4\xca\xb5\x4b\x50\x71\xed\xea\x3e" \
"\x9f\x7e\xf1\x0d\xcb\x26\x70\xe1\xb9\x5d\x3e\xfb" \
"\xe4\xfd\xc7\xd2\x7d\x7f\xe3\x16\x7f\xde\xba\xfd" \
"\xc1\xb7\x5f\xfd\x02\xf0\xe1\x19\xe1\x94\x94\x2e" \
"\x0a\xd5\xb0\x89\x9b\xbf\x1f\x72\xf9\xc2\x79\xea" \
"\x7a\xf2\xc4\x6d\x2f\x9a\x0e\x07\xdc\xf3\xbb\x23" \
"\xe8\x26\x91\x61\x7b\x55\xd5\x0b\xef\x5f\xdc\xe3" \
"\xe5\x17\xaf\xf0\xc2\x95\xcb\x4f\x14\x16\x31\x96" \
"\xcb\x15\x29\xc9\x38\x77\x2a\x2c\x8a\xa8\x31\xa9" \
"\x7a\xc2\xb7\xdf\x78\x95\xab\xcf\x5f\x64\xff\xd2" \
"\xb3\x8f\x15\x75\x2f\x2c\x9b\x96\xe5\xa2\xc1\x2c" \
"\x3f\x2c\x9c\xc4\x10\xcd\x94\x62\x9c\x34\x1d\xef" \
"\xbd\xf5\x3a\xa5\x14\x4a\x81\xf9\x71\x3b\xe4\xfd" \
"\x78\x13\xcd\x8c\x93\x93\x96\xa3\x07\xc7\x74\x21" \
"\x62\x66\xdb\x88\x7b\x2b\xba\x98\xf8\xed\x8f\x3b" \
"\xa8\x65\xcc\x32\x96\x7d\xc8\xbd\xaf\x88\x9c\x11" \
"\x31\x62\x48\x34\xcb\x96\xf5\x3a\x70\x7c\xd2\x12" \
"\xa3\x3c\x8a\x58\x11\x55\x42\x1c\x4a\xca\x72\x5f" \
"\x52\x96\x07\xff\x8d\x98\x94\x18\x85\xb6\x5d\x11" \
"\xba\x48\x4a\x42\x4a\x8a\x88\x10\x63\x24\xe7\x2d" \
"\xc2\x3a\x58\xe1\xee\x9c\xdf\x3d\x87\x65\x1f\x6a" \
"\xd7\xa8\x8a\xe3\x06\x93\x92\x99\x96\xcc\xce\x6c" \
"\xca\x64\xf7\x1c\xb3\x49\xc5\x6c\x3a\xe1\x28\x04" \
"\x52\x12\x72\xf6\x47\x11\x1b\x3b\xcf\xcc\x78\xe7" \
"\xcd\xd7\x70\x77\x42\x10\x42\x48\xa4\xa4\xa8\xda" \
"\x60\x97\xf5\x07\x2d\x36\xf6\x2f\xbf\xfe\x89\x79" \
"\x48\x8f\xf7\x18\xc0\x2c\x13\x42\x22\x04\x41\xd5" \
"\xb6\x08\x9f\x5d\x20\x89\x20\xa2\xdb\xad\x48\x49" \
"\x11\xcd\x98\x1a\x77\xef\xfe\xcb\xfd\xfb\x0d\xaa" \
"\x36\x5c\x61\x1d\xae\xb0\x8e\xf6\x98\xf5\xe2\x3b" \
"\x3b\x35\x29\x0a\x49\xd2\x76\x61\x19\xc8\x4c\x95" \
"\xaa\x2a\xec\xed\xd5\x88\x80\x2a\xd4\x75\xa1\xae" \
"\x61\x3a\x2d\x98\xc1\x64\x52\x50\x2d\x54\x55\x01" \
"\x1c\x19\x88\xdd\xb7\x79\x9c\x94\xd5\xba\x23\x9b" \
"\xb1\x5a\x45\x16\x8b\x6e\xa4\xea\x09\x4f\x1f\x21" \
"\x11\x1d\x1f\xa1\xd9\xac\x1a\xac\x90\xed\xc4\x31" \
"\x24\xd6\xeb\x40\x71\x47\xd5\x70\xcf\xe4\xfc\x70" \
"\x37\xeb\xa3\xbb\x8d\x42\xfd\x42\xb2\x9d\x38\xc4" \
"\x44\x08\x09\xbc\x60\x66\x94\xe2\xb8\xf7\xbd\xcf" \
"\xf3\x30\xee\x63\xce\x9b\xbc\xa0\x22\xa8\x28\xee" \
"\x5b\x88\xc3\x3a\xb2\x5e\xb4\xe4\x6c\xac\x56\xcb" \
"\xf1\xe4\x4f\xab\xe0\xec\x21\xaa\xe6\xf1\x9f\x75" \
"\xdb\x90\xba\x35\xbe\xad\x8e\x8f\xe7\x8b\xeb\xf7" \
"\xfe\x7e\x70\x10\x53\xe2\xf6\x5f\x87\xe4\x5c\x46" \
"\xe2\x9c\x0b\xa5\xf8\xff\xe6\x4e\xbf\xb9\x17\xee" \
"\x1d\xce\xe9\x82\xe2\xce\x75\x9e\xda\xf6\x1f\x12" \
"\x1a\xe0\xff\xdf\x79\x4b\xbc\x00\x00\x00\x00\x49" \
"\x45\x4e\x44\xae\x42\x60\x82"
image2_data = \
"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d" \
"\x49\x48\x44\x52\x00\x00\x00\x16\x00\x00\x00\x16" \
"\x08\x06\x00\x00\x00\xc4\xb4\x6c\x3b\x00\x00\x03" \
"\xf7\x49\x44\x41\x54\x78\x9c\xb5\x95\x5f\x68\xd5" \
"\x65\x18\xc7\x3f\xe7\x77\x7e\xd3\x33\xdd\x3f\x67" \
"\xfe\xcb\x74\xc7\x22\xed\x62\xf9\x07\x12\x23\x33" \
"\x2c\x56\x20\x06\x2d\x09\x82\xa8\x8b\xec\x22\x88" \
"\x8a\xea\xc2\x42\x32\x2d\x2b\x28\x08\xb1\x2e\x02" \
"\x2f\xb4\xa0\xee\xa2\x16\x15\x5d\xe4\x68\x66\x64" \
"\x1b\xca\xcc\xd4\xe2\xb4\x64\x73\x6e\xce\xf3\x6f" \
"\x67\x3b\xe7\xfc\xfe\xbc\x7f\x9e\xb7\x8b\x6d\xce" \
"\x95\x12\x5e\xf8\xc2\xc3\xfb\xc0\xfb\x3c\x9f\xf7" \
"\xe1\x79\xbf\x0f\x6f\xc2\x39\xc7\x8d\x58\xde\x0d" \
"\xa1\x02\x89\xeb\x4d\x78\xf5\xcd\xee\xb4\x18\x77" \
"\xa8\x66\x96\xb7\xb6\x5c\xd6\x2b\x3e\x7a\x7f\x63" \
"\xe9\x6a\x71\xd7\x55\xf1\x6b\x6f\x75\xbf\xd4\xdc" \
"\x98\xea\xdd\xfe\x54\xeb\xe6\xed\x4f\xb6\x36\x59" \
"\x91\x43\xd7\x0c\x76\xce\x5d\xd3\xa6\xd6\xae\x77" \
"\x7a\xd6\xee\xdc\xdb\xd3\xdb\xd9\x35\xe2\xaa\x55" \
"\xe7\xb2\x59\xed\x46\x47\xad\xfb\xf8\x60\xc6\x3d" \
"\xf3\xc2\x4f\xed\x57\xe3\x26\x5a\x9e\xfe\xc3\xb5" \
"\x6f\x5e\x88\x08\x28\x0b\xda\x08\xca\xc0\x17\xdf" \
"\x0f\x95\xc2\x6f\xd7\xcc\xdb\xf3\xde\xf1\x3d\xe9" \
"\x5b\x9a\x76\x3f\xf8\x40\x0b\x8d\xf5\x35\x18\xe3" \
"\xd0\xda\x62\x8c\x23\x95\x4a\xf2\xfa\xbb\xdd\xa5" \
"\xb1\xf1\x78\xc5\xe7\x07\xee\x9f\xd1\x92\xc4\xa6" \
"\x1d\xe7\xdc\xbe\xdd\x2b\x50\x06\x22\x0b\xca\x40" \
"\xac\x61\xa8\x2f\x64\xe0\x58\x86\x6d\x6d\xcb\xb8" \
"\x6b\x5d\x33\x5a\x83\x52\x60\xed\x34\xd8\x39\xc7" \
"\xdf\x83\x15\xf6\x7e\x78\xaa\xe3\x87\xd3\x89\x97" \
"\x39\xb5\xb9\x7f\x0a\xec\xff\x5b\x6e\x46\xc1\x70" \
"\xcf\x10\xf5\x26\xe2\x8d\x17\xd7\x30\xa7\x16\xac" \
"\xbd\x7a\x1b\x45\x1c\xad\x2b\x9b\xd8\xb4\xae\xb9" \
"\x5d\xa4\xf0\x69\xe7\x29\xa6\xc1\x56\xa6\xc1\xa5" \
"\xf3\x21\x43\x27\xfa\x79\xb8\x2d\x4d\x7a\x79\xed" \
"\x35\xdf\x05\x60\xb4\x62\xd8\xdf\x71\x09\x9c\x43" \
"\xeb\xb9\x64\x8a\xa3\xfb\x58\x73\x64\x2d\x22\xe0" \
"\x1c\xfe\x95\xd5\x8c\x8f\x8c\x73\xcf\xfa\x25\xff" \
"\x0b\x2d\x55\x85\xad\xbb\x32\xb4\xdd\x7b\x33\xb7" \
"\x2e\xab\x43\x04\xd2\xab\x16\xa7\xcb\x55\xfb\x8a" \
"\x36\x94\xf6\x7f\x70\xbc\xde\xb7\x32\x9d\xe0\x2d" \
"\x5d\xc8\xce\x83\xa7\xd9\xb0\x5e\x70\x16\xac\x58" \
"\xac\x15\xc4\x38\xc4\x3a\xcc\xa4\xff\xdd\xaf\x45" \
"\x1e\xdb\xd2\xc2\xac\x05\x4d\x1c\x19\x00\x11\x98" \
"\x9d\x82\xdf\x7a\xc7\xea\x07\x3b\xfb\xb2\x40\xd7" \
"\x8c\x56\x7c\x75\x38\x47\xfa\xf6\x05\xac\x5e\xea" \
"\x33\xef\xa6\x39\x04\xe2\x08\x95\x23\x54\x10\x44" \
"\x8e\x40\x0b\x51\x04\xcf\xae\x5c\x4c\x5c\xe3\xd3" \
"\x73\x6e\x02\x9a\xf0\xa0\x30\x10\x92\x3d\xd6\x6f" \
"\x91\x4a\x9a\x84\x7b\x7b\x06\xf8\xf0\xd1\x2c\x99" \
"\xcf\x5a\x29\x64\x35\x8d\xf3\x7d\xa4\x0e\x72\x65" \
"\xc8\x97\x61\xe0\x22\x54\x46\x21\x1f\x43\x3e\x0b" \
"\xc5\x92\x25\x9f\x8b\x09\x0a\x15\x2e\x9e\xc9\x52" \
"\x1a\xae\x96\x88\x12\x83\xd4\xdb\x55\x78\xae\xcb" \
"\x37\x66\x02\xfa\x63\x4f\xc8\xa6\x3b\x1b\xa8\x4d" \
"\x41\xcd\xc2\x1a\x7a\x33\xe3\x2c\x58\xde\x40\xe6" \
"\x02\xf4\x0d\xc0\x48\xce\x90\xcb\x5b\x2e\xe5\x0d" \
"\xb9\xbc\x21\x28\x85\x84\xa5\x2a\xa5\xe1\x51\x9c" \
"\x72\x10\x90\xc5\xb7\x3e\xd8\xb3\x64\x1e\xea\xf7" \
"\x65\x52\x6e\x47\xba\x8b\x6c\xdb\xd8\x4c\x55\xe0" \
"\x52\x0c\x59\xaf\x81\x3f\x4f\x5a\xc4\x25\x09\x22" \
"\x87\x52\x10\x29\x21\x56\x8e\x28\x16\xf2\xb9\x00" \
"\x1d\x68\x98\x3b\x17\x6c\x0c\xc4\x90\x54\x4d\x38" \
"\x39\x00\xe0\x89\x15\xc6\xab\xf0\xfb\xd9\x12\x8f" \
"\xb7\xd5\x91\x29\xc2\x85\x22\x44\x40\xb1\x2c\x8c" \
"\x55\x84\x58\x3b\x22\xe5\x50\x1a\x4a\x63\x9a\x91" \
"\x91\x00\xeb\xcf\xc6\x4b\xcd\x9e\x29\x97\xa4\x69" \
"\xc4\xd2\x31\xa9\x63\x38\x7c\xac\xcc\x7d\xab\x1b" \
"\x19\xa9\x40\x9d\x0f\x4b\x52\x50\x67\xa1\x61\x89" \
"\xcf\x58\x51\x61\xfc\x04\xcd\x62\x68\xd0\x11\xf5" \
"\x8b\x14\xb7\x35\x25\x91\x38\x46\x82\x18\x13\x6b" \
"\xc2\xc0\x90\x2b\xc8\xfc\x81\x41\x9d\xa5\x7f\xcb" \
"\x49\x00\xdf\x58\x47\xd7\x2f\x05\x76\x3c\xba\x88" \
"\x3b\xea\x27\x6f\x9e\x07\x5a\x43\x14\x39\xb4\xf6" \
"\x88\x63\x43\x1c\x5b\x94\xf2\x50\xca\x27\x8a\x0c" \
"\x5a\xfb\x44\x51\x2d\x4a\xd5\xa0\x54\x8a\x2f\x8f" \
"\x8e\xcd\xff\xe4\xbc\xfb\xe6\xb2\x74\xab\x81\xa1" \
"\x7f\xa0\xcc\x23\x1b\x66\x0e\x85\xe7\x81\xef\x7b" \
"\x24\x93\x09\x3c\xcf\x9b\xf4\xbd\xcb\xbe\xe7\x4d" \
"\x9c\x4d\xf9\x3f\x9f\x09\x41\xdc\xd7\x53\xf9\x7e" \
"\x61\xb8\xca\x73\x4f\xb4\xfc\x67\xba\x92\xc9\x09" \
"\x73\x38\x44\x0c\xb1\xd2\x84\x51\x4c\x18\x6a\x82" \
"\x40\x11\xc5\x31\x51\xa8\xd1\x5a\x88\xb5\xa1\xef" \
"\xaf\x00\x2e\x6c\xed\xb8\x0c\xc6\x4b\xd0\x79\xa2" \
"\xc8\xdd\xcf\x8f\xe1\xc4\x21\x02\x88\x43\x44\xb0" \
"\x56\xb0\x5a\x30\xc6\x60\x8d\x60\x8d\xc1\x18\x8b" \
"\xd1\x16\x31\x32\xb1\x8b\xa5\x1a\x38\x08\x5c\xc7" \
"\x95\x85\x25\x6e\xd4\x67\xfa\x0f\x0f\x45\x7a\xf9" \
"\xba\xdf\x7d\x44\x00\x00\x00\x00\x49\x45\x4e\x44" \
"\xae\x42\x60\x82"
class PatchForm(QDialog):
def __init__(self,parent = None,name = None,modal = 0,fl = 0):
QDialog.__init__(self,parent,name,modal,fl)
self.image0 = QPixmap()
self.image0.loadFromData(image0_data,"PNG")
self.image1 = QPixmap()
self.image1.loadFromData(image1_data,"PNG")
self.image2 = QPixmap()
self.image2.loadFromData(image2_data,"PNG")
if not name:
self.setName("PatchForm")
self.setIcon(self.image0)
self.setSizeGripEnabled(1)
PatchFormLayout = QVBoxLayout(self,11,6,"PatchFormLayout")
layout6 = QHBoxLayout(None,0,6,"layout6")
self.textLabel1_2 = QLabel(self,"textLabel1_2")
layout6.addWidget(self.textLabel1_2)
self.nameLineEdit = QLineEdit(self,"nameLineEdit")
layout6.addWidget(self.nameLineEdit)
self.textLabel1 = QLabel(self,"textLabel1")
layout6.addWidget(self.textLabel1)
self.tempoSpinBox = QSpinBox(self,"tempoSpinBox")
self.tempoSpinBox.setMaxValue(160)
self.tempoSpinBox.setValue(137)
layout6.addWidget(self.tempoSpinBox)
self.saveButton = QPushButton(self,"saveButton")
self.saveButton.setPixmap(self.image1)
self.saveButton.setFlat(1)
layout6.addWidget(self.saveButton)
self.openButton = QPushButton(self,"openButton")
self.openButton.setPixmap(self.image2)
self.openButton.setFlat(1)
layout6.addWidget(self.openButton)
PatchFormLayout.addLayout(layout6)
self.groupBox1 = QGroupBox(self,"groupBox1")
self.groupBox1.setColumnLayout(0,Qt.Vertical)
self.groupBox1.layout().setSpacing(6)
self.groupBox1.layout().setMargin(11)
groupBox1Layout = QVBoxLayout(self.groupBox1.layout())
groupBox1Layout.setAlignment(Qt.AlignTop)
self.filenamesListBox = QListBox(self.groupBox1,"filenamesListBox")
groupBox1Layout.addWidget(self.filenamesListBox)
layout5 = QHBoxLayout(None,0,6,"layout5")
self.addOneButton = QPushButton(self.groupBox1,"addOneButton")
layout5.addWidget(self.addOneButton)
self.addManyButton = QPushButton(self.groupBox1,"addManyButton")
layout5.addWidget(self.addManyButton)
self.removeButton = QPushButton(self.groupBox1,"removeButton")
layout5.addWidget(self.removeButton)
groupBox1Layout.addLayout(layout5)
PatchFormLayout.addWidget(self.groupBox1)
Layout1 = QHBoxLayout(None,0,6,"Layout1")
self.buttonHelp = QPushButton(self,"buttonHelp")
self.buttonHelp.setAutoDefault(1)
Layout1.addWidget(self.buttonHelp)
Horizontal_Spacing2 = QSpacerItem(20,20,QSizePolicy.Expanding,QSizePolicy.Minimum)
Layout1.addItem(Horizontal_Spacing2)
self.buttonOk = QPushButton(self,"buttonOk")
self.buttonOk.setAutoDefault(1)
self.buttonOk.setDefault(1)
Layout1.addWidget(self.buttonOk)
self.buttonCancel = QPushButton(self,"buttonCancel")
self.buttonCancel.setAutoDefault(1)
Layout1.addWidget(self.buttonCancel)
PatchFormLayout.addLayout(Layout1)
self.languageChange()
self.resize(QSize(532,303).expandedTo(self.minimumSizeHint()))
self.clearWState(Qt.WState_Polished)
self.connect(self.buttonOk,SIGNAL("clicked()"),self.accept)
self.connect(self.buttonCancel,SIGNAL("clicked()"),self.reject)
self.connect(self.addOneButton,SIGNAL("clicked()"),self.slotAddOne)
self.connect(self.addManyButton,SIGNAL("clicked()"),self.slotAddMany)
self.connect(self.nameLineEdit,SIGNAL("textChanged(const QString&)"),self.slotNameChanged)
self.connect(self.saveButton,SIGNAL("clicked()"),self.slotSaveAs)
self.connect(self.removeButton,SIGNAL("clicked()"),self.slotRemove)
self.connect(self.saveButton,SIGNAL("clicked()"),self.slotSaveAs)
self.connect(self.openButton,SIGNAL("clicked()"),self.slotOpen)
def languageChange(self):
self.setCaption(self.__tr("New Patch"))
self.textLabel1_2.setText(self.__tr("Name"))
self.nameLineEdit.setText(self.__tr("mypatch"))
self.textLabel1.setText(self.__tr("Tempo (BPMs)"))
self.saveButton.setText(QString.null)
self.openButton.setText(QString.null)
self.groupBox1.setTitle(self.__tr("Samples"))
self.addOneButton.setText(self.__tr("add one"))
self.addManyButton.setText(self.__tr("add many"))
self.removeButton.setText(self.__tr("remove"))
self.buttonHelp.setText(self.__tr("&Help"))
self.buttonHelp.setAccel(self.__tr("F1"))
self.buttonOk.setText(self.__tr("&Save and Exit"))
self.buttonOk.setAccel(self.__tr("Alt+S"))
self.buttonCancel.setText(self.__tr("&Cancel"))
self.buttonCancel.setAccel(QString.null)
def slotAddOne(self):
print "PatchForm.slotAddOne(): Not implemented yet"
def slotAddMany(self):
print "PatchForm.slotAddMany(): Not implemented yet"
def slotNameChanged(self,a0):
print "PatchForm.slotNameChanged(const QString&): Not implemented yet"
def slotRemove(self):
print "PatchForm.slotRemove(): Not implemented yet"
def slotOpen(self):
print "PatchForm.slotOpen(): Not implemented yet"
def slotSaveAs(self):
print "PatchForm.slotSaveAs(): Not implemented yet"
def __tr(self,s,c = None):
return qApp.translate("PatchForm",s,c)
if __name__ == "__main__":
a = QApplication(sys.argv)
QObject.connect(a,SIGNAL("lastWindowClosed()"),a,SLOT("quit()"))
w = PatchForm()
a.setMainWidget(w)
w.show()
a.exec_loop()
| 50.439479
| 120
| 0.642583
| 5,065
| 27,086
| 3.424877
| 0.088845
| 0.012798
| 0.009339
| 0.006918
| 0.041275
| 0.033147
| 0.030034
| 0.030034
| 0.030034
| 0.030034
| 0
| 0.238955
| 0.125046
| 27,086
| 536
| 121
| 50.533582
| 0.493017
| 0.010485
| 0
| 0.018367
| 1
| 0.746939
| 0.685265
| 0.663171
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.006122
| null | null | 0.012245
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
723af79a7106509c4dc5f342b75100dd5fe8d070
| 62
|
py
|
Python
|
btcsignature/command_line.py
|
superarius/btcsignature
|
14d745e18585ace01cc19e13ad4a12644d1d09d5
|
[
"MIT"
] | 4
|
2018-05-19T22:34:45.000Z
|
2020-03-28T09:48:52.000Z
|
btcsignature/command_line.py
|
superarius/btcsignature
|
14d745e18585ace01cc19e13ad4a12644d1d09d5
|
[
"MIT"
] | null | null | null |
btcsignature/command_line.py
|
superarius/btcsignature
|
14d745e18585ace01cc19e13ad4a12644d1d09d5
|
[
"MIT"
] | null | null | null |
import btcsignature
def main():
return btcsignature.run()
| 15.5
| 29
| 0.741935
| 7
| 62
| 6.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 62
| 4
| 29
| 15.5
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a0e10bdaea7d3fa29e9c916ea777363779d365eb
| 31,006
|
py
|
Python
|
unicornhat/swimmer.py
|
lpabon/pi-projects
|
4c7d856da29f570fd0d6b207724d9abe5bbce1e2
|
[
"Apache-2.0"
] | null | null | null |
unicornhat/swimmer.py
|
lpabon/pi-projects
|
4c7d856da29f570fd0d6b207724d9abe5bbce1e2
|
[
"Apache-2.0"
] | null | null | null |
unicornhat/swimmer.py
|
lpabon/pi-projects
|
4c7d856da29f570fd0d6b207724d9abe5bbce1e2
|
[
"Apache-2.0"
] | null | null | null |
import unicornhat as uh
import time
FRAMES = [
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [255, 255, 255], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 255], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 255], [255, 255, 255], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 255], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 255], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 255, 255]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
[[[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[255, 255, 0], [255, 255, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [255, 128, 0], [255, 128, 0], [255, 128, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], [[0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255], [0, 0, 255]]],
]
for loop in range(3):
for x in FRAMES:
uh.set_pixels(x)
uh.show()
time.sleep(0.25)
| 607.960784
| 785
| 0.313617
| 7,706
| 31,006
| 1.261744
| 0.002595
| 1.258254
| 1.743906
| 2.250334
| 0.990435
| 0.990435
| 0.990435
| 0.990435
| 0.990435
| 0.990435
| 0
| 0.41417
| 0.24979
| 31,006
| 50
| 786
| 620.12
| 0.003826
| 0
| 0
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.040816
| 0
| 0.040816
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
a0ea20c132ac50dbd689ef359fd942282ccbc112
| 31,824
|
py
|
Python
|
DQM/HLTEvF/python/HLTObjectsMonitor_MUO_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DQM/HLTEvF/python/HLTObjectsMonitor_MUO_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DQM/HLTEvF/python/HLTObjectsMonitor_MUO_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
muoObjects = cms.VPSet(
cms.PSet(
pathNAME = cms.string("HLT_L1SingleMu18"),
moduleNAME = cms.string("hltL1fL1sMu18L1Filtered0"),
label = cms.string("L1 muon"),
xTITLE = cms.string("L1 muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,50.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(),
dimassBINNING = cms.vdouble(),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(False),
displayInPrimary_pt_HEM17 = cms.bool(False),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(False),
displayInPrimary_dz = cms.bool(False),
displayInPrimary_dimass = cms.bool(False),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(False),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(False),
doPlotDiMass = cms.untracked.bool(False),
),
cms.PSet(
pathNAME = cms.string("HLT_L2Mu10"),
moduleNAME = cms.string("hltL2fL1sMu22or25L1f0L2Filtered10Q"),
label = cms.string("L2 muon"),
xTITLE = cms.string("L2 muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,5.,6.,7.,8.,9.,10.,11.,12.,13.,14.,15.,20.,30.,40.,50.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(),
dimassBINNING = cms.vdouble(),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(False),
displayInPrimary_pt_HEM17 = cms.bool(False),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(False),
displayInPrimary_dz = cms.bool(False),
displayInPrimary_dimass = cms.bool(False),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(False),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(False),
doPlotDiMass = cms.untracked.bool(False),
),
cms.PSet(
pathNAME = cms.string("HLT_Mu27"),
moduleNAME = cms.string("hltL3fL1sMu22Or25L1f0L2f10QL3Filtered27Q"),
label = cms.string("L3 muon"),
xTITLE = cms.string("L3 muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,10.,20.,24.,25.,26.,27.,28.,29.,30.,31.,32.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(),
dimassBINNING = cms.vdouble(),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(False),
displayInPrimary_pt_HEM17 = cms.bool(False),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(False),
displayInPrimary_dimass = cms.bool(False),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(False),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(False),
doPlotDiMass = cms.untracked.bool(False),
),
cms.PSet(
pathNAME = cms.string("HLT_IsoMu27"),
moduleNAME = cms.string("hltL3crIsoL1sMu22Or25L1f0L2f10QL3f27QL3trkIsoFiltered0p07"),
label = cms.string("ISO muon"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,10.,20.,24.,25.,26.,27.,28.,29.,30.,31.,32.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(),
dimassBINNING = cms.vdouble(),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(False),
displayInPrimary_dimass = cms.bool(False),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(False),
doPlotDiMass = cms.untracked.bool(False),
),
cms.PSet(
pathNAME = cms.string("HLT_Mu20"),
moduleNAME = cms.string("hltL3fL1sMu18L1f0L2f10QL3Filtered20Q"),
label = cms.string("L3 muon20"),
xTITLE = cms.string("L3 muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(),
dimassBINNING = cms.vdouble(),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(False),
displayInPrimary_pt_HEM17 = cms.bool(False),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(False),
displayInPrimary_dz = cms.bool(False),
displayInPrimary_dimass = cms.bool(False),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(False),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(False),
doPlotDiMass = cms.untracked.bool(False),
),
cms.PSet(
pathNAME = cms.string("HLT_IsoMu20"),
moduleNAME = cms.string("hltL3crIsoL1sMu18L1f0L2f10QL3f20QL3trkIsoFiltered0p07"),
label = cms.string("ISO muon20"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(),
dimassBINNING = cms.vdouble(),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(False),
displayInPrimary_dz = cms.bool(False),
displayInPrimary_dimass = cms.bool(False),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(False),
doPlotDiMass = cms.untracked.bool(False),
),
cms.PSet(
pathNAME = cms.string("HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL"),
moduleNAME = cms.string("hltDiMuonGlb17Glb8RelTrkIsoFiltered0p4"),
label = cms.string("ISO muon (double-muon)"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,5.,6.,7.,8.,9.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dimassBINNING = cms.vdouble(0.,2.0,4.0,6.0,8.0,10.,12.,14.,20.,40.,60.,70.,80.,84.,86.,88.,90.,92.,94.,96.,100.,120.,140.,160.,200.),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(True),
displayInPrimary_dimass = cms.bool(True),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(True),
doPlotDiMass = cms.untracked.bool(True),
),
cms.PSet(
pathNAME = cms.string("HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ"),
moduleNAME = cms.string("hltDiMuonGlb17Glb8RelTrkIsoFiltered0p4DzFiltered0p2"),
label = cms.string("ISO muon (double-muon dz)"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,5.,6.,7.,8.,9.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dimassBINNING = cms.vdouble(0.,2.0,4.0,6.0,8.0,10.,12.,14.,20.,40.,60.,70.,80.,84.,86.,88.,90.,92.,94.,96.,100.,120.,140.,160.,200.),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(True),
displayInPrimary_dimass = cms.bool(True),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(True),
doPlotDiMass = cms.untracked.bool(True),
),
cms.PSet(
pathNAME = cms.string("HLT_Mu17_TrkIsoVVL_TkMu8_TrkIsoVVL"),
moduleNAME = cms.string("hltDiMuonGlb17Trk8RelTrkIsoFiltered0p4"),
label = cms.string("ISO muon (mu-tkmu)"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,5.,6.,7.,8.,9.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dimassBINNING = cms.vdouble(0.,2.0,4.0,6.0,8.0,10.,12.,14.,20.,40.,60.,70.,80.,84.,86.,88.,90.,92.,94.,96.,100.,120.,140.,160.,200.),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(True),
displayInPrimary_dimass = cms.bool(True),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(True),
doPlotDiMass = cms.untracked.bool(True),
),
cms.PSet(
pathNAME = cms.string("HLT_Mu17_TrkIsoVVL_TkMu8_TrkIsoVVL_DZ"),
moduleNAME = cms.string("hltDiMuonGlb17Trk8RelTrkIsoFiltered0p4DzFiltered0p2"),
label = cms.string("ISO muon (mu-tkmu dz)"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,5.,6.,7.,8.,9.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dimassBINNING = cms.vdouble(0.,2.0,4.0,6.0,8.0,10.,12.,14.,20.,40.,60.,70.,80.,84.,86.,88.,90.,92.,94.,96.,100.,120.,140.,160.,200.),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(True),
displayInPrimary_dimass = cms.bool(True),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(True),
doPlotDiMass = cms.untracked.bool(True),
),
cms.PSet(
pathNAME = cms.string("HLT_TkMu17_TrkIsoVVL_TkMu8_TrkIsoVVL"),
moduleNAME = cms.string("hltDiMuonTrk17Trk8RelTrkIsoFiltered0p4"),
label = cms.string("ISO muon (tkmu-tkmu)"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,5.,6.,7.,8.,9.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dimassBINNING = cms.vdouble(0.,2.0,4.0,6.0,8.0,10.,12.,14.,20.,40.,60.,70.,80.,84.,86.,88.,90.,92.,94.,96.,100.,120.,140.,160.,200.),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(True),
displayInPrimary_dimass = cms.bool(True),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(True),
doPlotDiMass = cms.untracked.bool(True),
),
cms.PSet(
pathNAME = cms.string("HLT_TkMu17_TrkIsoVVL_TkMu8_TrkIsoVVL_DZ"),
moduleNAME = cms.string("hltDiMuonTrk17Trk8RelTrkIsoFiltered0p4DzFiltered0p2"),
label = cms.string("ISO muon (tkmu-tkmu dz)"),
xTITLE = cms.string("ISO muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,5.,6.,7.,8.,9.,10.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,30.,40.,60.,80.,100.,150.,200.,300.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dimassBINNING = cms.vdouble(0.,2.0,4.0,6.0,8.0,10.,12.,14.,20.,40.,60.,70.,80.,84.,86.,88.,90.,92.,94.,96.,100.,120.,140.,160.,200.),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(True),
displayInPrimary_pt_HEM17 = cms.bool(True),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(True),
displayInPrimary_dimass = cms.bool(True),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(True),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(True),
doPlotDiMass = cms.untracked.bool(True),
),
cms.PSet(
pathNAME = cms.string("HLT_Mu50"),
moduleNAME = cms.string("hltL3fL1sMu22Or25L1f0L2f10QL3Filtered50Q"),
label = cms.string("L3 muon50"),
xTITLE = cms.string("L3 muon"),
etaBINNING = cms.vdouble(-2.4,-2.1,-1.6,-1.2,-0.9,-0.3,-0.2,0.2,0.3,0.9,1.2,1.6,2.1,2.4),
ptBINNING = cms.vdouble(0.,10.,20.,30.,40.,45.,46.,47.,48.,49.,50.,51.,52.,53.,54.,55.,60.,70.,80.,90.,100.,200.,300.,400.),
phiBINNING = cms.vdouble(-3.2,-3.,-2.8,-2.6,-2.4,-2.2,-2.0,-1.8,-1.6,-1.4,-1.2,-1.0,-0.8,-0.6,-0.4,-0.2,0.,0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,3.2),
massBINNING = cms.vdouble(),
dxyBINNING = cms.vdouble(-2.0,-1.5,-1.0,-0.8,-0.6,-0.4,-0.2,-0.1,-0.05,-0.025,0.0,0.025,0.05,0.1,0.2,0.4,0.6,0.8,1.0,1.5,2.0),
dzBINNING = cms.vdouble(),
dimassBINNING = cms.vdouble(),
displayInPrimary_eta = cms.bool(True),
displayInPrimary_phi = cms.bool(True),
displayInPrimary_pt = cms.bool(True),
displayInPrimary_mass = cms.bool(False),
displayInPrimary_energy = cms.bool(False),
displayInPrimary_csv = cms.bool(False),
displayInPrimary_etaVSphi = cms.bool(True),
displayInPrimary_pt_HEP17 = cms.bool(False),
displayInPrimary_pt_HEM17 = cms.bool(False),
displayInPrimary_MR = cms.bool(False),
displayInPrimary_RSQ = cms.bool(False),
displayInPrimary_dxy = cms.bool(True),
displayInPrimary_dz = cms.bool(False),
displayInPrimary_dimass = cms.bool(False),
doPlot2D = cms.untracked.bool(True),
doPlotETA = cms.untracked.bool(True),
doPlotMASS = cms.untracked.bool(False),
doPlotENERGY = cms.untracked.bool(False),
doPlotHEP17 = cms.untracked.bool(False),
doPlotCSV = cms.untracked.bool(False),
doCALO = cms.untracked.bool(False),
doPF = cms.untracked.bool(False),
doPlotRazor = cms.untracked.bool(False),
doPlotDXY = cms.untracked.bool(True),
doPlotDZ = cms.untracked.bool(False),
doPlotDiMass = cms.untracked.bool(False),
),
)
| 62.035088
| 184
| 0.551628
| 4,775
| 31,824
| 3.625759
| 0.032251
| 0.09877
| 0.144169
| 0.117657
| 0.941142
| 0.938948
| 0.935193
| 0.923872
| 0.923872
| 0.923872
| 0
| 0.139757
| 0.257133
| 31,824
| 512
| 185
| 62.15625
| 0.592572
| 0
| 0
| 0.907843
| 0
| 0
| 0.035226
| 0.024008
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001961
| 0
| 0.001961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d2bd4e13fabc7ce8e2394a874b418a331173fc5
| 8,349
|
py
|
Python
|
post_optimization_studies/mad_analyses/Lambda_kinematics_compare/Output/Histos/MadAnalysis5job_0/selection_9.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/Lambda_kinematics_compare/Output/Histos/MadAnalysis5job_0/selection_9.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/Lambda_kinematics_compare/Output/Histos/MadAnalysis5job_0/selection_9.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_9():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,4000.0,401,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([5.0,15.0,25.0,35.0,45.0,55.0,65.0,75.0,85.0,95.0,105.0,115.0,125.0,135.0,145.0,155.0,165.0,175.0,185.0,195.0,205.0,215.0,225.0,235.0,245.0,255.0,265.0,275.0,285.0,295.0,305.0,315.0,325.0,335.0,345.0,355.0,365.0,375.0,385.0,395.0,405.0,415.0,425.0,435.0,445.0,455.0,465.0,475.0,485.0,495.0,505.0,515.0,525.0,535.0,545.0,555.0,565.0,575.0,585.0,595.0,605.0,615.0,625.0,635.0,645.0,655.0,665.0,675.0,685.0,695.0,705.0,715.0,725.0,735.0,745.0,755.0,765.0,775.0,785.0,795.0,805.0,815.0,825.0,835.0,845.0,855.0,865.0,875.0,885.0,895.0,905.0,915.0,925.0,935.0,945.0,955.0,965.0,975.0,985.0,995.0,1005.0,1015.0,1025.0,1035.0,1045.0,1055.0,1065.0,1075.0,1085.0,1095.0,1105.0,1115.0,1125.0,1135.0,1145.0,1155.0,1165.0,1175.0,1185.0,1195.0,1205.0,1215.0,1225.0,1235.0,1245.0,1255.0,1265.0,1275.0,1285.0,1295.0,1305.0,1315.0,1325.0,1335.0,1345.0,1355.0,1365.0,1375.0,1385.0,1395.0,1405.0,1415.0,1425.0,1435.0,1445.0,1455.0,1465.0,1475.0,1485.0,1495.0,1505.0,1515.0,1525.0,1535.0,1545.0,1555.0,1565.0,1575.0,1585.0,1595.0,1605.0,1615.0,1625.0,1635.0,1645.0,1655.0,1665.0,1675.0,1685.0,1695.0,1705.0,1715.0,1725.0,1735.0,1745.0,1755.0,1765.0,1775.0,1785.0,1795.0,1805.0,1815.0,1825.0,1835.0,1845.0,1855.0,1865.0,1875.0,1885.0,1895.0,1905.0,1915.0,1925.0,1935.0,1945.0,1955.0,1965.0,1975.0,1985.0,1995.0,2005.0,2015.0,2025.0,2035.0,2045.0,2055.0,2065.0,2075.0,2085.0,2095.0,2105.0,2115.0,2125.0,2135.0,2145.0,2155.0,2165.0,2175.0,2185.0,2195.0,2205.0,2215.0,2225.0,2235.0,2245.0,2255.0,2265.0,2275.0,2285.0,2295.0,2305.0,2315.0,2325.0,2335.0,2345.0,2355.0,2365.0,2375.0,2385.0,2395.0,2405.0,2415.0,2425.0,2435.0,2445.0,2455.0,2465.0,2475.0,2485.0,2495.0,2505.0,2515.0,2525.0,2535.0,2545.0,2555.0,2565.0,2575.0,2585.0,2595.0,2605.0,2615.0,2625.0,2635.0,2645.0,2655.0,2665.0,2675.0,2685.0,2695.0,2705.0,2715.0,2725.0,2735.0,2745.0,2755.0,2765.0,2775.0,2785.0,2795.0,2805.0,2815.0,2825.0,2835.0,2845.0,2855.0,2865.0,2875.0,2885.0,2895.0,2905.0,2915.0,2925.0,2935.0,2945.0,2955.0,2965.0,2975.0,2985.0,2995.0,3005.0,3015.0,3025.0,3035.0,3045.0,3055.0,3065.0,3075.0,3085.0,3095.0,3105.0,3115.0,3125.0,3135.0,3145.0,3155.0,3165.0,3175.0,3185.0,3195.0,3205.0,3215.0,3225.0,3235.0,3245.0,3255.0,3265.0,3275.0,3285.0,3295.0,3305.0,3315.0,3325.0,3335.0,3345.0,3355.0,3365.0,3375.0,3385.0,3395.0,3405.0,3415.0,3425.0,3435.0,3445.0,3455.0,3465.0,3475.0,3485.0,3495.0,3505.0,3515.0,3525.0,3535.0,3545.0,3555.0,3565.0,3575.0,3585.0,3595.0,3605.0,3615.0,3625.0,3635.0,3645.0,3655.0,3665.0,3675.0,3685.0,3695.0,3705.0,3715.0,3725.0,3735.0,3745.0,3755.0,3765.0,3775.0,3785.0,3795.0,3805.0,3815.0,3825.0,3835.0,3845.0,3855.0,3865.0,3875.0,3885.0,3895.0,3905.0,3915.0,3925.0,3935.0,3945.0,3955.0,3965.0,3975.0,3985.0,3995.0])
# Creating weights for histo: y10_M_0
y10_M_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y10_M_1
y10_M_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights+y10_M_1_weights,\
label="$signal4TeV$", rwidth=1.0,\
color="#ce5e60", edgecolor="#ce5e60", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y10_M_0_weights,\
label="$signal1TeV$", rwidth=1.0,\
color="#5954d8", edgecolor="#5954d8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"M [ a_{1} , a_{2} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y10_M_0_weights+y10_M_1_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y10_M_0_weights+y10_M_1_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_9.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_9.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_9.eps')
# Running!
if __name__ == '__main__':
selection_9()
| 112.824324
| 2,715
| 0.606659
| 2,743
| 8,349
| 1.821728
| 0.194677
| 0.639984
| 0.958175
| 1.275966
| 0.419652
| 0.419652
| 0.419652
| 0.394637
| 0.38263
| 0.38263
| 0
| 0.45853
| 0.059887
| 8,349
| 73
| 2,716
| 114.369863
| 0.178112
| 0.05857
| 0
| 0.105263
| 0
| 0.026316
| 0.044904
| 0.020156
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0.105263
| 0
| 0.131579
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
19c3722100f990e77bf5e77d4ded5ee775db6fe1
| 106
|
py
|
Python
|
python-client-migration/setup.py
|
aiven/opensearch-migration-examples
|
d57a9542e208bf97b92834be459b3ba89e380720
|
[
"Apache-2.0"
] | 1
|
2022-02-18T00:12:19.000Z
|
2022-02-18T00:12:19.000Z
|
python-client-migration/setup.py
|
aiven/opensearch-migration-examples
|
d57a9542e208bf97b92834be459b3ba89e380720
|
[
"Apache-2.0"
] | 8
|
2021-11-25T14:21:00.000Z
|
2022-01-25T12:09:36.000Z
|
python-client-migration/setup.py
|
aiven/opensearch-migration-examples
|
d57a9542e208bf97b92834be459b3ba89e380720
|
[
"Apache-2.0"
] | 3
|
2021-11-29T08:39:02.000Z
|
2022-01-25T17:25:55.000Z
|
from setuptools import setup
setup(name="python-client-migration", packages=["python_client_migration"])
| 26.5
| 75
| 0.811321
| 13
| 106
| 6.461538
| 0.692308
| 0.285714
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066038
| 106
| 3
| 76
| 35.333333
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0.433962
| 0.433962
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
dfb08393ceec366f8fa76aa8ca9790c2383ea229
| 134,695
|
py
|
Python
|
proliantutils/tests/ilo/test_ris.py
|
anta-nok/proliantutils
|
35c711e391b839bbb93c24880e08e4ac7554dae6
|
[
"Apache-2.0"
] | null | null | null |
proliantutils/tests/ilo/test_ris.py
|
anta-nok/proliantutils
|
35c711e391b839bbb93c24880e08e4ac7554dae6
|
[
"Apache-2.0"
] | null | null | null |
proliantutils/tests/ilo/test_ris.py
|
anta-nok/proliantutils
|
35c711e391b839bbb93c24880e08e4ac7554dae6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for RIS Module."""
import json
import ddt
import mock
from requests.packages import urllib3
from requests.packages.urllib3 import exceptions as urllib3_exceptions
import testtools
from proliantutils import exception
from proliantutils.ilo import common
from proliantutils.ilo import constants
from proliantutils.ilo import ris
from proliantutils.tests.ilo import ris_sample_outputs as ris_outputs
from proliantutils import utils
class IloRisTestCaseInitTestCase(testtools.TestCase):
@mock.patch.object(urllib3, 'disable_warnings')
def test_init(self, disable_warning_mock):
ris_client = ris.RISOperations(
"x.x.x.x", "admin", "Admin", bios_password='foo',
cacert='/somepath')
self.assertEqual(ris_client.host, "x.x.x.x")
self.assertEqual(ris_client.login, "admin")
self.assertEqual(ris_client.password, "Admin")
self.assertEqual(ris_client.bios_password, "foo")
self.assertEqual({}, ris_client.message_registries)
self.assertEqual(ris_client.cacert, '/somepath')
@mock.patch.object(urllib3, 'disable_warnings')
def test_init_without_cacert(self, disable_warning_mock):
ris_client = ris.RISOperations(
"x.x.x.x", "admin", "Admin", bios_password='foo')
self.assertEqual(ris_client.host, "x.x.x.x")
self.assertEqual(ris_client.login, "admin")
self.assertEqual(ris_client.password, "Admin")
self.assertIsNone(ris_client.cacert)
disable_warning_mock.assert_called_once_with(
urllib3_exceptions.InsecureRequestWarning)
@ddt.ddt
class IloRisTestCase(testtools.TestCase):
def setUp(self):
super(IloRisTestCase, self).setUp()
self.client = ris.RISOperations("1.2.3.4", "Administrator", "Admin")
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_get_http_boot_url_uefi(self, _uefi_boot_mode_mock,
get_bios_settings_mock):
get_bios_settings_mock.return_value = ris_outputs.HTTP_BOOT_URL
_uefi_boot_mode_mock.return_value = True
result = self.client.get_http_boot_url()
_uefi_boot_mode_mock.assert_called_once_with()
self.assertEqual(
'http://10.10.1.30:8081/startup.nsh', result['UefiShellStartupUrl']
)
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_set_http_boot_url_uefi(self, _uefi_boot_mode_mock,
change_bios_setting_mock):
_uefi_boot_mode_mock.return_value = True
self.client.set_http_boot_url('http://10.10.1.30:8081/startup.nsh')
_uefi_boot_mode_mock.assert_called_once_with()
change_bios_setting_mock.assert_called_once_with({
"UefiShellStartupUrl": "http://10.10.1.30:8081/startup.nsh"
})
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_get_http_boot_url_bios(self, _uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedInBiosError,
self.client.get_http_boot_url)
_uefi_boot_mode_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_set_http_boot_url_bios(self, _uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedInBiosError,
self.client.set_http_boot_url,
'http://10.10.1.30:8081/startup.nsh')
_uefi_boot_mode_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_check_iscsi_rest_patch_allowed')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_set_iscsi_initiator_info_uefi(self, _uefi_boot_mode_mock,
check_iscsi_mock, patch_mock):
_uefi_boot_mode_mock.return_value = True
iscsi_uri = '/rest/v1/systems/1/bios/iScsi/Settings'
check_iscsi_mock.return_value = iscsi_uri
initiator_iqn = 'iqn.2011-07.com.example.server:test1'
initiator_info = {'iSCSIInitiatorName': initiator_iqn}
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.set_iscsi_initiator_info(initiator_iqn)
patch_mock.assert_called_once_with(iscsi_uri, None, initiator_info)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_check_iscsi_rest_patch_allowed')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_set_iscsi_initiator_info_failed(self, _uefi_boot_mode_mock,
check_iscsi_mock, patch_mock):
_uefi_boot_mode_mock.return_value = True
iscsi_uri = '/rest/v1/systems/1/bios/iScsi/Settings'
check_iscsi_mock.return_value = iscsi_uri
initiator_iqn = 'iqn.2011-07.com.example.server:test1'
initiator_info = {'iSCSIInitiatorName': initiator_iqn}
patch_mock.return_value = (302, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.assertRaises(exception.IloError,
self.client.set_iscsi_initiator_info,
initiator_iqn)
patch_mock.assert_called_once_with(iscsi_uri, None, initiator_info)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_set_iscsi_initiator_info_bios(self, _uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedError,
self.client.set_iscsi_initiator_info,
'iqn.2011-07.com.example.server:test1')
_uefi_boot_mode_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_change_iscsi_settings')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_set_iscsi_info_uefi(self, _uefi_boot_mode_mock,
change_iscsi_settings_mock):
_uefi_boot_mode_mock.return_value = True
iscsi_variables = {
'iSCSITargetName': 'iqn.2011-07.com.example.server:test1',
'iSCSITargetInfoViaDHCP': False,
'iSCSIBootLUN': '1',
'iSCSIBootEnable': 'Enabled',
'iSCSITargetIpAddress': '10.10.1.30',
'iSCSITargetTcpPort': 3260}
self.client.set_iscsi_info(
'iqn.2011-07.com.example.server:test1',
'1', '10.10.1.30')
_uefi_boot_mode_mock.assert_called_once_with()
change_iscsi_settings_mock.assert_called_once_with(iscsi_variables)
@mock.patch.object(ris.RISOperations, '_change_iscsi_settings')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_unset_iscsi_info_uefi(self, _uefi_boot_mode_mock,
change_iscsi_settings_mock):
_uefi_boot_mode_mock.return_value = True
iscsi_variables = {'iSCSIBootEnable': 'Disabled'}
self.client.unset_iscsi_info()
_uefi_boot_mode_mock.assert_called_once_with()
change_iscsi_settings_mock.assert_called_once_with(iscsi_variables)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_unset_iscsi_info_bios(self, _uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedInBiosError,
self.client.unset_iscsi_info)
_uefi_boot_mode_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_iscsi_initiator_info(self, check_bios_mock,
get_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
iscsi_settings = json.loads(ris_outputs.GET_ISCSI_SETTINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
iscsi_settings)
ret = self.client.get_iscsi_initiator_info()
self.assertEqual(ret, 'iqn.1986-03.com.hp:uefi-p89-mxq45006w5')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_iscsi_initiator_info_failed(self, check_bios_mock,
get_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
iscsi_uri = '/rest/v1/systems/1/bios/iScsi'
iscsi_settings = json.loads(ris_outputs.GET_ISCSI_SETTINGS)
get_mock.return_value = (202, ris_outputs.GET_HEADERS,
iscsi_settings)
self.assertRaises(exception.IloError,
self.client.get_iscsi_initiator_info)
check_bios_mock.assert_called_once_with()
get_mock.assert_called_once_with(iscsi_uri)
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_iscsi_initiator_info_not_found(self, check_bios_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BASE_CONFIG)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client.get_iscsi_initiator_info)
check_bios_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
def test_set_iscsi_info_bios(self, _uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedInBiosError,
self.client.set_iscsi_info,
'iqn.2011-07.com.example.server:test1',
'1', '10.10.1.30')
_uefi_boot_mode_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_secure_boot_mode(self, get_details_mock, rest_get_mock):
host_response = ris_outputs.RESPONSE_BODY_FOR_REST_OP
get_details_mock.return_value = json.loads(host_response)
uri = ris_outputs.REST_GET_SECURE_BOOT['links']['self']['href']
rest_get_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_GET_SECURE_BOOT)
result = self.client.get_secure_boot_mode()
self.assertFalse(result)
get_details_mock.assert_called_once_with()
rest_get_mock.assert_called_once_with(uri)
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_secure_boot_mode_fail(self, get_details_mock, rest_get_mock):
host_response = ris_outputs.RESPONSE_BODY_FOR_REST_OP
get_details_mock.return_value = json.loads(host_response)
uri = ris_outputs.REST_GET_SECURE_BOOT['links']['self']['href']
rest_get_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
exc = self.assertRaises(exception.IloError,
self.client.get_secure_boot_mode)
get_details_mock.assert_called_once_with()
rest_get_mock.assert_called_once_with(uri)
self.assertIn('FakeFailureMessage', str(exc))
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_secure_boot_mode_not_supported(self, get_details_mock):
host_response = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
del host_response['Oem']['Hp']['links']['SecureBoot']
get_details_mock.return_value = host_response
self.assertRaises(exception.IloCommandNotSupportedError,
self.client.get_secure_boot_mode)
get_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_host_power_status_ok(self, get_details_mock):
host_response = ris_outputs.RESPONSE_BODY_FOR_REST_OP
get_details_mock.return_value = json.loads(host_response)
result = self.client.get_host_power_status()
self.assertEqual(result, 'OFF')
get_details_mock.assert_called_once_with()
@mock.patch.object(common, 'wait_for_ilo_after_reset')
@mock.patch.object(ris.RISOperations, '_rest_post')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test_reset_ilo_ok(self, get_mock, post_mock, status_mock):
uri = '/rest/v1/Managers/1'
manager_data = json.loads(ris_outputs.GET_MANAGER_DETAILS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
manager_data)
post_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.reset_ilo()
get_mock.assert_called_once_with(uri)
post_mock.assert_called_once_with(uri, None, {'Action': 'Reset'})
status_mock.assert_called_once_with(self.client)
@mock.patch.object(ris.RISOperations, '_rest_post')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test_reset_ilo_fail(self, get_mock, post_mock):
uri = '/rest/v1/Managers/1'
manager_data = json.loads(ris_outputs.GET_MANAGER_DETAILS)
get_mock.return_value = (200, ris_outputs.HEADERS_FOR_REST_OP,
manager_data)
post_mock.return_value = (301, ris_outputs.HEADERS_FOR_REST_OP,
ris_outputs.REST_FAILURE_OUTPUT)
exc = self.assertRaises(exception.IloError, self.client.reset_ilo)
get_mock.assert_called_once_with(uri)
post_mock.assert_called_once_with(uri, None, {'Action': 'Reset'})
self.assertIn('FakeFailureMessage', str(exc))
@mock.patch.object(ris.RISOperations, '_get_type')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test_reset_ilo_type_mismatch(self, get_mock, type_mock):
uri = '/rest/v1/Managers/1'
manager_data = json.loads(ris_outputs.GET_MANAGER_DETAILS)
get_mock.return_value = (200, ris_outputs.HEADERS_FOR_REST_OP,
manager_data)
type_mock.return_value = 'Manager.x'
self.assertRaises(exception.IloError, self.client.reset_ilo)
get_mock.assert_called_once_with(uri)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_change_secure_boot_settings')
def test_reset_secure_boot_keys(self, change_mock,
_uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = True
self.client.reset_secure_boot_keys()
_uefi_boot_mode_mock.assert_called_once_with()
change_mock.assert_called_once_with('ResetToDefaultKeys', True)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_change_secure_boot_settings')
def test_reset_secure_boot_keys_bios(self, change_mock,
_uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedInBiosError,
self.client.reset_secure_boot_keys)
_uefi_boot_mode_mock.assert_called_once_with()
self.assertFalse(change_mock.called)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_change_secure_boot_settings')
def test_clear_secure_boot_keys(self, change_mock,
_uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = True
self.client.clear_secure_boot_keys()
_uefi_boot_mode_mock.assert_called_once_with()
change_mock.assert_called_once_with('ResetAllKeys', True)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_change_secure_boot_settings')
def test_clear_secure_boot_keys_bios(self, change_mock,
_uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedInBiosError,
self.client.clear_secure_boot_keys)
_uefi_boot_mode_mock.assert_called_once_with()
self.assertFalse(change_mock.called)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_change_secure_boot_settings')
def test_set_secure_boot_mode(self, change_mock,
_uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = True
self.client.set_secure_boot_mode(True)
_uefi_boot_mode_mock.assert_called_once_with()
change_mock.assert_called_once_with('SecureBootEnable', True)
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_change_secure_boot_settings')
def test_set_secure_boot_mode_bios(self, change_mock,
_uefi_boot_mode_mock):
_uefi_boot_mode_mock.return_value = False
self.assertRaises(exception.IloCommandNotSupportedInBiosError,
self.client.set_secure_boot_mode, True)
_uefi_boot_mode_mock.assert_called_once_with()
self.assertFalse(change_mock.called)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_product_name(self, get_details_mock):
host_response = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_details_mock.return_value = host_response
result = self.client.get_product_name()
self.assertEqual(result, 'ProLiant BL460c Gen9')
get_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test_get_current_boot_mode(self, bios_mock):
bios_mock.return_value = 'LegacyBios'
result = self.client.get_current_boot_mode()
self.assertEqual(result, 'LEGACY')
@mock.patch.object(ris.RISOperations, '_get_bios_settings_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_pending_boot_mode(self, check_mock, bios_mock):
check_mock.return_value = ('fake', 'fake',
json.loads(ris_outputs.GET_BIOS_SETTINGS))
bios_mock.return_value = ('fake', 'fake',
json.loads(ris_outputs.GET_BIOS_SETTINGS))
result = self.client.get_pending_boot_mode()
self.assertEqual(result, 'UEFI')
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
def test_set_pending_boot_mode_legacy(self, change_mock):
self.client.set_pending_boot_mode('legacy')
change_mock.assert_called_once_with({'BootMode': 'LegacyBios'})
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
def test_set_pending_boot_mode_uefi(self, change_mock):
self.client.set_pending_boot_mode('uefi')
expected_properties = {'BootMode': 'uefi',
'UefiOptimizedBoot': 'Enabled'}
change_mock.assert_called_once_with(expected_properties)
def test_set_pending_boot_mode_invalid_mode(self):
self.assertRaises(exception.IloInvalidInputError,
self.client.set_pending_boot_mode, 'invalid')
@ddt.data((0, constants.SUPPORTED_BOOT_MODE_LEGACY_BIOS_ONLY),
(3, constants.SUPPORTED_BOOT_MODE_UEFI_ONLY),
(2, constants.SUPPORTED_BOOT_MODE_LEGACY_BIOS_AND_UEFI))
@ddt.unpack
@mock.patch.object(ris.RISOperations, '_get_host_details', autospec=True)
def test_get_supported_boot_mode(
self, raw_boot_mode_value, expected_boot_mode_value,
_get_host_details_mock):
# | GIVEN |
system_val = {'Oem': {'Hp': {'Bios':
{'UefiClass': raw_boot_mode_value}}}}
_get_host_details_mock.return_value = system_val
# | WHEN |
actual_val = self.client.get_supported_boot_mode()
# | THEN |
self.assertEqual(expected_boot_mode_value, actual_val)
@mock.patch.object(ris.RISOperations, '_get_host_details', autospec=True)
def test_get_supported_boot_mode_returns_legacy_bios_if_bios_atrrib_absent(
self, _get_host_details_mock):
# | GIVEN |
system_val = {'Oem': {'Hp': {'blahblah': 1234}}}
_get_host_details_mock.return_value = system_val
# | WHEN |
actual_val = self.client.get_supported_boot_mode()
# | THEN |
self.assertEqual(constants.SUPPORTED_BOOT_MODE_LEGACY_BIOS_ONLY,
actual_val)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_collection')
def test_reset_ilo_credential(self, collection_mock, patch_mock):
uri = '/rest/v1/AccountService/Accounts/1'
collection_output = json.loads(ris_outputs.COLLECTIONS_SAMPLE)
item = collection_output['Items'][0]
collection_mock.return_value = [(200, None, item, uri)]
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.reset_ilo_credential('fake-password')
patch_mock.assert_called_once_with(uri, None,
{'Password': 'fake-password'})
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_collection')
def test_reset_ilo_credential_fail(self, collection_mock, patch_mock):
uri = '/rest/v1/AccountService/Accounts/1'
collection_output = json.loads(ris_outputs.COLLECTIONS_SAMPLE)
item = collection_output['Items'][0]
collection_mock.return_value = [(200, None, item, uri)]
patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.assertRaises(exception.IloError,
self.client.reset_ilo_credential,
'fake-password')
patch_mock.assert_called_once_with(uri, None,
{'Password': 'fake-password'})
@mock.patch.object(ris.RISOperations, '_get_collection')
def test_reset_ilo_credential_no_account(self, collection_mock):
uri = '/rest/v1/AccountService/Accounts/1'
self.client = ris.RISOperations("1.2.3.4", "Admin", "Admin")
collection_output = json.loads(ris_outputs.COLLECTIONS_SAMPLE)
item = collection_output['Items'][0]
collection_mock.return_value = [(200, None, item, uri)]
self.assertRaises(exception.IloError,
self.client.reset_ilo_credential,
'fake-password')
@mock.patch.object(ris.RISOperations, '_validate_if_patch_supported')
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_bios_hash_password')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_operation_allowed')
@mock.patch.object(ris.RISOperations, '_get_bios_settings_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_reset_bios_to_default(self, check_mock, bios_mock, op_mock,
get_mock, passwd_mock, patch_mock,
validate_mock):
settings_uri = '/rest/v1/systems/1/bios/Settings'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
base_config = json.loads(ris_outputs.GET_BASE_CONFIG)
default_config = base_config['BaseConfigs'][0]['default']
check_mock.return_value = (ris_outputs.GET_HEADERS, 'fake',
json.loads(ris_outputs.GET_BIOS_SETTINGS))
op_mock.return_value = False
passwd_mock.return_value = {}
get_mock.return_value = (200, 'fake', base_config)
bios_mock.return_value = (ris_outputs.GET_HEADERS,
settings_uri, {})
patch_mock.return_value = (200, 'fake', 'fake')
self.client.reset_bios_to_default()
check_mock.assert_called_once_with()
bios_mock.assert_called_once_with(settings)
op_mock.assert_called_once_with(ris_outputs.GET_HEADERS, 'PATCH')
get_mock.assert_called_once_with('/rest/v1/systems/1/bios/BaseConfigs')
passwd_mock.assert_called_once_with(None)
patch_mock.assert_called_once_with(settings_uri, {}, default_config)
validate_mock.assert_called_once_with(ris_outputs.GET_HEADERS,
settings_uri)
@mock.patch.object(ris.RISOperations, '_is_raid_supported')
@mock.patch.object(ris.RISOperations, '_get_logical_raid_levels')
@mock.patch.object(ris.RISOperations, '_get_drive_type_and_speed')
@mock.patch.object(ris.RISOperations, '_check_iscsi_rest_patch_allowed')
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
@mock.patch.object(ris.RISOperations, '_get_nvdimm_n_status')
@mock.patch.object(ris.RISOperations,
'_get_cpu_virtualization')
@mock.patch.object(ris.RISOperations, '_get_tpm_capability')
@mock.patch.object(ris.RISOperations,
'_get_number_of_gpu_devices_connected')
@mock.patch.object(ris.RISOperations, 'get_supported_boot_mode')
@mock.patch.object(ris.RISOperations, 'get_secure_boot_mode')
@mock.patch.object(ris.RISOperations, '_get_ilo_firmware_version')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_server_capabilities(self, get_details_mock, ilo_firm_mock,
secure_mock, boot_mode_mock, gpu_mock,
tpm_mock, cpu_vt_mock, nvdimm_n_mock,
bios_sriov_mock, iscsi_boot_mock,
drive_mock, raid_mock, raid_support_mock):
host_details = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_details_mock.return_value = host_details
ilo_firm_mock.return_value = {'ilo_firmware_version': 'iLO 4 v2.20'}
gpu_mock.return_value = {'pci_gpu_devices': 2}
boot_mode_mock.return_value = (
constants.SUPPORTED_BOOT_MODE_UEFI_ONLY)
cpu_vt_mock.return_value = True
secure_mock.return_value = False
nvdimm_n_mock.return_value = True
tpm_mock.return_value = True
bios_sriov_mock.return_value = 'Disabled'
iscsi_boot_mock.return_value = '/rest/v1/systems/1/bios/iScsi'
drive_mock.return_value = {'has_rotational': True,
'rotational_drive_4800_rpm': True}
raid_mock.return_value = {'logical_raid_volume_0': 'true'}
raid_support_mock.return_value = True
expected_caps = {'secure_boot': 'true',
'ilo_firmware_version': 'iLO 4 v2.20',
'rom_firmware_version': u'I36 v1.40 (01/28/2015)',
'server_model': u'ProLiant BL460c Gen9',
'pci_gpu_devices': 2,
'trusted_boot': 'true',
'cpu_vt': 'true',
'nvdimm_n': 'true',
'boot_mode_bios': 'false',
'boot_mode_uefi': 'true',
'iscsi_boot': 'true',
'has_rotational': True,
'rotational_drive_4800_rpm': True,
'logical_raid_volume_0': 'true',
'hardware_supports_raid': 'true'}
capabilities = self.client.get_server_capabilities()
self.assertEqual(expected_caps, capabilities)
@mock.patch.object(ris.RISOperations, '_is_raid_supported')
@mock.patch.object(ris.RISOperations, '_get_logical_raid_levels')
@mock.patch.object(ris.RISOperations, '_get_drive_type_and_speed')
@mock.patch.object(ris.RISOperations, '_check_iscsi_rest_patch_allowed')
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
@mock.patch.object(ris.RISOperations, '_get_nvdimm_n_status')
@mock.patch.object(ris.RISOperations,
'_get_cpu_virtualization')
@mock.patch.object(ris.RISOperations, '_get_tpm_capability')
@mock.patch.object(ris.RISOperations,
'_get_number_of_gpu_devices_connected')
@mock.patch.object(ris.RISOperations, 'get_supported_boot_mode')
@mock.patch.object(ris.RISOperations, 'get_secure_boot_mode')
@mock.patch.object(ris.RISOperations, '_get_ilo_firmware_version')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_server_capabilities_tp_absent(
self, get_details_mock, ilo_firm_mock, secure_mock, boot_mode_mock,
gpu_mock, tpm_mock, cpu_vt_mock, nvdimm_n_mock, bios_sriov_mock,
iscsi_mock, drive_mock, raid_mock, raid_support_mock):
host_details = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_details_mock.return_value = host_details
ilo_firm_mock.return_value = {'ilo_firmware_version': 'iLO 4 v2.20'}
gpu_mock.return_value = {'pci_gpu_devices': 2}
boot_mode_mock.return_value = (
constants.SUPPORTED_BOOT_MODE_LEGACY_BIOS_AND_UEFI)
secure_mock.return_value = False
nvdimm_n_mock.return_value = True
tpm_mock.return_value = False
cpu_vt_mock.return_value = True
bios_sriov_mock.return_value = 'Enabled'
iscsi_mock.side_effect = exception.IloCommandNotSupportedError('error')
drive_mock.return_value = {'has_rotational': True,
'rotational_drive_4800_rpm': True}
raid_mock.return_value = {'logical_raid_volume_0': 'true'}
raid_support_mock.return_value = False
expected_caps = {'secure_boot': 'true',
'ilo_firmware_version': 'iLO 4 v2.20',
'rom_firmware_version': u'I36 v1.40 (01/28/2015)',
'server_model': u'ProLiant BL460c Gen9',
'pci_gpu_devices': 2,
'cpu_vt': 'true',
'nvdimm_n': 'true',
'sriov_enabled': 'true',
'boot_mode_bios': 'true',
'boot_mode_uefi': 'true',
'has_rotational': True,
'rotational_drive_4800_rpm': True,
'logical_raid_volume_0': 'true'}
capabilities = self.client.get_server_capabilities()
self.assertEqual(expected_caps, capabilities)
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test_get_ilo_firmware_version_as_major_minor(
self, get_ilo_details_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS)
uri = '/rest/v1/Managers/1'
get_ilo_details_mock.return_value = (ilo_details, uri)
ilo_firm = self.client.get_ilo_firmware_version_as_major_minor()
expected_ilo_firm = "2.04"
self.assertEqual(expected_ilo_firm, ilo_firm)
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test_get_ilo_firmware_version_as_major_minor_suggested_min(
self, get_ilo_details_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS_EQ_SUGGESTED)
uri = '/rest/v1/Managers/1'
get_ilo_details_mock.return_value = (ilo_details, uri)
ilo_firm = self.client.get_ilo_firmware_version_as_major_minor()
expected_ilo_firm = "2.30"
self.assertEqual(expected_ilo_firm, ilo_firm)
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test_get_ilo_firmware_version_as_major_minor_gt_suggested_min(
self, get_ilo_details_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS_GT_SUGGESTED)
uri = '/rest/v1/Managers/1'
get_ilo_details_mock.return_value = (ilo_details, uri)
ilo_firm = self.client.get_ilo_firmware_version_as_major_minor()
expected_ilo_firm = "2.54"
self.assertEqual(expected_ilo_firm, ilo_firm)
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test_get_ilo_firmware_version_as_major_minor_no_firmware(
self, get_ilo_details_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS_NO_FIRMWARE)
uri = '/rest/v1/Managers/1'
get_ilo_details_mock.return_value = (ilo_details, uri)
ilo_firm = self.client.get_ilo_firmware_version_as_major_minor()
expected_ilo_firm = None
self.assertEqual(expected_ilo_firm, ilo_firm)
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test__get_ilo_firmware_version(self, get_ilo_details_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS)
uri = '/rest/v1/Managers/1'
get_ilo_details_mock.return_value = (ilo_details, uri)
ilo_firm = self.client._get_ilo_firmware_version()
expected_ilo_firm = {'ilo_firmware_version': 'iLO 4 v2.20'}
self.assertIn('ilo_firmware_version', ilo_firm)
self.assertEqual(expected_ilo_firm, ilo_firm)
@mock.patch.object(ris.RISOperations, '_rest_post')
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test_activate_license(self, get_ilo_details_mock, post_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS)
uri = '/rest/v1/Managers/1'
license_uri = "/rest/v1/Managers/1/LicenseService"
get_ilo_details_mock.return_value = (ilo_details, uri)
post_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.activate_license('testkey')
get_ilo_details_mock.assert_called_once_with()
post_mock.assert_called_once_with(license_uri, None,
{'LicenseKey': 'testkey'})
@mock.patch.object(ris.RISOperations, '_rest_post')
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test_activate_license_IloError(self, get_ilo_details_mock, post_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS)
uri = '/rest/v1/Managers/1'
license_uri = "/rest/v1/Managers/1/LicenseService"
get_ilo_details_mock.return_value = (ilo_details, uri)
post_mock.return_value = (500, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError, self.client.activate_license,
'testkey')
get_ilo_details_mock.assert_called_once_with()
post_mock.assert_called_once_with(license_uri, None,
{'LicenseKey': 'testkey'})
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test_activate_license_IloCommandNotSupported(self,
get_ilo_details_mock):
ilo_details = json.loads(ris_outputs.GET_MANAGER_DETAILS)
del ilo_details['Oem']['Hp']['links']['LicenseService']
uri = '/rest/v1/Managers/1'
get_ilo_details_mock.return_value = (ilo_details, uri)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client.activate_license, 'testkey')
get_ilo_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_get_vm_status_floppy_empty(self, get_vm_device_status_mock):
floppy_resp = json.loads(ris_outputs.RESP_VM_STATUS_FLOPPY_EMPTY)
device_uri = floppy_resp["links"]["self"]["href"]
get_vm_device_status_mock.return_value = (floppy_resp, device_uri)
exp_result = json.loads(ris_outputs.GET_VM_STATUS_FLOPPY_EMPTY)
result = self.client.get_vm_status('FLOPPY')
self.assertEqual(result, exp_result)
get_vm_device_status_mock.assert_called_once_with('FLOPPY')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_get_vm_status_floppy_inserted(self, get_vm_device_status_mock):
floppy_resp = json.loads(ris_outputs.RESP_VM_STATUS_FLOPPY_INSERTED)
device_uri = floppy_resp["links"]["self"]["href"]
get_vm_device_status_mock.return_value = (floppy_resp, device_uri)
exp_result = json.loads(ris_outputs.GET_VM_STATUS_FLOPPY_INSERTED)
result = self.client.get_vm_status('FLOPPY')
self.assertEqual(result, exp_result)
get_vm_device_status_mock.assert_called_once_with('FLOPPY')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_get_vm_status_cdrom_empty(self, get_vm_device_status_mock):
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_EMPTY)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_status_mock.return_value = (cdrom_resp, device_uri)
exp_result = json.loads(ris_outputs.GET_VM_STATUS_CDROM_EMPTY)
result = self.client.get_vm_status('CDROM')
self.assertEqual(result, exp_result)
get_vm_device_status_mock.assert_called_once_with('CDROM')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_get_vm_status_cdrom_inserted(self, get_vm_device_status_mock):
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_INSERTED)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_status_mock.return_value = (cdrom_resp, device_uri)
exp_result = json.loads(ris_outputs.GET_VM_STATUS_CDROM_INSERTED)
result = self.client.get_vm_status('CDROM')
self.assertEqual(result, exp_result)
get_vm_device_status_mock.assert_called_once_with('CDROM')
@mock.patch.object(ris.RISOperations, '_rest_patch')
def test_set_vm_status_cdrom_connect(self, patch_mock):
self.client.set_vm_status('CDROM', boot_option='CONNECT')
self.assertFalse(patch_mock.called)
def test_set_vm_status_cdrom_invalid_arg(self):
self.assertRaises(exception.IloInvalidInputError,
self.client.set_vm_status,
device='CDROM',
boot_option='FOO')
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_set_vm_status_cdrom(self, get_vm_device_mock, patch_mock):
vm_uri = '/rest/v1/Managers/1/VirtualMedia/2'
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_INSERTED)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
vm_patch = json.loads(ris_outputs.PATCH_VM_CDROM)
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.set_vm_status(device='CDROM', boot_option='BOOT_ONCE')
get_vm_device_mock.assert_called_once_with('CDROM')
patch_mock.assert_called_once_with(vm_uri, None, vm_patch)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_set_vm_status_cdrom_fail(self, get_vm_device_mock, patch_mock):
vm_uri = '/rest/v1/Managers/1/VirtualMedia/2'
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_INSERTED)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
vm_patch = json.loads(ris_outputs.PATCH_VM_CDROM)
patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError,
self.client.set_vm_status,
device='CDROM', boot_option='BOOT_ONCE')
get_vm_device_mock.assert_called_once_with('CDROM')
patch_mock.assert_called_once_with(vm_uri, None, vm_patch)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_insert_virtual_media(self, get_vm_device_mock, patch_mock):
vm_uri = '/rest/v1/Managers/1/VirtualMedia/2'
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_EMPTY)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
vm_patch = {'Image': 'http://1.1.1.1/cdrom.iso'}
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.insert_virtual_media('http://1.1.1.1/cdrom.iso',
device='CDROM')
get_vm_device_mock.assert_called_once_with('CDROM')
patch_mock.assert_called_once_with(vm_uri, None, vm_patch)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, 'eject_virtual_media')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_insert_virtual_media_media_attached(self,
get_vm_device_mock,
eject_virtual_media_mock,
patch_mock):
vm_uri = '/rest/v1/Managers/1/VirtualMedia/2'
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_INSERTED)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
vm_patch = {'Image': 'http://1.1.1.1/cdrom.iso'}
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.insert_virtual_media('http://1.1.1.1/cdrom.iso',
device='CDROM')
get_vm_device_mock.assert_called_once_with('CDROM')
eject_virtual_media_mock.assert_called_once_with('CDROM')
patch_mock.assert_called_once_with(vm_uri, None, vm_patch)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_insert_virtual_media_fail(self, get_vm_device_mock, patch_mock):
vm_uri = '/rest/v1/Managers/1/VirtualMedia/2'
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_EMPTY)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
vm_patch = {'Image': 'http://1.1.1.1/cdrom.iso'}
patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError,
self.client.insert_virtual_media,
'http://1.1.1.1/cdrom.iso', device='CDROM')
get_vm_device_mock.assert_called_once_with('CDROM')
patch_mock.assert_called_once_with(vm_uri, None, vm_patch)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_eject_virtual_media(self, get_vm_device_mock, patch_mock):
vm_uri = '/rest/v1/Managers/1/VirtualMedia/2'
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_INSERTED)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
vm_patch = {'Image': None}
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.eject_virtual_media(device='CDROM')
get_vm_device_mock.assert_called_once_with('CDROM')
patch_mock.assert_called_once_with(vm_uri, None, vm_patch)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_eject_virtual_media_cdrom_empty(
self, get_vm_device_mock, patch_mock):
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_EMPTY)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
self.client.eject_virtual_media(device='CDROM')
get_vm_device_mock.assert_called_once_with('CDROM')
self.assertFalse(patch_mock.called)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_vm_device_status')
def test_eject_virtual_media_fail(self, get_vm_device_mock, patch_mock):
vm_uri = '/rest/v1/Managers/1/VirtualMedia/2'
cdrom_resp = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_INSERTED)
device_uri = cdrom_resp["links"]["self"]["href"]
get_vm_device_mock.return_value = (cdrom_resp, device_uri)
vm_patch = {'Image': None}
patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError,
self.client.eject_virtual_media, device='CDROM')
get_vm_device_mock.assert_called_once_with('CDROM')
patch_mock.assert_called_once_with(vm_uri, None, vm_patch)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_one_time_boot_not_set(self, get_host_details_mock):
system_data = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_host_details_mock.return_value = system_data
ret = self.client.get_one_time_boot()
get_host_details_mock.assert_called_once_with()
self.assertEqual(ret, 'Normal')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_one_time_boot_cdrom(self, get_host_details_mock):
system_data = json.loads(ris_outputs.RESP_BODY_FOR_SYSTEM_WITH_CDROM)
get_host_details_mock.return_value = system_data
ret = self.client.get_one_time_boot()
get_host_details_mock.assert_called_once_with()
self.assertEqual(ret, 'CDROM')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_one_time_boot_UefiShell(self, get_host_details_mock):
system_data = json.loads(ris_outputs.RESP_BODY_WITH_UEFI_SHELL)
get_host_details_mock.return_value = system_data
ret = self.client.get_one_time_boot()
get_host_details_mock.assert_called_once_with()
self.assertEqual(ret, 'UefiShell')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_one_time_boot_exc(self, get_host_details_mock):
system_data = json.loads(ris_outputs.RESP_BODY_FOR_SYSTEM_WITHOUT_BOOT)
get_host_details_mock.return_value = system_data
self.assertRaises(exception.IloError,
self.client.get_one_time_boot)
get_host_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_update_persistent_boot')
def test_set_one_time_boot_cdrom(self, update_persistent_boot_mock):
self.client.set_one_time_boot('cdrom')
update_persistent_boot_mock.assert_called_once_with(
['cdrom'], persistent=False)
@mock.patch.object(ris.RISOperations, '_update_persistent_boot')
def test_set_one_time_boot_iscsi(self, update_persistent_boot_mock):
self.client.set_one_time_boot('ISCSI')
update_persistent_boot_mock.assert_called_once_with(
['ISCSI'], persistent=False)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_persistent_boot_device_cdrom(self, get_host_details_mock):
system_data = json.loads(ris_outputs.SYSTEM_WITH_CDROM_CONT)
get_host_details_mock.return_value = system_data
ret = self.client.get_persistent_boot_device()
get_host_details_mock.assert_called_once_with()
self.assertEqual(ret, 'CDROM')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_persistent_boot_device_UefiShell(self, get_host_details_mock):
system_data = json.loads(ris_outputs.SYSTEM_WITH_UEFISHELL_CONT)
get_host_details_mock.return_value = system_data
ret = self.client.get_persistent_boot_device()
get_host_details_mock.assert_called_once_with()
self.assertEqual(ret, 'UefiShell')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_persistent_boot_device_exc(self, get_host_details_mock):
system_data = json.loads(ris_outputs.RESP_BODY_FOR_SYSTEM_WITHOUT_BOOT)
get_host_details_mock.return_value = system_data
self.assertRaises(exception.IloError,
self.client.get_persistent_boot_device)
get_host_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_persistent_boot_device_bios(self, get_host_details_mock,
_uefi_boot_mode_mock):
system_data = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_host_details_mock.return_value = system_data
_uefi_boot_mode_mock.return_value = False
ret = self.client.get_persistent_boot_device()
get_host_details_mock.assert_called_once_with()
self.assertIsNone(ret)
@mock.patch.object(ris.RISOperations, '_get_persistent_boot_devices')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def _test_get_persistent_boot_device_uefi(self, get_host_details_mock,
_uefi_boot_mode_mock,
boot_devices_mock,
boot_devices,
boot_sources,
exp_ret_value=None):
system_data = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_host_details_mock.return_value = system_data
_uefi_boot_mode_mock.return_value = True
boot_devices_mock.return_value = boot_sources, boot_devices
ret = self.client.get_persistent_boot_device()
get_host_details_mock.assert_called_once_with()
_uefi_boot_mode_mock.assert_called_once_with()
boot_devices_mock.assert_called_once_with()
self.assertEqual(ret, exp_ret_value)
def test_get_persistent_boot_device_uefi_pxe(self):
boot_devs = ris_outputs.UEFI_BOOT_DEVICE_ORDER_PXE
boot_srcs = json.loads(ris_outputs.UEFI_BootSources)
self._test_get_persistent_boot_device_uefi(boot_devices=boot_devs,
boot_sources=boot_srcs,
exp_ret_value='NETWORK')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_persistent_boot_device_uefi_cd(self, get_host_details_mock,
_uefi_boot_mode_mock):
boot_devs = ris_outputs.UEFI_BOOT_DEVICE_ORDER_CD
boot_srcs = json.loads(ris_outputs.UEFI_BootSources)
self._test_get_persistent_boot_device_uefi(boot_devices=boot_devs,
boot_sources=boot_srcs,
exp_ret_value='CDROM')
def test_get_persistent_boot_device_uefi_hdd(self):
boot_devs = ris_outputs.UEFI_BOOT_DEVICE_ORDER_HDD
boot_srcs = json.loads(ris_outputs.UEFI_BootSources)
self._test_get_persistent_boot_device_uefi(boot_devices=boot_devs,
boot_sources=boot_srcs,
exp_ret_value='HDD')
def test_get_persistent_boot_device_uefi_none(self):
boot_devs = ris_outputs.UEFI_BOOT_DEVICE_ORDER_ERR
boot_srcs = json.loads(ris_outputs.UEFI_BootSources)
self._test_get_persistent_boot_device_uefi(boot_devices=boot_devs,
boot_sources=boot_srcs,
exp_ret_value=None)
@mock.patch.object(ris.RISOperations, '_get_persistent_boot_devices')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_persistent_boot_device_uefi_exp(self, get_host_details_mock,
_uefi_boot_mode_mock,
boot_devices_mock):
system_data = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_host_details_mock.return_value = system_data
_uefi_boot_mode_mock.return_value = True
devices = ris_outputs.UEFI_BOOT_DEVICE_ORDER_HDD
sources = json.loads(ris_outputs.UEFI_BOOT_SOURCES_ERR)
boot_devices_mock.return_value = sources, devices
self.assertRaises(exception.IloError,
self.client.get_persistent_boot_device)
get_host_details_mock.assert_called_once_with()
_uefi_boot_mode_mock.assert_called_once_with()
boot_devices_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_update_persistent_boot')
def test_update_persistent_boot_cdrom(self, update_persistent_boot_mock):
self.client.update_persistent_boot(['cdrom'])
update_persistent_boot_mock.assert_called_once_with(
['cdrom'], persistent=True)
@mock.patch.object(ris.RISOperations, '_update_persistent_boot')
def test_update_persistent_boot_iscsi(self, update_persistent_boot_mock):
self.client.update_persistent_boot(['ISCSI'])
update_persistent_boot_mock.assert_called_once_with(
['ISCSI'], persistent=True)
@mock.patch.object(ris.RISOperations, '_update_persistent_boot')
def test_update_persistent_boot_exc(self, update_persistent_boot_mock):
self.assertRaises(exception.IloError,
self.client.update_persistent_boot, ['fake'])
self.assertFalse(update_persistent_boot_mock.called)
def test_update_firmware_throws_error_for_invalid_component(self):
# | WHEN | & | THEN |
self.assertRaises(exception.InvalidInputError,
self.client.update_firmware,
'fw_file_url',
'invalid_component')
@mock.patch.object(ris.RISOperations,
'_get_firmware_update_service_resource',
autospec=True)
@mock.patch.object(ris.RISOperations, '_rest_post', autospec=True)
@mock.patch.object(ris.common, 'wait_for_ris_firmware_update_to_complete',
autospec=True)
@mock.patch.object(ris.RISOperations, 'get_firmware_update_progress',
autospec=True)
def test_update_firmware(
self, get_firmware_update_progress_mock,
wait_for_ris_firmware_update_to_complete_mock, _rest_post_mock,
_get_firmware_update_service_resource_mock):
# | GIVEN |
_rest_post_mock.return_value = 200, 'some-headers', 'response'
get_firmware_update_progress_mock.return_value = 'COMPLETED', 100
# | WHEN |
self.client.update_firmware('fw_file_url', 'ilo')
# | THEN |
_get_firmware_update_service_resource_mock.assert_called_once_with(
self.client)
_rest_post_mock.assert_called_once_with(
self.client, mock.ANY, None, {'Action': 'InstallFromURI',
'FirmwareURI': 'fw_file_url',
})
wait_for_ris_firmware_update_to_complete_mock.assert_called_once_with(
self.client)
get_firmware_update_progress_mock.assert_called_once_with(
self.client)
@mock.patch.object(
ris.RISOperations, '_get_firmware_update_service_resource',
autospec=True)
@mock.patch.object(ris.RISOperations, '_rest_post', autospec=True)
def test_update_firmware_throws_if_post_operation_fails(
self, _rest_post_mock, _get_firmware_update_service_resource_mock):
# | GIVEN |
_rest_post_mock.return_value = 500, 'some-headers', 'response'
# | WHEN | & | THEN |
self.assertRaises(exception.IloError,
self.client.update_firmware,
'fw_file_url',
'cpld')
@mock.patch.object(ris.RISOperations,
'_get_firmware_update_service_resource',
autospec=True)
@mock.patch.object(ris.RISOperations, '_rest_post', autospec=True)
@mock.patch.object(ris.common, 'wait_for_ris_firmware_update_to_complete',
autospec=True)
@mock.patch.object(ris.RISOperations, 'get_firmware_update_progress',
autospec=True)
def test_update_firmware_throws_if_error_occurs_in_update(
self, get_firmware_update_progress_mock,
wait_for_ris_firmware_update_to_complete_mock, _rest_post_mock,
_get_firmware_update_service_resource_mock):
# | GIVEN |
_rest_post_mock.return_value = 200, 'some-headers', 'response'
get_firmware_update_progress_mock.return_value = 'ERROR', 0
# | WHEN | & | THEN |
self.assertRaises(exception.IloError,
self.client.update_firmware,
'fw_file_url',
'ilo')
@mock.patch.object(ris.RISOperations,
'_get_firmware_update_service_resource',
autospec=True)
@mock.patch.object(ris.RISOperations, '_rest_get', autospec=True)
def test_get_firmware_update_progress(
self, _rest_get_mock,
_get_firmware_update_service_resource_mock):
# | GIVEN |
_rest_get_mock.return_value = (200, 'some-headers',
{'State': 'COMPLETED',
'ProgressPercent': 100})
# | WHEN |
state, percent = self.client.get_firmware_update_progress()
# | THEN |
_get_firmware_update_service_resource_mock.assert_called_once_with(
self.client)
_rest_get_mock.assert_called_once_with(self.client, mock.ANY)
self.assertTupleEqual((state, percent), ('COMPLETED', 100))
@mock.patch.object(ris.RISOperations,
'_get_firmware_update_service_resource',
autospec=True)
@mock.patch.object(ris.RISOperations, '_rest_get', autospec=True)
def test_get_firmware_update_progress_throws_if_get_operation_fails(
self, _rest_get_mock, _get_firmware_update_service_resource_mock):
# | GIVEN |
_rest_get_mock.return_value = 500, 'some-headers', 'response'
# | WHEN | & | THEN |
self.assertRaises(exception.IloError,
self.client.get_firmware_update_progress)
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
def test_set_host_power_no_change(self, host_power_status_mock):
host_power_status_mock.return_value = 'ON'
self.client.set_host_power('on')
self.assertTrue(host_power_status_mock.called)
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
def test_set_host_power_exc(self, host_power_status_mock):
self.assertRaises(exception.IloInvalidInputError,
self.client.set_host_power, 'invalid')
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
@mock.patch.object(ris.RISOperations, 'get_product_name')
@mock.patch.object(ris.RISOperations, '_retry_until_powered_on')
def test_set_host_power_off_for_blade_servers(self, retry_mock,
product_mock,
host_power_status_mock,
perform_power_op_mock):
host_power_status_mock.return_value = 'ON'
product_mock.return_value = 'ProLiant BL460'
self.client.set_host_power('off')
host_power_status_mock.assert_called_once_with()
perform_power_op_mock.assert_called_once_with('ForceOff')
self.assertFalse(retry_mock.called)
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
@mock.patch.object(ris.RISOperations, 'get_product_name')
@mock.patch.object(ris.RISOperations, '_retry_until_powered_on')
def test_set_host_power_on_for_blade_servers(self, retry_mock,
product_mock,
host_power_status_mock,
perform_power_op_mock):
host_power_status_mock.return_value = 'OFF'
product_mock.return_value = 'ProLiant BL460'
self.client.set_host_power('On')
host_power_status_mock.assert_called_once_with()
self.assertTrue(product_mock.called)
self.assertFalse(perform_power_op_mock.called)
self.assertTrue(retry_mock.called)
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
@mock.patch.object(ris.RISOperations, '_retry_until_powered_on')
def test_set_host_power_off_for_non_blade_servers(
self, retry_mock, host_power_status_mock, perform_power_op_mock):
host_power_status_mock.return_value = 'ON'
self.client.set_host_power('off')
host_power_status_mock.assert_called_once_with()
perform_power_op_mock.assert_called_once_with('ForceOff')
self.assertFalse(retry_mock.called)
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
@mock.patch.object(ris.RISOperations, 'get_product_name')
@mock.patch.object(ris.RISOperations, '_retry_until_powered_on')
def test_set_host_power_on_for_non_blade_servers(
self, retry_mock, product_mock, host_power_status_mock,
perform_power_op_mock):
host_power_status_mock.return_value = 'OFF'
product_mock.return_value = 'ProLiant DL380'
self.client.set_host_power('On')
host_power_status_mock.assert_called_once_with()
self.assertTrue(product_mock.called)
self.assertTrue(perform_power_op_mock.called)
self.assertFalse(retry_mock.called)
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
def test_retry_until_powered_on_3times(self, host_power_status_mock,
perform_power_mock):
host_power_status_mock.side_effect = ['OFF', 'OFF', 'ON']
self.client._retry_until_powered_on('ON')
self.assertEqual(3, host_power_status_mock.call_count)
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
def test_retry_until_powered_on(self, host_power_status_mock,
perform_power_mock):
host_power_status_mock.return_value = 'ON'
self.client._retry_until_powered_on('ON')
self.assertEqual(1, host_power_status_mock.call_count)
@mock.patch.object(ris.RISOperations, '_perform_power_op')
def test_reset_server(self, mock_perform_power):
self.client.reset_server()
mock_perform_power.assert_called_once_with("ForceRestart")
@mock.patch.object(ris.RISOperations, '_press_pwr_btn')
def test_hold_pwr_btn(self, press_pwr_btn_mock):
self.client.hold_pwr_btn()
press_pwr_btn_mock.assert_called_once_with(pushType="PressAndHold")
@mock.patch.object(ris.RISOperations, '_press_pwr_btn')
def test_press_pwr_btn(self, press_pwr_btn_mock):
self.client.hold_pwr_btn()
press_pwr_btn_mock.assert_called_once_with(pushType="PressAndHold")
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
def test_inject_nmi(self, get_power_status_mock,
perform_power_op_mock):
get_power_status_mock.return_value = 'ON'
self.client.inject_nmi()
get_power_status_mock.assert_called_once_with()
perform_power_op_mock.assert_called_once_with('Nmi')
@mock.patch.object(ris.RISOperations, '_perform_power_op')
@mock.patch.object(ris.RISOperations, 'get_host_power_status')
def test_inject_nmi_exc(self, get_power_status_mock,
perform_power_op_mock):
get_power_status_mock.return_value = 'OFF'
self.assertRaises(exception.IloError,
self.client.inject_nmi)
get_power_status_mock.assert_called_once_with()
self.assertFalse(perform_power_op_mock.called)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_host_post_state(self, get_details_mock):
host_response = ris_outputs.RESPONSE_BODY_FOR_REST_OP
expected = 'PowerOff'
get_details_mock.return_value = json.loads(host_response)
result = self.client.get_host_post_state()
self.assertEqual(expected, result)
get_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test_get_host_post_state_exc(self, get_details_mock):
host_response = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_details_mock.return_value = host_response
del host_response['Oem']['Hp']['PostState']
self.assertRaises(exception.IloError,
self.client.get_host_post_state)
get_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_current_bios_settings_filter_true(self, check_bios_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
settings.pop("links", None)
expected_value = {k: settings[k] for k in (
constants.SUPPORTED_BIOS_PROPERTIES) if k in settings}
actual_value = self.client.get_current_bios_settings(True)
check_bios_mock.assert_called_once_with()
self.assertEqual(actual_value, expected_value)
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_current_bios_settings_filter_false(self, check_bios_mock,
bios_filter_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
settings.pop("links", None)
actual_value = self.client.get_current_bios_settings(False)
check_bios_mock.assert_called_once_with()
bios_filter_mock.assert_not_called()
self.assertEqual(actual_value, settings)
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_pending_bios_settings_no_links(self, check_bios_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
settings.pop("links", None)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client.get_pending_bios_settings, False)
check_bios_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_extended_error')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_pending_bios_settings_filter_true(self, check_bios_mock,
get_mock, get_ext_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
settings_uri = "/rest/v1/systems/1/bios/Settings"
pending_settings = json.loads(ris_outputs.GET_BIOS_PENDING_SETTINGS)
pending_settings.pop("Description", None)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
pending_settings)
expected_value = {k: pending_settings[k] for k in (
constants.SUPPORTED_BIOS_PROPERTIES) if k in pending_settings}
actual_value = self.client.get_pending_bios_settings(True)
self.assertEqual(actual_value, expected_value)
get_mock.assert_called_once_with(settings_uri)
check_bios_mock.assert_called_once_with()
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_get_extended_error')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_pending_bios_settings_filter_false(self, check_bios_mock,
get_mock, get_ext_mock,
bios_filter_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
settings_uri = "/rest/v1/systems/1/bios/Settings"
pending_settings = json.loads(ris_outputs.GET_BIOS_PENDING_SETTINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
pending_settings)
actual_value = self.client.get_pending_bios_settings(False)
self.assertEqual(actual_value, pending_settings)
get_mock.assert_called_once_with(settings_uri)
check_bios_mock.assert_called_once_with()
bios_filter_mock.assert_not_called()
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_default_bios_settings_filter_true(self, check_bios_mock,
rest_get_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
base_config = json.loads(ris_outputs.GET_BASE_CONFIG)
rest_get_mock.return_value = (200, 'HEADERS', base_config)
default_settings = None
for cfg in base_config['BaseConfigs']:
default_settings = cfg.get('default', None)
if default_settings is not None:
break
expected_value = {k: default_settings[k] for k in (
constants.SUPPORTED_BIOS_PROPERTIES) if k in default_settings}
actual_value = self.client.get_default_bios_settings(True)
check_bios_mock.assert_called_once_with()
rest_get_mock.assert_called_once_with(
"/rest/v1/systems/1/bios/BaseConfigs")
self.assertEqual(expected_value, actual_value)
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_default_bios_settings_filter_false(
self, check_bios_mock, rest_get_mock, filter_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
base_config = json.loads(ris_outputs.GET_BASE_CONFIG)
rest_get_mock.return_value = (200, 'HEADERS', base_config)
default_settings = None
for cfg in base_config['BaseConfigs']:
default_settings = cfg.get('default', None)
if default_settings is not None:
break
expected_value = default_settings
actual_value = self.client.get_default_bios_settings(False)
check_bios_mock.assert_called_once_with()
rest_get_mock.assert_called_once_with(
"/rest/v1/systems/1/bios/BaseConfigs")
self.assertEqual(expected_value, actual_value)
filter_mock.assert_not_called()
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_default_bios_settings_no_links(self, check_bios_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
settings.pop("links", None)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client.get_default_bios_settings, False)
check_bios_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_extended_error')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_default_bios_settings_check_extended_error(
self, check_bios_mock, rest_get_mock, ext_err_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
base_config = json.loads(ris_outputs.GET_BASE_CONFIG)
rest_get_mock.return_value = (201, 'HEADERS', base_config)
self.assertRaises(exception.IloError,
self.client.get_default_bios_settings, False)
check_bios_mock.assert_called_once_with()
ext_err_mock.assert_called_once_with(base_config)
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_get_extended_error')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test_get_default_bios_settings_no_default_settings(
self, check_bios_mock, rest_get_mock, ext_err_mock, filter_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
base_config = json.loads(ris_outputs.GET_BASE_CONFIG)
default_val = base_config["BaseConfigs"][0].pop("default")
base_config["BaseConfigs"][0]["no_default"] = default_val
rest_get_mock.return_value = (200, 'HEADERS', base_config)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client.get_default_bios_settings, False)
check_bios_mock.assert_called_once_with()
ext_err_mock.assert_not_called()
filter_mock.assert_not_called()
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
def test_set_bios_settings_no_data_apply_filter(self, change_bios_mock,
filter_mock):
apply_filter = True
data = None
self.client.set_bios_settings(data, apply_filter)
change_bios_mock.assert_not_called()
filter_mock.assert_not_called()
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
def test_set_bios_settings_no_data_no_filter(self, change_bios_mock,
filter_mock):
apply_filter = False
data = None
self.client.set_bios_settings(data, apply_filter)
change_bios_mock.assert_not_called()
filter_mock.assert_not_called()
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
def test_set_bios_settings_filter_true(self, change_bios_mock,
filter_mock):
data = {
"AdminName": "Administrator",
"BootMode": "LEGACY",
"ServerName": "Gen9 server",
"TimeFormat": "Ist",
"BootOrderPolicy": "RetryIndefinitely",
"ChannelInterleaving": "Enabled",
"CollabPowerControl": "Enabled",
"ConsistentDevNaming": "LomsOnly",
"CustomPostMessage": ""
}
expected = {
"AdminName": "Administrator",
"BootMode": "LEGACY",
"ServerName": "Gen9 server",
"TimeFormat": "Ist",
"BootOrderPolicy": "RetryIndefinitely",
}
filter_mock.return_value = expected
apply_filter = True
self.client.set_bios_settings(data, apply_filter)
change_bios_mock.assert_called_once_with(expected)
filter_mock.assert_called_once_with(
data, constants.SUPPORTED_BIOS_PROPERTIES)
@mock.patch.object(utils, 'apply_bios_properties_filter')
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
def test_set_bios_settings_filter_false(self, change_bios_mock,
filter_mock):
data = {
"AdminName": "Administrator",
"BootMode": "LEGACY",
"ServerName": "Gen9 server",
"TimeFormat": "Ist",
"BootOrderPolicy": "RetryIndefinitely",
"ChannelInterleaving": "Enabled",
"CollabPowerControl": "Enabled",
"ConsistentDevNaming": "LomsOnly",
"CustomPostMessage": ""
}
apply_filter = False
self.client.set_bios_settings(data, apply_filter)
change_bios_mock.assert_called_once_with(data)
filter_mock.assert_not_called()
class TestRISOperationsPrivateMethods(testtools.TestCase):
def setUp(self):
super(TestRISOperationsPrivateMethods, self).setUp()
self.client = ris.RISOperations("1.2.3.4", "admin", "Admin")
@mock.patch.object(ris.RISOperations, 'get_current_boot_mode')
def test__is_boot_mode_uefi_uefi(self, get_current_boot_mode_mock):
get_current_boot_mode_mock.return_value = 'UEFI'
result = self.client._is_boot_mode_uefi()
self.assertTrue(result)
@mock.patch.object(ris.RISOperations, 'get_current_boot_mode')
def test__is_boot_mode_uefi_bios(self, get_current_boot_mode_mock):
get_current_boot_mode_mock.return_value = 'LEGACY'
result = self.client._is_boot_mode_uefi()
self.assertFalse(result)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test___change_bios_setting(self, check_bios_mock, patch_mock):
bios_uri = '/rest/v1/systems/1/bios'
properties = {'fake-property': 'fake-value'}
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client._change_bios_setting(properties)
check_bios_mock.assert_called_once_with(properties.keys())
patch_mock.assert_called_once_with(bios_uri, {}, properties)
@mock.patch.object(ris.RISOperations, '_validate_if_patch_supported')
@mock.patch.object(ris.RISOperations, '_operation_allowed')
@mock.patch.object(ris.RISOperations, '_get_bios_settings_resource')
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test___change_bios_setting_fail(self, check_bios_mock, patch_mock,
settings_mock, op_mock,
validate_mock):
bios_uri = '/rest/v1/systems/1/bios/Settings'
properties = {'fake-property': 'fake-value'}
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
op_mock.return_value = False
settings_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.assertRaises(exception.IloError,
self.client._change_bios_setting,
properties)
check_bios_mock.assert_called_once_with(properties.keys())
op_mock.assert_called_once_with(ris_outputs.GET_HEADERS, 'PATCH')
settings_mock.assert_called_once_with(settings)
patch_mock.assert_called_once_with(bios_uri, {}, properties)
@mock.patch.object(ris.RISOperations, '_validate_if_patch_supported')
@mock.patch.object(ris.RISOperations, '_get_iscsi_settings_resource')
@mock.patch.object(ris.RISOperations, '_operation_allowed')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__check_iscsi_rest_patch_allowed(self, check_bios_mock, get_mock,
op_mock, settings_mock,
validate_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
iscsi_uri = '/rest/v1/systems/1/bios/iScsi'
iscsi_settings = json.loads(ris_outputs.GET_ISCSI_SETTINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
iscsi_settings)
op_mock.return_value = False
iscsi_settings_uri = '/rest/v1/systems/1/bios/iScsi/Settings'
settings_mock.return_value = (ris_outputs.GET_HEADERS,
iscsi_settings_uri, iscsi_settings)
self.client._check_iscsi_rest_patch_allowed()
check_bios_mock.assert_called_once_with()
get_mock.assert_called_once_with(iscsi_uri)
op_mock.assert_called_once_with(ris_outputs.GET_HEADERS, 'PATCH')
settings_mock.assert_called_once_with(iscsi_settings)
validate_mock.assert_called_once_with(ris_outputs.GET_HEADERS,
iscsi_settings_uri)
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__check_iscsi_rest_patch_allowed_fail(self, check_bios_mock,
get_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
iscsi_uri = '/rest/v1/systems/1/bios/iScsi'
iscsi_settings = json.loads(ris_outputs.GET_ISCSI_SETTINGS)
get_mock.return_value = (202, ris_outputs.GET_HEADERS,
iscsi_settings)
self.assertRaises(exception.IloError,
self.client._check_iscsi_rest_patch_allowed)
check_bios_mock.assert_called_once_with()
get_mock.assert_called_once_with(iscsi_uri)
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__check_iscsi_rest_patch_allowed_not_found(self, check_bios_mock):
bios_uri = '/rest/v1/systems/1/bios'
settings = json.loads(ris_outputs.GET_BASE_CONFIG)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, settings)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._check_iscsi_rest_patch_allowed)
check_bios_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_check_iscsi_rest_patch_allowed')
@mock.patch.object(ris.RISOperations, '_get_bios_mappings_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__change_iscsi_settings(self, check_bios_mock,
mappings_mock, check_iscsi_mock,
patch_mock):
bios_uri = '/rest/v1/systems/1/bios'
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, bios_settings)
map_settings = json.loads(ris_outputs.GET_BIOS_MAPPINGS)
mappings_mock.return_value = map_settings
iscsi_uri = '/rest/v1/systems/1/bios/iScsi/Settings'
properties = {'iSCSITargetName':
'iqn.2011-07.com.example.server:test1',
'iSCSIBootLUN': '1',
'iSCSITargetIpAddress': '10.10.1.30',
'iSCSITargetTcpPort': 3260}
settings = json.loads(ris_outputs.GET_ISCSI_PATCH)
check_iscsi_mock.return_value = iscsi_uri
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client._change_iscsi_settings(properties)
check_bios_mock.assert_called_once_with()
mappings_mock.assert_called_once_with(bios_settings)
check_iscsi_mock.assert_called_once_with()
patch_mock.assert_called_once_with(iscsi_uri, None, settings)
@mock.patch.object(ris.RISOperations, '_get_bios_mappings_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__change_iscsi_settings_without_nic(self, check_bios_mock,
mappings_mock):
bios_uri = '/rest/v1/systems/1/bios'
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, bios_settings)
map_settings = json.loads(ris_outputs.GET_BIOS_MAPPINGS_WITHOUT_NIC)
mappings_mock.return_value = map_settings
self.assertRaises(exception.IloError,
self.client._change_iscsi_settings,
{})
check_bios_mock.assert_called_once_with()
mappings_mock.assert_called_once_with(bios_settings)
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_check_iscsi_rest_patch_allowed')
@mock.patch.object(ris.RISOperations, '_get_bios_mappings_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__change_iscsi_settings_fail(self, check_bios_mock,
mappings_mock, check_iscsi_mock,
patch_mock):
bios_uri = '/rest/v1/systems/1/bios'
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, bios_settings)
map_settings = json.loads(ris_outputs.GET_BIOS_MAPPINGS)
mappings_mock.return_value = map_settings
iscsi_uri = '/rest/v1/systems/1/bios/iScsi/Settings'
properties = {'iSCSITargetName':
'iqn.2011-07.com.example.server:test1',
'iSCSIBootLUN': '1',
'iSCSITargetIpAddress': '10.10.1.30',
'iSCSITargetTcpPort': 3260}
settings = json.loads(ris_outputs.GET_ISCSI_PATCH)
check_iscsi_mock.return_value = iscsi_uri
patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.assertRaises(exception.IloError,
self.client._change_iscsi_settings,
properties)
check_bios_mock.assert_called_once_with()
mappings_mock.assert_called_once_with(bios_settings)
check_iscsi_mock.assert_called_once_with()
patch_mock.assert_called_once_with(iscsi_uri, None, settings)
@mock.patch.object(ris.RISOperations, '_change_bios_setting')
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test___change_secure_boot_settings(self, get_details_mock, patch_mock,
get_bios_mock, change_bios_mock):
host_details = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_details_mock.return_value = host_details
get_bios_mock.return_value = "test"
secure_boot_uri = '/rest/v1/Systems/1/SecureBoot'
bios_dict = {'CustomPostMessage': 'test '}
patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client._change_secure_boot_settings('fake-property',
'fake-value')
get_details_mock.assert_called_once_with()
patch_mock.assert_called_once_with(secure_boot_uri, None,
{'fake-property': 'fake-value'})
get_bios_mock.assert_called_once_with('CustomPostMessage')
change_bios_mock.assert_called_once_with(bios_dict)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test___change_secure_boot_settings_not_supported(self,
get_details_mock):
host_response = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
del host_response['Oem']['Hp']['links']['SecureBoot']
get_details_mock.return_value = host_response
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._change_secure_boot_settings,
'fake-property', 'fake-value')
get_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_patch')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test___change_secure_boot_settings_fail(self, get_details_mock,
patch_mock):
host_details = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_details_mock.return_value = host_details
secure_boot_uri = '/rest/v1/Systems/1/SecureBoot'
patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError,
self.client._change_secure_boot_settings,
'fake-property', 'fake-value')
get_details_mock.assert_called_once_with()
patch_mock.assert_called_once_with(secure_boot_uri, None,
{'fake-property': 'fake-value'})
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__get_bios_setting(self, bios_mock):
bios_mock.return_value = ('fake', 'fake',
json.loads(ris_outputs.GET_BIOS_SETTINGS))
result = self.client._get_bios_setting('BootMode')
bios_mock.assert_called_once_with(['BootMode'])
self.assertEqual(result, 'Uefi')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_bios_settings_resource(self, get_mock):
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
settings)
self.client._get_bios_settings_resource(settings)
get_mock.assert_called_once_with('/rest/v1/systems/1/bios/Settings')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_bios_settings_resource_key_error(self, get_mock):
settings = json.loads(ris_outputs.GET_BASE_CONFIG)
self.assertRaises(exception.IloError,
self.client._get_bios_settings_resource,
settings)
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_bios_settings_resource_fail(self, get_mock):
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
settings_uri = '/rest/v1/systems/1/bios/Settings'
get_mock.return_value = (301, ris_outputs.GET_HEADERS,
settings)
self.assertRaises(exception.IloError,
self.client._get_bios_settings_resource,
settings)
get_mock.assert_called_once_with(settings_uri)
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_bios_boot_resource(self, get_mock):
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
boot_settings = json.loads(ris_outputs.GET_BIOS_BOOT)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
boot_settings)
self.client._get_bios_boot_resource(settings)
get_mock.assert_called_once_with('/rest/v1/systems/1/bios/Boot')
def test__get_bios_boot_resource_key_error(self):
settings = json.loads(ris_outputs.GET_BASE_CONFIG)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_bios_boot_resource,
settings)
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_bios_boot_resource_fail(self, get_mock):
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
boot_settings = json.loads(ris_outputs.GET_BIOS_BOOT)
get_mock.return_value = (201, ris_outputs.GET_HEADERS,
boot_settings)
self.assertRaises(exception.IloError,
self.client._get_bios_boot_resource,
settings)
get_mock.assert_called_once_with('/rest/v1/systems/1/bios/Boot')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_bios_mappings_resource(self, get_mock):
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
map_settings = json.loads(ris_outputs.GET_BIOS_MAPPINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
map_settings)
self.client._get_bios_mappings_resource(settings)
get_mock.assert_called_once_with('/rest/v1/systems/1/bios/Mappings')
def test__get_bios_mappings_resource_key_error(self):
settings = json.loads(ris_outputs.GET_BASE_CONFIG)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_bios_mappings_resource,
settings)
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_bios_mappings_resource_fail(self, get_mock):
settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
map_settings = json.loads(ris_outputs.GET_BIOS_MAPPINGS)
get_mock.return_value = (201, ris_outputs.GET_HEADERS,
map_settings)
self.assertRaises(exception.IloError,
self.client._get_bios_mappings_resource,
settings)
get_mock.assert_called_once_with('/rest/v1/systems/1/bios/Mappings')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_iscsi_settings_resource(self, get_mock):
settings = json.loads(ris_outputs.GET_ISCSI_SETTINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS, settings)
self.client._get_iscsi_settings_resource(settings)
get_mock.assert_called_once_with(
'/rest/v1/systems/1/bios/iScsi/Settings')
def test__get_iscsi_settings_resource_key_error(self):
settings = json.loads(ris_outputs.GET_ISCSI_PATCH)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_iscsi_settings_resource,
settings)
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_iscsi_settings_resource_fail(self, get_mock):
settings = json.loads(ris_outputs.GET_ISCSI_SETTINGS)
get_mock.return_value = (201, ris_outputs.GET_HEADERS, settings)
self.assertRaises(exception.IloError,
self.client._get_iscsi_settings_resource,
settings)
get_mock.assert_called_once_with(
'/rest/v1/systems/1/bios/iScsi/Settings')
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
@mock.patch.object(ris.RISOperations, '_get_collection')
def test__get_vm_device_status(self,
collection_mock,
ilo_details_mock,
get_mock):
manager_uri = '/rest/v1/Managers/1'
manager_data = json.loads(ris_outputs.GET_MANAGER_DETAILS)
ilo_details_mock.return_value = (manager_data, manager_uri)
collection_item = json.loads(ris_outputs.RESP_VM_STATUS_FLOPPY_EMPTY)
vmedia_uri = '/rest/v1/Managers/1/VirtualMedia'
member_uri = '/rest/v1/Managers/1/VirtualMedia/1'
collection_mock.return_value = [(200, None, collection_item,
member_uri)]
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
collection_item)
self.client._get_vm_device_status('FLOPPY')
ilo_details_mock.assert_called_once_with()
collection_mock.assert_called_once_with(vmedia_uri)
get_mock.assert_called_once_with(member_uri)
def test__get_vm_device_status_invalid_device(self):
self.assertRaises(exception.IloInvalidInputError,
self.client._get_vm_device_status, device='FOO')
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
def test__get_vm_device_status_vmedia_not_supported(self,
ilo_details_mock):
manager_uri = '/rest/v1/Managers/1'
manager_data = json.loads(ris_outputs.GET_MANAGER_DETAILS_NO_VMEDIA)
ilo_details_mock.return_value = (manager_data, manager_uri)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_vm_device_status, device='FLOPPY')
ilo_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
@mock.patch.object(ris.RISOperations, '_get_collection')
def test__get_vm_device_status_fail(self,
collection_mock,
ilo_details_mock,
get_mock):
manager_uri = '/rest/v1/Managers/1'
manager_data = json.loads(ris_outputs.GET_MANAGER_DETAILS)
ilo_details_mock.return_value = (manager_data, manager_uri)
collection_item = json.loads(ris_outputs.RESP_VM_STATUS_FLOPPY_EMPTY)
vmedia_uri = '/rest/v1/Managers/1/VirtualMedia'
member_uri = '/rest/v1/Managers/1/VirtualMedia/1'
collection_mock.return_value = [(200, None, collection_item,
member_uri)]
get_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError,
self.client._get_vm_device_status, device='FLOPPY')
ilo_details_mock.assert_called_once_with()
collection_mock.assert_called_once_with(vmedia_uri)
get_mock.assert_called_once_with(member_uri)
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_ilo_details')
@mock.patch.object(ris.RISOperations, '_get_collection')
def test__get_vm_device_status_device_missing(self,
collection_mock,
ilo_details_mock,
get_mock):
manager_uri = '/rest/v1/Managers/1'
manager_data = json.loads(ris_outputs.GET_MANAGER_DETAILS)
ilo_details_mock.return_value = (manager_data, manager_uri)
collection_item = json.loads(ris_outputs.RESP_VM_STATUS_CDROM_MISSING)
vmedia_uri = '/rest/v1/Managers/1/VirtualMedia'
member_uri = '/rest/v1/Managers/1/VirtualMedia/2'
collection_mock.return_value = [(200, None, collection_item,
member_uri)]
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
collection_item)
self.assertRaises(exception.IloError,
self.client._get_vm_device_status, device='CDROM')
ilo_details_mock.assert_called_once_with()
collection_mock.assert_called_once_with(vmedia_uri)
get_mock.assert_called_once_with(member_uri)
@mock.patch.object(ris.RISOperations, '_rest_patch')
def test__update_persistent_boot_once(self, rest_patch_mock):
systems_uri = "/rest/v1/Systems/1"
new_boot_settings = {}
new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': 'Once',
'BootSourceOverrideTarget': 'Cd'}
rest_patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client._update_persistent_boot(['cdrom'], persistent=False)
rest_patch_mock.assert_called_once_with(systems_uri, None,
new_boot_settings)
@mock.patch.object(ris.RISOperations, '_rest_patch')
def test__update_persistent_boot_for_continuous(self, rest_patch_mock):
systems_uri = "/rest/v1/Systems/1"
new_boot_settings = {}
new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': 'Continuous',
'BootSourceOverrideTarget': 'Cd'}
rest_patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client._update_persistent_boot(['cdrom'], persistent=True)
rest_patch_mock.assert_called_once_with(systems_uri, None,
new_boot_settings)
@mock.patch.object(ris.RISOperations, '_rest_patch')
def test__update_persistent_boot_for_UefiShell(self, rest_patch_mock):
systems_uri = "/rest/v1/Systems/1"
new_boot_settings = {}
new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': 'Continuous',
'BootSourceOverrideTarget': 'UefiShell'}
rest_patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client._update_persistent_boot(['UefiShell'],
persistent=True)
rest_patch_mock.assert_called_once_with(systems_uri, None,
new_boot_settings)
@mock.patch.object(ris.RISOperations, '_get_host_details')
@mock.patch.object(ris.RISOperations, '_rest_patch')
def test__update_persistent_boot_for_iscsi(self, rest_patch_mock,
get_host_mock):
get_host_mock.return_value = (
json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP_WITH_ISCSI))
systems_uri = '/rest/v1/Systems/1'
new1_boot_settings = {}
new1_boot_settings['Boot'] = {'UefiTargetBootSourceOverride':
u'NIC.LOM.1.1.iSCSI'}
new2_boot_settings = {}
new2_boot_settings['Boot'] = {'BootSourceOverrideEnabled':
'Continuous', 'BootSourceOverrideTarget':
'UefiTarget'}
rest_patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
calls = [mock.call(systems_uri, None, new1_boot_settings),
mock.call(systems_uri, None, new2_boot_settings)]
self.client._update_persistent_boot(['ISCSI'], persistent=True)
rest_patch_mock.assert_has_calls(calls)
@mock.patch.object(ris.RISOperations, '_get_host_details')
@mock.patch.object(ris.RISOperations, '_rest_patch')
def test__update_persistent_boot_for_iscsi_with_none_device_present(
self, rest_patch_mock, get_host_mock):
get_host_mock.return_value = (
json.loads(
ris_outputs.RESPONSE_BODY_FOR_REST_OP_WITH_ISCSI_AND_NONE))
systems_uri = '/rest/v1/Systems/1'
new1_boot_settings = {}
new1_boot_settings['Boot'] = {'UefiTargetBootSourceOverride':
u'NIC.LOM.1.1.iSCSI'}
new2_boot_settings = {}
new2_boot_settings['Boot'] = {'BootSourceOverrideEnabled':
'Continuous', 'BootSourceOverrideTarget':
'UefiTarget'}
rest_patch_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
calls = [mock.call(systems_uri, None, new1_boot_settings),
mock.call(systems_uri, None, new2_boot_settings)]
self.client._update_persistent_boot(['ISCSI'], persistent=True)
rest_patch_mock.assert_has_calls(calls)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test__update_persistent_boot_for_iscsi_not_found(self,
get_host_mock):
get_host_mock.return_value = (
json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP))
self.assertRaisesRegex(exception.IloError, "No UEFI iSCSI bootable "
"device found",
self.client._update_persistent_boot,
['ISCSI'], persistent=True)
@mock.patch.object(ris.RISOperations, '_rest_patch')
def test__update_persistent_boot_fail(self, rest_patch_mock):
systems_uri = "/rest/v1/Systems/1"
new_boot_settings = {}
new_boot_settings['Boot'] = {'BootSourceOverrideEnabled': 'Continuous',
'BootSourceOverrideTarget': 'FakeDevice'}
rest_patch_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.assertRaises(exception.IloError,
self.client._update_persistent_boot,
['FakeDevice'], persistent=True)
rest_patch_mock.assert_called_once_with(systems_uri, None,
new_boot_settings)
@mock.patch.object(ris.RISOperations, '_get_bios_boot_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__get_persistent_boot_devices_no_boot_order(self,
check_bios_mock,
boot_mock):
bios_uri = '/rest/v1/systems/1/bios'
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, bios_settings)
boot_settings = json.loads(ris_outputs.BOOT_PERS_DEV_ORDER_MISSING)
boot_mock.return_value = boot_settings
self.assertRaises(exception.IloError,
self.client._get_persistent_boot_devices)
check_bios_mock.assert_called_once_with()
boot_mock.assert_called_once_with(bios_settings)
@mock.patch.object(ris.RISOperations, '_get_bios_boot_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__get_persistent_boot_devices(self, check_bios_mock, boot_mock):
bios_uri = '/rest/v1/systems/1/bios'
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, bios_settings)
boot_settings = json.loads(ris_outputs.GET_BIOS_BOOT)
boot_mock.return_value = boot_settings
exp_boot_src = json.loads(ris_outputs.UEFI_BootSources)
exp_boot_order = ris_outputs.UEFI_PERS_BOOT_DEVICES
boot_src, boot_order = self.client._get_persistent_boot_devices()
check_bios_mock.assert_called_once_with()
boot_mock.assert_called_once_with(bios_settings)
self.assertEqual(boot_src, exp_boot_src)
self.assertEqual(boot_order, exp_boot_order)
@mock.patch.object(ris.RISOperations, '_get_bios_boot_resource')
@mock.patch.object(ris.RISOperations, '_check_bios_resource')
def test__get_persistent_boot_devices_no_bootsources(self,
check_bios_mock,
boot_mock):
bios_uri = '/rest/v1/systems/1/bios'
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
check_bios_mock.return_value = (ris_outputs.GET_HEADERS,
bios_uri, bios_settings)
boot_settings = json.loads(ris_outputs.UEFI_BOOTSOURCES_MISSING)
boot_mock.return_value = boot_settings
self.assertRaises(exception.IloError,
self.client._get_persistent_boot_devices)
check_bios_mock.assert_called_once_with()
boot_mock.assert_called_once_with(bios_settings)
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test__get_pci_devices(self, get_host_details_mock, get_mock):
system_data = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_host_details_mock.return_value = system_data
pci_uri = '/rest/v1/Systems/1/PCIDevices'
pci_device_list = json.loads(ris_outputs.PCI_DEVICE_DETAILS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
pci_device_list)
self.client._get_pci_devices()
get_mock.assert_called_once_with(pci_uri)
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test__get_pci_devices_fail(self, get_host_details_mock,
get_mock):
system_data = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
get_host_details_mock.return_value = system_data
pci_uri = '/rest/v1/Systems/1/PCIDevices'
pci_device_list = json.loads(ris_outputs.PCI_DEVICE_DETAILS)
get_mock.return_value = (301, ris_outputs.GET_HEADERS,
pci_device_list)
self.assertRaises(exception.IloError,
self.client._get_pci_devices)
get_mock.assert_called_once_with(pci_uri)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test__get_pci_devices_not_supported(self, get_details_mock):
host_response = json.loads(ris_outputs.RESPONSE_BODY_FOR_REST_OP)
del host_response['Oem']['Hp']['links']['PCIDevices']
get_details_mock.return_value = host_response
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_pci_devices)
get_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test__get_storage_resource(self, get_host_details_mock, get_mock):
system_data = json.loads(ris_outputs.REST_GET_SMART_STORAGE)
get_host_details_mock.return_value = system_data
storage_uri = '/rest/v1/Systems/1/SmartStorage'
storage_settings = json.loads(ris_outputs.STORAGE_SETTINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
storage_settings)
self.client._get_storage_resource()
get_mock.assert_called_once_with(storage_uri)
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test__get_storage_resource_fail(self, get_host_details_mock,
get_mock):
system_data = json.loads(ris_outputs.REST_GET_SMART_STORAGE)
get_host_details_mock.return_value = system_data
storage_uri = '/rest/v1/Systems/1/SmartStorage'
storage_settings = json.loads(ris_outputs.STORAGE_SETTINGS)
get_mock.return_value = (301, ris_outputs.GET_HEADERS,
storage_settings)
self.assertRaises(exception.IloError,
self.client._get_storage_resource)
get_mock.assert_called_once_with(storage_uri)
@mock.patch.object(ris.RISOperations, '_get_host_details')
def test__get_storage_resource_not_supported(self,
get_host_details_mock):
system_data = json.loads(ris_outputs.REST_GET_SMART_STORAGE)
del system_data['Oem']['Hp']['links']['SmartStorage']
get_host_details_mock.return_value = system_data
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_storage_resource)
get_host_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_storage_resource')
def test__get_array_controller_resource(self, storage_mock, get_mock):
storage_data = json.loads(ris_outputs.STORAGE_SETTINGS)
storage_uri = '/rest/v1/Systems/1/SmartStorage'
storage_mock.return_value = (ris_outputs.GET_HEADERS,
storage_uri,
storage_data)
array_uri = '/rest/v1/Systems/1/SmartStorage/ArrayControllers'
array_settings = json.loads(ris_outputs.ARRAY_SETTINGS)
get_mock.return_value = (200, ris_outputs.GET_HEADERS,
array_settings)
self.client._get_array_controller_resource()
get_mock.assert_called_once_with(array_uri)
@mock.patch.object(ris.RISOperations, '_rest_get')
@mock.patch.object(ris.RISOperations, '_get_storage_resource')
def test__get_array_controller_resource_fail(self, storage_mock,
get_mock):
storage_data = json.loads(ris_outputs.STORAGE_SETTINGS)
storage_uri = '/rest/v1/Systems/1/SmartStorage'
storage_mock.return_value = (ris_outputs.GET_HEADERS,
storage_uri,
storage_data)
array_uri = '/rest/v1/Systems/1/SmartStorage/ArrayControllers'
array_settings = json.loads(ris_outputs.ARRAY_SETTINGS)
get_mock.return_value = (301, ris_outputs.GET_HEADERS,
array_settings)
self.assertRaises(exception.IloError,
self.client._get_array_controller_resource)
get_mock.assert_called_once_with(array_uri)
@mock.patch.object(ris.RISOperations, '_get_storage_resource')
def test__get_array_controller_resource_not_supported(self,
storage_mock):
storage_data = json.loads(ris_outputs.STORAGE_SETTINGS)
storage_uri = '/rest/v1/Systems/1/SmartStorage'
del storage_data['links']['ArrayControllers']
storage_mock.return_value = (ris_outputs.GET_HEADERS,
storage_uri,
storage_data)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_array_controller_resource)
storage_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_array_controller_resource')
def test__create_list_of_array_controllers(self, array_mock):
array_data = json.loads(ris_outputs.ARRAY_SETTINGS)
array_uri = '/rest/v1/Systems/1/SmartStorage/ArrayControllers'
array_mock.return_value = (ris_outputs.GET_HEADERS,
array_uri,
array_data)
expected_uri_links = (
[{u'href': u'/rest/v1/Systems/1/SmartStorage/ArrayControllers/0'}])
uri_links = self.client._create_list_of_array_controllers()
self.assertEqual(expected_uri_links, uri_links)
array_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_array_controller_resource')
def test__create_list_of_array_controllers_fail(self, array_mock):
array_data = json.loads(ris_outputs.ARRAY_SETTINGS)
array_uri = '/rest/v1/Systems/1/SmartStorage/ArrayControllers'
del array_data['links']['Member']
array_mock.return_value = (ris_outputs.GET_HEADERS,
array_uri,
array_data)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._create_list_of_array_controllers)
array_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_physical_drive_resource')
def test__get_drive_type_and_speed(self, disk_details_mock):
disk_details_mock.return_value = (
json.loads(ris_outputs.DISK_DETAILS_LIST))
expected_out = {'has_rotational': 'true',
'rotational_drive_10000_rpm': 'true'}
out = self.client._get_drive_type_and_speed()
self.assertEqual(expected_out, out)
disk_details_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_create_list_of_array_controllers')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_drive_resource_physical(self, get_mock, array_mock):
array_mock.return_value = (
[{u'href': u'/rest/v1/Systems/1/SmartStorage/ArrayControllers/0'}])
get_mock.side_effect = [(ris_outputs.GET_HEADERS, 'xyz',
json.loads(ris_outputs.ARRAY_MEM_SETTINGS)),
(ris_outputs.GET_HEADERS, 'xyz',
json.loads(ris_outputs.DISK_COLLECTION)),
(ris_outputs.GET_HEADERS, 'xyz',
json.loads(ris_outputs.DISK_DETAILS_LIST))]
out = self.client._get_physical_drive_resource()
expected_out = []
expected_out.append(json.loads(ris_outputs.DISK_DETAILS_LIST))
self.assertEqual(expected_out, out)
array_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_create_list_of_array_controllers')
@mock.patch.object(ris.RISOperations, '_rest_get')
def test__get_drive_resource_logical(self, get_mock, array_mock):
array_mock.return_value = (
[{u'href': u'/rest/v1/Systems/1/SmartStorage/ArrayControllers/0'}])
get_mock.side_effect = [(ris_outputs.GET_HEADERS, 'xyz',
json.loads(ris_outputs.ARRAY_MEM_SETTINGS)),
(ris_outputs.GET_HEADERS, 'xyz',
json.loads(ris_outputs.LOGICAL_COLLECTION)),
(ris_outputs.GET_HEADERS, 'xyz',
json.loads(ris_outputs.LOGICAL_DETAILS))]
out = self.client._get_logical_drive_resource()
expected_out = []
expected_out.append(json.loads(ris_outputs.LOGICAL_DETAILS))
self.assertEqual(expected_out, out)
array_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_pci_devices')
def test__get_gpu_pci_devices(self, pci_mock):
pci_mock.return_value = json.loads(ris_outputs.PCI_DEVICE_DETAILS)
pci_gpu_list = self.client._get_gpu_pci_devices()
self.assertEqual(pci_gpu_list, json.loads(ris_outputs.PCI_GPU_LIST))
self.assertTrue(pci_mock.called)
@mock.patch.object(ris.RISOperations, '_get_pci_devices')
def test__get_gpu_pci_devices_returns_empty(self, pci_mock):
pci_response = json.loads(ris_outputs.PCI_DEVICE_DETAILS_NO_GPU)
pci_mock.return_value = pci_response
pci_gpu_list = self.client._get_gpu_pci_devices()
self.assertEqual(len(pci_gpu_list), 0)
self.assertTrue(pci_mock.called)
@mock.patch.object(ris.RISOperations, '_get_pci_devices')
def test__get_gpu_pci_devices_fail_not_supported_error(self, pci_mock):
msg = ('links/PCIDevices section in ComputerSystem/Oem/Hp'
' does not exist')
pci_mock.side_effect = exception.IloCommandNotSupportedError(msg)
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_gpu_pci_devices)
self.assertTrue(pci_mock.called)
@mock.patch.object(ris.RISOperations, '_get_gpu_pci_devices')
def test__get_number_of_gpu_devices_connected(self, gpu_list_mock):
gpu_list_mock.return_value = json.loads(ris_outputs.PCI_GPU_LIST)
expected_gpu_count = {'pci_gpu_devices': 1}
gpu_count_returned = self.client._get_number_of_gpu_devices_connected()
self.assertEqual(gpu_count_returned, expected_gpu_count)
self.assertTrue(gpu_list_mock.called)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test___get_cpu_virtualization_enabled(self, bios_mock):
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
bios_mock.return_value = bios_settings['ProcVirtualization']
expected_cpu_vt = True
cpu_vt_return = self.client._get_cpu_virtualization()
self.assertEqual(cpu_vt_return, expected_cpu_vt)
self.assertTrue(bios_mock.called)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test___get_cpu_virtualization_disabled(self, bios_mock):
bios_mock.return_value = 'Disable'
expected_cpu_vt = False
cpu_vt_return = self.client._get_cpu_virtualization()
self.assertEqual(cpu_vt_return, expected_cpu_vt)
self.assertTrue(bios_mock.called)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test___get_cpu_virtualization_not_supported_error(self, bios_mock):
msg = ("BIOS Property 'ProcVirtualization' is not supported on this"
" system")
bios_mock.side_effect = exception.IloCommandNotSupportedError(msg)
expected_cpu_vt = False
cpu_vt_return = self.client._get_cpu_virtualization()
self.assertEqual(cpu_vt_return, expected_cpu_vt)
self.assertTrue(bios_mock.called)
@mock.patch.object(ris.RISOperations, '_get_ilo_details', autospec=True)
def test__get_firmware_update_service_resource_traverses_manager_as(
self, _get_ilo_details_mock):
# | GIVEN |
manager_mock = mock.MagicMock(spec=dict, autospec=True)
_get_ilo_details_mock.return_value = (manager_mock, 'some_uri')
# | WHEN |
self.client._get_firmware_update_service_resource()
# | THEN |
manager_mock.__getitem__.assert_called_once_with('Oem')
manager_mock.__getitem__().__getitem__.assert_called_once_with('Hp')
(manager_mock.__getitem__().__getitem__().__getitem__.
assert_called_once_with('links'))
(manager_mock.__getitem__().__getitem__().__getitem__().
__getitem__.assert_called_once_with('UpdateService'))
(manager_mock.__getitem__().__getitem__().__getitem__().
__getitem__().__getitem__.assert_called_once_with('href'))
@mock.patch.object(ris.RISOperations, '_get_ilo_details', autospec=True)
def test__get_firmware_update_service_resource_throws_if_not_found(
self, _get_ilo_details_mock):
# | GIVEN |
manager_mock = mock.MagicMock(spec=dict)
_get_ilo_details_mock.return_value = (manager_mock, 'some_uri')
manager_mock.__getitem__.side_effect = KeyError('not found')
# | WHEN | & | THEN |
self.assertRaises(exception.IloCommandNotSupportedError,
self.client._get_firmware_update_service_resource)
@mock.patch.object(ris.RISOperations, '_rest_post')
def test_press_pwr_btn(self, rest_post_mock):
systems_uri = "/rest/v1/Systems/1"
new_pow_settings = {"Action": "PowerButton",
"Target": "/Oem/Hp",
"PushType": "Press"}
rest_post_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client._press_pwr_btn()
rest_post_mock.assert_called_once_with(systems_uri, None,
new_pow_settings)
@mock.patch.object(ris.RISOperations, '_rest_post')
def test_press_pwr_btn_patch_fail(self, rest_post_mock):
systems_uri = "/rest/v1/Systems/1"
new_pow_settings = {"Action": "PowerButton",
"Target": "/Oem/Hp",
"PushType": "Press"}
rest_post_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError,
self.client._press_pwr_btn, 'Press')
rest_post_mock.assert_called_once_with(systems_uri, None,
new_pow_settings)
@mock.patch.object(ris.RISOperations, '_rest_post')
def test_perform_power_op(self, rest_post_mock):
systems_uri = "/rest/v1/Systems/1"
new_pow_settings = {"Action": "Reset", "ResetType": "ForceRestart"}
rest_post_mock.return_value = (200, ris_outputs.GET_HEADERS,
ris_outputs.REST_POST_RESPONSE)
self.client.reset_server()
rest_post_mock.assert_called_once_with(systems_uri, None,
new_pow_settings)
@mock.patch.object(ris.RISOperations, '_rest_post')
def test_perform_power_op_fail(self, rest_post_mock):
systems_uri = "/rest/v1/Systems/1"
new_pow_settings = {"Action": "Reset", "ResetType": "ForceRestart"}
rest_post_mock.return_value = (301, ris_outputs.GET_HEADERS,
ris_outputs.REST_FAILURE_OUTPUT)
self.assertRaises(exception.IloError,
self.client.reset_server)
rest_post_mock.assert_called_once_with(systems_uri, None,
new_pow_settings)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test__get_tpm_capability_notpresent(self, bios_mock):
bios_mock.return_value = 'NotPresent'
expected_out = False
status = self.client._get_tpm_capability()
self.assertEqual(expected_out, status)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test__get_tpm_capability_presentdisabled(self, bios_mock):
bios_mock.return_value = 'PresentDisabled'
expected_out = True
status = self.client._get_tpm_capability()
self.assertEqual(expected_out, status)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test__get_tpm_capability_presentenabled(self, bios_mock):
bios_mock.return_value = 'PresentEnabled'
expected_out = True
status = self.client._get_tpm_capability()
self.assertEqual(expected_out, status)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test__get_tpm_capability_resource_notpresent(self, bios_mock):
msg = 'BIOS Property TpmState is not supported on this system.'
bios_mock.side_effect = exception.IloCommandNotSupportedError(msg)
expected_out = False
status = self.client._get_tpm_capability()
self.assertEqual(expected_out, status)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test___get_nvdimm_n_status_enabled(self, bios_mock):
bios_settings = json.loads(ris_outputs.GET_BIOS_SETTINGS)
bios_mock.return_value = bios_settings['NvDimmNMemFunctionality']
expected_nvdimm_n_status = True
nvdimm_n_status_return = self.client._get_nvdimm_n_status()
self.assertEqual(nvdimm_n_status_return, expected_nvdimm_n_status)
self.assertTrue(bios_mock.called)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test___get_nvdimm_n_status_disabled(self, bios_mock):
bios_mock.return_value = 'Disabled'
expected_nvdimm_n_status = False
nvdimm_n_status_return = self.client._get_nvdimm_n_status()
self.assertEqual(nvdimm_n_status_return, expected_nvdimm_n_status)
self.assertTrue(bios_mock.called)
@mock.patch.object(ris.RISOperations, '_get_bios_setting')
def test___get_nvdimm_n_status_not_supported_error(self, bios_mock):
msg = ("BIOS Property 'NvDimmNMemFunctionality' is not supported on"
" this system")
bios_mock.side_effect = exception.IloCommandNotSupportedError(msg)
expected_nvdimm_n_status = False
nvdimm_n_status_return = self.client._get_nvdimm_n_status()
self.assertEqual(nvdimm_n_status_return, expected_nvdimm_n_status)
self.assertTrue(bios_mock.called)
@mock.patch.object(ris.RISOperations, '_get_array_controller_resource')
def test__is_raid_supported(self, get_array_mock):
array_settings = json.loads(ris_outputs.ARRAY_SETTINGS)
get_array_mock.return_value = (200, ris_outputs.GET_HEADERS,
array_settings)
expt_ret = True
ret = self.client._is_raid_supported()
self.assertEqual(ret, expt_ret)
get_array_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, '_get_array_controller_resource')
def test__is_raid_supported_false(self, get_array_mock):
array_settings = json.loads(ris_outputs.ARRAY_SETTING_NO_CONTROLLER)
get_array_mock.return_value = (200, ris_outputs.GET_HEADERS,
array_settings)
expt_ret = False
ret = self.client._is_raid_supported()
self.assertEqual(ret, expt_ret)
get_array_mock.assert_called_once_with()
@mock.patch.object(ris.RISOperations, 'get_product_name')
def test_delete_raid_configuration(self, product_name_mock):
product_name_mock.return_value = 'ProLiant BL460c Gen9'
self.assertRaisesRegexp(exception.IloCommandNotSupportedError,
'ProLiant BL460c Gen9',
self.client.delete_raid_configuration)
@mock.patch.object(ris.RISOperations, 'get_product_name')
def test_create_raid_configuration(self, product_name_mock):
ld1 = {"size_gb": 150, "raid_level": '0', "is_root_volume": True}
raid_config = {"logical_disks": [ld1]}
product_name_mock.return_value = 'ProLiant BL460c Gen9'
self.assertRaisesRegexp(exception.IloCommandNotSupportedError,
'ProLiant BL460c Gen9',
self.client.create_raid_configuration,
raid_config)
| 52.390121
| 79
| 0.665964
| 15,999
| 134,695
| 5.141571
| 0.03269
| 0.039497
| 0.062363
| 0.072429
| 0.930902
| 0.918612
| 0.896912
| 0.870131
| 0.85096
| 0.832653
| 0
| 0.008349
| 0.245035
| 134,695
| 2,570
| 80
| 52.410506
| 0.80058
| 0.006942
| 0
| 0.712522
| 0
| 0
| 0.113299
| 0.046628
| 0
| 0
| 0
| 0
| 0.178131
| 1
| 0.088183
| false
| 0.006173
| 0.005291
| 0
| 0.094797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dfb160435013835a2b602b142e8f593940ad600c
| 570
|
py
|
Python
|
eval_covid19china_timm-regnetx_002_GridDistortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_GridDistortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_GridDistortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_0_GridDistortion.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_1_GridDistortion.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_2_GridDistortion.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_3_GridDistortion.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_4_GridDistortion.yml",
]
for l in ls:
os.system(l)
| 51.818182
| 108
| 0.854386
| 80
| 570
| 5.7125
| 0.3
| 0.109409
| 0.131291
| 0.207877
| 0.892779
| 0.892779
| 0.892779
| 0.892779
| 0.892779
| 0.892779
| 0
| 0.055659
| 0.054386
| 570
| 11
| 109
| 51.818182
| 0.792208
| 0
| 0
| 0
| 0
| 0
| 0.884413
| 0.665499
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
dfcb2f303817b1e14d053e99496a20eeca96ccb0
| 27,098
|
py
|
Python
|
sdk/python/pulumi_alicloud/cfg/delivery_channel.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/cfg/delivery_channel.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/cfg/delivery_channel.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['DeliveryChannelArgs', 'DeliveryChannel']
@pulumi.input_type
class DeliveryChannelArgs:
def __init__(__self__, *,
delivery_channel_assume_role_arn: pulumi.Input[str],
delivery_channel_target_arn: pulumi.Input[str],
delivery_channel_type: pulumi.Input[str],
delivery_channel_condition: Optional[pulumi.Input[str]] = None,
delivery_channel_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a DeliveryChannel resource.
:param pulumi.Input[str] delivery_channel_assume_role_arn: The Alibaba Cloud Resource Name (ARN) of the role to be assumed by the delivery method.
:param pulumi.Input[str] delivery_channel_target_arn: - The ARN of the delivery destination. This parameter is required when you create a delivery method. The value must be in one of the following formats:
- `acs:oss:{RegionId}:{Aliuid}:{bucketName}`: if your delivery destination is an Object Storage Service (OSS) bucket.
- `acs:mns:{RegionId}:{Aliuid}:/topics/{topicName}`: if your delivery destination is a Message Service (MNS) topic.
- `acs:log:{RegionId}:{Aliuid}:project/{projectName}/logstore/{logstoreName}`: if your delivery destination is a Log Service Logstore.
:param pulumi.Input[str] delivery_channel_type: - The type of the delivery method. This parameter is required when you create a delivery method. Valid values: `OSS`: Object Storage, `MNS`: Message Service, `SLS`: Log Service.
:param pulumi.Input[str] delivery_channel_condition: The rule attached to the delivery method. This parameter is applicable only to delivery methods of the MNS type. Please refer to api [PutDeliveryChannel](https://www.alibabacloud.com/help/en/doc-detail/174253.htm) for example format.
:param pulumi.Input[str] delivery_channel_name: The name of the delivery channel.
:param pulumi.Input[str] description: The description of the delivery method.
:param pulumi.Input[int] status: The status of the delivery method. Valid values: `0`: The delivery method is disabled., `1`: The delivery destination is enabled. This is the default value.
"""
pulumi.set(__self__, "delivery_channel_assume_role_arn", delivery_channel_assume_role_arn)
pulumi.set(__self__, "delivery_channel_target_arn", delivery_channel_target_arn)
pulumi.set(__self__, "delivery_channel_type", delivery_channel_type)
if delivery_channel_condition is not None:
pulumi.set(__self__, "delivery_channel_condition", delivery_channel_condition)
if delivery_channel_name is not None:
pulumi.set(__self__, "delivery_channel_name", delivery_channel_name)
if description is not None:
pulumi.set(__self__, "description", description)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="deliveryChannelAssumeRoleArn")
def delivery_channel_assume_role_arn(self) -> pulumi.Input[str]:
"""
The Alibaba Cloud Resource Name (ARN) of the role to be assumed by the delivery method.
"""
return pulumi.get(self, "delivery_channel_assume_role_arn")
@delivery_channel_assume_role_arn.setter
def delivery_channel_assume_role_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "delivery_channel_assume_role_arn", value)
@property
@pulumi.getter(name="deliveryChannelTargetArn")
def delivery_channel_target_arn(self) -> pulumi.Input[str]:
"""
- The ARN of the delivery destination. This parameter is required when you create a delivery method. The value must be in one of the following formats:
- `acs:oss:{RegionId}:{Aliuid}:{bucketName}`: if your delivery destination is an Object Storage Service (OSS) bucket.
- `acs:mns:{RegionId}:{Aliuid}:/topics/{topicName}`: if your delivery destination is a Message Service (MNS) topic.
- `acs:log:{RegionId}:{Aliuid}:project/{projectName}/logstore/{logstoreName}`: if your delivery destination is a Log Service Logstore.
"""
return pulumi.get(self, "delivery_channel_target_arn")
@delivery_channel_target_arn.setter
def delivery_channel_target_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "delivery_channel_target_arn", value)
@property
@pulumi.getter(name="deliveryChannelType")
def delivery_channel_type(self) -> pulumi.Input[str]:
"""
- The type of the delivery method. This parameter is required when you create a delivery method. Valid values: `OSS`: Object Storage, `MNS`: Message Service, `SLS`: Log Service.
"""
return pulumi.get(self, "delivery_channel_type")
@delivery_channel_type.setter
def delivery_channel_type(self, value: pulumi.Input[str]):
pulumi.set(self, "delivery_channel_type", value)
@property
@pulumi.getter(name="deliveryChannelCondition")
def delivery_channel_condition(self) -> Optional[pulumi.Input[str]]:
"""
The rule attached to the delivery method. This parameter is applicable only to delivery methods of the MNS type. Please refer to api [PutDeliveryChannel](https://www.alibabacloud.com/help/en/doc-detail/174253.htm) for example format.
"""
return pulumi.get(self, "delivery_channel_condition")
@delivery_channel_condition.setter
def delivery_channel_condition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delivery_channel_condition", value)
@property
@pulumi.getter(name="deliveryChannelName")
def delivery_channel_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the delivery channel.
"""
return pulumi.get(self, "delivery_channel_name")
@delivery_channel_name.setter
def delivery_channel_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delivery_channel_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the delivery method.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[int]]:
"""
The status of the delivery method. Valid values: `0`: The delivery method is disabled., `1`: The delivery destination is enabled. This is the default value.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class _DeliveryChannelState:
def __init__(__self__, *,
delivery_channel_assume_role_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_condition: Optional[pulumi.Input[str]] = None,
delivery_channel_name: Optional[pulumi.Input[str]] = None,
delivery_channel_target_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering DeliveryChannel resources.
:param pulumi.Input[str] delivery_channel_assume_role_arn: The Alibaba Cloud Resource Name (ARN) of the role to be assumed by the delivery method.
:param pulumi.Input[str] delivery_channel_condition: The rule attached to the delivery method. This parameter is applicable only to delivery methods of the MNS type. Please refer to api [PutDeliveryChannel](https://www.alibabacloud.com/help/en/doc-detail/174253.htm) for example format.
:param pulumi.Input[str] delivery_channel_name: The name of the delivery channel.
:param pulumi.Input[str] delivery_channel_target_arn: - The ARN of the delivery destination. This parameter is required when you create a delivery method. The value must be in one of the following formats:
- `acs:oss:{RegionId}:{Aliuid}:{bucketName}`: if your delivery destination is an Object Storage Service (OSS) bucket.
- `acs:mns:{RegionId}:{Aliuid}:/topics/{topicName}`: if your delivery destination is a Message Service (MNS) topic.
- `acs:log:{RegionId}:{Aliuid}:project/{projectName}/logstore/{logstoreName}`: if your delivery destination is a Log Service Logstore.
:param pulumi.Input[str] delivery_channel_type: - The type of the delivery method. This parameter is required when you create a delivery method. Valid values: `OSS`: Object Storage, `MNS`: Message Service, `SLS`: Log Service.
:param pulumi.Input[str] description: The description of the delivery method.
:param pulumi.Input[int] status: The status of the delivery method. Valid values: `0`: The delivery method is disabled., `1`: The delivery destination is enabled. This is the default value.
"""
if delivery_channel_assume_role_arn is not None:
pulumi.set(__self__, "delivery_channel_assume_role_arn", delivery_channel_assume_role_arn)
if delivery_channel_condition is not None:
pulumi.set(__self__, "delivery_channel_condition", delivery_channel_condition)
if delivery_channel_name is not None:
pulumi.set(__self__, "delivery_channel_name", delivery_channel_name)
if delivery_channel_target_arn is not None:
pulumi.set(__self__, "delivery_channel_target_arn", delivery_channel_target_arn)
if delivery_channel_type is not None:
pulumi.set(__self__, "delivery_channel_type", delivery_channel_type)
if description is not None:
pulumi.set(__self__, "description", description)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="deliveryChannelAssumeRoleArn")
def delivery_channel_assume_role_arn(self) -> Optional[pulumi.Input[str]]:
"""
The Alibaba Cloud Resource Name (ARN) of the role to be assumed by the delivery method.
"""
return pulumi.get(self, "delivery_channel_assume_role_arn")
@delivery_channel_assume_role_arn.setter
def delivery_channel_assume_role_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delivery_channel_assume_role_arn", value)
@property
@pulumi.getter(name="deliveryChannelCondition")
def delivery_channel_condition(self) -> Optional[pulumi.Input[str]]:
"""
The rule attached to the delivery method. This parameter is applicable only to delivery methods of the MNS type. Please refer to api [PutDeliveryChannel](https://www.alibabacloud.com/help/en/doc-detail/174253.htm) for example format.
"""
return pulumi.get(self, "delivery_channel_condition")
@delivery_channel_condition.setter
def delivery_channel_condition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delivery_channel_condition", value)
@property
@pulumi.getter(name="deliveryChannelName")
def delivery_channel_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the delivery channel.
"""
return pulumi.get(self, "delivery_channel_name")
@delivery_channel_name.setter
def delivery_channel_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delivery_channel_name", value)
@property
@pulumi.getter(name="deliveryChannelTargetArn")
def delivery_channel_target_arn(self) -> Optional[pulumi.Input[str]]:
"""
- The ARN of the delivery destination. This parameter is required when you create a delivery method. The value must be in one of the following formats:
- `acs:oss:{RegionId}:{Aliuid}:{bucketName}`: if your delivery destination is an Object Storage Service (OSS) bucket.
- `acs:mns:{RegionId}:{Aliuid}:/topics/{topicName}`: if your delivery destination is a Message Service (MNS) topic.
- `acs:log:{RegionId}:{Aliuid}:project/{projectName}/logstore/{logstoreName}`: if your delivery destination is a Log Service Logstore.
"""
return pulumi.get(self, "delivery_channel_target_arn")
@delivery_channel_target_arn.setter
def delivery_channel_target_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delivery_channel_target_arn", value)
@property
@pulumi.getter(name="deliveryChannelType")
def delivery_channel_type(self) -> Optional[pulumi.Input[str]]:
"""
- The type of the delivery method. This parameter is required when you create a delivery method. Valid values: `OSS`: Object Storage, `MNS`: Message Service, `SLS`: Log Service.
"""
return pulumi.get(self, "delivery_channel_type")
@delivery_channel_type.setter
def delivery_channel_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delivery_channel_type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the delivery method.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[int]]:
"""
The status of the delivery method. Valid values: `0`: The delivery method is disabled., `1`: The delivery destination is enabled. This is the default value.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "status", value)
class DeliveryChannel(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
delivery_channel_assume_role_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_condition: Optional[pulumi.Input[str]] = None,
delivery_channel_name: Optional[pulumi.Input[str]] = None,
delivery_channel_target_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
## Import
Alicloud Config Delivery Channel can be imported using the id, e.g.
```sh
$ pulumi import alicloud:cfg/deliveryChannel:DeliveryChannel example cdc-49a2ad756057********
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] delivery_channel_assume_role_arn: The Alibaba Cloud Resource Name (ARN) of the role to be assumed by the delivery method.
:param pulumi.Input[str] delivery_channel_condition: The rule attached to the delivery method. This parameter is applicable only to delivery methods of the MNS type. Please refer to api [PutDeliveryChannel](https://www.alibabacloud.com/help/en/doc-detail/174253.htm) for example format.
:param pulumi.Input[str] delivery_channel_name: The name of the delivery channel.
:param pulumi.Input[str] delivery_channel_target_arn: - The ARN of the delivery destination. This parameter is required when you create a delivery method. The value must be in one of the following formats:
- `acs:oss:{RegionId}:{Aliuid}:{bucketName}`: if your delivery destination is an Object Storage Service (OSS) bucket.
- `acs:mns:{RegionId}:{Aliuid}:/topics/{topicName}`: if your delivery destination is a Message Service (MNS) topic.
- `acs:log:{RegionId}:{Aliuid}:project/{projectName}/logstore/{logstoreName}`: if your delivery destination is a Log Service Logstore.
:param pulumi.Input[str] delivery_channel_type: - The type of the delivery method. This parameter is required when you create a delivery method. Valid values: `OSS`: Object Storage, `MNS`: Message Service, `SLS`: Log Service.
:param pulumi.Input[str] description: The description of the delivery method.
:param pulumi.Input[int] status: The status of the delivery method. Valid values: `0`: The delivery method is disabled., `1`: The delivery destination is enabled. This is the default value.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DeliveryChannelArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Alicloud Config Delivery Channel can be imported using the id, e.g.
```sh
$ pulumi import alicloud:cfg/deliveryChannel:DeliveryChannel example cdc-49a2ad756057********
```
:param str resource_name: The name of the resource.
:param DeliveryChannelArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DeliveryChannelArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
delivery_channel_assume_role_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_condition: Optional[pulumi.Input[str]] = None,
delivery_channel_name: Optional[pulumi.Input[str]] = None,
delivery_channel_target_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DeliveryChannelArgs.__new__(DeliveryChannelArgs)
if delivery_channel_assume_role_arn is None and not opts.urn:
raise TypeError("Missing required property 'delivery_channel_assume_role_arn'")
__props__.__dict__["delivery_channel_assume_role_arn"] = delivery_channel_assume_role_arn
__props__.__dict__["delivery_channel_condition"] = delivery_channel_condition
__props__.__dict__["delivery_channel_name"] = delivery_channel_name
if delivery_channel_target_arn is None and not opts.urn:
raise TypeError("Missing required property 'delivery_channel_target_arn'")
__props__.__dict__["delivery_channel_target_arn"] = delivery_channel_target_arn
if delivery_channel_type is None and not opts.urn:
raise TypeError("Missing required property 'delivery_channel_type'")
__props__.__dict__["delivery_channel_type"] = delivery_channel_type
__props__.__dict__["description"] = description
__props__.__dict__["status"] = status
super(DeliveryChannel, __self__).__init__(
'alicloud:cfg/deliveryChannel:DeliveryChannel',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
delivery_channel_assume_role_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_condition: Optional[pulumi.Input[str]] = None,
delivery_channel_name: Optional[pulumi.Input[str]] = None,
delivery_channel_target_arn: Optional[pulumi.Input[str]] = None,
delivery_channel_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[int]] = None) -> 'DeliveryChannel':
"""
Get an existing DeliveryChannel resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] delivery_channel_assume_role_arn: The Alibaba Cloud Resource Name (ARN) of the role to be assumed by the delivery method.
:param pulumi.Input[str] delivery_channel_condition: The rule attached to the delivery method. This parameter is applicable only to delivery methods of the MNS type. Please refer to api [PutDeliveryChannel](https://www.alibabacloud.com/help/en/doc-detail/174253.htm) for example format.
:param pulumi.Input[str] delivery_channel_name: The name of the delivery channel.
:param pulumi.Input[str] delivery_channel_target_arn: - The ARN of the delivery destination. This parameter is required when you create a delivery method. The value must be in one of the following formats:
- `acs:oss:{RegionId}:{Aliuid}:{bucketName}`: if your delivery destination is an Object Storage Service (OSS) bucket.
- `acs:mns:{RegionId}:{Aliuid}:/topics/{topicName}`: if your delivery destination is a Message Service (MNS) topic.
- `acs:log:{RegionId}:{Aliuid}:project/{projectName}/logstore/{logstoreName}`: if your delivery destination is a Log Service Logstore.
:param pulumi.Input[str] delivery_channel_type: - The type of the delivery method. This parameter is required when you create a delivery method. Valid values: `OSS`: Object Storage, `MNS`: Message Service, `SLS`: Log Service.
:param pulumi.Input[str] description: The description of the delivery method.
:param pulumi.Input[int] status: The status of the delivery method. Valid values: `0`: The delivery method is disabled., `1`: The delivery destination is enabled. This is the default value.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DeliveryChannelState.__new__(_DeliveryChannelState)
__props__.__dict__["delivery_channel_assume_role_arn"] = delivery_channel_assume_role_arn
__props__.__dict__["delivery_channel_condition"] = delivery_channel_condition
__props__.__dict__["delivery_channel_name"] = delivery_channel_name
__props__.__dict__["delivery_channel_target_arn"] = delivery_channel_target_arn
__props__.__dict__["delivery_channel_type"] = delivery_channel_type
__props__.__dict__["description"] = description
__props__.__dict__["status"] = status
return DeliveryChannel(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="deliveryChannelAssumeRoleArn")
def delivery_channel_assume_role_arn(self) -> pulumi.Output[str]:
"""
The Alibaba Cloud Resource Name (ARN) of the role to be assumed by the delivery method.
"""
return pulumi.get(self, "delivery_channel_assume_role_arn")
@property
@pulumi.getter(name="deliveryChannelCondition")
def delivery_channel_condition(self) -> pulumi.Output[str]:
"""
The rule attached to the delivery method. This parameter is applicable only to delivery methods of the MNS type. Please refer to api [PutDeliveryChannel](https://www.alibabacloud.com/help/en/doc-detail/174253.htm) for example format.
"""
return pulumi.get(self, "delivery_channel_condition")
@property
@pulumi.getter(name="deliveryChannelName")
def delivery_channel_name(self) -> pulumi.Output[str]:
"""
The name of the delivery channel.
"""
return pulumi.get(self, "delivery_channel_name")
@property
@pulumi.getter(name="deliveryChannelTargetArn")
def delivery_channel_target_arn(self) -> pulumi.Output[str]:
"""
- The ARN of the delivery destination. This parameter is required when you create a delivery method. The value must be in one of the following formats:
- `acs:oss:{RegionId}:{Aliuid}:{bucketName}`: if your delivery destination is an Object Storage Service (OSS) bucket.
- `acs:mns:{RegionId}:{Aliuid}:/topics/{topicName}`: if your delivery destination is a Message Service (MNS) topic.
- `acs:log:{RegionId}:{Aliuid}:project/{projectName}/logstore/{logstoreName}`: if your delivery destination is a Log Service Logstore.
"""
return pulumi.get(self, "delivery_channel_target_arn")
@property
@pulumi.getter(name="deliveryChannelType")
def delivery_channel_type(self) -> pulumi.Output[str]:
"""
- The type of the delivery method. This parameter is required when you create a delivery method. Valid values: `OSS`: Object Storage, `MNS`: Message Service, `SLS`: Log Service.
"""
return pulumi.get(self, "delivery_channel_type")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
The description of the delivery method.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def status(self) -> pulumi.Output[int]:
"""
The status of the delivery method. Valid values: `0`: The delivery method is disabled., `1`: The delivery destination is enabled. This is the default value.
"""
return pulumi.get(self, "status")
| 58.150215
| 294
| 0.692376
| 3,305
| 27,098
| 5.457489
| 0.06233
| 0.138881
| 0.062095
| 0.054887
| 0.909575
| 0.899318
| 0.891667
| 0.8813
| 0.874813
| 0.873039
| 0
| 0.003512
| 0.211934
| 27,098
| 465
| 295
| 58.275269
| 0.841114
| 0.425825
| 0
| 0.730038
| 1
| 0
| 0.142214
| 0.104027
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159696
| false
| 0.003802
| 0.019011
| 0
| 0.273764
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dfdae71401966a0ddb8c4f586e084cce31fb8620
| 81
|
py
|
Python
|
scheduler.py
|
kevinbett/Flight-Booking-API
|
c3cd828ac0b7e64447be7cc2f846639afcbe207b
|
[
"MIT"
] | null | null | null |
scheduler.py
|
kevinbett/Flight-Booking-API
|
c3cd828ac0b7e64447be7cc2f846639afcbe207b
|
[
"MIT"
] | null | null | null |
scheduler.py
|
kevinbett/Flight-Booking-API
|
c3cd828ac0b7e64447be7cc2f846639afcbe207b
|
[
"MIT"
] | null | null | null |
from api.v1.helpers.mail import handle_email
def scheduler():
handle_email()
| 20.25
| 44
| 0.765432
| 12
| 81
| 5
| 0.833333
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0.135802
| 81
| 4
| 45
| 20.25
| 0.842857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a0b449a5e660ea224236e0a65da0d2d15dfc2122
| 6,761
|
py
|
Python
|
utils/quantiles.py
|
MikeLasz/Copula-Based-Normalizing-Flows
|
b13b6726de4ffe3791ac78d075ca4eeb4cb0c8a9
|
[
"MIT"
] | 1
|
2021-07-15T20:30:39.000Z
|
2021-07-15T20:30:39.000Z
|
utils/quantiles.py
|
MikeLasz/Copula-Based-Normalizing-Flows
|
b13b6726de4ffe3791ac78d075ca4eeb4cb0c8a9
|
[
"MIT"
] | null | null | null |
utils/quantiles.py
|
MikeLasz/Copula-Based-Normalizing-Flows
|
b13b6726de4ffe3791ac78d075ca4eeb4cb0c8a9
|
[
"MIT"
] | null | null | null |
import openturns as ot
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
def generate_quantiles(samples_true, samples_model, title, num_components=2):
"""Generates a Q-Plot given samples from 2 distributions."""
fig, axs = plt.subplots(1, 2, figsize=(15, 6))
axs = axs.ravel()
sns.set_context("paper", font_scale=3)
for component in range(num_components):
#discard NANs:
data_true = samples_true[:, component]
data_true = data_true[~np.isnan(data_true)]
print('True sampels: amount of discarded NAN-values: {}'.format(np.isnan(data_true).sum()))
data_model = samples_model[:, component]
data_model = data_model[~np.isnan(data_model)]
print('Model sampels: amount of discarded NAN-values: {}'.format(np.isnan(data_model).sum()))
# compute CDFs
data_true_sorted = np.sort(data_true)
data_model_sorted = np.sort(data_model)
u_true = 1. * np.arange(len(data_true)) / (len(data_true) - 1) # cumulative probability
u_model = 1. * np.arange(len(data_model)) / (len(data_model) - 1)
axs[component].plot(u_true, data_true_sorted, label="truth", linewidth=4.0)
axs[component].plot(u_model, data_model_sorted, label="model", linewidth=4.0, linestyle="--")
axs[component].set_xlim([0, 1])
axs[component].set_ylim([-10, 10])
axs[component].set_xlabel("u", fontsize=25.0)
axs[component].tick_params(axis="x", labelsize=20)
axs[component].set_ylabel("Q(u)", fontsize=25.0)
axs[component].tick_params(axis="y", labelsize=20)
plt.tight_layout()
plt.savefig("plots/quantiles/Qplot" + title + ".pdf")
plt.clf()
# quantiles of ||x|| instead of marginal quantiles
data_true = np.linalg.norm(samples_true, axis=1)
data_true = data_true[~np.isnan(data_true)]
print('True sampels: amount of discarded NAN-values: {}'.format(np.isnan(data_true).sum()))
data_model = np.linalg.norm(samples_model, axis=1)
data_model = data_model[~np.isnan(data_model)]
print('Model samples: amount of discarded NAN-values: {}'.format(np.isnan(data_model).sum()))
# compute CDFs
data_true_sorted = np.sort(data_true)
data_model_sorted = np.sort(data_model)
u_true = 1. * np.arange(len(data_true)) / (len(data_true) - 1) # cumulative probability
u_model = 1. * np.arange(len(data_model)) / (len(data_model) - 1)
plt.figure(figsize=(7.5, 6))
plt.plot(u_true, data_true_sorted, label="truth", linewidth=4.0)
plt.plot(u_model, data_model_sorted, label="model", linewidth=4.0, linestyle="--")
plt.xlim(0, 1)
plt.ylim(0, 20)
plt.xlabel("u")
plt.ylabel("Q(u)")
plt.tight_layout()
plt.savefig("plots/quantiles/Qplot" + title + "_norm.pdf")
def generate_quantiles_2models(flow1, flow2, title, num_components=2):
"""Generates a Q-Plot, which compares the samples from flow1 and flow2 with the
quantiles of a Gumbel Copula Distribution."""
copula = ot.GumbelCopula(2.5)
x1 = ot.Student(2, 0, 1)
x2 = ot.Student(2, 0, 1)
X = ot.ComposedDistribution([x1, x2], copula)
samples_true = np.array(X.getSample(1000))
samples_model1 = np.array(flow1.sample(1000).detach().numpy())
samples_model2 = np.array(flow2.sample(1000).detach().numpy())
fig, axs = plt.subplots(1, 2, figsize=(15, 6))
axs = axs.ravel()
sns.set_context("paper", font_scale=3)
for component in range(num_components):
#discard NANs:
data_true = samples_true[:, component]
data_true = data_true[~np.isnan(data_true)]
print('True sampels: amount of discarded NAN-values: {}'.format(np.isnan(data_true).sum()))
data_model1 = samples_model1[:, component]
data_model1 = data_model1[~np.isnan(data_model1)]
print('Model 1: amount of discarded NAN-values: {}'.format(np.isnan(data_model1).sum()))
data_model2 = samples_model2[:, component]
data_model2 = data_model2[~np.isnan(data_model2)]
print('Model 2: amount of discarded NAN-values: {}'.format(np.isnan(data_model2).sum()))
# compute CDFs
data_true_sorted = np.sort(data_true)
data_model1_sorted = np.sort(data_model1)
data_model2_sorted = np.sort(data_model2)
u_true = 1. * np.arange(len(data_true)) / (len(data_true) - 1) # cumulative probability
u_model1 = 1. * np.arange(len(data_model1)) / (len(data_model1) - 1)
u_model2 = 1. * np.arange(len(data_model2)) / (len(data_model2) - 1)
axs[component].plot(u_true, data_true_sorted, label="truth", linewidth=4.0)
axs[component].plot(u_model1, data_model1_sorted, label="model1", linewidth=4.0, linestyle="--")
axs[component].plot(u_model2, data_model2_sorted, label="model2", linewidth=4.0, linestyle="dotted")
axs[component].set_xlim([0, 1])
axs[component].set_ylim([-10, 10])
axs[component].set_xlabel("u", fontsize=25.0)
axs[component].tick_params(axis="y", labelsize=20)
axs[component].set_ylabel("Q(u)", fontsize=25.0)
axs[component].tick_params(axis="y", labelsize=20)
plt.tight_layout()
plt.savefig("plots/quantiles/2in1Qplot" + title + ".pdf")
plt.clf()
# quantiles of ||x|| instead of marginal quantiles
data_true = np.linalg.norm(samples_true, axis=1)
data_true = data_true[~np.isnan(data_true)]
print('True sampels: amount of discarded NAN-values: {}'.format(np.isnan(data_true).sum()))
data_model1 = np.linalg.norm(samples_model1, axis=1)
data_model1 = data_model1[~np.isnan(data_model1)]
print('Model 1: amount of discarded NAN-values: {}'.format(np.isnan(data_model1).sum()))
data_model2 = np.linalg.norm(samples_model2, axis=1)
data_model2 = data_model2[~np.isnan(data_model2)]
print('Model 2: amount of discarded NAN-values: {}'.format(np.isnan(data_model2).sum()))
# compute CDFs
data_true_sorted = np.sort(data_true)
data_model1_sorted = np.sort(data_model1)
data_model2_sorted = np.sort(data_model2)
u_true = 1. * np.arange(len(data_true)) / (len(data_true) - 1) # cumulative probability
u_model1 = 1. * np.arange(len(data_model1)) / (len(data_model1) - 1)
u_model2 = 1. * np.arange(len(data_model2)) / (len(data_model2) - 1)
plt.figure(figsize=(7.5, 6))
plt.plot(u_true, data_true_sorted, label="truth", linewidth=4.0)
plt.plot(u_model1, data_model1_sorted, label="model1", linewidth=4.0, linestyle="--")
plt.plot(u_model2, data_model2_sorted, label="model2", linewidth=4.0, linestyle="dotted")
plt.ylim(0, 20)
plt.xlabel("u")
plt.ylabel("Q(u)")
plt.tight_layout()
plt.savefig("plots/quantiles/2in1Qplot" + title + "_norm.pdf")
| 44.774834
| 108
| 0.666913
| 986
| 6,761
| 4.38641
| 0.128803
| 0.073988
| 0.050867
| 0.046243
| 0.852717
| 0.84578
| 0.843006
| 0.843006
| 0.827283
| 0.818497
| 0
| 0.036583
| 0.179263
| 6,761
| 151
| 109
| 44.774834
| 0.742837
| 0.065523
| 0
| 0.705357
| 1
| 0
| 0.109397
| 0.014629
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017857
| false
| 0
| 0.035714
| 0
| 0.053571
| 0.089286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
260a815bb25a6544da35b9e1fc69d72aef158a3f
| 140
|
py
|
Python
|
cosmogrb/io/__init__.py
|
wematthias/cosmogrb
|
09852eb4e6e7315bbede507e19a2d57f1b927c3f
|
[
"BSD-2-Clause"
] | 3
|
2020-03-08T18:20:32.000Z
|
2022-03-10T17:27:26.000Z
|
cosmogrb/io/__init__.py
|
wematthias/cosmogrb
|
09852eb4e6e7315bbede507e19a2d57f1b927c3f
|
[
"BSD-2-Clause"
] | 11
|
2020-03-04T17:21:15.000Z
|
2020-06-09T12:20:00.000Z
|
cosmogrb/io/__init__.py
|
wematthias/cosmogrb
|
09852eb4e6e7315bbede507e19a2d57f1b927c3f
|
[
"BSD-2-Clause"
] | 5
|
2020-03-18T18:05:05.000Z
|
2022-03-21T16:06:38.000Z
|
from cosmogrb.io.grb_save import GRBSave
from cosmogrb.io.gbm_fits import grbsave_to_gbm_fits
__all__ = ["GRBSave", "grbsave_to_gbm_fits"]
| 28
| 52
| 0.821429
| 23
| 140
| 4.478261
| 0.478261
| 0.203884
| 0.271845
| 0.31068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092857
| 140
| 4
| 53
| 35
| 0.811024
| 0
| 0
| 0
| 0
| 0
| 0.185714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
261089d0a64c9b12059e2576f06d6cca9fb00ec8
| 43,524
|
py
|
Python
|
sdk/python/pulumi_oci/opsi/database_insight.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/opsi/database_insight.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/opsi/database_insight.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['DatabaseInsightArgs', 'DatabaseInsight']
@pulumi.input_type
class DatabaseInsightArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
enterprise_manager_bridge_id: pulumi.Input[str],
enterprise_manager_entity_identifier: pulumi.Input[str],
enterprise_manager_identifier: pulumi.Input[str],
entity_source: pulumi.Input[str],
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a DatabaseInsight resource.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier of database
:param pulumi.Input[str] enterprise_manager_bridge_id: OPSI Enterprise Manager Bridge OCID
:param pulumi.Input[str] enterprise_manager_entity_identifier: Enterprise Manager Entity Unique Identifier
:param pulumi.Input[str] enterprise_manager_identifier: Enterprise Manager Unqiue Identifier
:param pulumi.Input[str] entity_source: (Updatable) Source of the database entity. The supported type is "EM_MANAGED_EXTERNAL_DATABASE"
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[str] status: (Updatable) Status of the resource. Example: "ENABLED", "DISABLED". Resource can be either enabled or disabled by updating the value of status field to either "ENABLED" or "DISABLED"
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "enterprise_manager_bridge_id", enterprise_manager_bridge_id)
pulumi.set(__self__, "enterprise_manager_entity_identifier", enterprise_manager_entity_identifier)
pulumi.set(__self__, "enterprise_manager_identifier", enterprise_manager_identifier)
pulumi.set(__self__, "entity_source", entity_source)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) Compartment Identifier of database
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="enterpriseManagerBridgeId")
def enterprise_manager_bridge_id(self) -> pulumi.Input[str]:
"""
OPSI Enterprise Manager Bridge OCID
"""
return pulumi.get(self, "enterprise_manager_bridge_id")
@enterprise_manager_bridge_id.setter
def enterprise_manager_bridge_id(self, value: pulumi.Input[str]):
pulumi.set(self, "enterprise_manager_bridge_id", value)
@property
@pulumi.getter(name="enterpriseManagerEntityIdentifier")
def enterprise_manager_entity_identifier(self) -> pulumi.Input[str]:
"""
Enterprise Manager Entity Unique Identifier
"""
return pulumi.get(self, "enterprise_manager_entity_identifier")
@enterprise_manager_entity_identifier.setter
def enterprise_manager_entity_identifier(self, value: pulumi.Input[str]):
pulumi.set(self, "enterprise_manager_entity_identifier", value)
@property
@pulumi.getter(name="enterpriseManagerIdentifier")
def enterprise_manager_identifier(self) -> pulumi.Input[str]:
"""
Enterprise Manager Unqiue Identifier
"""
return pulumi.get(self, "enterprise_manager_identifier")
@enterprise_manager_identifier.setter
def enterprise_manager_identifier(self, value: pulumi.Input[str]):
pulumi.set(self, "enterprise_manager_identifier", value)
@property
@pulumi.getter(name="entitySource")
def entity_source(self) -> pulumi.Input[str]:
"""
(Updatable) Source of the database entity. The supported type is "EM_MANAGED_EXTERNAL_DATABASE"
"""
return pulumi.get(self, "entity_source")
@entity_source.setter
def entity_source(self, value: pulumi.Input[str]):
pulumi.set(self, "entity_source", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Status of the resource. Example: "ENABLED", "DISABLED". Resource can be either enabled or disabled by updating the value of status field to either "ENABLED" or "DISABLED"
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class _DatabaseInsightState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
database_display_name: Optional[pulumi.Input[str]] = None,
database_id: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
database_type: Optional[pulumi.Input[str]] = None,
database_version: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enterprise_manager_bridge_id: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_display_name: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_identifier: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_name: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_type: Optional[pulumi.Input[str]] = None,
enterprise_manager_identifier: Optional[pulumi.Input[str]] = None,
entity_source: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
processor_count: Optional[pulumi.Input[int]] = None,
state: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering DatabaseInsight resources.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier of database
:param pulumi.Input[str] database_display_name: Display name of database
:param pulumi.Input[str] database_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the database.
:param pulumi.Input[str] database_name: Name of database
:param pulumi.Input[str] database_type: Operations Insights internal representation of the database type.
:param pulumi.Input[str] database_version: The version of the database.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] enterprise_manager_bridge_id: OPSI Enterprise Manager Bridge OCID
:param pulumi.Input[str] enterprise_manager_entity_display_name: Enterprise Manager Entity Display Name
:param pulumi.Input[str] enterprise_manager_entity_identifier: Enterprise Manager Entity Unique Identifier
:param pulumi.Input[str] enterprise_manager_entity_name: Enterprise Manager Entity Name
:param pulumi.Input[str] enterprise_manager_entity_type: Enterprise Manager Entity Type
:param pulumi.Input[str] enterprise_manager_identifier: Enterprise Manager Unqiue Identifier
:param pulumi.Input[str] entity_source: (Updatable) Source of the database entity. The supported type is "EM_MANAGED_EXTERNAL_DATABASE"
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[str] lifecycle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input[int] processor_count: Processor count.
:param pulumi.Input[str] state: The current state of the database.
:param pulumi.Input[str] status: (Updatable) Status of the resource. Example: "ENABLED", "DISABLED". Resource can be either enabled or disabled by updating the value of status field to either "ENABLED" or "DISABLED"
:param pulumi.Input[Mapping[str, Any]] system_tags: System tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param pulumi.Input[str] time_created: The time the the database insight was first enabled. An RFC3339 formatted datetime string
:param pulumi.Input[str] time_updated: The time the database insight was updated. An RFC3339 formatted datetime string
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if database_display_name is not None:
pulumi.set(__self__, "database_display_name", database_display_name)
if database_id is not None:
pulumi.set(__self__, "database_id", database_id)
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if database_type is not None:
pulumi.set(__self__, "database_type", database_type)
if database_version is not None:
pulumi.set(__self__, "database_version", database_version)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if enterprise_manager_bridge_id is not None:
pulumi.set(__self__, "enterprise_manager_bridge_id", enterprise_manager_bridge_id)
if enterprise_manager_entity_display_name is not None:
pulumi.set(__self__, "enterprise_manager_entity_display_name", enterprise_manager_entity_display_name)
if enterprise_manager_entity_identifier is not None:
pulumi.set(__self__, "enterprise_manager_entity_identifier", enterprise_manager_entity_identifier)
if enterprise_manager_entity_name is not None:
pulumi.set(__self__, "enterprise_manager_entity_name", enterprise_manager_entity_name)
if enterprise_manager_entity_type is not None:
pulumi.set(__self__, "enterprise_manager_entity_type", enterprise_manager_entity_type)
if enterprise_manager_identifier is not None:
pulumi.set(__self__, "enterprise_manager_identifier", enterprise_manager_identifier)
if entity_source is not None:
pulumi.set(__self__, "entity_source", entity_source)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if lifecycle_details is not None:
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
if processor_count is not None:
pulumi.set(__self__, "processor_count", processor_count)
if state is not None:
pulumi.set(__self__, "state", state)
if status is not None:
pulumi.set(__self__, "status", status)
if system_tags is not None:
pulumi.set(__self__, "system_tags", system_tags)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Compartment Identifier of database
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="databaseDisplayName")
def database_display_name(self) -> Optional[pulumi.Input[str]]:
"""
Display name of database
"""
return pulumi.get(self, "database_display_name")
@database_display_name.setter
def database_display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_display_name", value)
@property
@pulumi.getter(name="databaseId")
def database_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the database.
"""
return pulumi.get(self, "database_id")
@database_id.setter
def database_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_id", value)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of database
"""
return pulumi.get(self, "database_name")
@database_name.setter
def database_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_name", value)
@property
@pulumi.getter(name="databaseType")
def database_type(self) -> Optional[pulumi.Input[str]]:
"""
Operations Insights internal representation of the database type.
"""
return pulumi.get(self, "database_type")
@database_type.setter
def database_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_type", value)
@property
@pulumi.getter(name="databaseVersion")
def database_version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the database.
"""
return pulumi.get(self, "database_version")
@database_version.setter
def database_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database_version", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="enterpriseManagerBridgeId")
def enterprise_manager_bridge_id(self) -> Optional[pulumi.Input[str]]:
"""
OPSI Enterprise Manager Bridge OCID
"""
return pulumi.get(self, "enterprise_manager_bridge_id")
@enterprise_manager_bridge_id.setter
def enterprise_manager_bridge_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "enterprise_manager_bridge_id", value)
@property
@pulumi.getter(name="enterpriseManagerEntityDisplayName")
def enterprise_manager_entity_display_name(self) -> Optional[pulumi.Input[str]]:
"""
Enterprise Manager Entity Display Name
"""
return pulumi.get(self, "enterprise_manager_entity_display_name")
@enterprise_manager_entity_display_name.setter
def enterprise_manager_entity_display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "enterprise_manager_entity_display_name", value)
@property
@pulumi.getter(name="enterpriseManagerEntityIdentifier")
def enterprise_manager_entity_identifier(self) -> Optional[pulumi.Input[str]]:
"""
Enterprise Manager Entity Unique Identifier
"""
return pulumi.get(self, "enterprise_manager_entity_identifier")
@enterprise_manager_entity_identifier.setter
def enterprise_manager_entity_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "enterprise_manager_entity_identifier", value)
@property
@pulumi.getter(name="enterpriseManagerEntityName")
def enterprise_manager_entity_name(self) -> Optional[pulumi.Input[str]]:
"""
Enterprise Manager Entity Name
"""
return pulumi.get(self, "enterprise_manager_entity_name")
@enterprise_manager_entity_name.setter
def enterprise_manager_entity_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "enterprise_manager_entity_name", value)
@property
@pulumi.getter(name="enterpriseManagerEntityType")
def enterprise_manager_entity_type(self) -> Optional[pulumi.Input[str]]:
"""
Enterprise Manager Entity Type
"""
return pulumi.get(self, "enterprise_manager_entity_type")
@enterprise_manager_entity_type.setter
def enterprise_manager_entity_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "enterprise_manager_entity_type", value)
@property
@pulumi.getter(name="enterpriseManagerIdentifier")
def enterprise_manager_identifier(self) -> Optional[pulumi.Input[str]]:
"""
Enterprise Manager Unqiue Identifier
"""
return pulumi.get(self, "enterprise_manager_identifier")
@enterprise_manager_identifier.setter
def enterprise_manager_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "enterprise_manager_identifier", value)
@property
@pulumi.getter(name="entitySource")
def entity_source(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Source of the database entity. The supported type is "EM_MANAGED_EXTERNAL_DATABASE"
"""
return pulumi.get(self, "entity_source")
@entity_source.setter
def entity_source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "entity_source", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> Optional[pulumi.Input[str]]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@lifecycle_details.setter
def lifecycle_details(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lifecycle_details", value)
@property
@pulumi.getter(name="processorCount")
def processor_count(self) -> Optional[pulumi.Input[int]]:
"""
Processor count.
"""
return pulumi.get(self, "processor_count")
@processor_count.setter
def processor_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "processor_count", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of the database.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Status of the resource. Example: "ENABLED", "DISABLED". Resource can be either enabled or disabled by updating the value of status field to either "ENABLED" or "DISABLED"
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
System tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@system_tags.setter
def system_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "system_tags", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The time the the database insight was first enabled. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The time the database insight was updated. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
class DatabaseInsight(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enterprise_manager_bridge_id: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_identifier: Optional[pulumi.Input[str]] = None,
enterprise_manager_identifier: Optional[pulumi.Input[str]] = None,
entity_source: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
status: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Import
DatabaseInsights can be imported using the `id`, e.g.
```sh
$ pulumi import oci:opsi/databaseInsight:DatabaseInsight test_database_insight "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier of database
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] enterprise_manager_bridge_id: OPSI Enterprise Manager Bridge OCID
:param pulumi.Input[str] enterprise_manager_entity_identifier: Enterprise Manager Entity Unique Identifier
:param pulumi.Input[str] enterprise_manager_identifier: Enterprise Manager Unqiue Identifier
:param pulumi.Input[str] entity_source: (Updatable) Source of the database entity. The supported type is "EM_MANAGED_EXTERNAL_DATABASE"
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[str] status: (Updatable) Status of the resource. Example: "ENABLED", "DISABLED". Resource can be either enabled or disabled by updating the value of status field to either "ENABLED" or "DISABLED"
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DatabaseInsightArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
DatabaseInsights can be imported using the `id`, e.g.
```sh
$ pulumi import oci:opsi/databaseInsight:DatabaseInsight test_database_insight "id"
```
:param str resource_name: The name of the resource.
:param DatabaseInsightArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DatabaseInsightArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enterprise_manager_bridge_id: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_identifier: Optional[pulumi.Input[str]] = None,
enterprise_manager_identifier: Optional[pulumi.Input[str]] = None,
entity_source: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
status: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DatabaseInsightArgs.__new__(DatabaseInsightArgs)
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
if enterprise_manager_bridge_id is None and not opts.urn:
raise TypeError("Missing required property 'enterprise_manager_bridge_id'")
__props__.__dict__["enterprise_manager_bridge_id"] = enterprise_manager_bridge_id
if enterprise_manager_entity_identifier is None and not opts.urn:
raise TypeError("Missing required property 'enterprise_manager_entity_identifier'")
__props__.__dict__["enterprise_manager_entity_identifier"] = enterprise_manager_entity_identifier
if enterprise_manager_identifier is None and not opts.urn:
raise TypeError("Missing required property 'enterprise_manager_identifier'")
__props__.__dict__["enterprise_manager_identifier"] = enterprise_manager_identifier
if entity_source is None and not opts.urn:
raise TypeError("Missing required property 'entity_source'")
__props__.__dict__["entity_source"] = entity_source
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["status"] = status
__props__.__dict__["database_display_name"] = None
__props__.__dict__["database_id"] = None
__props__.__dict__["database_name"] = None
__props__.__dict__["database_type"] = None
__props__.__dict__["database_version"] = None
__props__.__dict__["enterprise_manager_entity_display_name"] = None
__props__.__dict__["enterprise_manager_entity_name"] = None
__props__.__dict__["enterprise_manager_entity_type"] = None
__props__.__dict__["lifecycle_details"] = None
__props__.__dict__["processor_count"] = None
__props__.__dict__["state"] = None
__props__.__dict__["system_tags"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(DatabaseInsight, __self__).__init__(
'oci:opsi/databaseInsight:DatabaseInsight',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
database_display_name: Optional[pulumi.Input[str]] = None,
database_id: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
database_type: Optional[pulumi.Input[str]] = None,
database_version: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enterprise_manager_bridge_id: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_display_name: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_identifier: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_name: Optional[pulumi.Input[str]] = None,
enterprise_manager_entity_type: Optional[pulumi.Input[str]] = None,
enterprise_manager_identifier: Optional[pulumi.Input[str]] = None,
entity_source: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
processor_count: Optional[pulumi.Input[int]] = None,
state: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None) -> 'DatabaseInsight':
"""
Get an existing DatabaseInsight resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) Compartment Identifier of database
:param pulumi.Input[str] database_display_name: Display name of database
:param pulumi.Input[str] database_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the database.
:param pulumi.Input[str] database_name: Name of database
:param pulumi.Input[str] database_type: Operations Insights internal representation of the database type.
:param pulumi.Input[str] database_version: The version of the database.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] enterprise_manager_bridge_id: OPSI Enterprise Manager Bridge OCID
:param pulumi.Input[str] enterprise_manager_entity_display_name: Enterprise Manager Entity Display Name
:param pulumi.Input[str] enterprise_manager_entity_identifier: Enterprise Manager Entity Unique Identifier
:param pulumi.Input[str] enterprise_manager_entity_name: Enterprise Manager Entity Name
:param pulumi.Input[str] enterprise_manager_entity_type: Enterprise Manager Entity Type
:param pulumi.Input[str] enterprise_manager_identifier: Enterprise Manager Unqiue Identifier
:param pulumi.Input[str] entity_source: (Updatable) Source of the database entity. The supported type is "EM_MANAGED_EXTERNAL_DATABASE"
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[str] lifecycle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input[int] processor_count: Processor count.
:param pulumi.Input[str] state: The current state of the database.
:param pulumi.Input[str] status: (Updatable) Status of the resource. Example: "ENABLED", "DISABLED". Resource can be either enabled or disabled by updating the value of status field to either "ENABLED" or "DISABLED"
:param pulumi.Input[Mapping[str, Any]] system_tags: System tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param pulumi.Input[str] time_created: The time the the database insight was first enabled. An RFC3339 formatted datetime string
:param pulumi.Input[str] time_updated: The time the database insight was updated. An RFC3339 formatted datetime string
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DatabaseInsightState.__new__(_DatabaseInsightState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["database_display_name"] = database_display_name
__props__.__dict__["database_id"] = database_id
__props__.__dict__["database_name"] = database_name
__props__.__dict__["database_type"] = database_type
__props__.__dict__["database_version"] = database_version
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["enterprise_manager_bridge_id"] = enterprise_manager_bridge_id
__props__.__dict__["enterprise_manager_entity_display_name"] = enterprise_manager_entity_display_name
__props__.__dict__["enterprise_manager_entity_identifier"] = enterprise_manager_entity_identifier
__props__.__dict__["enterprise_manager_entity_name"] = enterprise_manager_entity_name
__props__.__dict__["enterprise_manager_entity_type"] = enterprise_manager_entity_type
__props__.__dict__["enterprise_manager_identifier"] = enterprise_manager_identifier
__props__.__dict__["entity_source"] = entity_source
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["lifecycle_details"] = lifecycle_details
__props__.__dict__["processor_count"] = processor_count
__props__.__dict__["state"] = state
__props__.__dict__["status"] = status
__props__.__dict__["system_tags"] = system_tags
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
return DatabaseInsight(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) Compartment Identifier of database
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="databaseDisplayName")
def database_display_name(self) -> pulumi.Output[str]:
"""
Display name of database
"""
return pulumi.get(self, "database_display_name")
@property
@pulumi.getter(name="databaseId")
def database_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the database.
"""
return pulumi.get(self, "database_id")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> pulumi.Output[str]:
"""
Name of database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="databaseType")
def database_type(self) -> pulumi.Output[str]:
"""
Operations Insights internal representation of the database type.
"""
return pulumi.get(self, "database_type")
@property
@pulumi.getter(name="databaseVersion")
def database_version(self) -> pulumi.Output[str]:
"""
The version of the database.
"""
return pulumi.get(self, "database_version")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="enterpriseManagerBridgeId")
def enterprise_manager_bridge_id(self) -> pulumi.Output[str]:
"""
OPSI Enterprise Manager Bridge OCID
"""
return pulumi.get(self, "enterprise_manager_bridge_id")
@property
@pulumi.getter(name="enterpriseManagerEntityDisplayName")
def enterprise_manager_entity_display_name(self) -> pulumi.Output[str]:
"""
Enterprise Manager Entity Display Name
"""
return pulumi.get(self, "enterprise_manager_entity_display_name")
@property
@pulumi.getter(name="enterpriseManagerEntityIdentifier")
def enterprise_manager_entity_identifier(self) -> pulumi.Output[str]:
"""
Enterprise Manager Entity Unique Identifier
"""
return pulumi.get(self, "enterprise_manager_entity_identifier")
@property
@pulumi.getter(name="enterpriseManagerEntityName")
def enterprise_manager_entity_name(self) -> pulumi.Output[str]:
"""
Enterprise Manager Entity Name
"""
return pulumi.get(self, "enterprise_manager_entity_name")
@property
@pulumi.getter(name="enterpriseManagerEntityType")
def enterprise_manager_entity_type(self) -> pulumi.Output[str]:
"""
Enterprise Manager Entity Type
"""
return pulumi.get(self, "enterprise_manager_entity_type")
@property
@pulumi.getter(name="enterpriseManagerIdentifier")
def enterprise_manager_identifier(self) -> pulumi.Output[str]:
"""
Enterprise Manager Unqiue Identifier
"""
return pulumi.get(self, "enterprise_manager_identifier")
@property
@pulumi.getter(name="entitySource")
def entity_source(self) -> pulumi.Output[str]:
"""
(Updatable) Source of the database entity. The supported type is "EM_MANAGED_EXTERNAL_DATABASE"
"""
return pulumi.get(self, "entity_source")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> pulumi.Output[str]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter(name="processorCount")
def processor_count(self) -> pulumi.Output[int]:
"""
Processor count.
"""
return pulumi.get(self, "processor_count")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of the database.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
(Updatable) Status of the resource. Example: "ENABLED", "DISABLED". Resource can be either enabled or disabled by updating the value of status field to either "ENABLED" or "DISABLED"
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
System tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The time the the database insight was first enabled. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The time the database insight was updated. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_updated")
| 49.124153
| 224
| 0.6819
| 5,003
| 43,524
| 5.656206
| 0.047372
| 0.074634
| 0.0752
| 0.067637
| 0.908757
| 0.878366
| 0.856845
| 0.831083
| 0.808926
| 0.747049
| 0
| 0.000968
| 0.216731
| 43,524
| 885
| 225
| 49.179661
| 0.829104
| 0.293286
| 0
| 0.593985
| 1
| 0
| 0.144443
| 0.073574
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167293
| false
| 0.00188
| 0.009399
| 0
| 0.281955
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26320a5abfa5d3156487b9d92c9d194d69b7aed7
| 1,667
|
py
|
Python
|
tests/test_unit_transport_database.py
|
jkyeung/python-itoolkit
|
ca11d532220bb0fff7afca540b46bd179b6f2b2d
|
[
"MIT"
] | 1
|
2019-06-07T17:24:23.000Z
|
2019-06-07T17:24:23.000Z
|
tests/test_unit_transport_database.py
|
jkyeung/python-itoolkit
|
ca11d532220bb0fff7afca540b46bd179b6f2b2d
|
[
"MIT"
] | null | null | null |
tests/test_unit_transport_database.py
|
jkyeung/python-itoolkit
|
ca11d532220bb0fff7afca540b46bd179b6f2b2d
|
[
"MIT"
] | null | null | null |
from itoolkit import iToolKit
from itoolkit.transport import DatabaseTransport
def test_database_transport_callproc(database_callproc):
transport = DatabaseTransport(database_callproc)
tk = iToolKit()
out = transport.call(tk)
assert isinstance(out, (bytes, str))
cursor = database_callproc.cursor()
cursor.callproc.assert_called_once()
cursor.__iter__.assert_called_once()
def test_database_transport_execute(database_execute):
transport = DatabaseTransport(database_execute)
tk = iToolKit()
out = transport.call(tk)
assert isinstance(out, (bytes, str))
cursor = database_execute.cursor()
cursor.execute.assert_called_once()
cursor.__iter__.assert_called_once()
def test_database_transport_execute_schema(database_execute):
schema = 'MYSCHEMA'
transport = DatabaseTransport(database_execute, schema=schema)
tk = iToolKit()
out = transport.call(tk)
assert isinstance(out, (bytes, str))
cursor = database_execute.cursor()
cursor.execute.assert_called_once()
cursor.__iter__.assert_called_once()
assert len(cursor.execute.call_args[0]) > 0
assert schema in cursor.execute.call_args[0][0]
def test_database_transport_callproc_schema(database_execute):
schema = 'MYSCHEMA'
transport = DatabaseTransport(database_execute, schema=schema)
tk = iToolKit()
out = transport.call(tk)
assert isinstance(out, (bytes, str))
cursor = database_execute.cursor()
cursor.execute.assert_called_once()
cursor.__iter__.assert_called_once()
assert len(cursor.execute.call_args[0]) > 0
assert schema in cursor.execute.call_args[0][0]
| 26.460317
| 66
| 0.741452
| 197
| 1,667
| 5.959391
| 0.137056
| 0.114991
| 0.109029
| 0.081772
| 0.819421
| 0.764906
| 0.764906
| 0.764906
| 0.764906
| 0.764906
| 0
| 0.005727
| 0.161968
| 1,667
| 62
| 67
| 26.887097
| 0.834646
| 0
| 0
| 0.75
| 0
| 0
| 0.009598
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.1
| false
| 0
| 0.05
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd0ebfdff586ab340a4760c9a98cdd50e18c10fe
| 204
|
py
|
Python
|
remove-context-menu.py
|
ZaphodElevated/Pikturit
|
031e26555de240e051fbcc2244168ae24504baca
|
[
"Apache-2.0"
] | 2
|
2021-03-31T02:00:46.000Z
|
2021-04-10T15:16:08.000Z
|
remove-context-menu.py
|
ZaphodElevated/Pikturit
|
031e26555de240e051fbcc2244168ae24504baca
|
[
"Apache-2.0"
] | null | null | null |
remove-context-menu.py
|
ZaphodElevated/Pikturit
|
031e26555de240e051fbcc2244168ae24504baca
|
[
"Apache-2.0"
] | null | null | null |
import winreg
winreg.DeleteKey(winreg.HKEY_CLASSES_ROOT, r'SystemFileAssociations\image\shell\Pikturit\command')
winreg.DeleteKey(winreg.HKEY_CLASSES_ROOT, r'SystemFileAssociations\image\shell\Pikturit')
| 51
| 98
| 0.867647
| 25
| 204
| 6.92
| 0.48
| 0.17341
| 0.242775
| 0.289017
| 0.890173
| 0.890173
| 0.890173
| 0.890173
| 0.890173
| 0.890173
| 0
| 0
| 0.029412
| 204
| 4
| 99
| 51
| 0.873737
| 0
| 0
| 0
| 0
| 0
| 0.458537
| 0.458537
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 13
|
cd1e826db794230a201e259bf4748d371bb78299
| 11,212
|
py
|
Python
|
test/test_tez_vault.py
|
atomex-protocol/atomex-core
|
69db4dda1fd4be789a1ede1a72543cc96b2e251e
|
[
"MIT"
] | null | null | null |
test/test_tez_vault.py
|
atomex-protocol/atomex-core
|
69db4dda1fd4be789a1ede1a72543cc96b2e251e
|
[
"MIT"
] | null | null | null |
test/test_tez_vault.py
|
atomex-protocol/atomex-core
|
69db4dda1fd4be789a1ede1a72543cc96b2e251e
|
[
"MIT"
] | null | null | null |
from os.path import dirname, join
from unittest import TestCase
from decimal import Decimal
from pytezos import ContractInterface, MichelsonRuntimeError
source = 'tz1irF8HUsQp2dLhKNMhteG1qALNU9g3pfdN'
party = 'tz1h3rQ8wBxFd8L9B3d7Jhaawu6Z568XU3xY'
proxy = 'tz1grSQDByRpnVs7sPtaprNZRp531ZKz6Jmm'
secret = 'dca15ce0c01f61ab03139b4673f4bd902203dc3b898a89a5d35bad794e5cfd4f'
hashed_secret = bytes.fromhex('05bce5c12071fbca95b13d49cb5ef45323e0216d618bb4575c519b74be75e3da')
empty_storage = [{}, None]
project_dir = dirname(dirname(__file__))
class AtomexContractTest(TestCase):
@classmethod
def setUpClass(cls):
cls.atomex = ContractInterface.from_file(join(project_dir, 'build/contracts/tez_vault.tz'))
cls.maxDiff = None
def test_initiate(self):
res = self.atomex \
.initiate(participant=party,
hashed_secret=hashed_secret,
refund_time=6 * 3600,
payoff=20000) \
.with_amount(1000000) \
.interpret(storage=empty_storage,
source=source,
now=0)
res_storage = {
hashed_secret: {
'initiator': source,
'participant': party,
'amount': 980000,
'refund_time': 6 * 3600,
'payoff': 20000
}
}
self.assertDictEqual(res_storage, res.storage[0])
self.assertEqual([], res.operations)
def test_initiate_proxy(self):
res = self.atomex \
.initiate(participant=party,
hashed_secret=hashed_secret,
refund_time=6 * 3600,
payoff=20000) \
.with_amount(1000000) \
.interpret(storage=empty_storage,
sender=proxy,
source=source,
now=0)
res_storage = {
hashed_secret: {
'initiator': proxy,
'participant': party,
'amount': 980000,
'refund_time': 6 * 3600,
'payoff': 20000
}
}
self.assertDictEqual(res_storage, res.storage[0])
self.assertEqual([], res.operations)
def test_initiate_same_secret(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 6 * 3600,
'payoff': Decimal('0.02')
}
}, None]
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.initiate(participant=party,
hashed_secret=hashed_secret,
refund_time=6 * 3600,
payoff=Decimal('0.02')) \
.with_amount(1000000) \
.interpret(storage=initial_storage,
source=source,
now=0)
def test_initiate_payoff_overflow(self):
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.initiate(participant=party,
hashed_secret=hashed_secret,
refund_time=6 * 3600,
payoff=1100000) \
.with_amount(1000000) \
.interpret(storage=empty_storage,
source=source,
now=0)
def test_initiate_in_the_past(self):
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.initiate(participant=party,
hashed_secret=hashed_secret,
refund_time=0,
payoff=Decimal('0.01')) \
.with_amount(1000000) \
.interpret(storage=empty_storage,
source=source,
now=6 * 3600)
def test_initiate_same_party(self):
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.initiate(participant=party,
hashed_secret=hashed_secret,
refund_time=0,
payoff=Decimal('0.01')) \
.with_amount(1000000) \
.interpret(storage=empty_storage,
source=party,
now=6 * 3600)
def test_add_non_existent(self):
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.add(hashed_secret) \
.with_amount(1000000) \
.interpret(storage=empty_storage)
def test_add_another_address(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': 980000,
'refund_time': 6 * 3600,
'payoff': 20000
}
}, None]
res = self.atomex \
.add(hashed_secret) \
.with_amount(1000000) \
.interpret(storage=initial_storage, source=party, now=0)
res_storage = initial_storage[0]
res_storage[hashed_secret]['amount'] = 1980000
self.assertDictEqual(res_storage, res.storage[0])
def test_add_after_expiration(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 0,
'payoff': Decimal('0.02')
}
}, None]
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.add(hashed_secret) \
.with_amount(1000000) \
.interpret(storage=initial_storage, source=source, now=6 * 3600)
def test_redeem_by_third_party(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 60,
'payoff': Decimal('0.02')
}
}, None]
res = self.atomex \
.redeem(secret) \
.interpret(storage=initial_storage, source=source, now=0)
self.assertDictEqual({hashed_secret: None}, res.storage[0])
self.assertEqual(2, len(res.operations))
redeem_tx = res.operations[0]
self.assertEqual(party, redeem_tx['destination'])
self.assertEqual('980000', redeem_tx['amount'])
payoff_tx = res.operations[1]
self.assertEqual(source, payoff_tx['destination'])
self.assertEqual('20000', payoff_tx['amount'])
def test_redeem_after_expiration(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 0,
'payoff': Decimal('0.02')
}
}, None]
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.redeem(secret) \
.interpret(storage=initial_storage, source=party, now=60)
def test_redeem_invalid_secret(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 60,
'payoff': Decimal('0.02')
}
}, None]
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.redeem('a' * 32) \
.interpret(storage=initial_storage, source=source, now=0)
def test_redeem_with_money(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 60,
'payoff': Decimal('0.02')
}
}, None]
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.redeem(secret) \
.with_amount(1000000) \
.interpret(storage=initial_storage, source=source, now=0)
def test_refund(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 0,
'payoff': Decimal('0.02')
}
}, None]
res = self.atomex \
.refund(hashed_secret) \
.interpret(storage=initial_storage, source=source, now=60)
self.assertDictEqual({hashed_secret: None}, res.storage[0])
self.assertEqual(1, len(res.operations))
refund_tx = res.operations[0]
self.assertEqual(source, refund_tx['destination'])
self.assertEqual('1000000', refund_tx['amount'])
def test_refund_before_expiration(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 60,
'payoff': Decimal('0.02')
}
}, None]
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.refund(hashed_secret) \
.interpret(storage=initial_storage, source=source, now=0)
def test_refund_non_existent(self):
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.refund(hashed_secret) \
.interpret(storage=empty_storage, source=source)
def test_refund_with_money(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 0,
'payoff': Decimal('0.02')
}
}, None]
with self.assertRaises(MichelsonRuntimeError):
self.atomex \
.refund(hashed_secret) \
.with_amount(1000000) \
.interpret(storage=initial_storage, source=source, now=60)
def test_refund_by_third_party(self):
initial_storage = [{
hashed_secret: {
'initiator': source,
'participant': party,
'amount': Decimal('0.98'),
'refund_time': 0,
'payoff': Decimal('0.02')
}
}, None]
res = self.atomex \
.refund(hashed_secret) \
.interpret(storage=initial_storage, source=party, now=60)
self.assertDictEqual({hashed_secret: None}, res.storage[0])
self.assertEqual(1, len(res.operations))
refund_tx = res.operations[0]
self.assertEqual(source, refund_tx['destination'])
self.assertEqual('1000000', refund_tx['amount'])
| 33.975758
| 99
| 0.516054
| 950
| 11,212
| 5.903158
| 0.104211
| 0.081312
| 0.047432
| 0.064907
| 0.813302
| 0.806526
| 0.796719
| 0.78602
| 0.774073
| 0.738588
| 0
| 0.061988
| 0.379861
| 11,212
| 329
| 100
| 34.079027
| 0.744571
| 0
| 0
| 0.757042
| 0
| 0
| 0.090528
| 0.023546
| 0
| 0
| 0
| 0
| 0.109155
| 1
| 0.066901
| false
| 0
| 0.014085
| 0
| 0.084507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd3a36d13429cb75fffaeb80d88fe3fba52f64a5
| 50
|
py
|
Python
|
apps/demo/models/__init__.py
|
wangyuhuiever/sanic-tailor
|
8be2c855a737803a431e87068bada8489930c425
|
[
"MIT"
] | null | null | null |
apps/demo/models/__init__.py
|
wangyuhuiever/sanic-tailor
|
8be2c855a737803a431e87068bada8489930c425
|
[
"MIT"
] | null | null | null |
apps/demo/models/__init__.py
|
wangyuhuiever/sanic-tailor
|
8be2c855a737803a431e87068bada8489930c425
|
[
"MIT"
] | null | null | null |
from . import models
from . import models_inherit
| 16.666667
| 28
| 0.8
| 7
| 50
| 5.571429
| 0.571429
| 0.512821
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 50
| 2
| 29
| 25
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
26bbfbc70339b79d9b733aa9c730f763ac506d8f
| 377
|
py
|
Python
|
src/echelon/algorithms/__init__.py
|
takeshi-teshima/echelon-py
|
f95fd24f6023921fbe19f16ea7ab15cef5099e5c
|
[
"Apache-2.0"
] | null | null | null |
src/echelon/algorithms/__init__.py
|
takeshi-teshima/echelon-py
|
f95fd24f6023921fbe19f16ea7ab15cef5099e5c
|
[
"Apache-2.0"
] | 3
|
2021-11-02T14:28:28.000Z
|
2022-01-28T03:51:07.000Z
|
src/echelon/algorithms/__init__.py
|
takeshi-teshima/echelon-py
|
f95fd24f6023921fbe19f16ea7ab15cef5099e5c
|
[
"Apache-2.0"
] | null | null | null |
from echelon.algorithms.find_echelon_clusters import find_echelon_clusters
from echelon.algorithms.find_foundation_echelons import find_foundation_echelons
from echelon.algorithms.find_peak_echelons import find_peak_echelons
from echelon.algorithms.find_echelon_hierarchy import find_echelon_hierarchy
from echelon.algorithms.find_echelon_hotspots import find_echelon_hotspots
| 62.833333
| 80
| 0.920424
| 50
| 377
| 6.54
| 0.22
| 0.201835
| 0.321101
| 0.382263
| 0.41896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05305
| 377
| 5
| 81
| 75.4
| 0.915966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f8662da71aceeb96f5927d976e880a0e775a3021
| 135
|
py
|
Python
|
ssc/__init__.py
|
wence-/ssc
|
c56af51286a7dd7f3d98a8087903e3d18ca937cb
|
[
"MIT"
] | null | null | null |
ssc/__init__.py
|
wence-/ssc
|
c56af51286a7dd7f3d98a8087903e3d18ca937cb
|
[
"MIT"
] | 3
|
2018-03-05T12:22:54.000Z
|
2018-09-27T10:54:59.000Z
|
ssc/__init__.py
|
wence-/ssc
|
c56af51286a7dd7f3d98a8087903e3d18ca937cb
|
[
"MIT"
] | 1
|
2018-01-23T15:21:31.000Z
|
2018-01-23T15:21:31.000Z
|
from ssc.ssc import SSC # noqa: F401
from ssc.patch import PatchPC # noqa: F401
from ssc.relaxation import * # noqa: F401
| 33.75
| 44
| 0.674074
| 20
| 135
| 4.55
| 0.4
| 0.230769
| 0.263736
| 0.32967
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089109
| 0.251852
| 135
| 3
| 45
| 45
| 0.811881
| 0.237037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f8a67b910078e9a352c7d9a9492d3de3a7403915
| 73,382
|
py
|
Python
|
tests/test_info_scripts.py
|
gogetdata/ggd-cli
|
717d37643f3e29813f47eda68b9745459d9ef430
|
[
"MIT"
] | 29
|
2016-04-23T13:28:51.000Z
|
2021-10-03T15:49:29.000Z
|
tests/test_info_scripts.py
|
gogetdata/ggd-cli
|
717d37643f3e29813f47eda68b9745459d9ef430
|
[
"MIT"
] | 17
|
2016-04-22T15:45:33.000Z
|
2020-11-20T16:47:24.000Z
|
tests/test_info_scripts.py
|
gogetdata/ggd-cli
|
717d37643f3e29813f47eda68b9745459d9ef430
|
[
"MIT"
] | 2
|
2016-05-26T01:54:51.000Z
|
2020-04-30T19:17:18.000Z
|
import os
import sys
import subprocess as sp
import pytest
import yaml
import tempfile
import requests
import argparse
import json
import re
from argparse import Namespace
from argparse import ArgumentParser
import glob
import contextlib
import tarfile
from helpers import install_hg19_gaps_ucsc_v1, uninstall_hg19_gaps_ucsc_v1, CreateRecipe
from ggd import show_env
from ggd import list_files
from ggd import list_pkg_info
from ggd import list_installed_pkgs
from ggd import predict_path
from ggd import utils
from ggd import install
from ggd import uninstall
from ggd.utils import get_conda_package_list
if sys.version_info[0] == 3:
from io import StringIO
elif sys.version_info[0] == 2:
from StringIO import StringIO
#---------------------------------------------------------------------------------------------------------
## enable socket
#---------------------------------------------------------------------------------------------------------
from pytest_socket import disable_socket, enable_socket
def pytest_enable_socket():
enable_socket()
def pytest_disable_socket():
disable_socket()
#---------------------------------------------------------------------------------------------------------
## Test Label
#---------------------------------------------------------------------------------------------------------
TEST_LABEL = "ggd-info-scripts-test"
#---------------------------------------------------------------------------------------------------------
## IO redirection
#---------------------------------------------------------------------------------------------------------
## Create a redirect_stdout that works for python 2 and 3. (Similar to contextlib.redirect_stdout in python 3)
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
#-----------------------------------------------------------------------------------------------------------------------
# Unit Test for ggd show-env, ggd get-files, ggd pkg-info, ggd list, and predict-path
#-----------------------------------------------------------------------------------------------------------------------
#----------------------------------------------------
## Test functions based on hg19-gaps being installed
#----------------------------------------------------
### Show-env
def test_show_env_goodrun():
"""
Test that show_env functoin properly provides the environment variable for an installed package
"""
pytest_enable_socket()
try:
uninstall_hg19_gaps_ucsc_v1()
except:
pass
try:
install_hg19_gaps_ucsc_v1()
except:
pass
parser = ()
args = Namespace(command='show-env', pattern=None)
dir_env_var_name = "$ggd_hg19_gaps_ucsc_v1_dir"
file_env_var_name = "$ggd_hg19_gaps_ucsc_v1_file"
## Test a normal run
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
show_env.show_env(parser,args)
output = temp_stdout.getvalue().strip()
assert (dir_env_var_name in output)
assert (file_env_var_name in output)
## Test active environment variables
sp.check_call(["activate", "base"])
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
show_env.show_env(parser,args)
output = temp_stdout.getvalue().strip()
newout = ""
active = False
for line in output.strip().split("\n"):
if "Active environment variables:" in line:
active = True
if "Inactive or out-of-date environment variables:" in line:
active = False
if active:
newout += line
assert (dir_env_var_name in output)
assert (file_env_var_name in output)
def test_show_env_with_pattern():
"""
Test that adding the pattern parameter to show-env properly filters the results
"""
pytest_enable_socket()
dir_env_var_name = "$ggd_hg19_gaps_ucsc_v1_dir"
file_env_var_name = "$ggd_hg19_gaps_ucsc_v1_file"
parser = ()
## Good pattern should have "ggd_hg19_gaps" in the results
args = Namespace(command='show-env', pattern="gaps")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
show_env.show_env(parser,args)
output = temp_stdout.getvalue().strip()
assert (dir_env_var_name in output)
assert (file_env_var_name in output)
## Bad pattern should return "No matching recipe variables found for this environment"
args = Namespace(command='show-env', pattern="NONE")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
show_env.show_env(parser,args)
output = temp_stdout.getvalue().strip()
assert (dir_env_var_name not in output)
assert (file_env_var_name not in output)
assert ("No matching recipe variables found for this environment" in output)
## invalid pattern should exit
args = Namespace(command='show-env', pattern=")()(")
with pytest.raises(SystemExit) as pytest_wrapped_e:
show_env.show_env(parser,args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match("1") ## Check that the exit code is 1
def replace_env_var(active_var, deactive_var, active_loc, deactive_loc):
"""
Helper method for test_remove_env_variables. Replaces environment variable removed
"""
pytest_enable_socket()
with open(active_loc, "a") as a:
a.write(active_var)
with open(deactive_loc, "a") as d:
d.write(deactive_var)
def test_remove_env_variable():
"""
Test that the remove_env_varial correctly removes the env var from the activated.d/env_vars.sh file
"""
pytest_enable_socket()
dir_main_env_var = "ggd_hg19_gaps_ucsc_v1_dir"
file_main_env_var = "ggd_hg19_gaps_ucsc_v1_file"
conda_root, conda_path = utils.get_conda_env()
active_env_file = os.path.join(conda_path, "etc", "conda", "activate.d", "env_vars.sh")
deactive_env_file = os.path.join(conda_path, "etc", "conda", "deactivate.d", "env_vars.sh")
dir_active_env_var = str([x for x in open(active_env_file, "r") if dir_main_env_var in x][0])
file_active_env_var = str([x for x in open(active_env_file, "r") if file_main_env_var in x][0])
dir_deactive_env_var = str([x for x in open(deactive_env_file, "r") if dir_main_env_var in x][0])
file_deactive_env_var = str([x for x in open(deactive_env_file, "r") if file_main_env_var in x][0])
## test that a env variable not in the files does not remove those that are in the files:
env_var = "ggd_NOT-in-file"
show_env.remove_env_variable(env_var)
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+dir_main_env_var+"=", var):
found = True
break
assert found == True
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+file_main_env_var+"=", var):
found = True
break
assert found == True
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+dir_main_env_var+r"\b", var):
found = True
break
assert found == True
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+file_main_env_var+r"\b", var):
found = True
break
assert found == True
## Test a proper removal of a environment variable
show_env.remove_env_variable(dir_main_env_var)
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+dir_main_env_var+"=", var):
found = True
break
assert found == False
show_env.remove_env_variable(file_main_env_var)
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+file_main_env_var+"=", var):
found = True
break
assert found == False
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+dir_main_env_var+r"\b", var):
found = True
break
assert found == False
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+file_main_env_var+r"\b", var):
found = True
break
assert found == False
replace_env_var(dir_active_env_var, dir_deactive_env_var, active_env_file, deactive_env_file)
replace_env_var(file_active_env_var, file_deactive_env_var, active_env_file, deactive_env_file)
## Test that a similar env variable does not remove other env variables
env_var = "ggd_hg19-ga"
show_env.remove_env_variable(env_var)
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+dir_main_env_var+"=", var):
found = True
break
assert found == True
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+file_main_env_var+"=", var):
found = True
break
assert found == True
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+dir_main_env_var+r"\b", var):
found = True
break
assert found == True
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+file_main_env_var+r"\b", var):
found = True
break
assert found == True
def test_remove_env_variable_different_prefix():
"""
Test that the remove_env_varial correctly removes the env var in non conda_root prefix from the activated.d/env_vars.sh file
"""
pytest_enable_socket()
## Set up temp_env
env_name = "temp_env12"
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", env_name)
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", env_name])
try:
shutil.rmtree(temp_env)
except Exception:
pass
## Create conda environmnet
sp.check_output(["conda", "create", "--name", env_name])
## Install ggd recipe using conda into temp_env
ggd_package = "hg19-pfam-domains-ucsc-v1"
install_args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_package], file=[], prefix = temp_env, id = None)
assert install.install((), install_args) == True
dir_main_env_var = "ggd_hg19_pfam_domains_ucsc_v1_dir"
file_main_env_var = "ggd_hg19_pfam_domains_ucsc_v1_file"
conda_root, conda_path = utils.get_conda_env(prefix=temp_env)
assert conda_path == temp_env
active_env_file = os.path.join(conda_path, "etc", "conda", "activate.d", "env_vars.sh")
deactive_env_file = os.path.join(conda_path, "etc", "conda", "deactivate.d", "env_vars.sh")
dir_active_env_var = str([x for x in open(active_env_file, "r") if dir_main_env_var in x][0])
file_active_env_var = str([x for x in open(active_env_file, "r") if file_main_env_var in x][0])
dir_deactive_env_var = str([x for x in open(deactive_env_file, "r") if dir_main_env_var in x][0])
file_deactive_env_var = str([x for x in open(deactive_env_file, "r") if file_main_env_var in x][0])
## Test a proper removal of a environment variable
show_env.remove_env_variable(dir_main_env_var,prefix=temp_env)
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+dir_main_env_var+"=", var):
found = True
break
assert found == False
show_env.remove_env_variable(file_main_env_var,prefix=temp_env)
found = False
with open(active_env_file, "r") as a:
for var in a:
if re.search(r"\b"+file_main_env_var+"=", var):
found = True
break
assert found == False
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+dir_main_env_var+r"\b", var):
found = True
break
assert found == False
found = False
with open(deactive_env_file, "r") as d:
for var in d:
if re.search(r"\b"+file_main_env_var+r"\b", var):
found = True
break
assert found == False
replace_env_var(dir_active_env_var, dir_deactive_env_var, active_env_file, deactive_env_file)
replace_env_var(file_active_env_var, file_deactive_env_var, active_env_file, deactive_env_file)
## Remove temp env
sp.check_output(["conda", "env", "remove", "--name", env_name])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_activate_environment_variables():
"""
Test that the activate_environment_variables function properly activates the environment variables
"""
pytest_enable_socket()
dir_env_var_name = "$ggd_hg19_gaps_ucsc_v1_dir"
file_env_var_name = "$ggd_hg19_gaps_ucsc_v1_file"
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
show_env.activate_environment_variables()
output = temp_stdout.getvalue().strip()
newout = ""
active = False
for line in output.strip().split("\n"):
if "Active environment variables:" in line:
active = True
if "Inactive or out-of-date environment variables:" in line:
active = False
if active:
newout += line
assert (dir_env_var_name in output)
assert (file_env_var_name in output)
def test_test_vars():
"""
Test that the test_vars function correclty provides the active and inactive environemnt variables
"""
pytest_enable_socket()
matching_vars = {'ggd_hg38_gaps_11_mar_2019': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg38/hg38-gaps_11-mar-2019/1', 'ggd_hg38_reference_genome_ucsc': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg38/hg38-reference-genome-ucsc/1', 'ggd_hg38_cpg_islands': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg38/hg38-cpg-islands/1', 'ggd_grch38_reference_genome_ensembl': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/GRCh38/grch38-reference-genome-ensembl/1', 'ggd_hg19_cpg_islands': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg19/hg19-cpg-islands/1', 'ggd_hg19_gaps': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg19/hg19-gaps/1', 'ggd_hg19_reference_genome_ucsc': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg19/hg19-reference-genome-ucsc/1', 'ggd_hg38_simplerepeats': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg38/hg38-simplerepeats/1', 'ggd_hg19_pfam_domains_ucsc': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg19/hg19-pfam-domains-ucsc/1', 'ggd_grch37_esp_variants': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/GRCh37/grch37-esp-variants/1', 'ggd_hg38_pfam_domains_ucsc': '/uufs/chpc.utah.edu/common/home/quinlan-ucgdstor/u1138933/ucgdscratch/anaconda2/share/ggd/Homo_sapiens/hg38/hg38-pfam-domains-ucsc/1'}
active = [x for x in matching_vars if x in os.environ and os.environ[x] == matching_vars[x]]
inactive = [x for x in matching_vars if x not in active]
active_list, inactive_list = show_env.test_vars(matching_vars)
for var in active_list:
assert var in active
for var in inactive_list:
assert var in inactive
### get files
def test_in_ggd_channel():
"""
Test that the in_ggd_channel from ggd list-files works correctly
"""
pytest_enable_socket()
## Test that in_ggd_channel properly returns the species, genome-build, and versoin if it is in the channel
ggd_package = "hg19-gaps-ucsc-v1"
channel = "genomics"
species, build, version = list_files.in_ggd_channel([ggd_package], channel, utils.conda_root())
assert species == "Homo_sapiens"
assert build == "hg19"
assert version == "1"
## test that in_ggd_channel properly handels bad channels
ggd_package = "hg19-gaps-ucsc-v1"
channel = "not_a_real_channel"
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_files.in_ggd_channel([ggd_package], channel, utils.conda_root())
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that systemexit was raised by sys.exit()
assert pytest_wrapped_e.match("The '{c}' channel is not a ggd conda channel".format(c=channel)) ## check that the exit code is 1
## test that in_ggd_channel properly handels bad recipe name
ggd_package = "NOT_A_REAL_PACKAGE_NAME"
channel = "genomics"
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_files.in_ggd_channel([ggd_package], channel, utils.conda_root())
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that systemexit was raised by sys.exit()
assert pytest_wrapped_e.match("2") ## check that the exit code is 1
def test_list_files():
"""
Test the main method of list-files
"""
pytest_enable_socket()
ggd_package = "hg19-gaps-ucsc-v1"
file1 = "{}.bed.gz".format(ggd_package)
file2 = "{}.bed.gz.tbi".format(ggd_package)
##Test that the correct file paths are returned
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=None, prefix=None, species=None, version=None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert re.search(file1+"$", sorted(output.split("\n"))[0])
assert re.search(file2+"$", sorted(output.split("\n"))[1])
assert len(output.split("\n")) == 2
##Test that the correct file paths are returned with the genome_build key set
args = Namespace(channel='genomics', command='list-files', genome_build="hg19", name=ggd_package, pattern=None, prefix=None, species=None, version=None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert re.search(file1+"$", sorted(output.split("\n"))[0])
assert re.search(file2+"$", sorted(output.split("\n"))[1])
assert len(output.split("\n")) == 2
##Test that the correct file paths are returned with the species key set
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=None, prefix=None, species="Homo_sapiens", version=None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert re.search(file1+"$", sorted(output.split("\n"))[0])
assert re.search(file2+"$", sorted(output.split("\n"))[1])
assert len(output.split("\n")) == 2
##Test that the correct file paths are returned with version key set
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=None, prefix=None, species=None, version="1")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert re.search(file1+"$", sorted(output.split("\n"))[0])
assert re.search(file2+"$", sorted(output.split("\n"))[1])
assert len(output.split("\n")) == 2
##Test that the correct file paths are returned with the patterns key set key set
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=file1, prefix=None, species=None, version=None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert re.search(file1+"$", output)
assert re.search(file2+"$", output) == None
assert len(output.split("\n")) == 1
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=file2, prefix=None, species=None, version=None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert re.search(file1+"$", output) == None
assert re.search(file2+"$", output)
assert len(output.split("\n")) == 1
## Test that nothing is returned when a bad ggd package name is given
args = Namespace(channel='genomics', command='list-files', genome_build=None, name="NOT_a_real_package_name", pattern=None, prefix=None, species=None, version=None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_files.list_files((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that systemexit was raised by sys.exit()
assert pytest_wrapped_e.match("2") ## check that the exit code is 1
##Test that the function exits if a bad genome build is given
args = Namespace(channel='genomics', command='list-files', genome_build="Bad_Build", name=ggd_package, pattern=None, species=None, prefix=None, version=None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_files.list_files((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that systemexit was raised by sys.exit()
assert pytest_wrapped_e.match("3") ## check that the exit code is 1
##Test that the function exits if a bad species is given
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=None, prefix=None, species="Mus_musculus", version=None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_files.list_files((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that systemexit was raised by sys.exit()
assert pytest_wrapped_e.match("3") ## check that the exit code is 1
##Test that the function exits if a bad version is given
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=None, prefix=None, species=None, version="99999")
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_files.list_files((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that systemexit was raised by sys.exit()
assert pytest_wrapped_e.match("1") ## check that the exit code is 1
##Test that the function exits if a bad pattern is given
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern="BAD_PATTERN", prefix=None, species=None, version=None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_files.list_files((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that systemexit was raised by sys.exit()
assert pytest_wrapped_e.match("1") ## check that the exit code is 1
def test_list_files_with_prefix():
"""
test the list-files function with --prefix flag set
"""
pytest_enable_socket()
env_name = "temp_e"
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", env_name)
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", env_name])
try:
shutil.rmtree(temp_env)
except Exception:
pass
## Create conda environmnet
sp.check_output(["conda", "create", "--name", env_name])
## Install ggd recipe using conda into temp_env
ggd_package = "hg19-pfam-domains-ucsc-v1"
install_args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_package], file=[], prefix = temp_env, id=None)
assert install.install((), install_args) == True
## Test the list-files method can access info from the files in a different prefix
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=None, prefix=temp_env, species=None, version=None)
file1 = "{}.bed12.bed.gz".format(ggd_package)
file2 = "{}.bed12.bed.gz.tbi".format(ggd_package)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert file1 in output
assert file2 in output
assert temp_env in output
assert len(output.split("\n")) == 2
## Check output has correct file path
jdict = install.check_ggd_recipe(ggd_package,"genomics")
species = jdict["packages"][ggd_package]["identifiers"]["species"]
build = jdict["packages"][ggd_package]["identifiers"]["genome-build"]
version = jdict["packages"][ggd_package]["version"]
assert os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file1) in output
assert os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file2) in output
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file1))
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file2))
## Test with environment name instead of path
args = Namespace(channel='genomics', command='list-files', genome_build=None, name=ggd_package, pattern=None, prefix=env_name, species=None, version=None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert file1 in output
assert file2 in output
assert temp_env in output
assert len(output.split("\n")) == 2
assert os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file1) in output
assert os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file2) in output
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file1))
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file2))
## Remove temp env
sp.check_output(["conda", "env", "remove", "--name", env_name])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
### pkg-info
def test_check_if_ggd_recipe():
"""
Test if check_if_ggd_recipe correclty identifies if a ggd recipe is a recipe or not
"""
pytest_enable_socket()
## Test a normal package name and channel
ggd_package = "hg19-gaps-ucsc-v1"
ggd_channel = "genomics"
assert list_pkg_info.check_if_ggd_recipe(ggd_package, ggd_channel) == True
## Test a normal package name but bad channel
ggd_package = "hg19-gaps-ucsc-v1"
ggd_channel = "BAD_CHANNEL"
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
assert list_pkg_info.check_if_ggd_recipe(ggd_package, ggd_channel) == False
output = temp_stdout.getvalue().strip()
assert "The hg19-gaps-ucsc-v1 package is not in the ggd-{c} channel.".format(c=ggd_channel) in output
## Test a bad package name and normal channel
ggd_package = "BAD_Recipe"
ggd_channel = "genomics"
assert list_pkg_info.check_if_ggd_recipe(ggd_package, ggd_channel) == False
def test_get_pkg_info():
"""
Test that get_pkg_info correctly returns the pkg info or handles other problems
"""
pytest_enable_socket()
## Test a normal run that should pass
ggd_package = "hg19-gaps-ucsc-v1"
ggd_channel = "genomics"
assert list_pkg_info.get_pkg_info(ggd_package, ggd_channel, False) == True
## Test that a uninstalled package is handled correctly
ggd_package = "Bad_package"
ggd_channel = "genomics"
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.get_pkg_info(ggd_package, ggd_channel, False)
output = temp_stdout.getvalue().strip()
assert ggd_package+" is not downloaded on your system, or was downloaded incorrectly" in output
assert list_pkg_info.get_pkg_info(ggd_package, ggd_channel, False) == False
def test_get_meta_yaml_info():
"""
Test the get_meta_yaml_info file to correctly get the correct info
"""
pytest_enable_socket()
recipe = CreateRecipe(
"""
fake-recipe:
meta.yaml: |
build:
binary_relocation: false
detect_binary_files_with_prefix: false
noarch: generic
number: 1
extra:
authors: me
package:
name: fake-recipe
version: '1'
requirements:
build:
- gsort
- htslib
- zlib
run:
- gsort
- htslib
- zlib
source:
path: .
about:
identifiers:
genome-build: hg19
species: Homo_sapiens
keywords:
- gaps
- region
summary: A fake recipe for testing
tags:
data-version: Today
ggd-channel: genomics
""", from_string=True)
recipe.write_recipes()
ggd_package = "fake-recipe"
ggd_channel = "genomics"
meta_yaml_file = os.path.join(recipe.recipe_dirs[ggd_package],"meta.yaml")
try:
f = open(meta_yaml_file, "r")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.get_meta_yaml_info(f,ggd_package,ggd_channel)
output = temp_stdout.getvalue().strip()
assert "\t\x1b[1mGGD-Package:\x1b[0m fake-recipe" in output
assert "\t\x1b[1mGGD-Channel:\x1b[0m ggd-genomics" in output
assert "\t\x1b[1mGGD Pkg Version:\x1b[0m 1" in output
assert "\t\x1b[1mSummary:\x1b[0m A fake recipe for testing" in output
assert "\t\x1b[1mSpecies:\x1b[0m Homo_sapiens" in output
assert "\t\x1b[1mGenome Build:\x1b[0m hg19" in output
assert "\t\x1b[1mKeywords:\x1b[0m gaps, region" in output
assert "\t\x1b[1mData Version:\x1b[0m Today" in output
conda_root = utils.conda_root()
assert "\t\x1b[1mPkg File Path:\x1b[0m {}/share/ggd/Homo_sapiens/hg19/fake-recipe/1".format(conda_root) in output
assert "\t\x1b[1mInstalled Pkg Files:\x1b[0m " in output
f.close()
f = open(meta_yaml_file, "r")
assert list_pkg_info.get_meta_yaml_info(f,ggd_package,ggd_channel) == True
f.close()
except IOError as e:
print("IO Error")
print(e)
f.close()
assert False
except AssertionError as e:
print("Assertion Error")
print(e)
f.close()
raise AssertionError(e)
except Exception as e:
print(e)
f.close()
raise AssertionError(e)
finally:
f.close()
## Test get_meta_yaml_info function correctly returns output for a recipe tags
recipe = CreateRecipe(
"""
fake-recipe2:
meta.yaml: |
build:
binary_relocation: false
detect_binary_files_with_prefix: false
noarch: generic
number: 1
extra:
authors: me
package:
name: fake-recipe2
version: '1'
requirements:
build:
- gsort
- htslib
- zlib
run:
- gsort
- htslib
- zlib
source:
path: .
about:
identifiers:
genome-build: hg19
species: Homo_sapiens
keywords:
- gaps
- region
summary: A fake recipe for testing
tags:
cached:
- uploaded_to_aws
data-provider: ME
data-version: Today
file-type:
- something
final-files:
- fake2.something.gz
- fake2.something.gz.tbi
genomic-coordinate-base: 0-based-inclusive
ggd-channel: fake2
""", from_string=True)
recipe.write_recipes()
ggd_package = "fake-recipe2"
ggd_channel = "fake2"
meta_yaml_file = os.path.join(recipe.recipe_dirs[ggd_package],"meta.yaml")
try:
f = open(meta_yaml_file, "r")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.get_meta_yaml_info(f,ggd_package,ggd_channel)
output = temp_stdout.getvalue().strip()
assert "\t\x1b[1mGGD-Package:\x1b[0m fake-recipe2" in output
assert "\t\x1b[1mGGD-Channel:\x1b[0m ggd-fake2" in output
assert "\t\x1b[1mGGD Pkg Version:\x1b[0m 1" in output
assert "\t\x1b[1mSummary:\x1b[0m A fake recipe for testing" in output
assert "\t\x1b[1mSpecies:\x1b[0m Homo_sapiens" in output
assert "\t\x1b[1mGenome Build:\x1b[0m hg19" in output
assert "\t\x1b[1mKeywords:\x1b[0m gaps, region" in output
assert "\t\x1b[1mCached:\x1b[0m uploaded_to_aws" in output
assert "\t\x1b[1mData Provider:\x1b[0m ME" in output
assert "\t\x1b[1mData Version:\x1b[0m Today" in output
assert "\t\x1b[1mFile type(s):\x1b[0m something" in output
assert "\t\x1b[1mData file coordinate base:\x1b[0m 0-based-inclusive" in output
assert "\t\x1b[1mIncluded Data Files:\x1b[0m " in output
assert "\t\tfake2.something.gz" in output
assert "\t\tfake2.something.gz.tbi" in output
conda_root = utils.conda_root()
assert "\t\x1b[1mPkg File Path:\x1b[0m {}/share/ggd/Homo_sapiens/hg19/fake-recipe2/1".format(conda_root) in output
assert "\t\x1b[1mInstalled Pkg Files:\x1b[0m " in output
f.close()
f = open(meta_yaml_file, "r")
assert list_pkg_info.get_meta_yaml_info(f,ggd_package,ggd_channel) == True
f.close()
except IOError as e:
print("IO Error")
print(e)
f.close()
assert False
except AssertionError as e:
print("Assertion Error")
print(e)
f.close()
raise AssertionError(e)
except Exception as e:
print(e)
f.close()
raise AssertionError(e)
finally:
f.close()
## Test get_meta_yaml_info function correctly returns output meta-recipe updated meta.yaml files
recipe = CreateRecipe(
"""
fake-meta-recipe1:
meta.yaml: |
build:
binary_relocation: false
detect_binary_files_with_prefix: false
noarch: generic
number: 1
extra:
authors: me
package:
name: fake-meta-recipe1
version: '1'
requirements:
build:
- gsort
- htslib
- zlib
run:
- gsort
- htslib
- zlib
source:
path: .
about:
identifiers:
parent-meta-recipe: fake-meta-recipe0
genome-build: meta-recipe
updated-genome-build: UPDATED-meta-recipe
species: meta-recipe
updated-species: UPDATED-meta-recipe
keywords:
- gaps
- region
summary: A fake meta-recipe for testing
tags:
data-provider: ME
data-version: Today
file-type:
- something
final-files:
- fake3.something.gz
- fake3.something.gz.tbi
genomic-coordinate-base: 0-based-inclusive
ggd-channel: fake3
""", from_string=True)
recipe.write_recipes()
ggd_package = "fake-meta-recipe1"
ggd_channel = "fake3"
meta_yaml_file = os.path.join(recipe.recipe_dirs[ggd_package],"meta.yaml")
try:
f = open(meta_yaml_file, "r")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.get_meta_yaml_info(f,ggd_package,ggd_channel)
output = temp_stdout.getvalue().strip()
print("OUTPUT: '''", output, "'''")
assert "\t\x1b[1mGGD-Package:\x1b[0m fake-meta-recipe1" in output
assert "\t\x1b[1mGGD Parent Meta-Recipe:\x1b[0m fake-meta-recipe0" in output
assert "\t\x1b[1mGGD-Channel:\x1b[0m ggd-fake3" in output
assert "\t\x1b[1mGGD Pkg Version:\x1b[0m 1" in output
assert "\t\x1b[1mSummary:\x1b[0m A fake meta-recipe for testing" in output
assert "\t\x1b[1mSpecies:\x1b[0m (Updated) UPDATED-meta-recipe" in output
assert "\t\x1b[1mGenome Build:\x1b[0m (Updated) UPDATED-meta-recipe" in output
assert "\t\x1b[1mKeywords:\x1b[0m gaps, region" in output
assert "\t\x1b[1mCached:\x1b[0m uploaded_to_aws" not in output
assert "\t\x1b[1mData Provider:\x1b[0m ME" in output
assert "\t\x1b[1mData Version:\x1b[0m Today" in output
assert "\t\x1b[1mFile type(s):\x1b[0m something" in output
assert "\t\x1b[1mData file coordinate base:\x1b[0m 0-based-inclusive" in output
assert "\t\x1b[1mIncluded Data Files:\x1b[0m " in output
assert "\t\tfake3.something.gz" in output
assert "\t\tfake3.something.gz.tbi" in output
conda_root = utils.conda_root()
assert "\t\x1b[1mPkg File Path:\x1b[0m {}/share/ggd/meta-recipe/meta-recipe/fake-meta-recipe1/1".format(conda_root) in output
assert "\t\x1b[1mInstalled Pkg Files:\x1b[0m " in output
f.close()
f = open(meta_yaml_file, "r")
assert list_pkg_info.get_meta_yaml_info(f,ggd_package,ggd_channel) == True
f.close()
except IOError as e:
print("IO Error")
print(e)
f.close()
assert False
except AssertionError as e:
print("Assertion Error")
print(e)
f.close()
raise AssertionError(e)
except Exception as e:
print(e)
f.close()
raise AssertionError(e)
finally:
f.close()
def test_print_recipe():
"""
Test the print_recipe fucntion
"""
pytest_enable_socket()
message = "TESTING THE CREATION OF A RECIPE SCRIPT"
recipe = CreateRecipe(
"""
TestRecipe:
recipe.sh: |
{}
""".format(message), from_string=True)
recipe.write_recipes()
ggd_package = "TestRecipe"
recipe_file = os.path.join(recipe.recipe_dirs[ggd_package],"recipe.sh")
try:
f = open(recipe_file, "r")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.print_recipe(f,ggd_package)
output = temp_stdout.getvalue().strip()
assert "{} recipe file:".format(ggd_package) in output
assert message in output
assert "NOTE: The recipe provided above outlines where the data was accessed and how it was processed" in output
assert "GGD" not in output
f.close()
f = open(recipe_file, "r")
assert list_pkg_info.print_recipe(f,ggd_package) == True
except IOError as e:
print("IO Error")
print(e)
f.close()
assert False
except AssertionError as e:
print("Assertion Error")
print(e)
f.close()
raise AssertionError(e)
except Exception as e:
print(e)
f.close()
raise AssertionError(e)
finally:
f.close()
def test_info_main():
"""
test the main funtion, info(parser, args), of pkg-info
"""
pytest_enable_socket()
## Normal run
ggd_package = "hg19-gaps-ucsc-v1"
ggd_channel = "genomics"
args = Namespace(channel=ggd_channel, command='pkg-info', name=ggd_package, show_recipe=False, prefix = None)
assert list_pkg_info.info((),args) == True
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.info((),args)
output = temp_stdout.getvalue().strip()
## Include checks for meta yaml tag keys like final files and approximate file sizes
assert "\t\x1b[1mGGD-Package:\x1b[0m hg19-gaps-ucsc-v1" in output
assert "\t\x1b[1mGGD-Channel:\x1b[0m ggd-genomics" in output
assert "\t\x1b[1mGGD Pkg Version:\x1b[0m 1" in output
assert "\t\x1b[1mSummary:\x1b[0m Assembly gaps from UCSC in bed fromat. Scaffoldings that are not contained in the hg19.genome file are removed" in output
assert "\t\x1b[1mSpecies:\x1b[0m Homo_sapiens" in output
assert "\t\x1b[1mGenome Build:\x1b[0m hg19" in output
assert "\t\x1b[1mKeywords:\x1b[0m gaps, regions, gap-locations, Assembly-Gaps, clone-gaps, contig-gaps, centromere-gaps, telomere-gaps, heterochromatin-gaps, short-arm-gaps" in output
assert "\t\x1b[1mCached:\x1b[0m uploaded_to_aws" in output
assert "\t\x1b[1mData Provider:\x1b[0m UCSC" in output
assert "\t\x1b[1mData Version:\x1b[0m 22-Mar-2020" in output
assert "\t\x1b[1mFile type(s):\x1b[0m bed" in output
assert "\t\x1b[1mData file coordinate base:\x1b[0m 0-based-inclusive" in output
assert "\t\x1b[1mIncluded Data Files:\x1b[0m" in output
assert "\t\x1b[1mApproximate Data File Sizes:\x1b[0m" in output
conda_root = utils.conda_root()
assert "\t\x1b[1mPkg File Path:\x1b[0m {}/share/ggd/Homo_sapiens/hg19/hg19-gaps-ucsc-v1/1".format(conda_root) in output
assert "\t\x1b[1mInstalled Pkg Files:\x1b[0m " in output
assert "\t\t{}/share/ggd/Homo_sapiens/hg19/hg19-gaps-ucsc-v1/1/hg19-gaps-ucsc-v1.bed.gz.tbi".format(conda_root) in output
assert "\t\t{}/share/ggd/Homo_sapiens/hg19/hg19-gaps-ucsc-v1/1/hg19-gaps-ucsc-v1.bed.gz".format(conda_root) in output
## Normal run with print recipes
ggd_package = "hg19-gaps-ucsc-v1"
ggd_channel = "genomics"
args = Namespace(channel=ggd_channel, command='pkg-info', name=ggd_package, show_recipe=True, prefix = None)
assert list_pkg_info.info((),args) == True
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.info((),args)
output = temp_stdout.getvalue().strip()
assert "\t\x1b[1mGGD-Package:\x1b[0m {}".format(ggd_package) in output
assert "\t\x1b[1mGGD-Channel:\x1b[0m ggd-{}".format(ggd_channel) in output
assert "\t\x1b[1mGGD Pkg Version:\x1b[0m 1" in output
assert "\t\x1b[1mSummary:\x1b[0m Assembly gaps from UCSC in bed fromat. Scaffoldings that are not contained in the hg19.genome file are removed" in output
assert "\t\x1b[1mSpecies:\x1b[0m Homo_sapiens" in output
assert "\t\x1b[1mGenome Build:\x1b[0m hg19" in output
assert "\t\x1b[1mKeywords:\x1b[0m gaps, regions, gap-locations, Assembly-Gaps, clone-gaps, contig-gaps, centromere-gaps, telomere-gaps, heterochromatin-gaps, short-arm-gaps" in output
assert "\t\x1b[1mCached:\x1b[0m uploaded_to_aws" in output
assert "{} recipe file:\n***********************".format(ggd_package) in output
## Bad recipe run
ggd_package = "Bad-recipe"
ggd_channel = "genomics"
args = Namespace(channel=ggd_channel, command='pkg-info', name=ggd_package, show_recipe=False, prefix = None)
assert list_pkg_info.info((),args) == False
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.info((),args)
output = temp_stdout.getvalue().strip()
assert ":ggd:pkg-info: The {} package is not in the ggd-{} channel.".format(ggd_package, ggd_channel) in output
## Bad recipe run with print recipe
ggd_package = "Bad-recipe"
ggd_channel = "genomics"
args = Namespace(channel=ggd_channel, command='pkg-info', name=ggd_package, show_recipe=True, prefix = None)
assert list_pkg_info.info((),args) == False
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.info((),args)
output = temp_stdout.getvalue().strip()
assert ":ggd:pkg-info: The {} package is not in the ggd-{} channel.".format(ggd_package, ggd_channel) in output
def test_info_main_in_different_prefix():
## Test using a different prefix
### Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_pkg_info_env")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_pkg_info_env"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create conda environmnet
sp.check_output(["conda", "create", "--name", "temp_pkg_info_env"])
### Install ggd recipe using conda into temp_env
ggd_package = "hg19-gaps-ucsc-v1"
install_args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_package], file=[], prefix = temp_env, id = None)
assert install.install((), install_args) == True
## Test different prefix
ggd_channel = "genomics"
args = Namespace(channel=ggd_channel, command='pkg-info', name=ggd_package, show_recipe=False, prefix = temp_env)
assert list_pkg_info.info((),args) == True
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_pkg_info.info((),args)
output = temp_stdout.getvalue().strip()
## Include checks for meta yaml tag keys like final files and approximate file sizes
assert "\t\x1b[1mGGD-Package:\x1b[0m hg19-gaps-ucsc-v1" in output
assert "\t\x1b[1mGGD-Channel:\x1b[0m ggd-genomics" in output
assert "\t\x1b[1mGGD Pkg Version:\x1b[0m 1" in output
assert "\t\x1b[1mSummary:\x1b[0m Assembly gaps from UCSC in bed fromat. Scaffoldings that are not contained in the hg19.genome file are removed" in output
assert "\t\x1b[1mSpecies:\x1b[0m Homo_sapiens" in output
assert "\t\x1b[1mGenome Build:\x1b[0m hg19" in output
assert "\t\x1b[1mKeywords:\x1b[0m gaps, regions, gap-locations, Assembly-Gaps, clone-gaps, contig-gaps, centromere-gaps, telomere-gaps, heterochromatin-gaps, short-arm-gaps" in output
assert "\t\x1b[1mCached:\x1b[0m uploaded_to_aws" in output
assert "\t\x1b[1mData Provider:\x1b[0m UCSC" in output
assert "\t\x1b[1mData Version:\x1b[0m 22-Mar-2020" in output
assert "\t\x1b[1mFile type(s):\x1b[0m bed" in output
assert "\t\x1b[1mData file coordinate base:\x1b[0m 0-based-inclusive" in output
assert "\t\x1b[1mIncluded Data Files:\x1b[0m" in output
assert "\t\x1b[1mApproximate Data File Sizes:\x1b[0m" in output
conda_root = temp_env
assert "\t\x1b[1mPkg File Path:\x1b[0m {}/share/ggd/Homo_sapiens/hg19/hg19-gaps-ucsc-v1/1".format(conda_root) in output
assert "\t\x1b[1mInstalled Pkg Files:\x1b[0m " in output
assert "\t\t{}/share/ggd/Homo_sapiens/hg19/hg19-gaps-ucsc-v1/1/hg19-gaps-ucsc-v1.bed.gz.tbi".format(conda_root) in output
assert "\t\t{}/share/ggd/Homo_sapiens/hg19/hg19-gaps-ucsc-v1/1/hg19-gaps-ucsc-v1.bed.gz".format(conda_root) in output
## Remove temp env created in test_get_environment_variables()
sp.check_output(["conda", "env", "remove", "--name", "temp_pkg_info_env"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
### list (List installed packages)
def test_load_json():
"""
Test that the load json file correctly returns a dictionary loaded from a json object
"""
pytest_enable_socket()
## Make file
file_name = "./tempjson.json"
json_object = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'Madeup_package': {u'activate.d': False, u'version': u'1', u'tags': {u'cached': [u'uploaded_to_aws'], u'ggd-channel': u'genomics', u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package': u'noarch/Madeup_package-1-3.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'], u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build': u'hg19', u'species': u'Homo_sapiens'}}}}
with open(file_name, "w") as fn:
json.dump(json_object, fn)
jdict = list_installed_pkgs.load_json(file_name)
assert list(jdict["packages"].keys())[0] == "Madeup_package"
assert jdict["packages"]["Madeup_package"]["version"] == "1"
assert jdict["packages"]["Madeup_package"]["identifiers"]["genome-build"] == "hg19"
assert jdict["packages"]["Madeup_package"]["identifiers"]["species"] == "Homo_sapiens"
os.remove(file_name)
def test_get_environment_variables():
"""
Test the get_environment_variables correctly gets the environment variables in the designated prefix
"""
## enable socket
pytest_enable_socket()
## Test the hg19 gaps enviroment variable exists
try:
install_hg19_gaps_ucsc_v1()
except:
pass
env_vars = list_installed_pkgs.get_environment_variables(utils.conda_root())
assert "ggd_hg19_gaps_ucsc_v1_file" in env_vars.keys()
assert "ggd_hg19_gaps_ucsc_v1_dir" in env_vars.keys()
## Test that "None" is returned for no enviroment variables
assert list_installed_pkgs.get_environment_variables(os.path.join(utils.conda_root(),"BadPath")) == None
## Test using a different prefix
### Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create conda environmnet
sp.check_output(["conda", "create", "--name", "temp_env"])
### Install ggd recipe using conda into temp_env
ggd_package2 = "hg19-pfam-domains-ucsc-v1"
install_args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_package2], file=[], prefix = temp_env, id = None)
assert install.install((), install_args) == True
env_vars = list_installed_pkgs.get_environment_variables(temp_env)
assert "ggd_hg19_pfam_domains_ucsc_v1_file" in env_vars.keys()
assert "ggd_hg19_pfam_domains_ucsc_v1_dir" in env_vars.keys()
## Keep pfam in temp_env for future tests
def test_list_pkg_info():
"""
test the list_pkg_info function displays the correct info for the designated prefix
"""
## Test a normal run
pkg_name = "hg19-gaps-ucsc-v1"
ggd_channel = "genomics"
prefix = utils.conda_root()
jdict = list_installed_pkgs.load_json(os.path.join(prefix,"share","ggd_info","channeldata.json"))
env_vars = list_installed_pkgs.get_environment_variables(prefix)
pkg_info = get_conda_package_list(prefix)
### Prefix not set
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_pkg_info([pkg_name],jdict["packages"],env_vars,pkg_info,prefix,False)
output = temp_stdout.getvalue().strip()
assert pkg_name in output
assert pkg_name.replace("-","_")+"_file" in output
assert pkg_name.replace("-","_")+"_dir" in output
assert "To use the environment variables run `source activate base" in output
assert "You can see the available ggd data package environment variables by running `ggd show-env" in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
## prefix set
pkg_name = "hg19-pfam-domains-ucsc-v1"
ggd_channel = "genomics"
prefix = temp_env = os.path.join(utils.conda_root(), "envs", "temp_env") ## From test_get_environment_variables()
jdict = list_installed_pkgs.load_json(os.path.join(prefix,"share","ggd_info","channeldata.json"))
env_vars = list_installed_pkgs.get_environment_variables(prefix)
pkg_info = get_conda_package_list(prefix)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_pkg_info([pkg_name],jdict["packages"],env_vars,pkg_info,prefix,True)
output = temp_stdout.getvalue().strip()
assert pkg_name in output
assert pkg_name.replace("-","_")+"_file" in output
assert pkg_name.replace("-","_")+"_dir" in output
assert "The environment variables are only available when you are using the '{p}' conda environment".format(p=prefix) in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
def test_get_metadata():
"""
Test the "get_metadata" function correctly returns the local meta-data json file
"""
GGD_INFO = "share/ggd_info"
METADATA = "channeldata.json"
## Get json dict
metadata_dict = list_installed_pkgs.get_metadata(utils.conda_root(), GGD_INFO, METADATA)
assert "packages" in metadata_dict
assert "hg19-gaps-ucsc-v1" in metadata_dict["packages"]
## Test a bad file
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_installed_pkgs.get_metadata(utils.conda_root(), GGD_INFO, "BAD_METADATA_FILE.json")
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match(":ggd:list: !!ERROR!! Unable to load the local metadata")
def test_list_installed_packages():
"""
Test the main function of ggd list
"""
## Normal Run
args = Namespace(command='list', pattern=None, prefix=None, reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "hg19-gaps-ucsc-v1" in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
assert "To use the environment variables run `source activate base" in output
assert "You can see the available ggd data package environment variables by running `ggd show-env" in output
## Pattern set to exact package name
args = Namespace(command='list', pattern="hg19-gaps-ucsc-v1", prefix=None, reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "hg19-gaps-ucsc-v1" in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
assert "To use the environment variables run `source activate base" in output
assert "You can see the available ggd data package environment variables by running `ggd show-env" in output
## Pattern set to beginning of package name
args = Namespace(command='list', pattern="hg19", prefix=None, reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "hg19-gaps-ucsc-v1" in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
assert "To use the environment variables run `source activate base" in output
assert "You can see the available ggd data package environment variables by running `ggd show-env" in output
## Pattern set to middle of package name
args = Namespace(command='list', pattern="gaps", prefix=None, reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "hg19-gaps-ucsc-v1" in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
assert "To use the environment variables run `source activate base" in output
assert "You can see the available ggd data package environment variables by running `ggd show-env" in output
## Pattern does not match an installed package
args = Namespace(command='list', pattern="BADPATTERN", prefix=None, reset=False)
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_installed_pkgs.list_installed_packages((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match("'{p}' did not match any installed data packages".format(p="BADPATTERN"))
## Package in set prefix (Not conda_root)
p = os.path.join(utils.conda_root(), "envs", "temp_env") ## From test_get_environment_variables()
args = Namespace(command='list', pattern=None, prefix=p, reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "hg19-pfam-domains-ucsc-v1" in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
assert "The environment variables are only available when you are using the '{}' conda environment".format(p) in output
## Package in set prefix (Not conda_root) and using the prefix name rather than the prefix path
args = Namespace(command='list', pattern=None, prefix="temp_env", reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "hg19-pfam-domains-ucsc-v1" in output
assert "Name" in output and "Pkg-Version" in output and "Pkg-Build" in output and "Channel" in output and "Environment-Variables" in output
assert "The environment variables are only available when you are using the '{}' conda environment".format(p) in output
## Remove "hg19-pfam-domains-ucsc-v1" from temp_env conda metadata but not ggd
metadata_path = "share/ggd_info/channeldata.json"
full_path = os.path.join(utils.get_conda_prefix_path("temp_env"), metadata_path)
## Check that the package is still displayed, but a warning is provided about it is missing from conda metadata
sp.check_output(["conda", "uninstall", "hg19-pfam-domains-ucsc-v1", "-p", utils.get_conda_prefix_path("temp_env")])
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "hg19-pfam-domains-ucsc-v1" in output
assert "[WARNING: Present in GGD but missing from Conda]" in str(output)
assert ("NOTE: Packages with the '[WARNING: Present in GGD but missing from Conda]' messages represent packages where the ggd"
" package(s) are installed, but the package metadata has been removed from conda storage. This"
" happens when one of the following happen: \n 1) The package represents an ID specific meta-"
"recipe intsalled by GGD. \n 2) When the recipe is built locally using 'ggd check-recipe' and"
" has not been uninstalled. (Commonly for private data packages).\n Or \n 3) The package is"
" uninstalled using conda rather then ggd. The package is still available for use and is in"
" the same state as before the 'conda uninstall'. To fix the problem on conda's side, uninstall"
" the package with 'ggd uninstall' and re-install with 'ggd install'.\n"
)
## Remove temp env created in test_get_environment_variables()
sp.check_output(["conda", "env", "remove", "--name", "temp_env"])
try:
shutil.rmtree(p)
except Exception:
pass
assert os.path.exists(p) == False
## Test basic reset works
args = Namespace(command='list', pattern=None, prefix=None, reset=True)
with pytest.raises(SystemExit) as pytest_wrapped_e:
list_installed_pkgs.list_installed_packages((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match("0") ## check that the exit code is 1
### predict-path
def test_get_ggd_metadata():
"""
Test that the get_ggd_metadata properly works
"""
pytest_enable_socket()
metadata = predict_path.get_ggd_metadata("genomics")
assert len(metadata["packages"]) > 0
## Test without internet connection
pytest_disable_socket()
with pytest.raises(SystemExit) as pytest_wrapped_e:
predict_path.get_ggd_metadata("genomics")
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match("A internet connection is required to use this function. Please try again when you have secured an internet connection")
pytest_enable_socket()
def test_predict_path():
"""
Test the main method of predict-path
"""
pytest_enable_socket()
## Testing with grch37-autosomal-dominant-genes-berg-v1 data package
## Test bad package name
args = Namespace(channel='genomics', command='predict-path', file_name='grch37-autosomal-dominant-genes-berg-v1.bed.gz', package_name='bad_package_name-grch37-autosomal-dominant-genes-berg-v1', prefix=None, dir_path = False, id = None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
predict_path.predict_path((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match("The {pn} data package is not one of the packages in the ggd-{c} channel".format(pn="bad_package_name-grch37-autosomal-dominant-genes-berg-v1", c="genomics"))
## Test bad file name
args = Namespace(channel='genomics', command='predict-path', file_name='autodom-genes-berg', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=None, dir_path = False, id = None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
predict_path.predict_path((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match("The autodom-genes-berg file is not one of the files listed for this package. The files installed by this package are")
## Test closest file name
args = Namespace(channel='genomics', command='predict-path', file_name='grch37-autosomal-dominant-genes-berg-v1', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=None, dir_path = False, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(utils.conda_root(),"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.bed.gz") in str(output)
## Test closest file name
args = Namespace(channel='genomics', command='predict-path', file_name='berg-v1.compliment', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=None, dir_path = False, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(utils.conda_root(),"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.compliment.bed.gz") in str(output)
## Test full name file name
args = Namespace(channel='genomics', command='predict-path', file_name='grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=None, dir_path = False, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(utils.conda_root(),"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi") in str(output)
## Test no file-name or dir-path
args = Namespace(channel='genomics', command='predict-path', file_name=None, package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=None, dir_path = False, id = None)
temp_stdout = StringIO()
with pytest.raises(SystemExit) as pytest_wrapped_e, redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
output = temp_stdout.getvalue().strip()
assert ":ggd:predict-path: !!ERROR!! Either the '--file-name' or the '--dir-path' argument is required. Neither was given" in output
## Test prdiction in different environmnet
### Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "predict-path")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "predict-path"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create conda environmnet
sp.check_output(["conda", "create", "--name", "predict-path"])
## Test full name file name
args = Namespace(channel='genomics', command='predict-path', file_name='grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=temp_env, dir_path = False, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(temp_env,"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi") in str(output)
## Test full name file name and that the ID is ignored for a non meta-recipe
args = Namespace(channel='genomics', command='predict-path', file_name='grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=temp_env, dir_path = False, id = "SOME ID")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(temp_env,"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi") in str(output)
## Test full name file name and dir-path. (File name should be used over dir path)
args = Namespace(channel='genomics', command='predict-path', file_name='grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=temp_env, dir_path = True, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(temp_env,"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi") in str(output)
## Test dir path
args = Namespace(channel='genomics', command='predict-path', file_name=None, package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=temp_env, dir_path = True, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(temp_env,"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1") in str(output)
assert os.path.join(temp_env,"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi") not in str(output)
## Test dir path and that the ID is ignored for a non meta-recipe
args = Namespace(channel='genomics', command='predict-path', file_name=None, package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=temp_env, dir_path = True, id = "SOME_ID")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(temp_env,"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1") in str(output)
assert os.path.join(temp_env,"share","ggd", "Homo_sapiens","GRCh37","grch37-autosomal-dominant-genes-berg-v1","1","grch37-autosomal-dominant-genes-berg-v1.bed.gz.tbi") not in str(output)
## Test meta-recipe without an ID
args = Namespace(channel='genomics', command='predict-path', file_name=None, package_name='meta-recipe-geo-accession-geo-v1', prefix=temp_env, dir_path = True, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(temp_env,"share","ggd", "meta-recipe","meta-recipe","meta-recipe-geo-accession-geo-v1","1") in str(output)
## Test meta-recipe with an ID and that the id is set to lower case
args = Namespace(channel='genomics', command='predict-path', file_name=None, package_name='meta-recipe-geo-accession-geo-v1', prefix=temp_env, dir_path = True, id = "GSE123")
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args)
output = temp_stdout.getvalue().strip()
assert os.path.join(temp_env,"share","ggd", "meta-recipe","meta-recipe","gse123-geo-v1","1") in str(output)
## Remove temp env created in test_get_environment_variables()
sp.check_output(["conda", "env", "remove", "--name", "predict-path"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
## Test the predict path is the same path as an installed file
install_args = Namespace(channel='genomics', command='install', debug=False, name=["grch37-autosomal-dominant-genes-berg-v1"], file=[], prefix=None, id = None)
assert install.install((), install_args) == True
list_files
args = Namespace(channel='genomics', command='list-files', genome_build=None, name="grch37-autosomal-dominant-genes-berg-v1", pattern="grch37-autosomal-dominant-genes-berg-v1.bed.gz", prefix=None, species=None, version=None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_files.list_files((),args)
output = str(temp_stdout.getvalue().strip())
assert os.path.exists(str(output))
args2 = Namespace(channel='genomics', command='predict-path', file_name='grch37-autosomal-dominant-genes-berg-v1.bed.gz', package_name='grch37-autosomal-dominant-genes-berg-v1', prefix=None, dir_path = False, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
predict_path.predict_path((), args2)
output2 = temp_stdout.getvalue().strip()
assert str(output2) == str(output)
sp.check_call(["ggd","uninstall","grch37-autosomal-dominant-genes-berg-v1"])
#--------------------------------------------------------
## Test functions based on hg19-gaps not being installed
#--------------------------------------------------------
def test_show_env_no_envvars():
pytest_enable_socket()
## uninstalled hg19_gaps() testing
uninstall_hg19_gaps_ucsc_v1()
parser = ()
args = Namespace(command='show-env', pattern=None)
dir_env_var_name_dir = "$ggd_hg19_gaps_v1_dir"
file_env_var_name_file = "$ggd_hg19_gaps_v1_file"
## Test a normal run
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
show_env.show_env(parser,args)
output = temp_stdout.getvalue().strip()
assert (dir_env_var_name_dir not in output)
assert (file_env_var_name_file not in output)
| 43.039296
| 1,818
| 0.661552
| 10,099
| 73,382
| 4.631449
| 0.050995
| 0.032497
| 0.037714
| 0.025014
| 0.817886
| 0.799799
| 0.773694
| 0.757168
| 0.735852
| 0.722981
| 0
| 0.018758
| 0.20813
| 73,382
| 1,704
| 1,819
| 43.064554
| 0.78616
| 0.118626
| 0
| 0.733583
| 0
| 0.030019
| 0.268974
| 0.103177
| 0
| 0
| 0
| 0
| 0.28424
| 1
| 0.025328
| false
| 0.012195
| 0.026266
| 0
| 0.051595
| 0.022514
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6ef21af10fa0e4a845699ffcd8434111c5e5fd04
| 289
|
py
|
Python
|
challenges/multi-bracket-validation/conftest.py
|
seattlechem/data-structures-and-algorithms
|
376e465c0a5529ea7c5c4e972a9852b6340251ff
|
[
"MIT"
] | null | null | null |
challenges/multi-bracket-validation/conftest.py
|
seattlechem/data-structures-and-algorithms
|
376e465c0a5529ea7c5c4e972a9852b6340251ff
|
[
"MIT"
] | null | null | null |
challenges/multi-bracket-validation/conftest.py
|
seattlechem/data-structures-and-algorithms
|
376e465c0a5529ea7c5c4e972a9852b6340251ff
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.fixture
def small_false_string():
return "\{\}{{{[][](())"
@pytest.fixture
def square_true_string():
return '[][][][][]'
@pytest.fixture
def mixed_square_true_string():
return 'a[b]c[dd[ee]]f[g]'
@pytest.fixture
def two_bracket():
return ')('
| 13.136364
| 31
| 0.622837
| 37
| 289
| 4.648649
| 0.540541
| 0.302326
| 0.372093
| 0.290698
| 0.325581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15917
| 289
| 21
| 32
| 13.761905
| 0.707819
| 0
| 0
| 0.307692
| 0
| 0
| 0.152249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| true
| 0
| 0.076923
| 0.307692
| 0.692308
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3e700688753411c70501c101b9594caa08b1c9e0
| 32,140
|
py
|
Python
|
tests/Real.py
|
perfidia/regexpgen
|
a7139219b69127c488efffb588e7ef4177a8ec89
|
[
"MIT"
] | 2
|
2017-02-02T06:04:28.000Z
|
2019-04-23T07:14:58.000Z
|
tests/Real.py
|
perfidia/regexpgen
|
a7139219b69127c488efffb588e7ef4177a8ec89
|
[
"MIT"
] | null | null | null |
tests/Real.py
|
perfidia/regexpgen
|
a7139219b69127c488efffb588e7ef4177a8ec89
|
[
"MIT"
] | null | null | null |
'''
Created on Mar 16, 2012
@authors: Joanna Binczewska, Dawid Kowalski
'''
import unittest
import regexpgen
import re
import random
import math
class Test(unittest.TestCase):
def testDefault(self):
self.__testForShorterReal()
self.__testForWrongFormat()
self.__testForWrongInput()
self.__testForBoundaryReal()
random.seed(0)
for a in xrange(0, 3):
for i in xrange(0, 1100):
for j in [True, False]:
for k in [True, False]:
scale = len(self.__str(i))
self.__runTest1(scale, False, False, j, k)
self.__runTest1(scale, False, True, j, k)
self.__runTest1(scale, True, False, j, k)
self.__runTest1(scale, True, True, j, k)
self.__runTest2(scale, False, False, j, k)
self.__runTest2(scale, False, True, j, k)
self.__runTest2(scale, True, False, j, k)
self.__runTest2(scale, True, True, j, k)
self.__runTest3(scale, False, False, j, k)
self.__runTest3(scale, False, True, j, k)
self.__runTest3(scale, True, False, j, k)
self.__runTest3(scale, True, True, j, k)
self.__runTest4(scale, False, False, j, k)
self.__runTest4(scale, False, True, j, k)
self.__runTest4(scale, True, False, j, k)
self.__runTest4(scale, True, True, j, k)
self.__runTest5(scale, False, False, j, k)
self.__runTest5(scale, False, True, j, k)
self.__runTest5(scale, True, False, j, k)
self.__runTest5(scale, True, True, j, k)
self.__runTest6(scale, False, False, j, k)
self.__runTest6(scale, False, True, j, k)
self.__runTest6(scale, True, False, j, k)
self.__runTest6(scale, True, True, j, k)
print a,i
def __runTest1(self, scale, setMin, setMax, useInt, stepInt):
if stepInt:
step = random.randint(1,3)
else:
step = random.uniform(0.5, 1.5)
min = random.uniform(-1*(10**scale), 10**scale) if not useInt else random.randint(-1*(10**scale), 10**scale)
max = random.uniform(min, min + 5**scale) if not useInt else random.randint(min, min + 5**scale)
min = float(min)
max = float(max)
format = "%lf"
regexp = regexpgen.real(format, min if setMin else None, max if setMax else None)
info = lambda value: self.__getInfo(value, regexp, format, min if setMin else None, max if setMax else None)
(rangeLeft, rangeRight) = self.__getRanges(min, max)
i = rangeLeft
if setMin:
self.assertTrue(re.search(regexp, self.__str(min)), info(self.__str(min)))
while float(self.__str(i)) < float(self.__str(min)):
self.assertFalse(re.match(regexp, self.__str(i)), info(self.__str(i)))
i = i + step
while float(self.__str(i)) <= float(self.__str(max)):
if i >= 0:
a = "0" + self.__str(i); b = "00" + self.__str(i); c = "000" + self.__str(i); d = "0000" + self.__str(i);
else:
a = "-0" + self.__str(-i); b = "-00" + self.__str(-i); c = "-000" + self.__str(-i); d = "-0000" + self.__str(-i);
self.assertTrue(re.match(regexp, self.__str(i)), info(self.__str(i)))
self.assertTrue(re.match(regexp, a), info(a))
self.assertTrue(re.match(regexp, b), info(b))
self.assertTrue(re.match(regexp, c), info(c))
self.assertTrue(re.match(regexp, d), info(d))
i = i + step
if setMax:
self.assertTrue(re.search(regexp, self.__str(max)), info(self.__str(max)))
while i <= rangeRight:
self.assertFalse(re.search(regexp, self.__str(i)), info(self.__str(i)))
i = i + step
if setMax and float(max) > 0:
splitted = self.__str(max).split(".")
if len(splitted[1]) > 1:
test = splitted[0] + "." + splitted[1][:-1]
test = float(test)
self.assertTrue(re.match(regexp, self.__str(test)), info(self.__str(test)))
def __runTest2(self, scale, setMin, setMax, useInt, stepInt):
if useInt:
step = random.randint(1,3)
else:
step = random.uniform(0.5, 1.5)
min = random.uniform(-1*(10**scale), 10**scale) if not useInt else random.randint(-1*(10**scale), 10**scale)
max = random.uniform(min, min + 5**scale) if not useInt else random.randint(min, min + 5**scale)
min = float(min)
max = float(max)
format = "%0lf"
regexp = regexpgen.real(format, min if setMin else None, max if setMax else None)
info = lambda value: self.__getInfo(value, regexp, format, min if setMin else None, max if setMax else None)
(rangeLeft, rangeRight) = self.__getRanges(min, max)
i = rangeLeft
if setMin:
self.assertTrue(re.search(regexp, self.__str(min)), info(self.__str(min)))
while float(self.__str(i)) < float(self.__str(min)):
self.assertFalse(re.match(regexp, self.__str(i)), info(self.__str(i)))
i = i + step
while float(self.__str(i)) <= float(self.__str(max)):
if i >= 0:
a = "0" + self.__str(i); b = "00" + self.__str(i); c = "000" + self.__str(i); d = "0000" + self.__str(i);
else:
a = "-0" + self.__str(-i); b = "-00" + self.__str(-i); c = "-000" + self.__str(-i); d = "-0000" + self.__str(-i);
self.assertTrue(re.match(regexp, self.__str(i)), info(self.__str(i)))
self.assertFalse(re.match(regexp, a), info(a))
self.assertFalse(re.match(regexp, b), info(b))
self.assertFalse(re.match(regexp, c), info(c))
self.assertFalse(re.match(regexp, d), info(d))
i = i + step
if setMax:
self.assertTrue(re.search(regexp, self.__str(max)), info(self.__str(max)))
while i <= rangeRight:
self.assertFalse(re.search(regexp, self.__str(i)), info(self.__str(i)))
i = i + step
if setMax and float(max) > 0:
splitted = self.__str(max).split(".")
if len(splitted[1]) > 1:
test = splitted[0] + "." + splitted[1][:-1]
test = float(test)
self.assertTrue(re.match(regexp, self.__str(test)), info(self.__str(test)))
def __runTest3(self, scale, setMin, setMax, useInt, stepInt):
if useInt:
step = random.randint(1,3)
else:
step = random.uniform(0.5, 1.5)
min = random.uniform(-1*(10**scale), 10**scale) if not useInt else random.randint(-1*(10**scale), 10**scale)
max = random.uniform(min, min + 5**scale) if not useInt else random.randint(min, min + 5**scale)
format = "%0.{0}lf".format(scale)
min = float(self.__sliceFloat(float(min), scale))
max = float(self.__sliceFloat(float(max), scale))
regexp = regexpgen.real(format, min if setMin else None, max if setMax else None)
info = lambda value: self.__getInfo(value, regexp, format, min if setMin else None, max if setMax else None)
(rangeLeft, rangeRight) = self.__getRanges(min, max)
i = rangeLeft
if setMin:
while float(self.__str(i)) < float(self.__str(min)) - step:
x = self.__sliceFloat(i, scale - 1 if scale > 1 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
i = i + step
while float(self.__str(i)) <= float(self.__str(max)):
if i >= 0:
a = "0" + self.__str(i); b = "00" + self.__str(i); c = "000" + self.__str(i); d = "0000" + self.__str(i);
else:
a = "-0" + self.__str(-i); b = "-00" + self.__str(-i); c = "-000" + self.__str(-i); d = "-0000" + self.__str(-i);
x = self.__sliceFloat(i, scale - 1 if scale > 1 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
self.assertFalse(re.match(regexp, a), info(a))
self.assertFalse(re.match(regexp, b), info(b))
self.assertFalse(re.match(regexp, c), info(c))
self.assertFalse(re.match(regexp, d), info(d))
i = i + step
if setMax:
i = i + step
while i <= rangeRight:
x = self.__sliceFloat(i, scale - 1 if scale > 0 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
def __runTest4(self, scale, setMin, setMax, useInt, stepInt):
if useInt:
step = random.randint(1,3)
else:
step = random.uniform(0.5, 1.5)
min = random.uniform(-1*(10**scale), 10**scale) if not useInt else random.randint(-1*(10**scale), 10**scale)
max = random.uniform(min, min + 5**scale) if not useInt else random.randint(min, min + 5**scale)
format = "%.{0}lf".format(scale)
min = float(self.__sliceFloat(float(min), scale))
max = float(self.__sliceFloat(float(max), scale))
regexp = regexpgen.real(format, min if setMin else None, max if setMax else None)
info = lambda value: self.__getInfo(value, regexp, format, min if setMin else None, max if setMax else None)
(rangeLeft, rangeRight) = self.__getRanges(min, max)
i = rangeLeft
if setMin:
while float(self.__str(i)) < float(self.__str(min)) - step:
x = self.__sliceFloat(i, scale - 1 if scale > 0 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
i = i + step
while float(self.__str(i)) <= float(self.__str(max)):
if i >= 0:
a = "0" + self.__str(i); b = "00" + self.__str(i); c = "000" + self.__str(i); d = "0000" + self.__str(i);
else:
a = "-0" + self.__str(-i); b = "-00" + self.__str(-i); c = "-000" + self.__str(-i); d = "-0000" + self.__str(-i);
x = self.__sliceFloat(i, scale - 1 if scale > 1 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
a = self.__sliceFloat(a, scale)
b = self.__sliceFloat(b, scale)
c = self.__sliceFloat(c, scale)
d = self.__sliceFloat(d, scale)
aa = self.__sliceFloat(a, scale + 1)
bb = self.__sliceFloat(b, scale + 1)
cc = self.__sliceFloat(c, scale + 1)
dd = self.__sliceFloat(d, scale + 1)
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
if scale > 2:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
self.assertTrue(re.match(regexp, a), info(a))
self.assertTrue(re.match(regexp, b), info(b))
self.assertTrue(re.match(regexp, c), info(c))
self.assertTrue(re.match(regexp, d), info(d))
self.assertFalse(re.match(regexp, aa), info(aa))
self.assertFalse(re.match(regexp, bb), info(bb))
self.assertFalse(re.match(regexp, cc), info(cc))
self.assertFalse(re.match(regexp, dd), info(dd))
i = i + step
if setMax:
i = i + step
while i <= rangeRight:
x = self.__sliceFloat(i, scale - 1 if scale > 0 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
def __runTest5(self, scale, setMin, setMax, useInt, stepInt):
if useInt:
step = random.randint(1,3)
else:
step = random.uniform(0.5, 1.5)
min = random.uniform(-1*(10**scale), 10**scale) if not useInt else random.randint(-1*(10**scale), 10**scale)
max = random.uniform(min, min + 5**scale) if not useInt else random.randint(min, min + 5**scale)
format = "%{0}.{1}lf".format(scale + 2, scale)
min = float(self.__sliceFloat(float(min), scale))
max = float(self.__sliceFloat(float(max), scale))
regexp = regexpgen.real(format, min if setMin else None, max if setMax else None)
info = lambda value: self.__getInfo(value, regexp, format, min if setMin else None, max if setMax else None)
(rangeLeft, rangeRight) = self.__getRanges(min, max)
i = rangeLeft
if setMin:
while float(self.__str(i)) < float(self.__str(min)) - step:
x = self.__sliceFloat(i, scale - 1 if scale > 0 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
i = i + step
while float(self.__str(i)) <= float(self.__str(max)):
if i >= 0:
a = "0" + self.__str(i); b = "00" + self.__str(i); c = "000" + self.__str(i); d = "0000" + self.__str(i);
else:
a = "-0" + self.__str(-i); b = "-00" + self.__str(-i); c = "-000" + self.__str(-i); d = "-0000" + self.__str(-i);
x = self.__sliceFloat(i, scale - 1 if scale > 1 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
a = self.__sliceFloat(a, scale)
b = self.__sliceFloat(b, scale)
c = self.__sliceFloat(c, scale)
d = self.__sliceFloat(d, scale)
aa = self.__sliceFloat(a, scale + 1)
bb = self.__sliceFloat(b, scale + 1)
cc = self.__sliceFloat(c, scale + 1)
dd = self.__sliceFloat(d, scale + 1)
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
if scale > 2:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
self.assertTrue(re.match(regexp, a), info(a))
self.assertTrue(re.match(regexp, b), info(b))
self.assertTrue(re.match(regexp, c), info(c))
self.assertTrue(re.match(regexp, d), info(d))
self.assertFalse(re.match(regexp, aa), info(aa))
self.assertFalse(re.match(regexp, bb), info(bb))
self.assertFalse(re.match(regexp, cc), info(cc))
self.assertFalse(re.match(regexp, dd), info(dd))
i = i + step
if setMax:
i = i + step
while i <= rangeRight:
x = self.__sliceFloat(i, scale - 1 if scale > 0 else 0)
y = self.__sliceFloat(i, scale)
z = self.__sliceFloat(i, scale + 1)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
def __runTest6(self, scale, setMin, setMax, useInt, stepInt):
if useInt:
step = random.randint(1,3)
else:
step = random.uniform(0.5, 1.5)
scaleR = scale
scaleI = scale + 1
min = random.uniform(-1*(10**scale), 10**scale) if not useInt else random.randint(-1*(10**scale), 10**scale)
max = random.uniform(min, min + 5**scale) if not useInt else random.randint(min, min + 5**scale)
format = "%0{0}.{1}lf".format(scaleI + 1, scaleR) # +1 because dot is included
min = float(self.__sliceFloat(float(min), scaleR))
max = float(self.__sliceFloat(float(max), scaleR))
scaleI = scaleI - scaleR
min = float(self.__sliceFloatIntPart(float(min), scaleI))
max = float(self.__sliceFloatIntPart(float(max), scaleI))
regexp = regexpgen.real(format, min if setMin else None, max if setMax else None)
info = lambda value: self.__getInfo(value, regexp, format, min if setMin else None, max if setMax else None)
(rangeLeft, rangeRight) = self.__getRanges(min, max)
i = rangeLeft
if setMin:
while float(self.__str(i)) < float(self.__str(min)) - step:
x = self.__sliceFloat(i, scaleR - 1 if scaleR > 0 else 0)
y = self.__sliceFloat(i, scaleR)
z = self.__sliceFloat(i, scaleR + 1)
x = self.__sliceFloatIntPart(x, scaleI)
y = self.__sliceFloatIntPart(y, scaleI)
z = self.__sliceFloatIntPart(z, scaleI)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
i = i + step
while float(self.__str(i)) <= float(self.__str(max)):
x = self.__sliceFloat(i, scaleR - 1 if scaleR > 1 else 0)
y = self.__sliceFloat(i, scaleR)
z = self.__sliceFloat(i, scaleR + 1)
x = self.__sliceFloatIntPart(x, scaleI)
y = self.__sliceFloatIntPart(y, scaleI)
z = self.__sliceFloatIntPart(z, scaleI)
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
if scale > 2:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
x = self.__sliceFloat(i, scaleR)
y = self.__sliceFloat(i, scaleR)
z = self.__sliceFloat(i, scaleR)
if not setMin and not setMax:
x = self.__sliceFloatIntPart(x, scaleI - 1 if scaleI > 1 else 0)
y = self.__sliceFloatIntPart(y, scaleI)
z = self.__sliceFloatIntPart(z, scaleI + 1, "1")
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
if scaleI > 2:
self.assertTrue(re.match(regexp, self.__str(x)), info(self.__str(x)))
if i > 0:
self.assertTrue(re.match(regexp, self.__str(z)), info(self.__str(z)))
if setMin and not setMax:
x = self.__sliceFloatIntPart(x, scaleI - 1 if scaleI > 1 else 0)
y = self.__sliceFloatIntPart(y, scaleI)
z = self.__sliceFloatIntPart(z, scaleI + 1, "1")
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
if scaleI > 2:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
if i > 0:
self.assertTrue(re.match(regexp, self.__str(z)), info(self.__str(z)))
if setMax and not setMin:
x = self.__sliceFloatIntPart(x, scaleI - 1 if scaleI > 1 else 0)
y = self.__sliceFloatIntPart(y, scaleI)
z = self.__sliceFloatIntPart(z, scaleI + 1, "1")
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
if scaleI > 2:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
if i < 0:
self.assertTrue(re.match(regexp, self.__str(z)), info(self.__str(z)))
if setMax and setMin:
x = self.__sliceFloatIntPart(x, scaleI - 1 if scaleI > 1 else 0)
y = self.__sliceFloatIntPart(y, scaleI)
self.assertTrue(re.match(regexp, self.__str(y)), info(self.__str(y)))
if scaleI > 2:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
i = i + step
if setMax:
i = i + step
while i <= rangeRight:
x = self.__sliceFloat(i, scaleR - 1 if scaleR > 0 else 0)
y = self.__sliceFloat(i, scaleR)
z = self.__sliceFloat(i, scaleR + 1)
x = self.__sliceFloatIntPart(x, scaleI - 1 if scaleI > 0 else 0)
y = self.__sliceFloatIntPart(y, scaleI)
z = self.__sliceFloatIntPart(z, scaleI + 1)
if scale > 1:
self.assertFalse(re.match(regexp, self.__str(x)), info(self.__str(x)))
self.assertFalse(re.match(regexp, self.__str(y)), info(self.__str(y)))
self.assertFalse(re.match(regexp, self.__str(z)), info(self.__str(z)))
i = i + step
def __sliceFloat(self, f, scale):
splitted = self.__str(f).split(".")
if len(splitted[1]) > scale:
x = splitted[1][0:scale]
elif len(splitted[1]) < scale:
x = "0"*(scale -len(splitted[1])) + splitted[1]
else:
x = splitted[1]
if x == "":
return float(splitted[0])
return splitted[0] + "." + x
def __sliceFloatIntPart(self, f, scale, add = "0"):
splitted = self.__str(f).split(".")
if str(f).find("-") >= 0:
minus = "-"
splitted[0] = splitted[0][1:]
else:
minus = ""
if len(splitted[0]) < scale:
if minus == "":
x = add * (scale - len(splitted[0])) + splitted[0]
else:
x = splitted[0]
else:
x = splitted[0]
return minus + x + "." + splitted[1]
def __getRanges(self, min, max):
min = float(min)
max = float(max)
if min < 0 and max < 0:
rangeLeft = min*2
rangeRight = -min*2
if min < 0 and max > 0:
rangeLeft = min*2
rangeRight = max*2
if min > 0 and max > 0:
rangeLeft = -max*2
rangeRight = max*2
if min == 0 and max != 0:
rangeLeft = -max*2
rangeRight = max*2
if max == 0 and min != 0:
rangeLeft = min*2
rangeRight = -min*2
if max == 0 and min == 0:
rangeLeft = -100.0
rangeRight = 100.0
return (rangeLeft, rangeRight)
def __getInfo(self, i, regexp, format, min, max):
return "Failed! Number: {0}, min: {1}, max: {2}, format: {3}, regexp: {4}".format(i, self.__str(min), self.__str(max), format, regexp)
def __str(self, x):
if x != None and str(x).count("e") > 0:
return "{:f}".format(float(x))
else:
return str(x)
def __testForWrongFormat(self):
self.assertRaises(ValueError, regexpgen.real,"%0d", -100.0, 1000.0)
self.assertRaises(ValueError, regexpgen.real,"%d", 100.0, 1000.0)
self.assertRaises(ValueError, regexpgen.real,"aaaaaaaaa", None, 1000.0)
self.assertRaises(ValueError, regexpgen.real, "%0.2lf", None, 1000)
self.assertRaises(ValueError, regexpgen.real, None, None, 1000)
self.assertRaises(ValueError, regexpgen.real, 123)
self.assertRaises(ValueError, regexpgen.real, 12.3)
def __testForWrongInput(self):
self.assertRaises(ValueError, regexpgen.real,"%lf", -100, 1000)
self.assertRaises(ValueError, regexpgen.real,"%0lf", 100, 1000)
self.assertRaises(ValueError, regexpgen.real,"%lf", None, 10000)
self.assertRaises(ValueError, regexpgen.real,"%0.2lf", None, 10000)
self.assertRaises(ValueError, regexpgen.real,"%.2lf", None, 1000)
self.assertRaises(ValueError, regexpgen.real, "%lf", "123", 123)
self.assertRaises(ValueError, regexpgen.real, "%lf", 123, "123")
def __testForShorterReal(self):
x = regexpgen.real("%lf", 7.009, 7.698)
self.assertTrue(re.match(x, "7.69")) # powinno byc match
x = regexpgen.real("%lf", 7.09, 7.98)
self.assertTrue(re.match(x, "7.69")) # powinno byc match
x = regexpgen.real("%lf", None, 0.02)
self.assertFalse(re.match(x, "0.04")) # powinno byc None
x = regexpgen.real("%lf", -25.2691864655, -25.0697582645)
self.assertFalse(re.match(x, "-25.8042065349")) # powinno byc None
x = regexpgen.real("%lf", 6.063, 9.493)
self.assertTrue(re.match(x, "9.49")) # powinno byc match
x = regexpgen.real("%lf", 15.07, None)
self.assertTrue(re.match(x, "15.3")) # powinno byc match
x = regexpgen.real("%lf", None, 0.4)
self.assertFalse(re.match(x, "0.8")) # powinno byc None
x = regexpgen.real("%lf", None, 14.00)
self.assertTrue(re.match(x, "0.519")) # powinno byc match
x = regexpgen.real("%lf", 40.822, 46.202)
self.assertFalse(re.match(x, "46.21")) # powinno byc None
x = regexpgen.real("%lf", 0.97, 1.04)
self.assertFalse(re.match(x, "0.9")) # powinno byc None
x = regexpgen.real("%lf", None, 13.077)
self.assertFalse(re.match(x, "13.088")) # powinno byc None
x = regexpgen.real("%lf", None, 13.93)
self.assertTrue(re.match(x, "13.9")) # powinno byc match
x = regexpgen.real("%lf", -6.1, -3.3)
self.assertFalse(re.match(x, "-6.8")) # powinno byc None
self.assertFalse(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.0"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.9"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.92"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.920"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.9207"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.92071"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.920716"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.9207166"))
self.assertTrue(re.match(regexpgen.real("%lf", 88.7653193745, 88.920716654), "88.92071665"))
def __testForBoundaryReal(self):
self.assertTrue(re.match(regexpgen.real("%03.1lf", 5.1, None), "8.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf", 5.1, None), "88.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf", -5.1, None), "-4.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf", -5.1, None), "55.9"))
self.assertFalse(re.match(regexpgen.real("%03.1lf", -5.1, None), "-55.1"))
self.assertTrue(re.match(regexpgen.real("%04.1lf", 5.9, None), "05.9"))
self.assertFalse(re.match(regexpgen.real("%05.1lf", 5.9, None), "05.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf", 5.9, None), "005.90"))
self.assertFalse(re.match(regexpgen.real("%04.2lf", 5.9, None), "3335.9"))
self.assertFalse(re.match(regexpgen.real("%05.1lf", -5.9, None), "-05.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf", -5.9, None), "-005.90"))
self.assertTrue(re.match(regexpgen.real("%03.1lf",None, 5.1), "4.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf",None, 5.1), "-88.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf",None, -5.1), "-5.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf",None, -5.1), "-55.9"))
self.assertFalse(re.match(regexpgen.real("%03.1lf",None, -5.1), "55.1"))
self.assertTrue(re.match(regexpgen.real("%04.1lf",None, 5.9), "05.9"))
self.assertFalse(re.match(regexpgen.real("%05.1lf",None, 5.9), "05.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf",None, 5.9), "005.90"))
self.assertFalse(re.match(regexpgen.real("%06.2lf",None, 5.9), "05.90"))
self.assertFalse(re.match(regexpgen.real("%04.2lf",None, 5.9), "3335.91"))
self.assertFalse(re.match(regexpgen.real("%05.1lf",None, -5.9), "-05.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf",None, -5.9), "-005.90"))
self.assertFalse(re.match(regexpgen.real("%03.1lf",None, -9.1), "99.1"))
self.assertTrue(re.match(regexpgen.real("%04.1lf",None, 9.9), "09.9"))
self.assertFalse(re.match(regexpgen.real("%05.1lf",None, 9.9), "09.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf",None, 9.9), "009.90"))
self.assertTrue(re.match(regexpgen.real("%03.1lf",3.1, 5.1), "4.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf",-7.3, 5.1), "-6.9"))
self.assertTrue(re.match(regexpgen.real("%03.1lf",-8.9, -5.1), "-5.9"))
self.assertFalse(re.match(regexpgen.real("%03.1lf",-5.1, -5.0), "55.1"))
self.assertTrue(re.match(regexpgen.real("%04.1lf",4.1, 5.9), "05.9"))
self.assertFalse(re.match(regexpgen.real("%05.1lf",1.0, 5.9), "05.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf",1.0, 5.9), "005.90"))
self.assertFalse(re.match(regexpgen.real("%06.2lf",1.2, 5.9), "05.90"))
self.assertFalse(re.match(regexpgen.real("%04.2lf",2.8, 5.9), "3335.91"))
self.assertFalse(re.match(regexpgen.real("%05.1lf",-22.8, -5.9), "-05.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf",-22.8, -5.9), "-005.90"))
self.assertFalse(re.match(regexpgen.real("%03.1lf",-9.3, -9.1), "99.1"))
self.assertTrue(re.match(regexpgen.real("%04.1lf",1.2, 9.9), "09.9"))
self.assertFalse(re.match(regexpgen.real("%05.1lf",1.2, 9.9), "09.9"))
self.assertTrue(re.match(regexpgen.real("%06.2lf",1.2, 9.9), "009.90"))
self.assertFalse(re.match(regexpgen.real("%06.4lf", 1.3399, 9.9434), "009.9044"))
self.assertTrue(re.match(regexpgen.real("%08.4lf", 1.3399, 9.9434), "009.9044"))
self.assertTrue(re.match(regexpgen.real("%03.1lf", None, -3.0), "-12.0"))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 49.143731
| 142
| 0.548382
| 4,281
| 32,140
| 3.962392
| 0.045317
| 0.077168
| 0.081177
| 0.102458
| 0.910924
| 0.894417
| 0.833343
| 0.807699
| 0.782586
| 0.762365
| 0
| 0.059326
| 0.290417
| 32,140
| 653
| 143
| 49.218989
| 0.684469
| 0.00921
| 0
| 0.630631
| 0
| 0.001802
| 0.032722
| 0
| 0
| 0
| 0
| 0
| 0.300901
| 0
| null | null | 0
| 0.009009
| null | null | 0.001802
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3e90d72970a546b62bde71fbbe3049861a6fd9eb
| 128
|
py
|
Python
|
whaTFRecordsWriter/__init__.py
|
haideraltahan/whaTFRecordsWriter
|
8e53a3dd3eef927ae51376ac5dee4e263fb081a5
|
[
"MIT"
] | null | null | null |
whaTFRecordsWriter/__init__.py
|
haideraltahan/whaTFRecordsWriter
|
8e53a3dd3eef927ae51376ac5dee4e263fb081a5
|
[
"MIT"
] | null | null | null |
whaTFRecordsWriter/__init__.py
|
haideraltahan/whaTFRecordsWriter
|
8e53a3dd3eef927ae51376ac5dee4e263fb081a5
|
[
"MIT"
] | null | null | null |
from whaTFRecordsWriter.encoders import *
from whaTFRecordsWriter.writer import *
from whaTFRecordsWriter.preprocessing import *
| 42.666667
| 46
| 0.867188
| 12
| 128
| 9.25
| 0.5
| 0.594595
| 0.504505
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085938
| 128
| 3
| 46
| 42.666667
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e4f6d5e181da088f19108514b2954adde60cda60
| 580
|
py
|
Python
|
eval_covid19china_timm-regnetx_002_ShiftScaleRotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_ShiftScaleRotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_ShiftScaleRotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_0_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_1_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_2_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_3_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_4_ShiftScaleRotate.yml",
]
for l in ls:
os.system(l)
| 52.727273
| 110
| 0.856897
| 80
| 580
| 5.8375
| 0.3
| 0.107066
| 0.12848
| 0.203426
| 0.890792
| 0.890792
| 0.890792
| 0.890792
| 0.890792
| 0.890792
| 0
| 0.054645
| 0.053448
| 580
| 11
| 111
| 52.727273
| 0.795993
| 0
| 0
| 0
| 0
| 0
| 0.886403
| 0.671256
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5fa009f676a248401a1a5c22fcb88358797b8e52
| 9,101
|
py
|
Python
|
lte/protos/policydb_pb2_grpc.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
lte/protos/policydb_pb2_grpc.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
lte/protos/policydb_pb2_grpc.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from lte.protos import policydb_pb2 as lte_dot_protos_dot_policydb__pb2
from orc8r.protos import common_pb2 as orc8r_dot_protos_dot_common__pb2
class PolicyAssignmentControllerStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.EnableStaticRules = channel.unary_unary(
'/magma.lte.PolicyAssignmentController/EnableStaticRules',
request_serializer=lte_dot_protos_dot_policydb__pb2.EnableStaticRuleRequest.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
)
self.DisableStaticRules = channel.unary_unary(
'/magma.lte.PolicyAssignmentController/DisableStaticRules',
request_serializer=lte_dot_protos_dot_policydb__pb2.DisableStaticRuleRequest.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
)
class PolicyAssignmentControllerServicer(object):
"""Missing associated documentation comment in .proto file."""
def EnableStaticRules(self, request, context):
"""Associate the static rule with the IMSI
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DisableStaticRules(self, request, context):
"""Unassociate the static rule with the IMSI
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PolicyAssignmentControllerServicer_to_server(servicer, server):
rpc_method_handlers = {
'EnableStaticRules': grpc.unary_unary_rpc_method_handler(
servicer.EnableStaticRules,
request_deserializer=lte_dot_protos_dot_policydb__pb2.EnableStaticRuleRequest.FromString,
response_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
),
'DisableStaticRules': grpc.unary_unary_rpc_method_handler(
servicer.DisableStaticRules,
request_deserializer=lte_dot_protos_dot_policydb__pb2.DisableStaticRuleRequest.FromString,
response_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'magma.lte.PolicyAssignmentController', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class PolicyAssignmentController(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def EnableStaticRules(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.lte.PolicyAssignmentController/EnableStaticRules',
lte_dot_protos_dot_policydb__pb2.EnableStaticRuleRequest.SerializeToString,
orc8r_dot_protos_dot_common__pb2.Void.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DisableStaticRules(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.lte.PolicyAssignmentController/DisableStaticRules',
lte_dot_protos_dot_policydb__pb2.DisableStaticRuleRequest.SerializeToString,
orc8r_dot_protos_dot_common__pb2.Void.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class PolicyDBStub(object):
"""--------------------------------------------------------------------------
PolicyDB service definition
--------------------------------------------------------------------------
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.EnableStaticRules = channel.unary_unary(
'/magma.lte.PolicyDB/EnableStaticRules',
request_serializer=lte_dot_protos_dot_policydb__pb2.EnableStaticRuleRequest.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
)
self.DisableStaticRules = channel.unary_unary(
'/magma.lte.PolicyDB/DisableStaticRules',
request_serializer=lte_dot_protos_dot_policydb__pb2.DisableStaticRuleRequest.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
)
class PolicyDBServicer(object):
"""--------------------------------------------------------------------------
PolicyDB service definition
--------------------------------------------------------------------------
"""
def EnableStaticRules(self, request, context):
"""Immediately install the static policy for the IMSI
Also unassociate the static rule with the IMSI on orc8r
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DisableStaticRules(self, request, context):
"""Immediately uninstall the static policy for the IMSI
Also unassociate the static rule with the IMSI on orc8r
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PolicyDBServicer_to_server(servicer, server):
rpc_method_handlers = {
'EnableStaticRules': grpc.unary_unary_rpc_method_handler(
servicer.EnableStaticRules,
request_deserializer=lte_dot_protos_dot_policydb__pb2.EnableStaticRuleRequest.FromString,
response_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
),
'DisableStaticRules': grpc.unary_unary_rpc_method_handler(
servicer.DisableStaticRules,
request_deserializer=lte_dot_protos_dot_policydb__pb2.DisableStaticRuleRequest.FromString,
response_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'magma.lte.PolicyDB', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class PolicyDB(object):
"""--------------------------------------------------------------------------
PolicyDB service definition
--------------------------------------------------------------------------
"""
@staticmethod
def EnableStaticRules(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.lte.PolicyDB/EnableStaticRules',
lte_dot_protos_dot_policydb__pb2.EnableStaticRuleRequest.SerializeToString,
orc8r_dot_protos_dot_common__pb2.Void.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DisableStaticRules(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.lte.PolicyDB/DisableStaticRules',
lte_dot_protos_dot_policydb__pb2.DisableStaticRuleRequest.SerializeToString,
orc8r_dot_protos_dot_common__pb2.Void.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 42.528037
| 121
| 0.649819
| 814
| 9,101
| 6.92629
| 0.142506
| 0.041504
| 0.055339
| 0.034587
| 0.913622
| 0.889677
| 0.868393
| 0.866442
| 0.856864
| 0.837176
| 0
| 0.006321
| 0.235139
| 9,101
| 213
| 122
| 42.7277
| 0.80362
| 0.147786
| 0
| 0.811189
| 1
| 0
| 0.089911
| 0.053947
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083916
| false
| 0
| 0.020979
| 0.027972
| 0.174825
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
396eb1cbbf0bc2885dd7f11b3cfd8e47a9be4915
| 1,253
|
py
|
Python
|
tests/python/test_loops.py
|
AnimatedRNG/taichi
|
f1f403042dadf8b58887431dbf7a9a661c005bb2
|
[
"MIT"
] | null | null | null |
tests/python/test_loops.py
|
AnimatedRNG/taichi
|
f1f403042dadf8b58887431dbf7a9a661c005bb2
|
[
"MIT"
] | 1
|
2019-10-20T08:46:09.000Z
|
2019-10-20T08:46:09.000Z
|
tests/python/test_loops.py
|
falcontr14/taichi
|
f738bc165803c9e5083deb314da1cb6ca42ad591
|
[
"MIT"
] | null | null | null |
import taichi as ti
def test_loops():
for arch in [ti.x86_64, ti.cuda]:
ti.reset()
ti.cfg.arch = arch
x = ti.var(ti.f32)
y = ti.var(ti.f32)
N = 512
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x)
ti.root.dense(ti.i, N).place(y)
ti.root.lazy_grad()
for i in range(N // 2, N):
y[i] = i - 300
@ti.kernel
def func():
for i in range(N // 2 + 3, N):
x[i] = ti.abs(y[i])
func()
for i in range(N // 2 + 3):
assert x[i] == 0
for i in range(N // 2 + 3, N):
assert x[i] == abs(y[i])
def test_numpy_loops():
for arch in [ti.x86_64, ti.cuda]:
ti.reset()
ti.cfg.arch = arch
x = ti.var(ti.f32)
y = ti.var(ti.f32)
N = 512
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x)
ti.root.dense(ti.i, N).place(y)
ti.root.lazy_grad()
for i in range(N // 2, N):
y[i] = i - 300
import numpy as np
begin = np.ones(1) * (N // 2 + 3)
end = np.ones(1) * N
@ti.kernel
def func():
for i in range(begin, end):
x[i] = ti.abs(y[i])
func()
for i in range(N // 2 + 3):
assert x[i] == 0
for i in range(N // 2 + 3, N):
assert x[i] == abs(y[i])
| 18.701493
| 37
| 0.482841
| 235
| 1,253
| 2.544681
| 0.182979
| 0.053512
| 0.080268
| 0.147157
| 0.859532
| 0.859532
| 0.859532
| 0.859532
| 0.765886
| 0.765886
| 0
| 0.054958
| 0.332003
| 1,253
| 66
| 38
| 18.984848
| 0.659498
| 0
| 0
| 0.86
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.12
| false
| 0
| 0.04
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
39af78bf263fcc0a7bd426df471761a9f42c0c2e
| 8,655
|
py
|
Python
|
net/vgg.py
|
TrueNobility303/image-classification-CIFAR10
|
e0200d9b4d4f6ceaf058177abebd3f6510aebd9a
|
[
"MIT"
] | 2
|
2021-06-10T16:19:50.000Z
|
2021-06-16T10:55:14.000Z
|
net/vgg.py
|
TrueNobility303/image-classification-CIFAR10
|
e0200d9b4d4f6ceaf058177abebd3f6510aebd9a
|
[
"MIT"
] | null | null | null |
net/vgg.py
|
TrueNobility303/image-classification-CIFAR10
|
e0200d9b4d4f6ceaf058177abebd3f6510aebd9a
|
[
"MIT"
] | null | null | null |
import numpy as np
from torch import nn
#权重初始化
def init_weights_(m):
if isinstance(m, nn.Conv2d):
nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm1d):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.zeros_(m.bias)
def get_number_of_parameters(model):
parameters_n = 0
for parameter in model.parameters():
parameters_n += np.prod(parameter.shape).item()
return parameters_n
class VGG_A(nn.Module):
def __init__(self, inp_ch=3, num_classes=10, init_weights=True):
super().__init__()
self.features = nn.Sequential(
#总计使用5次下采样
nn.Conv2d(in_channels=inp_ch, out_channels=64, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(True),
nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.classifier = nn.Sequential(
nn.Linear(512 * 1 * 1, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, num_classes))
if init_weights:
self._init_weights()
def forward(self, x):
x = self.features(x)
x = self.classifier(x.view(-1, 512 * 1 * 1))
return x
def _init_weights(self):
for m in self.modules():
init_weights_(m)
class VGG_A_Light(nn.Module):
def __init__(self, inp_ch=3, num_classes=10):
super().__init__()
self.stage1 = nn.Sequential(
nn.Conv2d(in_channels=inp_ch, out_channels=16, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
self.stage2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
self.classifier = nn.Sequential(
nn.Linear(32 * 8 * 8, 128),
nn.ReLU(),
nn.Linear(128, 128),
nn.ReLU(),
nn.Linear(128, num_classes))
def forward(self, x):
x = self.stage1(x)
x = self.stage2(x)
# x = self.stage3(x)
# x = self.stage4(x)
# x = self.stage5(x)
x = self.classifier(x.view(-1, 32 * 8 * 8))
return x
class VGG_A_Dropout(nn.Module):
def __init__(self, inp_ch=3, num_classes=10):
super().__init__()
self.stage1 = nn.Sequential(
nn.Conv2d(in_channels=inp_ch, out_channels=64, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.stage2 = nn.Sequential(
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.stage3 = nn.Sequential(
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(True),
nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.stage4 = nn.Sequential(
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.stage5 = nn.Sequential(
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.classifier = nn.Sequential(
nn.Dropout(),
nn.Linear(512 * 1 * 1, 512),
nn.ReLU(True),
nn.Dropout(),
nn.Linear(512, 512),
nn.ReLU(True),
nn.Linear(512, num_classes))
def forward(self, x):
x = self.stage1(x)
x = self.stage2(x)
x = self.stage3(x)
x = self.stage4(x)
x = self.stage5(x)
x = self.classifier(x.view(-1, 512 * 1 * 1))
return x
class VGG_A_BatchNorm(nn.Module):
def __init__(self, inp_ch=3, num_classes=10, init_weights=True):
super().__init__()
self.features = nn.Sequential(
# stage 1
nn.Conv2d(in_channels=inp_ch, out_channels=64, kernel_size=3, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2),
# stage 2
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1),
nn.BatchNorm2d(128),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2),
# stage 3
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1),
nn.BatchNorm2d(256),
nn.ReLU(True),
nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, padding=1),
nn.BatchNorm2d(256),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2),
# stage 4
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1),
nn.BatchNorm2d(512),
nn.ReLU(True),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.BatchNorm2d(512),
nn.MaxPool2d(kernel_size=2, stride=2),
# stage5
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.classifier = nn.Sequential(
nn.Linear(512 * 1 * 1, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512, num_classes))
if init_weights:
self._init_weights()
def forward(self, x):
x = self.features(x)
x = self.classifier(x.view(-1, 512 * 1 * 1))
return x
def _init_weights(self):
for m in self.modules():
init_weights_(m)
class VGG_A_Light(nn.Module):
def __init__(self, inp_ch=3, num_classes=10):
super().__init__()
self.stage1 = nn.Sequential(
nn.Conv2d(in_channels=inp_ch, out_channels=16, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
self.stage2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
self.classifier = nn.Sequential(
nn.Linear(32 * 8 * 8, 128),
nn.ReLU(),
nn.Linear(128, 128),
nn.ReLU(),
nn.Linear(128, num_classes))
def forward(self, x):
x = self.stage1(x)
x = self.stage2(x)
# x = self.stage3(x)
# x = self.stage4(x)
# x = self.stage5(x)
x = self.classifier(x.view(-1, 32 * 8 * 8))
return x
if __name__ == '__main__':
print(get_number_of_parameters(VGG_A()))
print(get_number_of_parameters(VGG_A_Light()))
print(get_number_of_parameters(VGG_A_Dropout()))
| 33.034351
| 85
| 0.565569
| 1,181
| 8,655
| 3.951736
| 0.078747
| 0.100707
| 0.059996
| 0.107992
| 0.929291
| 0.917292
| 0.905292
| 0.872723
| 0.868009
| 0.864367
| 0
| 0.076961
| 0.301906
| 8,655
| 262
| 86
| 33.034351
| 0.695465
| 0.019295
| 0
| 0.865672
| 0
| 0
| 0.000944
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069652
| false
| 0
| 0.00995
| 0
| 0.134328
| 0.014925
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
39e4b9a96e853d61e6fc18c038dc11b115eeae0b
| 98
|
py
|
Python
|
pytorch_ca/src/models/__init__.py
|
LetteraUnica/neural_cellular_automata
|
360aed252d31529b02460cdf50d752acbd6edaf8
|
[
"MIT"
] | 2
|
2021-08-25T15:46:59.000Z
|
2021-10-01T20:38:36.000Z
|
pytorch_ca/src/models/__init__.py
|
LetteraUnica/neural_cellular_automata
|
360aed252d31529b02460cdf50d752acbd6edaf8
|
[
"MIT"
] | 5
|
2021-10-01T20:43:13.000Z
|
2021-11-19T15:47:45.000Z
|
pytorch_ca/src/models/__init__.py
|
LetteraUnica/neural_cellular_automata
|
360aed252d31529b02460cdf50d752acbd6edaf8
|
[
"MIT"
] | null | null | null |
from .neural_CA import *
from .virus_CA import *
from .multiple_CA import *
from .CAModel import *
| 24.5
| 26
| 0.765306
| 15
| 98
| 4.8
| 0.466667
| 0.333333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153061
| 98
| 4
| 27
| 24.5
| 0.86747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f2d9a9309c040ae56671f67d18a80d03994c5fe9
| 24,031
|
py
|
Python
|
avacloud_client_python/api/dangl_identity_api.py
|
Dangl-IT/avacloud-client-python
|
66f555096bbbc87d02d02e4e2dfb0c6accb18f95
|
[
"RSA-MD"
] | 1
|
2019-01-12T18:10:24.000Z
|
2019-01-12T18:10:24.000Z
|
avacloud_client_python/api/dangl_identity_api.py
|
Dangl-IT/avacloud-client-python
|
66f555096bbbc87d02d02e4e2dfb0c6accb18f95
|
[
"RSA-MD"
] | null | null | null |
avacloud_client_python/api/dangl_identity_api.py
|
Dangl-IT/avacloud-client-python
|
66f555096bbbc87d02d02e4e2dfb0c6accb18f95
|
[
"RSA-MD"
] | null | null | null |
# coding: utf-8
"""
AVACloud API 1.17.3
AVACloud API specification # noqa: E501
OpenAPI spec version: 1.17.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from avacloud_client_python.api_client import ApiClient
class DanglIdentityApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def dangl_identity_login_and_return_token(self, model, **kwargs): # noqa: E501
"""dangl_identity_login_and_return_token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_login_and_return_token(model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenLoginPost model: (required)
:return: TokenResponseGet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dangl_identity_login_and_return_token_with_http_info(model, **kwargs) # noqa: E501
else:
(data) = self.dangl_identity_login_and_return_token_with_http_info(model, **kwargs) # noqa: E501
return data
def dangl_identity_login_and_return_token_with_http_info(self, model, **kwargs): # noqa: E501
"""dangl_identity_login_and_return_token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_login_and_return_token_with_http_info(model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenLoginPost model: (required)
:return: TokenResponseGet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['model'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dangl_identity_login_and_return_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'model' is set
if ('model' not in params or
params['model'] is None):
raise ValueError("Missing the required parameter `model` when calling `dangl_identity_login_and_return_token`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model' in params:
body_params = params['model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['Dangl.Identity'] # noqa: E501
return self.api_client.call_api(
'/identity/token-login', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenResponseGet', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dangl_identity_login_with_cookie(self, model, **kwargs): # noqa: E501
"""dangl_identity_login_with_cookie # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_login_with_cookie(model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param LoginPost model: (required)
:param str redirect_url:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dangl_identity_login_with_cookie_with_http_info(model, **kwargs) # noqa: E501
else:
(data) = self.dangl_identity_login_with_cookie_with_http_info(model, **kwargs) # noqa: E501
return data
def dangl_identity_login_with_cookie_with_http_info(self, model, **kwargs): # noqa: E501
"""dangl_identity_login_with_cookie # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_login_with_cookie_with_http_info(model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param LoginPost model: (required)
:param str redirect_url:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['model', 'redirect_url'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dangl_identity_login_with_cookie" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'model' is set
if ('model' not in params or
params['model'] is None):
raise ValueError("Missing the required parameter `model` when calling `dangl_identity_login_with_cookie`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'redirect_url' in params:
query_params.append(('redirectUrl', params['redirect_url'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model' in params:
body_params = params['model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['Dangl.Identity'] # noqa: E501
return self.api_client.call_api(
'/identity/login', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dangl_identity_refresh_token(self, model, **kwargs): # noqa: E501
"""dangl_identity_refresh_token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_refresh_token(model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenRefreshPost model: (required)
:return: TokenResponseGet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dangl_identity_refresh_token_with_http_info(model, **kwargs) # noqa: E501
else:
(data) = self.dangl_identity_refresh_token_with_http_info(model, **kwargs) # noqa: E501
return data
def dangl_identity_refresh_token_with_http_info(self, model, **kwargs): # noqa: E501
"""dangl_identity_refresh_token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_refresh_token_with_http_info(model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenRefreshPost model: (required)
:return: TokenResponseGet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['model'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dangl_identity_refresh_token" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'model' is set
if ('model' not in params or
params['model'] is None):
raise ValueError("Missing the required parameter `model` when calling `dangl_identity_refresh_token`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model' in params:
body_params = params['model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['Dangl.Identity'] # noqa: E501
return self.api_client.call_api(
'/identity/token-refresh', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenResponseGet', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dangl_identity_register(self, register_model, **kwargs): # noqa: E501
"""dangl_identity_register # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_register(register_model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RegisterPost register_model: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dangl_identity_register_with_http_info(register_model, **kwargs) # noqa: E501
else:
(data) = self.dangl_identity_register_with_http_info(register_model, **kwargs) # noqa: E501
return data
def dangl_identity_register_with_http_info(self, register_model, **kwargs): # noqa: E501
"""dangl_identity_register # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_register_with_http_info(register_model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RegisterPost register_model: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['register_model'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dangl_identity_register" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'register_model' is set
if ('register_model' not in params or
params['register_model'] is None):
raise ValueError("Missing the required parameter `register_model` when calling `dangl_identity_register`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'register_model' in params:
body_params = params['register_model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['Dangl.Identity'] # noqa: E501
return self.api_client.call_api(
'/identity/register', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dangl_identity_request_password_reset(self, forgot_password_model, **kwargs): # noqa: E501
"""dangl_identity_request_password_reset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_request_password_reset(forgot_password_model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPasswordPost forgot_password_model: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dangl_identity_request_password_reset_with_http_info(forgot_password_model, **kwargs) # noqa: E501
else:
(data) = self.dangl_identity_request_password_reset_with_http_info(forgot_password_model, **kwargs) # noqa: E501
return data
def dangl_identity_request_password_reset_with_http_info(self, forgot_password_model, **kwargs): # noqa: E501
"""dangl_identity_request_password_reset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_request_password_reset_with_http_info(forgot_password_model, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPasswordPost forgot_password_model: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['forgot_password_model'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dangl_identity_request_password_reset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'forgot_password_model' is set
if ('forgot_password_model' not in params or
params['forgot_password_model'] is None):
raise ValueError("Missing the required parameter `forgot_password_model` when calling `dangl_identity_request_password_reset`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'forgot_password_model' in params:
body_params = params['forgot_password_model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['Dangl.Identity'] # noqa: E501
return self.api_client.call_api(
'/identity/password-forgotten', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dangl_identity_sign_out_with_sign_in_manager(self, **kwargs): # noqa: E501
"""dangl_identity_sign_out_with_sign_in_manager # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_sign_out_with_sign_in_manager(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dangl_identity_sign_out_with_sign_in_manager_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.dangl_identity_sign_out_with_sign_in_manager_with_http_info(**kwargs) # noqa: E501
return data
def dangl_identity_sign_out_with_sign_in_manager_with_http_info(self, **kwargs): # noqa: E501
"""dangl_identity_sign_out_with_sign_in_manager # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dangl_identity_sign_out_with_sign_in_manager_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dangl_identity_sign_out_with_sign_in_manager" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/problem+json']) # noqa: E501
# Authentication setting
auth_settings = ['Dangl.Identity'] # noqa: E501
return self.api_client.call_api(
'/identity/login', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.45977
| 153
| 0.625484
| 2,731
| 24,031
| 5.194434
| 0.061882
| 0.043423
| 0.023685
| 0.030453
| 0.950726
| 0.943254
| 0.928098
| 0.914987
| 0.903637
| 0.888834
| 0
| 0.014255
| 0.284799
| 24,031
| 608
| 154
| 39.524671
| 0.811136
| 0.297574
| 0
| 0.786585
| 1
| 0
| 0.204976
| 0.079101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039634
| false
| 0.036585
| 0.012195
| 0
| 0.109756
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
842c134001b9cbbf50f3a43412595b8cca035e69
| 194
|
py
|
Python
|
src/resources/database.py
|
danielWagnerr/Python_Monolithic
|
5359e4226ab4eab703fc382e2e7c8815a4770fc1
|
[
"MIT"
] | null | null | null |
src/resources/database.py
|
danielWagnerr/Python_Monolithic
|
5359e4226ab4eab703fc382e2e7c8815a4770fc1
|
[
"MIT"
] | null | null | null |
src/resources/database.py
|
danielWagnerr/Python_Monolithic
|
5359e4226ab4eab703fc382e2e7c8815a4770fc1
|
[
"MIT"
] | null | null | null |
import os
def get_database_path(db: str = 'database') -> str:
database_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
return f'{database_path}/database/{db}.json'
| 27.714286
| 80
| 0.716495
| 29
| 194
| 4.517241
| 0.482759
| 0.274809
| 0.198473
| 0.229008
| 0.244275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 194
| 6
| 81
| 32.333333
| 0.770588
| 0
| 0
| 0
| 0
| 0
| 0.216495
| 0.175258
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
8454eda7554bbf4130febf4e65527ac97adf2ed6
| 9,134
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowIsisInterface/cli/equal/golden_output_2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowIsisInterface/cli/equal/golden_output_2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowIsisInterface/cli/equal/golden_output_2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"instance": {
"Genie": {
"interface": {
"Bundle-Ether2": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Disabled (Suppressed in IS-IS cfg)",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"rsi_srlg": "Registered",
"bandwidth": 100000000,
"circuit_type": "level-2-only",
"media_type": "P2P",
"circuit_number": 0,
"extended_circuit_number": 113,
"next_p2p_iih_in": 4,
"lsp_rexmit_queue_size": 1,
"level": {
2: {
"adjacency_count": 1,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
}
},
"clns_io": {
"protocol_state": "Up",
"mtu": 9199,
"snpa": "008a.96ff.1790",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Disabled (Intf suppressed in IS-IS cfg)",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "3/3/12/0 (PRI/BKP/SRTE/SRAT)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Enabled",
"type": "per-prefix",
"direct_lfa": {"state": "Enabled"},
"remote_lfa": {
"state": "Not Enabled",
"tie_breaker": "Default",
"line_card_disjoint": "30",
"lowest_backup_metric": "20",
"node_protecting": "40",
"primary_path": "10",
},
"ti_lfa": {
"state": "Enabled",
"tie_breaker": "Default",
"link_protecting": "Enabled",
"line_card_disjoint": "0",
"node_protecting": "100",
"srlg_disjoint": "0",
},
},
2: {
"state": "Enabled",
"type": "per-prefix",
"direct_lfa": {"state": "Enabled"},
"remote_lfa": {
"state": "Not Enabled",
"tie_breaker": "Default",
"line_card_disjoint": "30",
"lowest_backup_metric": "20",
"node_protecting": "40",
"primary_path": "10",
},
"ti_lfa": {
"state": "Enabled",
"tie_breaker": "Default",
"link_protecting": "Enabled",
"line_card_disjoint": "0",
"node_protecting": "100",
"srlg_disjoint": "0",
},
},
}
},
}
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["172.18.0.1"],
"global_prefix": ["Unknown (Intf suppressed in IS-IS cfg)"],
}
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
"underlying_interface": {"HundredGigE0/0/0/1": {"index": "0x55"}},
},
"TenGigE0/0/0/0/0": {"state": "Disabled"},
"TenGigE0/0/0/4/0": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Disabled (Suppressed in IS-IS cfg)",
"ipv4_bfd": True,
"ipv6_bfd": False,
"bfd_min_interval": 250,
"bfd_multiplier": 3,
"rsi_srlg": "Registered",
"bandwidth": 10000000,
"circuit_type": "level-2-only",
"media_type": "P2P",
"circuit_number": 0,
"extended_circuit_number": 27,
"next_p2p_iih_in": 5,
"lsp_rexmit_queue_size": 0,
"level": {
2: {
"adjacency_count": 1,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
}
},
"clns_io": {
"protocol_state": "Up",
"mtu": 9199,
"snpa": "008a.96ff.131b",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Disabled (Intf suppressed in IS-IS cfg)",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "3/3/12/0 (PRI/BKP/SRTE/SRAT)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
}
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["172.16.2.133"],
"global_prefix": ["Unknown (Intf suppressed in IS-IS cfg)"],
}
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
}
}
}
}
| 48.328042
| 94
| 0.292643
| 547
| 9,134
| 4.583181
| 0.274223
| 0.057439
| 0.033506
| 0.038293
| 0.900678
| 0.884324
| 0.863582
| 0.831671
| 0.831671
| 0.831671
| 0
| 0.055099
| 0.600613
| 9,134
| 188
| 95
| 48.585106
| 0.632127
| 0
| 0
| 0.66129
| 0
| 0
| 0.289422
| 0.047306
| 0
| 0
| 0.000438
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
083b8f13dc1f0d59a5587869b4b666716e01f493
| 98
|
py
|
Python
|
apps/google_analytics/context_processors.py
|
aurmeneta/ramos-uc
|
364ab3c5a55032ab7ffc08665a2da4c5ff04ae58
|
[
"MIT"
] | 7
|
2021-07-14T18:13:35.000Z
|
2021-11-21T20:10:54.000Z
|
apps/google_analytics/context_processors.py
|
aurmeneta/ramos-uc
|
364ab3c5a55032ab7ffc08665a2da4c5ff04ae58
|
[
"MIT"
] | 57
|
2021-07-10T01:31:56.000Z
|
2022-01-14T02:02:58.000Z
|
apps/google_analytics/context_processors.py
|
aurmeneta/ramos-uc
|
364ab3c5a55032ab7ffc08665a2da4c5ff04ae58
|
[
"MIT"
] | 4
|
2021-07-23T16:51:55.000Z
|
2021-08-31T02:41:41.000Z
|
from django.conf import settings
def ga_code(request):
return {"GA_CODE": settings.GA_CODE}
| 16.333333
| 40
| 0.744898
| 15
| 98
| 4.666667
| 0.666667
| 0.257143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153061
| 98
| 5
| 41
| 19.6
| 0.843373
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.