hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
1589fa21cf6bf20ff3bdb8c933f5a4a5b7255471
35,350
py
Python
tests/test_all_step_predator_prey.py
Leonardo767/Abmarl
9fada5447b09174c6a70b6032b4a8d08b66c4589
[ "Apache-2.0" ]
null
null
null
tests/test_all_step_predator_prey.py
Leonardo767/Abmarl
9fada5447b09174c6a70b6032b4a8d08b66c4589
[ "Apache-2.0" ]
null
null
null
tests/test_all_step_predator_prey.py
Leonardo767/Abmarl
9fada5447b09174c6a70b6032b4a8d08b66c4589
[ "Apache-2.0" ]
null
null
null
import numpy as np import pytest from abmarl.sim.predator_prey import PredatorPreySimulation, Predator, Prey from abmarl.managers import AllStepManager def test_turn_based_predator_prey_distance(): np.random.seed(24) predators = [Predator(id=f'predator{i}', attack=1) for i in range(2)] prey = [Prey(id=f'prey{i}') for i in range(7)] agents = predators + prey sim_config = { 'region': 6, 'observation_mode': PredatorPreySimulation.ObservationMode.DISTANCE, 'agents': agents, } sim = PredatorPreySimulation.build(sim_config) sim = AllStepManager(sim) # Little hackish here because I have to explicitly set their values obs = sim.reset() sim.agents['predator0'].position = np.array([2, 3]) sim.agents['predator1'].position = np.array([0, 1]) sim.agents['prey0'].position = np.array([1, 1]) sim.agents['prey1'].position = np.array([4, 3]) sim.agents['prey2'].position = np.array([4, 3]) sim.agents['prey3'].position = np.array([2, 3]) sim.agents['prey4'].position = np.array([3, 3]) sim.agents['prey5'].position = np.array([3, 1]) sim.agents['prey6'].position = np.array([2, 1]) obs = {agent_id: sim.sim.get_obs(agent_id) for agent_id in sim.agents} np.testing.assert_array_equal(obs['predator0']['predator1'], np.array([-2, -2, 2])) np.testing.assert_array_equal(obs['predator0']['prey0'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['predator0']['prey1'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['predator0']['prey2'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['predator0']['prey3'], np.array([0, 0, 1])) np.testing.assert_array_equal(obs['predator0']['prey4'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['predator0']['prey5'], np.array([1, -2, 1])) np.testing.assert_array_equal(obs['predator0']['prey6'], np.array([0, -2, 1])) np.testing.assert_array_equal(obs['predator1']['predator0'], np.array([2, 2, 2])) np.testing.assert_array_equal(obs['predator1']['prey0'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['predator1']['prey1'], np.array([4, 2, 1])) np.testing.assert_array_equal(obs['predator1']['prey2'], np.array([4, 2, 1])) np.testing.assert_array_equal(obs['predator1']['prey3'], np.array([2, 2, 1])) np.testing.assert_array_equal(obs['predator1']['prey4'], np.array([3, 2, 1])) np.testing.assert_array_equal(obs['predator1']['prey5'], np.array([3, 0, 1])) np.testing.assert_array_equal(obs['predator1']['prey6'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['prey0']['predator0'], np.array([1, 2, 2])) np.testing.assert_array_equal(obs['prey0']['predator1'], np.array([-1, 0, 2])) np.testing.assert_array_equal(obs['prey0']['prey1'], np.array([3, 2, 1])) np.testing.assert_array_equal(obs['prey0']['prey2'], np.array([3, 2, 1])) np.testing.assert_array_equal(obs['prey0']['prey3'], np.array([1, 2, 1])) np.testing.assert_array_equal(obs['prey0']['prey4'], np.array([2, 2, 1])) np.testing.assert_array_equal(obs['prey0']['prey5'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['prey0']['prey6'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['prey1']['predator0'], np.array([-2, 0, 2])) np.testing.assert_array_equal(obs['prey1']['predator1'], np.array([-4, -2, 2])) np.testing.assert_array_equal(obs['prey1']['prey0'], np.array([-3, -2, 1])) np.testing.assert_array_equal(obs['prey1']['prey2'], np.array([0, 0, 1])) np.testing.assert_array_equal(obs['prey1']['prey3'], np.array([-2, 0, 1])) np.testing.assert_array_equal(obs['prey1']['prey4'], np.array([-1, 0, 1])) np.testing.assert_array_equal(obs['prey1']['prey5'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey1']['prey6'], np.array([-2, -2, 1])) np.testing.assert_array_equal(obs['prey2']['predator0'], np.array([-2, 0, 2])) np.testing.assert_array_equal(obs['prey2']['predator1'], np.array([-4, -2, 2])) np.testing.assert_array_equal(obs['prey2']['prey0'], np.array([-3, -2, 1])) np.testing.assert_array_equal(obs['prey2']['prey1'], np.array([0, 0, 1])) np.testing.assert_array_equal(obs['prey2']['prey3'], np.array([-2, 0, 1])) np.testing.assert_array_equal(obs['prey2']['prey4'], np.array([-1, 0, 1])) np.testing.assert_array_equal(obs['prey2']['prey5'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey2']['prey6'], np.array([-2, -2, 1])) np.testing.assert_array_equal(obs['prey3']['predator0'], np.array([0, 0, 2])) np.testing.assert_array_equal(obs['prey3']['predator1'], np.array([-2, -2, 2])) np.testing.assert_array_equal(obs['prey3']['prey0'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey3']['prey1'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['prey3']['prey2'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['prey3']['prey4'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['prey3']['prey5'], np.array([1, -2, 1])) np.testing.assert_array_equal(obs['prey3']['prey6'], np.array([0, -2, 1])) np.testing.assert_array_equal(obs['prey4']['predator0'], np.array([-1, 0, 2])) np.testing.assert_array_equal(obs['prey4']['predator1'], np.array([-3, -2, 2])) np.testing.assert_array_equal(obs['prey4']['prey0'], np.array([-2, -2, 1])) np.testing.assert_array_equal(obs['prey4']['prey1'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['prey4']['prey2'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['prey4']['prey3'], np.array([-1, 0, 1])) np.testing.assert_array_equal(obs['prey4']['prey5'], np.array([0, -2, 1])) np.testing.assert_array_equal(obs['prey4']['prey6'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey5']['predator0'], np.array([-1, 2, 2])) np.testing.assert_array_equal(obs['prey5']['predator1'], np.array([-3, 0, 2])) np.testing.assert_array_equal(obs['prey5']['prey0'], np.array([-2, 0, 1])) np.testing.assert_array_equal(obs['prey5']['prey1'], np.array([1, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey2'], np.array([1, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey3'], np.array([-1, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey4'], np.array([0, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey6'], np.array([-1, 0, 1])) np.testing.assert_array_equal(obs['prey6']['predator0'], np.array([0, 2, 2])) np.testing.assert_array_equal(obs['prey6']['predator1'], np.array([-2, 0, 2])) np.testing.assert_array_equal(obs['prey6']['prey0'], np.array([-1, 0, 1])) np.testing.assert_array_equal(obs['prey6']['prey1'], np.array([2, 2, 1])) np.testing.assert_array_equal(obs['prey6']['prey2'], np.array([2, 2, 1])) np.testing.assert_array_equal(obs['prey6']['prey3'], np.array([0, 2, 1])) np.testing.assert_array_equal(obs['prey6']['prey4'], np.array([1, 2, 1])) np.testing.assert_array_equal(obs['prey6']['prey5'], np.array([1, 0, 1])) obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey0': np.array([-1, 1]), 'prey1': np.array([0, -1]), 'prey2': np.array([1, 1]), 'prey3': np.array([1, -1]), 'prey4': np.array([-1, 1]), 'prey5': np.array([1, 1]), 'prey6': np.array([0, 0]), }) np.testing.assert_array_equal(obs['predator0']['predator1'], np.array([-2, -2, 2])) np.testing.assert_array_equal(obs['predator0']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey1'], np.array([2, -1, 1])) np.testing.assert_array_equal(obs['predator0']['prey2'], np.array([3, 1, 1])) np.testing.assert_array_equal(obs['predator0']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey4'], np.array([0, 1, 1])) np.testing.assert_array_equal(obs['predator0']['prey5'], np.array([2, -1, 1])) np.testing.assert_array_equal(obs['predator0']['prey6'], np.array([0, -2, 1])) np.testing.assert_array_equal(obs['predator1']['predator0'], np.array([2, 2, 2])) np.testing.assert_array_equal(obs['predator1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey1'], np.array([4, 1, 1])) np.testing.assert_array_equal(obs['predator1']['prey2'], np.array([5, 3, 1])) np.testing.assert_array_equal(obs['predator1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey4'], np.array([2, 3, 1])) np.testing.assert_array_equal(obs['predator1']['prey5'], np.array([4, 1, 1])) np.testing.assert_array_equal(obs['predator1']['prey6'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['prey0']['predator0'], np.array([1, 2, 2])) np.testing.assert_array_equal(obs['prey0']['predator1'], np.array([-1, 0, 2])) np.testing.assert_array_equal(obs['prey0']['prey1'], np.array([3, 1, 1])) np.testing.assert_array_equal(obs['prey0']['prey2'], np.array([4, 3, 1])) np.testing.assert_array_equal(obs['prey0']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey0']['prey4'], np.array([1, 3, 1])) np.testing.assert_array_equal(obs['prey0']['prey5'], np.array([3, 1, 1])) np.testing.assert_array_equal(obs['prey0']['prey6'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['prey1']['predator0'], np.array([-2, 1, 2])) np.testing.assert_array_equal(obs['prey1']['predator1'], np.array([-4, -1, 2])) np.testing.assert_array_equal(obs['prey1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey2'], np.array([1, 2, 1])) np.testing.assert_array_equal(obs['prey1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey4'], np.array([-2, 2, 1])) np.testing.assert_array_equal(obs['prey1']['prey5'], np.array([0, 0, 1])) np.testing.assert_array_equal(obs['prey1']['prey6'], np.array([-2, -1, 1])) np.testing.assert_array_equal(obs['prey2']['predator0'], np.array([-3, -1, 2])) np.testing.assert_array_equal(obs['prey2']['predator1'], np.array([-5, -3, 2])) np.testing.assert_array_equal(obs['prey2']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey1'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey2']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey4'], np.array([-3, 0, 1])) np.testing.assert_array_equal(obs['prey2']['prey5'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey2']['prey6'], np.array([-3, -3, 1])) np.testing.assert_array_equal(obs['prey3']['predator0'], np.array([0, 0, 2])) np.testing.assert_array_equal(obs['prey3']['predator1'], np.array([-2, -2, 2])) np.testing.assert_array_equal(obs['prey3']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey3']['prey1'], np.array([2, -1, 1])) np.testing.assert_array_equal(obs['prey3']['prey2'], np.array([3, 1, 1])) np.testing.assert_array_equal(obs['prey3']['prey4'], np.array([0, 1, 1])) np.testing.assert_array_equal(obs['prey3']['prey5'], np.array([2, -1, 1])) np.testing.assert_array_equal(obs['prey3']['prey6'], np.array([0, -2, 1])) np.testing.assert_array_equal(obs['prey4']['predator0'], np.array([0, -1, 2])) np.testing.assert_array_equal(obs['prey4']['predator1'], np.array([-2, -3, 2])) np.testing.assert_array_equal(obs['prey4']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey4']['prey1'], np.array([2, -2, 1])) np.testing.assert_array_equal(obs['prey4']['prey2'], np.array([3, 0, 1])) np.testing.assert_array_equal(obs['prey4']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey4']['prey5'], np.array([2, -2, 1])) np.testing.assert_array_equal(obs['prey4']['prey6'], np.array([0, -3, 1])) np.testing.assert_array_equal(obs['prey5']['predator0'], np.array([-2, 1, 2])) np.testing.assert_array_equal(obs['prey5']['predator1'], np.array([-4, -1, 2])) np.testing.assert_array_equal(obs['prey5']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey1'], np.array([0, 0, 1])) np.testing.assert_array_equal(obs['prey5']['prey2'], np.array([1, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey4'], np.array([-2, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey6'], np.array([-2, -1, 1])) np.testing.assert_array_equal(obs['prey6']['predator0'], np.array([0, 2, 2])) np.testing.assert_array_equal(obs['prey6']['predator1'], np.array([-2, 0, 2])) np.testing.assert_array_equal(obs['prey6']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey1'], np.array([2, 1, 1])) np.testing.assert_array_equal(obs['prey6']['prey2'], np.array([3, 3, 1])) np.testing.assert_array_equal(obs['prey6']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey4'], np.array([0, 3, 1])) np.testing.assert_array_equal(obs['prey6']['prey5'], np.array([2, 1, 1])) assert reward == { 'predator0': 36, 'predator1': 36, 'prey0': -36, 'prey1': -1, 'prey2': -1, 'prey3': -36, 'prey4': -1, 'prey5': -1, 'prey6': 0, } assert done == { 'predator0': False, 'predator1': False, 'prey0': True, 'prey1': False, 'prey2': False, 'prey3': True, 'prey4': False, 'prey5': False, 'prey6': False, '__all__': False} with pytest.raises(AssertionError): obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey0': np.array([-1, 1]), 'prey1': np.array([0, -1]), 'prey2': np.array([1, 1]), 'prey3': np.array([1, -1]), 'prey4': np.array([-1, 1]), 'prey5': np.array([1, 1]), 'prey6': np.array([0, 0]), }) obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 0, 'move': np.array([1, 0])}, 'prey1': np.array([-1, -1]), 'prey2': np.array([-1, 0]), 'prey4': np.array([-1, 0]), 'prey5': np.array([-1, 0]), 'prey6': np.array([0, -1]), }) np.testing.assert_array_equal(obs['predator0']['predator1'], np.array([-1, -2, 2])) np.testing.assert_array_equal(obs['predator0']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey1'], np.array([1, -2, 1])) np.testing.assert_array_equal(obs['predator0']['prey2'], np.array([2, 1, 1])) np.testing.assert_array_equal(obs['predator0']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey5'], np.array([1, -1, 1])) np.testing.assert_array_equal(obs['predator0']['prey6'], np.array([0, -3, 1])) np.testing.assert_array_equal(obs['predator1']['predator0'], np.array([1, 2, 2])) np.testing.assert_array_equal(obs['predator1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey1'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['predator1']['prey2'], np.array([3, 3, 1])) np.testing.assert_array_equal(obs['predator1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey5'], np.array([2, 1, 1])) np.testing.assert_array_equal(obs['predator1']['prey6'], np.array([1, -1, 1])) np.testing.assert_array_equal(obs['prey1']['predator0'], np.array([-1, 2, 2])) np.testing.assert_array_equal(obs['prey1']['predator1'], np.array([-2, 0, 2])) np.testing.assert_array_equal(obs['prey1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey2'], np.array([1, 3, 1])) np.testing.assert_array_equal(obs['prey1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey5'], np.array([0, 1, 1])) np.testing.assert_array_equal(obs['prey1']['prey6'], np.array([-1, -1, 1])) np.testing.assert_array_equal(obs['prey2']['predator0'], np.array([-2, -1, 2])) np.testing.assert_array_equal(obs['prey2']['predator1'], np.array([-3, -3, 2])) np.testing.assert_array_equal(obs['prey2']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey1'], np.array([-1, -3, 1])) np.testing.assert_array_equal(obs['prey2']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey5'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey2']['prey6'], np.array([-2, -4, 1])) np.testing.assert_array_equal(obs['prey4']['predator0'], np.array([0, -1, 2])) np.testing.assert_array_equal(obs['prey4']['predator1'], np.array([-1, -3, 2])) np.testing.assert_array_equal(obs['prey4']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey4']['prey1'], np.array([1, -3, 1])) np.testing.assert_array_equal(obs['prey4']['prey2'], np.array([2, 0, 1])) np.testing.assert_array_equal(obs['prey4']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey4']['prey5'], np.array([1, -2, 1])) np.testing.assert_array_equal(obs['prey4']['prey6'], np.array([0, -4, 1])) np.testing.assert_array_equal(obs['prey5']['predator0'], np.array([-1, 1, 2])) np.testing.assert_array_equal(obs['prey5']['predator1'], np.array([-2, -1, 2])) np.testing.assert_array_equal(obs['prey5']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey1'], np.array([0, -1, 1])) np.testing.assert_array_equal(obs['prey5']['prey2'], np.array([1, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey6'], np.array([-1, -2, 1])) np.testing.assert_array_equal(obs['prey6']['predator0'], np.array([0, 3, 2])) np.testing.assert_array_equal(obs['prey6']['predator1'], np.array([-1, 1, 2])) np.testing.assert_array_equal(obs['prey6']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey1'], np.array([1, 1, 1])) np.testing.assert_array_equal(obs['prey6']['prey2'], np.array([2, 4, 1])) np.testing.assert_array_equal(obs['prey6']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey5'], np.array([1, 2, 1])) assert reward == { 'predator0': 36, 'predator1': -1, 'prey1': -1, 'prey2': -1, 'prey4': -36, 'prey5': -1, 'prey6': -1, } assert done == { 'predator0': False, 'predator1': False, 'prey1': False, 'prey2': False, 'prey4': True, 'prey5': False, 'prey6': False, '__all__': False} with pytest.raises(AssertionError): obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': np.array([0, -1]), 'prey2': np.array([1, 1]), 'prey4': np.array([-1, 1]), 'prey5': np.array([1, 1]), 'prey6': np.array([0, 0]), }) obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': np.array([-1, 0]), 'prey2': np.array([-1, 0]), 'prey5': np.array([0, 1]), 'prey6': np.array([-1, 0]), }) np.testing.assert_array_equal(obs['predator0']['predator1'], np.array([-1, -2, 2])) np.testing.assert_array_equal(obs['predator0']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey1'], np.array([0, -2, 1])) np.testing.assert_array_equal(obs['predator0']['prey2'], np.array([1, 1, 1])) np.testing.assert_array_equal(obs['predator0']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['predator0'], np.array([1, 2, 2])) np.testing.assert_array_equal(obs['predator1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey1'], np.array([1, 0, 1])) np.testing.assert_array_equal(obs['predator1']['prey2'], np.array([2, 3, 1])) np.testing.assert_array_equal(obs['predator1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['predator0'], np.array([0, 2, 2])) np.testing.assert_array_equal(obs['prey1']['predator1'], np.array([-1, 0, 2])) np.testing.assert_array_equal(obs['prey1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey2'], np.array([1, 3, 1])) np.testing.assert_array_equal(obs['prey1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['predator0'], np.array([-1, -1, 2])) np.testing.assert_array_equal(obs['prey2']['predator1'], np.array([-2, -3, 2])) np.testing.assert_array_equal(obs['prey2']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey1'], np.array([-1, -3, 1])) np.testing.assert_array_equal(obs['prey2']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['predator0'], np.array([-1, 1, 2])) np.testing.assert_array_equal(obs['prey5']['predator1'], np.array([-2, -1, 2])) np.testing.assert_array_equal(obs['prey5']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey1'], np.array([-1, -1, 1])) np.testing.assert_array_equal(obs['prey5']['prey2'], np.array([0, 2, 1])) np.testing.assert_array_equal(obs['prey5']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey5']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['predator0'], np.array([0, 3, 2])) np.testing.assert_array_equal(obs['prey6']['predator1'], np.array([-1, 1, 2])) np.testing.assert_array_equal(obs['prey6']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey1'], np.array([0, 1, 1])) np.testing.assert_array_equal(obs['prey6']['prey2'], np.array([1, 4, 1])) np.testing.assert_array_equal(obs['prey6']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey6']['prey5'], np.array([0, 0, 0])) assert reward == { 'predator0': 36, 'predator1': 36, 'prey1': -1, 'prey2': -1, 'prey5': -36, 'prey6': -36 } assert done == { 'predator0': False, 'predator1': False, 'prey1': False, 'prey2': False, 'prey5': True, 'prey6': True, '__all__': False} with pytest.raises(AssertionError): obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': np.array([0, -1]), 'prey2': np.array([1, 1]), 'prey5': np.array([1, 1]), 'prey6': np.array([0, 0]), }) obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': np.array([-1, 0]), 'prey2': np.array([-1, 0]), }) np.testing.assert_array_equal(obs['predator0']['predator1'], np.array([-1, -2, 2])) np.testing.assert_array_equal(obs['predator0']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey1'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey2'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator0']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['predator0'], np.array([1, 2, 2])) np.testing.assert_array_equal(obs['predator1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey1'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey2'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['predator1']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['predator0'], np.array([0, 2, 2])) np.testing.assert_array_equal(obs['prey1']['predator1'], np.array([-1, 0, 2])) np.testing.assert_array_equal(obs['prey1']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey2'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey1']['prey6'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['predator0'], np.array([-1, -1, 2])) np.testing.assert_array_equal(obs['prey2']['predator1'], np.array([-2, -3, 2])) np.testing.assert_array_equal(obs['prey2']['prey0'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey1'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey3'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey4'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey5'], np.array([0, 0, 0])) np.testing.assert_array_equal(obs['prey2']['prey6'], np.array([0, 0, 0])) assert reward == { 'predator0': 36, 'predator1': 36, 'prey1': -36, 'prey2': -36, } assert done == { 'predator0': False, 'predator1': False, 'prey1': True, 'prey2': True, '__all__': True} def test_turn_based_predator_prey_grid(): np.random.seed(24) predators = [Predator(id=f'predator{i}', attack=1, view=0) for i in range(2)] prey = [Prey(id=f'prey{i}', view=0) for i in range(7)] agents = predators + prey sim_config = { 'region': 6, 'observation_mode': PredatorPreySimulation.ObservationMode.GRID, 'agents': agents, } sim = PredatorPreySimulation.build(sim_config) sim = AllStepManager(sim) # Little hackish here because I have to explicitly set their values obs = sim.reset() sim.agents['predator0'].position = np.array([2, 3]) sim.agents['predator1'].position = np.array([0, 1]) sim.agents['prey0'].position = np.array([1, 1]) sim.agents['prey1'].position = np.array([4, 3]) sim.agents['prey2'].position = np.array([4, 3]) sim.agents['prey3'].position = np.array([2, 3]) sim.agents['prey4'].position = np.array([3, 3]) sim.agents['prey5'].position = np.array([3, 1]) sim.agents['prey6'].position = np.array([2, 1]) obs = {agent_id: sim.sim.get_obs(agent_id) for agent_id in sim.agents} assert 'predator0' in obs assert 'predator0' in obs assert 'prey0' in obs assert 'prey1' in obs assert 'prey2' in obs assert 'prey3' in obs assert 'prey4' in obs assert 'prey5' in obs assert 'prey6' in obs obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey0': {'move': np.array([1, 1]), 'harvest': 0}, 'prey1': {'move': np.array([0, -1]), 'harvest': 0}, 'prey2': {'move': np.array([1, 1]), 'harvest': 0}, 'prey3': {'move': np.array([0, 0]), 'harvest': 0}, 'prey4': {'move': np.array([-1, 1]), 'harvest': 0}, 'prey5': {'move': np.array([1, 1]), 'harvest': 0}, 'prey6': {'move': np.array([0, 0]), 'harvest': 0}, }) assert 'predator0' in obs assert 'predator0' in obs assert 'prey0' in obs assert 'prey1' in obs assert 'prey2' in obs assert 'prey3' in obs assert 'prey4' in obs assert 'prey5' in obs assert 'prey6' in obs assert reward == { 'predator0': 36, 'predator1': 36, 'prey0': -36, 'prey1': -1, 'prey2': -1, 'prey3': -36, 'prey4': -1, 'prey5': -1, 'prey6': 0, } assert done == { 'predator0': False, 'predator1': False, 'prey0': True, 'prey1': False, 'prey2': False, 'prey3': True, 'prey4': False, 'prey5': False, 'prey6': False, '__all__': False} with pytest.raises(AssertionError): obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey0': {'move': np.array([0, -1]), 'harvest': 0}, 'prey1': {'move': np.array([0, -1]), 'harvest': 0}, 'prey2': {'move': np.array([1, 1]), 'harvest': 0}, 'prey3': {'move': np.array([0, -1]), 'harvest': 0}, 'prey4': {'move': np.array([0, -1]), 'harvest': 0}, 'prey5': {'move': np.array([1, 1]), 'harvest': 0}, 'prey6': {'move': np.array([0, 0]), 'harvest': 0}, }) obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 0, 'move': np.array([1, 0])}, 'prey1': {'move': np.array([-1, -1]), 'harvest': 0}, 'prey2': {'move': np.array([-1, 0]), 'harvest': 0}, 'prey4': {'move': np.array([0, -1]), 'harvest': 0}, 'prey5': {'move': np.array([-1, 0]), 'harvest': 0}, 'prey6': {'move': np.array([0, -1]), 'harvest': 0}, }) assert 'predator0' in obs assert 'predator0' in obs assert 'prey1' in obs assert 'prey2' in obs assert 'prey4' in obs assert 'prey5' in obs assert 'prey6' in obs assert reward == { 'predator0': 36, 'predator1': -1, 'prey1': -1, 'prey2': -1, 'prey4': -36, 'prey5': -1, 'prey6': -1, } assert done == { 'predator0': False, 'predator1': False, 'prey1': False, 'prey2': False, 'prey4': True, 'prey5': False, 'prey6': False, '__all__': False} with pytest.raises(AssertionError): obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': {'move': np.array([0, -1]), 'harvest': 0}, 'prey2': {'move': np.array([1, 1]), 'harvest': 0}, 'prey4': {'move': np.array([0, -1]), 'harvest': 0}, 'prey5': {'move': np.array([1, 1]), 'harvest': 0}, 'prey6': {'move': np.array([0, 0]), 'harvest': 0}, }) obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': {'move': np.array([-1, 0]), 'harvest': 0}, 'prey2': {'move': np.array([-1, 0]), 'harvest': 0}, 'prey5': {'move': np.array([-1, 0]), 'harvest': 0}, 'prey6': {'move': np.array([1, -1]), 'harvest': 0}, }) assert 'predator0' in obs assert 'predator0' in obs assert 'prey1' in obs assert 'prey2' in obs assert 'prey5' in obs assert 'prey6' in obs assert reward == { 'predator0': 36, 'predator1': 36, 'prey1': -1, 'prey2': -1, 'prey5': -36, 'prey6': -36, } assert done == { 'predator0': False, 'predator1': False, 'prey1': False, 'prey2': False, 'prey5': True, 'prey6': True, '__all__': False} with pytest.raises(AssertionError): obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': {'move': np.array([0, -1]), 'harvest': 0}, 'prey2': {'move': np.array([1, 1]), 'harvest': 0}, 'prey5': {'move': np.array([1, 1]), 'harvest': 0}, 'prey6': {'move': np.array([0, 0]), 'harvest': 0}, }) obs, reward, done, info = sim.step({ 'predator0': {'attack': 1, 'move': np.array([0, 0])}, 'predator1': {'attack': 1, 'move': np.array([0, 0])}, 'prey1': {'move': np.array([-1, 0]), 'harvest': 0}, 'prey2': {'move': np.array([-1, 0]), 'harvest': 0}, }) assert 'predator0' in obs assert 'predator0' in obs assert 'prey1' in obs assert 'prey2' in obs assert reward == { 'predator0': 36, 'predator1': 36, 'prey1': -36, 'prey2': -36, } assert done == { 'predator0': False, 'predator1': False, 'prey1': True, 'prey2': True, '__all__': True}
49.440559
88
0.583847
5,150
35,350
3.887767
0.016893
0.137748
0.209769
0.279692
0.99126
0.989312
0.985766
0.982619
0.982419
0.98227
0
0.07001
0.176917
35,350
714
89
49.509804
0.618126
0.003706
0
0.714739
0
0
0.162511
0
0
0
0
0
0.534073
1
0.00317
false
0
0.006339
0
0.009509
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
10
ecd10a6748d120d96fbe6c06e4392a30a0fa9a69
24,086
py
Python
sdk/python/pulumi_azure/synapse/workspace_key.py
henriktao/pulumi-azure
f1cbcf100b42b916da36d8fe28be3a159abaf022
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/synapse/workspace_key.py
henriktao/pulumi-azure
f1cbcf100b42b916da36d8fe28be3a159abaf022
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/synapse/workspace_key.py
henriktao/pulumi-azure
f1cbcf100b42b916da36d8fe28be3a159abaf022
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['WorkspaceKeyArgs', 'WorkspaceKey'] @pulumi.input_type class WorkspaceKeyArgs: def __init__(__self__, *, active: pulumi.Input[bool], synapse_workspace_id: pulumi.Input[str], cusomter_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None): """ The set of arguments for constructing a WorkspaceKey resource. """ pulumi.set(__self__, "active", active) pulumi.set(__self__, "synapse_workspace_id", synapse_workspace_id) if cusomter_managed_key_name is not None: warnings.warn("""As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""", DeprecationWarning) pulumi.log.warn("""cusomter_managed_key_name is deprecated: As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""") if cusomter_managed_key_name is not None: pulumi.set(__self__, "cusomter_managed_key_name", cusomter_managed_key_name) if customer_managed_key_name is not None: pulumi.set(__self__, "customer_managed_key_name", customer_managed_key_name) if customer_managed_key_versionless_id is not None: pulumi.set(__self__, "customer_managed_key_versionless_id", customer_managed_key_versionless_id) @property @pulumi.getter def active(self) -> pulumi.Input[bool]: return pulumi.get(self, "active") @active.setter def active(self, value: pulumi.Input[bool]): pulumi.set(self, "active", value) @property @pulumi.getter(name="synapseWorkspaceId") def synapse_workspace_id(self) -> pulumi.Input[str]: return pulumi.get(self, "synapse_workspace_id") @synapse_workspace_id.setter def synapse_workspace_id(self, value: pulumi.Input[str]): pulumi.set(self, "synapse_workspace_id", value) @property @pulumi.getter(name="cusomterManagedKeyName") def cusomter_managed_key_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "cusomter_managed_key_name") @cusomter_managed_key_name.setter def cusomter_managed_key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cusomter_managed_key_name", value) @property @pulumi.getter(name="customerManagedKeyName") def customer_managed_key_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "customer_managed_key_name") @customer_managed_key_name.setter def customer_managed_key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "customer_managed_key_name", value) @property @pulumi.getter(name="customerManagedKeyVersionlessId") def customer_managed_key_versionless_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "customer_managed_key_versionless_id") @customer_managed_key_versionless_id.setter def customer_managed_key_versionless_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "customer_managed_key_versionless_id", value) @pulumi.input_type class _WorkspaceKeyState: def __init__(__self__, *, active: Optional[pulumi.Input[bool]] = None, cusomter_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None, synapse_workspace_id: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering WorkspaceKey resources. """ if active is not None: pulumi.set(__self__, "active", active) if cusomter_managed_key_name is not None: warnings.warn("""As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""", DeprecationWarning) pulumi.log.warn("""cusomter_managed_key_name is deprecated: As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""") if cusomter_managed_key_name is not None: pulumi.set(__self__, "cusomter_managed_key_name", cusomter_managed_key_name) if customer_managed_key_name is not None: pulumi.set(__self__, "customer_managed_key_name", customer_managed_key_name) if customer_managed_key_versionless_id is not None: pulumi.set(__self__, "customer_managed_key_versionless_id", customer_managed_key_versionless_id) if synapse_workspace_id is not None: pulumi.set(__self__, "synapse_workspace_id", synapse_workspace_id) @property @pulumi.getter def active(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "active") @active.setter def active(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "active", value) @property @pulumi.getter(name="cusomterManagedKeyName") def cusomter_managed_key_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "cusomter_managed_key_name") @cusomter_managed_key_name.setter def cusomter_managed_key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cusomter_managed_key_name", value) @property @pulumi.getter(name="customerManagedKeyName") def customer_managed_key_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "customer_managed_key_name") @customer_managed_key_name.setter def customer_managed_key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "customer_managed_key_name", value) @property @pulumi.getter(name="customerManagedKeyVersionlessId") def customer_managed_key_versionless_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "customer_managed_key_versionless_id") @customer_managed_key_versionless_id.setter def customer_managed_key_versionless_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "customer_managed_key_versionless_id", value) @property @pulumi.getter(name="synapseWorkspaceId") def synapse_workspace_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "synapse_workspace_id") @synapse_workspace_id.setter def synapse_workspace_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "synapse_workspace_id", value) class WorkspaceKey(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, active: Optional[pulumi.Input[bool]] = None, cusomter_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None, synapse_workspace_id: Optional[pulumi.Input[str]] = None, __props__=None): """ Manages a Synapse Workspace. ## Example Usage ```python import pulumi import pulumi_azure as azure example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe") example_account = azure.storage.Account("exampleAccount", resource_group_name=example_resource_group.name, location=example_resource_group.location, account_tier="Standard", account_replication_type="LRS", account_kind="StorageV2", is_hns_enabled=True) example_data_lake_gen2_filesystem = azure.storage.DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", storage_account_id=example_account.id) example_workspace = azure.synapse.Workspace("exampleWorkspace", resource_group_name=example_resource_group.name, location=example_resource_group.location, storage_data_lake_gen2_filesystem_id=example_data_lake_gen2_filesystem.id, sql_administrator_login="sqladminuser", sql_administrator_login_password="H@Sh1CoR3!", aad_admin=azure.synapse.WorkspaceAadAdminArgs( login="AzureAD Admin", object_id="00000000-0000-0000-0000-000000000000", tenant_id="00000000-0000-0000-0000-000000000000", ), tags={ "Env": "production", }) ``` ### Creating A Workspace With Customer Managed Key And Azure AD Admin ```python import pulumi import pulumi_azure as azure current = azure.core.get_client_config() example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe") example_account = azure.storage.Account("exampleAccount", resource_group_name=example_resource_group.name, location=example_resource_group.location, account_tier="Standard", account_replication_type="LRS", account_kind="StorageV2", is_hns_enabled=True) example_data_lake_gen2_filesystem = azure.storage.DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", storage_account_id=example_account.id) example_key_vault = azure.keyvault.KeyVault("exampleKeyVault", location=example_resource_group.location, resource_group_name=example_resource_group.name, tenant_id=current.tenant_id, sku_name="standard", purge_protection_enabled=True) deployer = azure.keyvault.AccessPolicy("deployer", key_vault_id=example_key_vault.id, tenant_id=current.tenant_id, object_id=current.object_id, key_permissions=[ "create", "get", "delete", "purge", ]) example_key = azure.keyvault.Key("exampleKey", key_vault_id=example_key_vault.id, key_type="RSA", key_size=2048, key_opts=[ "unwrapKey", "wrapKey", ], opts=pulumi.ResourceOptions(depends_on=[deployer])) example_workspace = azure.synapse.Workspace("exampleWorkspace", resource_group_name=example_resource_group.name, location=example_resource_group.location, storage_data_lake_gen2_filesystem_id=example_data_lake_gen2_filesystem.id, sql_administrator_login="sqladminuser", sql_administrator_login_password="H@Sh1CoR3!", customer_managed_key=azure.synapse.WorkspaceCustomerManagedKeyArgs( key_versionless_id=example_key.versionless_id, key_name="enckey", ), tags={ "Env": "production", }) workspace_policy = azure.keyvault.AccessPolicy("workspacePolicy", key_vault_id=example_key_vault.id, tenant_id=example_workspace.identities[0].tenant_id, object_id=example_workspace.identities[0].principal_id, key_permissions=[ "Get", "WrapKey", "UnwrapKey", ]) example_workspace_key = azure.synapse.WorkspaceKey("exampleWorkspaceKey", customer_managed_key_versionless_id=example_key.versionless_id, synapse_workspace_id=example_workspace.id, active=True, customer_managed_key_name="enckey", opts=pulumi.ResourceOptions(depends_on=[workspace_policy])) example_workspace_aad_admin = azure.synapse.WorkspaceAadAdmin("exampleWorkspaceAadAdmin", synapse_workspace_id=example_workspace.id, login="AzureAD Admin", object_id="00000000-0000-0000-0000-000000000000", tenant_id="00000000-0000-0000-0000-000000000000", opts=pulumi.ResourceOptions(depends_on=[example_workspace_key])) ``` ## Import Synapse Workspace can be imported using the `resource id`, e.g. ```sh $ pulumi import azure:synapse/workspaceKey:WorkspaceKey example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Synapse/workspaces/workspace1 ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. """ ... @overload def __init__(__self__, resource_name: str, args: WorkspaceKeyArgs, opts: Optional[pulumi.ResourceOptions] = None): """ Manages a Synapse Workspace. ## Example Usage ```python import pulumi import pulumi_azure as azure example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe") example_account = azure.storage.Account("exampleAccount", resource_group_name=example_resource_group.name, location=example_resource_group.location, account_tier="Standard", account_replication_type="LRS", account_kind="StorageV2", is_hns_enabled=True) example_data_lake_gen2_filesystem = azure.storage.DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", storage_account_id=example_account.id) example_workspace = azure.synapse.Workspace("exampleWorkspace", resource_group_name=example_resource_group.name, location=example_resource_group.location, storage_data_lake_gen2_filesystem_id=example_data_lake_gen2_filesystem.id, sql_administrator_login="sqladminuser", sql_administrator_login_password="H@Sh1CoR3!", aad_admin=azure.synapse.WorkspaceAadAdminArgs( login="AzureAD Admin", object_id="00000000-0000-0000-0000-000000000000", tenant_id="00000000-0000-0000-0000-000000000000", ), tags={ "Env": "production", }) ``` ### Creating A Workspace With Customer Managed Key And Azure AD Admin ```python import pulumi import pulumi_azure as azure current = azure.core.get_client_config() example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe") example_account = azure.storage.Account("exampleAccount", resource_group_name=example_resource_group.name, location=example_resource_group.location, account_tier="Standard", account_replication_type="LRS", account_kind="StorageV2", is_hns_enabled=True) example_data_lake_gen2_filesystem = azure.storage.DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", storage_account_id=example_account.id) example_key_vault = azure.keyvault.KeyVault("exampleKeyVault", location=example_resource_group.location, resource_group_name=example_resource_group.name, tenant_id=current.tenant_id, sku_name="standard", purge_protection_enabled=True) deployer = azure.keyvault.AccessPolicy("deployer", key_vault_id=example_key_vault.id, tenant_id=current.tenant_id, object_id=current.object_id, key_permissions=[ "create", "get", "delete", "purge", ]) example_key = azure.keyvault.Key("exampleKey", key_vault_id=example_key_vault.id, key_type="RSA", key_size=2048, key_opts=[ "unwrapKey", "wrapKey", ], opts=pulumi.ResourceOptions(depends_on=[deployer])) example_workspace = azure.synapse.Workspace("exampleWorkspace", resource_group_name=example_resource_group.name, location=example_resource_group.location, storage_data_lake_gen2_filesystem_id=example_data_lake_gen2_filesystem.id, sql_administrator_login="sqladminuser", sql_administrator_login_password="H@Sh1CoR3!", customer_managed_key=azure.synapse.WorkspaceCustomerManagedKeyArgs( key_versionless_id=example_key.versionless_id, key_name="enckey", ), tags={ "Env": "production", }) workspace_policy = azure.keyvault.AccessPolicy("workspacePolicy", key_vault_id=example_key_vault.id, tenant_id=example_workspace.identities[0].tenant_id, object_id=example_workspace.identities[0].principal_id, key_permissions=[ "Get", "WrapKey", "UnwrapKey", ]) example_workspace_key = azure.synapse.WorkspaceKey("exampleWorkspaceKey", customer_managed_key_versionless_id=example_key.versionless_id, synapse_workspace_id=example_workspace.id, active=True, customer_managed_key_name="enckey", opts=pulumi.ResourceOptions(depends_on=[workspace_policy])) example_workspace_aad_admin = azure.synapse.WorkspaceAadAdmin("exampleWorkspaceAadAdmin", synapse_workspace_id=example_workspace.id, login="AzureAD Admin", object_id="00000000-0000-0000-0000-000000000000", tenant_id="00000000-0000-0000-0000-000000000000", opts=pulumi.ResourceOptions(depends_on=[example_workspace_key])) ``` ## Import Synapse Workspace can be imported using the `resource id`, e.g. ```sh $ pulumi import azure:synapse/workspaceKey:WorkspaceKey example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Synapse/workspaces/workspace1 ``` :param str resource_name: The name of the resource. :param WorkspaceKeyArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(WorkspaceKeyArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, active: Optional[pulumi.Input[bool]] = None, cusomter_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None, synapse_workspace_id: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = WorkspaceKeyArgs.__new__(WorkspaceKeyArgs) if active is None and not opts.urn: raise TypeError("Missing required property 'active'") __props__.__dict__["active"] = active if cusomter_managed_key_name is not None and not opts.urn: warnings.warn("""As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""", DeprecationWarning) pulumi.log.warn("""cusomter_managed_key_name is deprecated: As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""") __props__.__dict__["cusomter_managed_key_name"] = cusomter_managed_key_name __props__.__dict__["customer_managed_key_name"] = customer_managed_key_name __props__.__dict__["customer_managed_key_versionless_id"] = customer_managed_key_versionless_id if synapse_workspace_id is None and not opts.urn: raise TypeError("Missing required property 'synapse_workspace_id'") __props__.__dict__["synapse_workspace_id"] = synapse_workspace_id super(WorkspaceKey, __self__).__init__( 'azure:synapse/workspaceKey:WorkspaceKey', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, active: Optional[pulumi.Input[bool]] = None, cusomter_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_name: Optional[pulumi.Input[str]] = None, customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None, synapse_workspace_id: Optional[pulumi.Input[str]] = None) -> 'WorkspaceKey': """ Get an existing WorkspaceKey resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _WorkspaceKeyState.__new__(_WorkspaceKeyState) __props__.__dict__["active"] = active __props__.__dict__["cusomter_managed_key_name"] = cusomter_managed_key_name __props__.__dict__["customer_managed_key_name"] = customer_managed_key_name __props__.__dict__["customer_managed_key_versionless_id"] = customer_managed_key_versionless_id __props__.__dict__["synapse_workspace_id"] = synapse_workspace_id return WorkspaceKey(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter def active(self) -> pulumi.Output[bool]: return pulumi.get(self, "active") @property @pulumi.getter(name="cusomterManagedKeyName") def cusomter_managed_key_name(self) -> pulumi.Output[str]: return pulumi.get(self, "cusomter_managed_key_name") @property @pulumi.getter(name="customerManagedKeyName") def customer_managed_key_name(self) -> pulumi.Output[str]: return pulumi.get(self, "customer_managed_key_name") @property @pulumi.getter(name="customerManagedKeyVersionlessId") def customer_managed_key_versionless_id(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "customer_managed_key_versionless_id") @property @pulumi.getter(name="synapseWorkspaceId") def synapse_workspace_id(self) -> pulumi.Output[str]: return pulumi.get(self, "synapse_workspace_id")
47.042969
196
0.672548
2,595
24,086
5.86975
0.093642
0.066308
0.0625
0.050551
0.883666
0.873293
0.869945
0.858718
0.844669
0.829307
0
0.020106
0.238022
24,086
511
197
47.135029
0.80984
0.408744
0
0.699531
1
0.014085
0.192819
0.104393
0
0
0
0
0
1
0.150235
false
0.004695
0.023474
0.070423
0.262911
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
01e93bd20aaf8e412f3e3e02acf44e5c0f0a2002
19,439
py
Python
generator_gru.py
accentgan/acl2018
d1fc5ad6e48f8fe77b14562a6044a2d2faf59aef
[ "MIT" ]
null
null
null
generator_gru.py
accentgan/acl2018
d1fc5ad6e48f8fe77b14562a6044a2d2faf59aef
[ "MIT" ]
null
null
null
generator_gru.py
accentgan/acl2018
d1fc5ad6e48f8fe77b14562a6044a2d2faf59aef
[ "MIT" ]
1
2017-10-31T19:31:46.000Z
2017-10-31T19:31:46.000Z
from __future__ import print_function import tensorflow as tf from tensorflow.contrib.layers import batch_norm, fully_connected, flatten from tensorflow.contrib.layers import xavier_initializer from ops import * import numpy as np class ActionGenerator(object): def __init__(self, segan): self.segan = segan self.grucell = tf.contrib.rnn.GRUCell(256+self.segan.accent_class) def zero(self,batch_size): if self.cell_type == "grucell" or not hasattr(self, "grucell"): grucell = self.grucell else : return ValueError("No such implemented Cell for action sampling") return grucell.zero_state(batch_size, tf.float32) def __call__(self, noisy_w,hidden_state, is_ref, spk=None, z_on=False, do_prelu=False): # TODO: remove c_vec """ Build the graph propagating (noisy_w) --> x On first pass will make variables. """ segan = self.segan def make_z(shape, mean=0., std=1., name='z'): if is_ref: with tf.variable_scope(name) as scope: z_init = tf.random_normal_initializer(mean=mean, stddev=std) z = tf.get_variable("z", shape, initializer=z_init, trainable=False ) if z.device != "/device:GPU:0": # this has to be created into gpu0 print('z.device is {}'.format(z.device)) assert False else: z = tf.random_normal(shape, mean=mean, stddev=std, name=name, dtype=tf.float32) return z if hasattr(segan, 'generator_built'): tf.get_variable_scope().reuse_variables() make_vars = False else: make_vars = True if is_ref: print('*** Building Generator ***') in_dims = noisy_w.get_shape().as_list() h_i = noisy_w if len(in_dims) == 2: h_i = tf.expand_dims(noisy_w, -1) elif len(in_dims) < 2 or len(in_dims) > 3: raise ValueError('Generator input must be 2-D or 3-D') kwidth = 31 enc_layers = 7 skips = [] if is_ref and do_prelu: #keep track of prelu activations alphas = [] with tf.variable_scope('g_e'): #AE to be built is shaped: # enc ~ [16384x1, 8192x16, 4096x32, 2048x32, 1024x64, 512x64, 256x128, 128x128, 64x256, 32x256, 16x512, 8x1024] # dec ~ [8x2048, 16x1024, 32x512, 64x512, 8x256, 256x256, 512x128, 1024x128, 2048x64, 4096x64, 8192x32, 16384x1] #FIRST ENCODER for layer_idx, layer_depth in enumerate(segan.g_enc_depths): bias_init = None if segan.bias_downconv: if is_ref: print('Biasing downconv in G') bias_init = tf.constant_initializer(0.) h_i_dwn = downconv(h_i, layer_depth, kwidth=kwidth, init=tf.truncated_normal_initializer(stddev=0.02), bias_init=bias_init, name='enc_{}'.format(layer_idx)) if is_ref: print('Downconv {} -> {}'.format(h_i.get_shape(), h_i_dwn.get_shape())) h_i = h_i_dwn if layer_idx < len(segan.g_enc_depths) - 1: if is_ref: print('Adding skip connection downconv ' '{}'.format(layer_idx)) # store skip connection # last one is not stored cause it's the code skips.append(h_i) if do_prelu: if is_ref: print('-- Enc: prelu activation --') h_i = prelu(h_i, ref=is_ref, name='enc_prelu_{}'.format(layer_idx)) if is_ref: # split h_i into its components alpha_i = h_i[1] h_i = h_i[0] alphas.append(alpha_i) else: if is_ref: print('-- Enc: leakyrelu activation --') h_i = leakyrelu(h_i) with tf.variable_scope("g_gru"): zmid = h_i encode_z = zmid[:,:,:256] h_i, hidden_state = self.grucell(tf.squeeze(zmid),hidden_state) h_i = tf.expand_dims(h_i, [-2]) z = tf.nn.softmax(h_i[:,:,256:]) zdim = z.get_shape().as_list()[-1] zstack = tf.reshape(z,shape=[segan.batch_size, 1, zdim]) real_z = h_i[:,:,:256] h_i = gaussian_noise_layer(h_i[:,:,:256],1e-2) zmid = h_i #SECOND DECODER (reverse order) with tf.variable_scope("g_d") as scope: g_dec_depths = segan.g_enc_depths[:-1][::-1] + [1] if is_ref: print('g_dec_depths: ', g_dec_depths) for layer_idx, layer_depth in enumerate(g_dec_depths): h_i_dim = h_i.get_shape().as_list() dimension = h_i.get_shape().as_list()[1] zconcat = zstack*tf.ones([segan.batch_size, dimension, zdim]) h_i = tf.concat(values=[h_i, zconcat], axis=2) out_shape = [h_i_dim[0], h_i_dim[1] * 2, layer_depth] bias_init = None # deconv if segan.deconv_type == 'deconv': if is_ref: print('-- Transposed deconvolution type --') if segan.bias_deconv: print('Biasing deconv in G') if segan.bias_deconv: bias_init = tf.constant_initializer(0.) h_i_dcv = deconv(h_i, out_shape, kwidth=kwidth, dilation=2, init=tf.truncated_normal_initializer(stddev=0.02), bias_init=bias_init, name='dec_{}'.format(layer_idx)) elif segan.deconv_type == 'nn_deconv': if is_ref: print('-- NN interpolated deconvolution type --') if segan.bias_deconv: print('Biasing deconv in G') if segan.bias_deconv: bias_init = 0. h_i_dcv = nn_deconv(h_i, kwidth=kwidth, dilation=2, init=tf.truncated_normal_initializer(stddev=0.02), bias_init=bias_init, name='dec_{}'.format(layer_idx)) else: raise ValueError('Unknown deconv type {}'.format(segan.deconv_type)) if is_ref: print('Deconv {} -> {}'.format(h_i.get_shape(), h_i_dcv.get_shape())) h_i = h_i_dcv if layer_idx < len(g_dec_depths) - 1: if do_prelu: if is_ref: print('-- Dec: prelu activation --') h_i = prelu(h_i, ref=is_ref, name='dec_prelu_{}'.format(layer_idx)) if is_ref: # split h_i into its components alpha_i = h_i[1] h_i = h_i[0] alphas.append(alpha_i) else: if is_ref: print('-- Dec: leakyrelu activation --') h_i = leakyrelu(h_i) # fuse skip connection skip_ = skips[-(layer_idx + 1)] if is_ref: print('Fusing skip connection of ' 'shape {}'.format(skip_.get_shape())) h_i = tf.concat(axis=2, values=[h_i, skip_]) else: if is_ref: print('-- Dec: tanh activation --') h_i = tf.tanh(h_i) wave = h_i if is_ref and do_prelu: print('Amount of alpha vectors: ', len(alphas)) segan.gen_wave_summ = histogram_summary('gen_wave', wave) if is_ref: print('Amount of skip connections: ', len(skips)) print('Last wave shape: ', wave.get_shape()) print('*************************') segan.generator_built = True # ret feats contains the features refs to be returned ret_feats = [wave] ret_feats.append(z) ret_feats.append(zmid) ret_feats.append(hidden_state) ret_feats.append(real_z) ret_feats.append(encode_z) if is_ref and do_prelu: ret_feats += alphas return ret_feats class MultiGenerator(object): def __init__(self, segan): self.segan = segan self.grucell = tf.contrib.rnn.GRUCell(256+self.segan.accent_class) def zero(self,batch_size): grucell = self.grucell return grucell.zero_state(batch_size, tf.float32) def __call__(self, noisy_w,hidden_state, is_ref, h_i=None, modus=0, spk=None, z_on=False, do_prelu=False): # TODO: remove c_vec """ Build the graph propagating (noisy_w) --> x On first pass will make variables. """ segan = self.segan def make_z(shape, mean=0., std=1., name='z'): if is_ref: with tf.variable_scope(name) as scope: z_init = tf.random_normal_initializer(mean=mean, stddev=std) z = tf.get_variable("z", shape, initializer=z_init, trainable=False ) if z.device != "/device:GPU:0": # this has to be created into gpu0 print('z.device is {}'.format(z.device)) assert False else: z = tf.random_normal(shape, mean=mean, stddev=std, name=name, dtype=tf.float32) return z if hasattr(segan, 'generator_built'): tf.get_variable_scope().reuse_variables() make_vars = False else: make_vars = True if is_ref: print('*** Building Generator ***') in_dims = noisy_w.get_shape().as_list() if modus == 0: h_i = noisy_w if len(in_dims) == 2: h_i = tf.expand_dims(noisy_w, -1) elif len(in_dims) < 2 or len(in_dims) > 3: raise ValueError('Generator input must be 2-D or 3-D') kwidth = 31 enc_layers = 7 skips = [] if is_ref and do_prelu: #keep track of prelu activations alphas = [] if modus == 0 : with tf.variable_scope('g_e'): #AE to be built is shaped: # enc ~ [16384x1, 8192x16, 4096x32, 2048x32, 1024x64, 512x64, 256x128, 128x128, 64x256, 32x256, 16x512, 8x1024] # dec ~ [8x2048, 16x1024, 32x512, 64x512, 8x256, 256x256, 512x128, 1024x128, 2048x64, 4096x64, 8192x32, 16384x1] #FIRST ENCODER for layer_idx, layer_depth in enumerate(segan.g_enc_depths): bias_init = None if segan.bias_downconv: if is_ref: print('Biasing downconv in G') bias_init = tf.constant_initializer(0.) h_i_dwn = downconv(h_i, layer_depth, kwidth=kwidth, init=tf.truncated_normal_initializer(stddev=0.02), bias_init=bias_init, name='enc_{}'.format(layer_idx)) if is_ref: print('Downconv {} -> {}'.format(h_i.get_shape(), h_i_dwn.get_shape())) h_i = h_i_dwn if layer_idx < len(segan.g_enc_depths) - 1: if is_ref: print('Adding skip connection downconv ' '{}'.format(layer_idx)) # store skip connection # last one is not stored cause it's the code skips.append(h_i) if do_prelu: if is_ref: print('-- Enc: prelu activation --') h_i = prelu(h_i, ref=is_ref, name='enc_prelu_{}'.format(layer_idx)) if is_ref: # split h_i into its components alpha_i = h_i[1] h_i = h_i[0] alphas.append(alpha_i) else: if is_ref: print('-- Enc: leakyrelu activation --') h_i = leakyrelu(h_i) with tf.variable_scope("g_gru"): zmid = h_i encode_z = zmid[:,:,:256] if modus != 2: h_i, hidden_state = self.grucell(tf.squeeze(zmid),hidden_state) h_i = tf.expand_dims(h_i, [-2]) z = tf.nn.softmax(h_i[:,:,256:]) zdim = z.get_shape().as_list()[-1] zstack = tf.reshape(z,shape=[segan.batch_size, 1, zdim]) real_z = h_i[:,:,:256] h_i = tf.concat([gaussian_noise_layer(h_i[:,:,:256],1e-1), h_i[:,:,256:]],axis=2) zmid = h_i h_i = h_i[:,:,:256] #SECOND DECODER (reverse order) with tf.variable_scope("g_d") as scope: g_dec_depths = segan.g_enc_depths[:-1][::-1] + [1] if is_ref: print('g_dec_depths: ', g_dec_depths) for layer_idx, layer_depth in enumerate(g_dec_depths): h_i_dim = h_i.get_shape().as_list() dimension = h_i.get_shape().as_list()[1] zconcat = zstack*tf.ones([segan.batch_size, dimension, zdim]) out_shape = [h_i_dim[0], h_i_dim[1] * 2, layer_depth] h_i = tf.concat(values=[h_i, zconcat], axis=2) bias_init = None # deconv if segan.deconv_type == 'deconv': if is_ref: print('-- Transposed deconvolution type --') if segan.bias_deconv: print('Biasing deconv in G') if segan.bias_deconv: bias_init = tf.constant_initializer(0.) h_i_dcv = deconv(h_i, out_shape, kwidth=kwidth, dilation=2, init=tf.truncated_normal_initializer(stddev=0.02), bias_init=bias_init, name='dec_{}'.format(layer_idx)) elif segan.deconv_type == 'nn_deconv': if is_ref: print('-- NN interpolated deconvolution type --') if segan.bias_deconv: print('Biasing deconv in G') if segan.bias_deconv: bias_init = 0. h_i_dcv = nn_deconv(h_i, kwidth=kwidth, dilation=2, init=tf.truncated_normal_initializer(stddev=0.02), bias_init=bias_init, name='dec_{}'.format(layer_idx)) else: raise ValueError('Unknown deconv type {}'.format(segan.deconv_type)) if is_ref: print('Deconv {} -> {}'.format(h_i.get_shape(), h_i_dcv.get_shape())) h_i = h_i_dcv if layer_idx < len(g_dec_depths) - 1: if do_prelu: if is_ref: print('-- Dec: prelu activation --') h_i = prelu(h_i, ref=is_ref, name='dec_prelu_{}'.format(layer_idx)) if is_ref: # split h_i into its components alpha_i = h_i[1] h_i = h_i[0] alphas.append(alpha_i) else: if is_ref: print('-- Dec: leakyrelu activation --') h_i = leakyrelu(h_i) # fuse skip connection if modus == 0: if not hasattr(self, "skip"): self.skip = {} skip_ = skips[-(layer_idx + 1)] if is_ref: print('Fusing skip connection of ' 'shape {}'.format(skip_.get_shape())) h_i = tf.concat(axis=2, values=[h_i, skip_]) self.skip["layer_%d"%(layer_idx)] = skip_.get_shape().as_list() else : dimension = h_i.get_shape().as_list()[1] shape = h_i.get_shape().as_list() shape[2] /= 2 zconcat = zstack*tf.ones([segan.batch_size, dimension, zdim]) t_i = tf.zeros(shape=self.skip["layer_%d"%(layer_idx)]) h_i = tf.concat(axis=2, values=[h_i,t_i]) else: if is_ref: print('-- Dec: tanh activation --') h_i = tf.tanh(h_i) wave = h_i if is_ref and do_prelu: print('Amount of alpha vectors: ', len(alphas)) segan.gen_wave_summ = histogram_summary('gen_wave', wave) if is_ref: print('Amount of skip connections: ', len(skips)) print('Last wave shape: ', wave.get_shape()) print('*************************') segan.generator_built = True # ret feats contains the features refs to be returned ret_feats = [wave] ret_feats.append(z) ret_feats.append(zmid) ret_feats.append(hidden_state) ret_feats.append(real_z) ret_feats.append(encode_z) if is_ref and do_prelu: ret_feats += alphas return ret_feats
48.476309
126
0.450023
2,114
19,439
3.882214
0.116367
0.028269
0.035823
0.043865
0.943828
0.935787
0.928476
0.922261
0.919093
0.910686
0
0.039507
0.45311
19,439
400
127
48.5975
0.73248
0.067493
0
0.924157
0
0
0.082262
0.002833
0
0
0
0.005
0.005618
1
0.022472
false
0
0.016854
0
0.064607
0.120787
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
01ea08ca353ba165010886d2db63ee670dcf38d5
958
py
Python
Frontend_Scripts/Django_view_methods.py
jeromjoy/Correct-Project
ce9ab7dc3171ddd38cd43d59b0589b7865306be7
[ "MIT" ]
null
null
null
Frontend_Scripts/Django_view_methods.py
jeromjoy/Correct-Project
ce9ab7dc3171ddd38cd43d59b0589b7865306be7
[ "MIT" ]
null
null
null
Frontend_Scripts/Django_view_methods.py
jeromjoy/Correct-Project
ce9ab7dc3171ddd38cd43d59b0589b7865306be7
[ "MIT" ]
null
null
null
def upload (request): cursor = connection.cursor() cursor.execute('SELECT * FROM webapp_news') News = cursor.fetchone() # fetchall() may not be the right call here? return render(request, 'webapp/upload.html', {'News':News}) def upload (request): cursor = connection.cursor() URL = 'http://www.bbc.com/news' cursor.execute('SELECT count(*)FROM webapp_news WHERE article = %s', [URL]) News = cursor.fetchone() # fetchall() may not be the right call here? return render(request, 'webapp/upload.html', {'News':News}) def upload (request): cursor = connection.cursor() url = 'http://www.bbc.com/news' cursor.execute('SELECT last_name FROM webapp_news WHERE articleUrl = %s', []) News = cursor.fetchone() # fetchall() may not be the right call here? return render(request, 'webapp/upload.html', {'News':News})
35.481481
85
0.606472
115
958
5.017391
0.295652
0.086655
0.083189
0.114385
0.816291
0.816291
0.750433
0.750433
0.750433
0.750433
0
0
0.256785
958
26
86
36.846154
0.810393
0.133612
0
0.705882
0
0
0.293689
0
0
0
0
0
0
1
0.176471
false
0
0
0
0.352941
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1770d252318449b9843a66b3a391e94403b83727
6,487
py
Python
test/client/test_read_memory_by_address.py
martinjthompson/python-udsoncan
fd89262785b968eb4a1aab15af86dbbd7353488b
[ "MIT" ]
326
2017-08-11T10:23:13.000Z
2022-03-30T09:03:27.000Z
test/client/test_read_memory_by_address.py
martinjthompson/python-udsoncan
fd89262785b968eb4a1aab15af86dbbd7353488b
[ "MIT" ]
105
2018-04-17T13:26:57.000Z
2022-03-30T09:00:34.000Z
test/client/test_read_memory_by_address.py
martinjthompson/python-udsoncan
fd89262785b968eb4a1aab15af86dbbd7353488b
[ "MIT" ]
142
2018-02-20T19:52:18.000Z
2022-03-10T00:39:06.000Z
from test.ClientServerTest import ClientServerTest from udsoncan import MemoryLocation from udsoncan.exceptions import * # Note : # MemoryLocation object is unit tested in a separate file (test_helper_class). # As it is the only parameter to be passed, no need to push this test too far for nothing. class TestReadMemoryByAddress(ClientServerTest): def test_4byte_block(self): request = self.conn.touserqueue.get(timeout=0.2) self.assertEqual(request, b"\x23\x12\x12\x34\x04") self.conn.fromuserqueue.put(b"\x63\x99\x88\x77\x66") def _test_4byte_block(self): response = self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) self.assertEqual(response.service_data.memory_block, b'\x99\x88\x77\x66') def test_4byte_block_spr_no_effect(self): request = self.conn.touserqueue.get(timeout=0.2) self.assertEqual(request, b"\x23\x12\x12\x34\x04") self.conn.fromuserqueue.put(b"\x63\x99\x88\x77\x66") def _test_4byte_block_spr_no_effect(self): with self.udsclient.suppress_positive_response: response = self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) self.assertEqual(response.service_data.memory_block, b'\x99\x88\x77\x66') def test_config_format(self): request = self.conn.touserqueue.get(timeout=0.2) self.assertEqual(request, b"\x23\x24\x00\x00\x12\x34\x00\x04") self.conn.fromuserqueue.put(b"\x63\x99\x88\x77\x66") def _test_config_format(self): self.udsclient.config['server_address_format'] = 32 self.udsclient.config['server_memorysize_format'] = 16 self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4)) def test_4byte_block_zeropadding_ok(self): data = b"\x63\x99\x88\x77\x66" for i in range(8): self.wait_request_and_respond(data + b'\x00'*(i+1)) def _test_4byte_block_zeropadding_ok(self): self.udsclient.config['tolerate_zero_padding'] = True for i in range(8): response = self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) self.assertEqual(response.service_data.memory_block, b'\x99\x88\x77\x66') def test_4byte_block_zeropadding_not_ok_exception(self): data = b"\x63\x99\x88\x77\x66" for i in range(8): self.wait_request_and_respond(data + b'\x00'*(i+1)) def _test_4byte_block_zeropadding_not_ok_exception(self): self.udsclient.config['tolerate_zero_padding'] = False for i in range(8): with self.assertRaises(UnexpectedResponseException): self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) def test_4byte_block_zeropadding_not_ok_no_exception(self): data = b"\x63\x99\x88\x77\x66" for i in range(8): self.wait_request_and_respond(data + b'\x00'*(i+1)) def _test_4byte_block_zeropadding_not_ok_no_exception(self): self.udsclient.config['tolerate_zero_padding'] = False self.udsclient.config['exception_on_unexpected_response'] = False for i in range(8): response = self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) self.assertTrue(response.valid) self.assertTrue(response.unexpected) def test_request_denied_exception(self): self.wait_request_and_respond(b"\x7F\x23\x45") #Request Out Of Range def _test_request_denied_exception(self): with self.assertRaises(NegativeResponseException) as handle: self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) def test_request_denied_no_exception(self): self.wait_request_and_respond(b"\x7F\x23\x45") #Request Out Of Range def _test_request_denied_no_exception(self): self.udsclient.config['exception_on_negative_response'] = False response = self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) self.assertTrue(response.valid) self.assertFalse(response.positive) def test_request_invalid_service_exception(self): self.wait_request_and_respond(b"\x00\x45") #Inexistent Service def _test_request_invalid_service_exception(self): with self.assertRaises(InvalidResponseException) as handle: self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) def test_request_invalid_service_no_exception(self): self.wait_request_and_respond(b"\x00\x45") #Inexistent Service def _test_request_invalid_service_no_exception(self): self.udsclient.config['exception_on_invalid_response'] = False response = self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) self.assertFalse(response.valid) def test_wrong_service_exception(self): self.wait_request_and_respond(b"\x7E\x99\x88\x77\x66") # Valid but wrong service (Tester Present) def _test_wrong_service_exception(self): with self.assertRaises(UnexpectedResponseException) as handle: self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) def test_wrong_service_no_exception(self): self.wait_request_and_respond(b"\x7E\x99\x88\x77\x66") # Valid but wrong service (Tester Present) def _test_wrong_service_no_exception(self): self.udsclient.config['exception_on_unexpected_response'] = False response = self.udsclient.read_memory_by_address(MemoryLocation(address=0x1234, memorysize=4, address_format=16, memorysize_format=8)) self.assertTrue(response.valid) self.assertTrue(response.unexpected) def test_bad_param(self): pass def _test_bad_param(self): with self.assertRaises(ValueError): self.udsclient.read_memory_by_address(1) with self.assertRaises(ValueError): self.udsclient.read_memory_by_address('aaa')
49.519084
146
0.735625
860
6,487
5.272093
0.146512
0.040141
0.052492
0.071019
0.861712
0.845832
0.823114
0.799294
0.773048
0.716586
0
0.054807
0.164637
6,487
130
147
49.9
0.781879
0.051179
0
0.520408
0
0
0.092107
0.042799
0
0
0.011717
0
0.193878
1
0.265306
false
0.010204
0.030612
0
0.306122
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
bd7bd889815c46f4a27262054cc530f1f9d4fde4
39,155
py
Python
sdk/python/pulumi_aws/emr/_inputs.py
mdop-wh/pulumi-aws
05bb32e9d694dde1c3b76d440fd2cd0344d23376
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_aws/emr/_inputs.py
mdop-wh/pulumi-aws
05bb32e9d694dde1c3b76d440fd2cd0344d23376
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_aws/emr/_inputs.py
mdop-wh/pulumi-aws
05bb32e9d694dde1c3b76d440fd2cd0344d23376
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Dict, List, Mapping, Optional, Tuple, Union from .. import _utilities, _tables __all__ = [ 'ClusterBootstrapActionArgs', 'ClusterCoreInstanceGroupArgs', 'ClusterCoreInstanceGroupEbsConfigArgs', 'ClusterEc2AttributesArgs', 'ClusterKerberosAttributesArgs', 'ClusterMasterInstanceGroupArgs', 'ClusterMasterInstanceGroupEbsConfigArgs', 'ClusterStepArgs', 'ClusterStepHadoopJarStepArgs', 'InstanceGroupEbsConfigArgs', ] @pulumi.input_type class ClusterBootstrapActionArgs: def __init__(__self__, *, name: pulumi.Input[str], path: pulumi.Input[str], args: Optional[pulumi.Input[List[pulumi.Input[str]]]] = None): """ :param pulumi.Input[str] name: The name of the step. :param pulumi.Input[str] path: Location of the script to run during a bootstrap action. Can be either a location in Amazon S3 or on a local file system :param pulumi.Input[List[pulumi.Input[str]]] args: List of command line arguments passed to the JAR file's main function when executed. """ pulumi.set(__self__, "name", name) pulumi.set(__self__, "path", path) if args is not None: pulumi.set(__self__, "args", args) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ The name of the step. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def path(self) -> pulumi.Input[str]: """ Location of the script to run during a bootstrap action. Can be either a location in Amazon S3 or on a local file system """ return pulumi.get(self, "path") @path.setter def path(self, value: pulumi.Input[str]): pulumi.set(self, "path", value) @property @pulumi.getter def args(self) -> Optional[pulumi.Input[List[pulumi.Input[str]]]]: """ List of command line arguments passed to the JAR file's main function when executed. """ return pulumi.get(self, "args") @args.setter def args(self, value: Optional[pulumi.Input[List[pulumi.Input[str]]]]): pulumi.set(self, "args", value) @pulumi.input_type class ClusterCoreInstanceGroupArgs: def __init__(__self__, *, instance_type: pulumi.Input[str], autoscaling_policy: Optional[pulumi.Input[str]] = None, bid_price: Optional[pulumi.Input[str]] = None, ebs_configs: Optional[pulumi.Input[List[pulumi.Input['ClusterCoreInstanceGroupEbsConfigArgs']]]] = None, id: Optional[pulumi.Input[str]] = None, instance_count: Optional[pulumi.Input[float]] = None, name: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] instance_type: EC2 instance type for all instances in the instance group. :param pulumi.Input[str] autoscaling_policy: String containing the [EMR Auto Scaling Policy](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-automatic-scaling.html) JSON. :param pulumi.Input[str] bid_price: Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. :param pulumi.Input[List[pulumi.Input['ClusterCoreInstanceGroupEbsConfigArgs']]] ebs_configs: Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. :param pulumi.Input[str] id: The ID of the EMR Cluster :param pulumi.Input[float] instance_count: Target number of instances for the instance group. Must be 1 or 3. Defaults to 1. Launching with multiple master nodes is only supported in EMR version 5.23.0+, and requires this resource's `core_instance_group` to be configured. Public (Internet accessible) instances must be created in VPC subnets that have `map public IP on launch` enabled. Termination protection is automatically enabled when launched with multiple master nodes and this provider must have the `termination_protection = false` configuration applied before destroying this resource. :param pulumi.Input[str] name: The name of the step. """ pulumi.set(__self__, "instance_type", instance_type) if autoscaling_policy is not None: pulumi.set(__self__, "autoscaling_policy", autoscaling_policy) if bid_price is not None: pulumi.set(__self__, "bid_price", bid_price) if ebs_configs is not None: pulumi.set(__self__, "ebs_configs", ebs_configs) if id is not None: pulumi.set(__self__, "id", id) if instance_count is not None: pulumi.set(__self__, "instance_count", instance_count) if name is not None: pulumi.set(__self__, "name", name) @property @pulumi.getter(name="instanceType") def instance_type(self) -> pulumi.Input[str]: """ EC2 instance type for all instances in the instance group. """ return pulumi.get(self, "instance_type") @instance_type.setter def instance_type(self, value: pulumi.Input[str]): pulumi.set(self, "instance_type", value) @property @pulumi.getter(name="autoscalingPolicy") def autoscaling_policy(self) -> Optional[pulumi.Input[str]]: """ String containing the [EMR Auto Scaling Policy](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-automatic-scaling.html) JSON. """ return pulumi.get(self, "autoscaling_policy") @autoscaling_policy.setter def autoscaling_policy(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "autoscaling_policy", value) @property @pulumi.getter(name="bidPrice") def bid_price(self) -> Optional[pulumi.Input[str]]: """ Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. """ return pulumi.get(self, "bid_price") @bid_price.setter def bid_price(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "bid_price", value) @property @pulumi.getter(name="ebsConfigs") def ebs_configs(self) -> Optional[pulumi.Input[List[pulumi.Input['ClusterCoreInstanceGroupEbsConfigArgs']]]]: """ Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. """ return pulumi.get(self, "ebs_configs") @ebs_configs.setter def ebs_configs(self, value: Optional[pulumi.Input[List[pulumi.Input['ClusterCoreInstanceGroupEbsConfigArgs']]]]): pulumi.set(self, "ebs_configs", value) @property @pulumi.getter def id(self) -> Optional[pulumi.Input[str]]: """ The ID of the EMR Cluster """ return pulumi.get(self, "id") @id.setter def id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "id", value) @property @pulumi.getter(name="instanceCount") def instance_count(self) -> Optional[pulumi.Input[float]]: """ Target number of instances for the instance group. Must be 1 or 3. Defaults to 1. Launching with multiple master nodes is only supported in EMR version 5.23.0+, and requires this resource's `core_instance_group` to be configured. Public (Internet accessible) instances must be created in VPC subnets that have `map public IP on launch` enabled. Termination protection is automatically enabled when launched with multiple master nodes and this provider must have the `termination_protection = false` configuration applied before destroying this resource. """ return pulumi.get(self, "instance_count") @instance_count.setter def instance_count(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "instance_count", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name of the step. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @pulumi.input_type class ClusterCoreInstanceGroupEbsConfigArgs: def __init__(__self__, *, size: pulumi.Input[float], type: pulumi.Input[str], iops: Optional[pulumi.Input[float]] = None, volumes_per_instance: Optional[pulumi.Input[float]] = None): """ :param pulumi.Input[float] size: The volume size, in gibibytes (GiB). :param pulumi.Input[str] type: The volume type. Valid options are `gp2`, `io1`, `standard` and `st1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). :param pulumi.Input[float] iops: The number of I/O operations per second (IOPS) that the volume supports :param pulumi.Input[float] volumes_per_instance: The number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1) """ pulumi.set(__self__, "size", size) pulumi.set(__self__, "type", type) if iops is not None: pulumi.set(__self__, "iops", iops) if volumes_per_instance is not None: pulumi.set(__self__, "volumes_per_instance", volumes_per_instance) @property @pulumi.getter def size(self) -> pulumi.Input[float]: """ The volume size, in gibibytes (GiB). """ return pulumi.get(self, "size") @size.setter def size(self, value: pulumi.Input[float]): pulumi.set(self, "size", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The volume type. Valid options are `gp2`, `io1`, `standard` and `st1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def iops(self) -> Optional[pulumi.Input[float]]: """ The number of I/O operations per second (IOPS) that the volume supports """ return pulumi.get(self, "iops") @iops.setter def iops(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "iops", value) @property @pulumi.getter(name="volumesPerInstance") def volumes_per_instance(self) -> Optional[pulumi.Input[float]]: """ The number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1) """ return pulumi.get(self, "volumes_per_instance") @volumes_per_instance.setter def volumes_per_instance(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "volumes_per_instance", value) @pulumi.input_type class ClusterEc2AttributesArgs: def __init__(__self__, *, instance_profile: pulumi.Input[str], additional_master_security_groups: Optional[pulumi.Input[str]] = None, additional_slave_security_groups: Optional[pulumi.Input[str]] = None, emr_managed_master_security_group: Optional[pulumi.Input[str]] = None, emr_managed_slave_security_group: Optional[pulumi.Input[str]] = None, key_name: Optional[pulumi.Input[str]] = None, service_access_security_group: Optional[pulumi.Input[str]] = None, subnet_id: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] instance_profile: Instance Profile for EC2 instances of the cluster assume this role :param pulumi.Input[str] additional_master_security_groups: String containing a comma separated list of additional Amazon EC2 security group IDs for the master node :param pulumi.Input[str] additional_slave_security_groups: String containing a comma separated list of additional Amazon EC2 security group IDs for the slave nodes as a comma separated string :param pulumi.Input[str] emr_managed_master_security_group: Identifier of the Amazon EC2 EMR-Managed security group for the master node :param pulumi.Input[str] emr_managed_slave_security_group: Identifier of the Amazon EC2 EMR-Managed security group for the slave nodes :param pulumi.Input[str] key_name: Amazon EC2 key pair that can be used to ssh to the master node as the user called `hadoop` :param pulumi.Input[str] service_access_security_group: Identifier of the Amazon EC2 service-access security group - required when the cluster runs on a private subnet :param pulumi.Input[str] subnet_id: VPC subnet id where you want the job flow to launch. Cannot specify the `cc1.4xlarge` instance type for nodes of a job flow launched in a Amazon VPC """ pulumi.set(__self__, "instance_profile", instance_profile) if additional_master_security_groups is not None: pulumi.set(__self__, "additional_master_security_groups", additional_master_security_groups) if additional_slave_security_groups is not None: pulumi.set(__self__, "additional_slave_security_groups", additional_slave_security_groups) if emr_managed_master_security_group is not None: pulumi.set(__self__, "emr_managed_master_security_group", emr_managed_master_security_group) if emr_managed_slave_security_group is not None: pulumi.set(__self__, "emr_managed_slave_security_group", emr_managed_slave_security_group) if key_name is not None: pulumi.set(__self__, "key_name", key_name) if service_access_security_group is not None: pulumi.set(__self__, "service_access_security_group", service_access_security_group) if subnet_id is not None: pulumi.set(__self__, "subnet_id", subnet_id) @property @pulumi.getter(name="instanceProfile") def instance_profile(self) -> pulumi.Input[str]: """ Instance Profile for EC2 instances of the cluster assume this role """ return pulumi.get(self, "instance_profile") @instance_profile.setter def instance_profile(self, value: pulumi.Input[str]): pulumi.set(self, "instance_profile", value) @property @pulumi.getter(name="additionalMasterSecurityGroups") def additional_master_security_groups(self) -> Optional[pulumi.Input[str]]: """ String containing a comma separated list of additional Amazon EC2 security group IDs for the master node """ return pulumi.get(self, "additional_master_security_groups") @additional_master_security_groups.setter def additional_master_security_groups(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "additional_master_security_groups", value) @property @pulumi.getter(name="additionalSlaveSecurityGroups") def additional_slave_security_groups(self) -> Optional[pulumi.Input[str]]: """ String containing a comma separated list of additional Amazon EC2 security group IDs for the slave nodes as a comma separated string """ return pulumi.get(self, "additional_slave_security_groups") @additional_slave_security_groups.setter def additional_slave_security_groups(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "additional_slave_security_groups", value) @property @pulumi.getter(name="emrManagedMasterSecurityGroup") def emr_managed_master_security_group(self) -> Optional[pulumi.Input[str]]: """ Identifier of the Amazon EC2 EMR-Managed security group for the master node """ return pulumi.get(self, "emr_managed_master_security_group") @emr_managed_master_security_group.setter def emr_managed_master_security_group(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "emr_managed_master_security_group", value) @property @pulumi.getter(name="emrManagedSlaveSecurityGroup") def emr_managed_slave_security_group(self) -> Optional[pulumi.Input[str]]: """ Identifier of the Amazon EC2 EMR-Managed security group for the slave nodes """ return pulumi.get(self, "emr_managed_slave_security_group") @emr_managed_slave_security_group.setter def emr_managed_slave_security_group(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "emr_managed_slave_security_group", value) @property @pulumi.getter(name="keyName") def key_name(self) -> Optional[pulumi.Input[str]]: """ Amazon EC2 key pair that can be used to ssh to the master node as the user called `hadoop` """ return pulumi.get(self, "key_name") @key_name.setter def key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_name", value) @property @pulumi.getter(name="serviceAccessSecurityGroup") def service_access_security_group(self) -> Optional[pulumi.Input[str]]: """ Identifier of the Amazon EC2 service-access security group - required when the cluster runs on a private subnet """ return pulumi.get(self, "service_access_security_group") @service_access_security_group.setter def service_access_security_group(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "service_access_security_group", value) @property @pulumi.getter(name="subnetId") def subnet_id(self) -> Optional[pulumi.Input[str]]: """ VPC subnet id where you want the job flow to launch. Cannot specify the `cc1.4xlarge` instance type for nodes of a job flow launched in a Amazon VPC """ return pulumi.get(self, "subnet_id") @subnet_id.setter def subnet_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "subnet_id", value) @pulumi.input_type class ClusterKerberosAttributesArgs: def __init__(__self__, *, kdc_admin_password: pulumi.Input[str], realm: pulumi.Input[str], ad_domain_join_password: Optional[pulumi.Input[str]] = None, ad_domain_join_user: Optional[pulumi.Input[str]] = None, cross_realm_trust_principal_password: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] kdc_admin_password: The password used within the cluster for the kadmin service on the cluster-dedicated KDC, which maintains Kerberos principals, password policies, and keytabs for the cluster. This provider cannot perform drift detection of this configuration. :param pulumi.Input[str] realm: The name of the Kerberos realm to which all nodes in a cluster belong. For example, `EC2.INTERNAL` :param pulumi.Input[str] ad_domain_join_password: The Active Directory password for `ad_domain_join_user`. This provider cannot perform drift detection of this configuration. :param pulumi.Input[str] ad_domain_join_user: Required only when establishing a cross-realm trust with an Active Directory domain. A user with sufficient privileges to join resources to the domain. This provider cannot perform drift detection of this configuration. :param pulumi.Input[str] cross_realm_trust_principal_password: Required only when establishing a cross-realm trust with a KDC in a different realm. The cross-realm principal password, which must be identical across realms. This provider cannot perform drift detection of this configuration. """ pulumi.set(__self__, "kdc_admin_password", kdc_admin_password) pulumi.set(__self__, "realm", realm) if ad_domain_join_password is not None: pulumi.set(__self__, "ad_domain_join_password", ad_domain_join_password) if ad_domain_join_user is not None: pulumi.set(__self__, "ad_domain_join_user", ad_domain_join_user) if cross_realm_trust_principal_password is not None: pulumi.set(__self__, "cross_realm_trust_principal_password", cross_realm_trust_principal_password) @property @pulumi.getter(name="kdcAdminPassword") def kdc_admin_password(self) -> pulumi.Input[str]: """ The password used within the cluster for the kadmin service on the cluster-dedicated KDC, which maintains Kerberos principals, password policies, and keytabs for the cluster. This provider cannot perform drift detection of this configuration. """ return pulumi.get(self, "kdc_admin_password") @kdc_admin_password.setter def kdc_admin_password(self, value: pulumi.Input[str]): pulumi.set(self, "kdc_admin_password", value) @property @pulumi.getter def realm(self) -> pulumi.Input[str]: """ The name of the Kerberos realm to which all nodes in a cluster belong. For example, `EC2.INTERNAL` """ return pulumi.get(self, "realm") @realm.setter def realm(self, value: pulumi.Input[str]): pulumi.set(self, "realm", value) @property @pulumi.getter(name="adDomainJoinPassword") def ad_domain_join_password(self) -> Optional[pulumi.Input[str]]: """ The Active Directory password for `ad_domain_join_user`. This provider cannot perform drift detection of this configuration. """ return pulumi.get(self, "ad_domain_join_password") @ad_domain_join_password.setter def ad_domain_join_password(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ad_domain_join_password", value) @property @pulumi.getter(name="adDomainJoinUser") def ad_domain_join_user(self) -> Optional[pulumi.Input[str]]: """ Required only when establishing a cross-realm trust with an Active Directory domain. A user with sufficient privileges to join resources to the domain. This provider cannot perform drift detection of this configuration. """ return pulumi.get(self, "ad_domain_join_user") @ad_domain_join_user.setter def ad_domain_join_user(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ad_domain_join_user", value) @property @pulumi.getter(name="crossRealmTrustPrincipalPassword") def cross_realm_trust_principal_password(self) -> Optional[pulumi.Input[str]]: """ Required only when establishing a cross-realm trust with a KDC in a different realm. The cross-realm principal password, which must be identical across realms. This provider cannot perform drift detection of this configuration. """ return pulumi.get(self, "cross_realm_trust_principal_password") @cross_realm_trust_principal_password.setter def cross_realm_trust_principal_password(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cross_realm_trust_principal_password", value) @pulumi.input_type class ClusterMasterInstanceGroupArgs: def __init__(__self__, *, instance_type: pulumi.Input[str], bid_price: Optional[pulumi.Input[str]] = None, ebs_configs: Optional[pulumi.Input[List[pulumi.Input['ClusterMasterInstanceGroupEbsConfigArgs']]]] = None, id: Optional[pulumi.Input[str]] = None, instance_count: Optional[pulumi.Input[float]] = None, name: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] instance_type: EC2 instance type for all instances in the instance group. :param pulumi.Input[str] bid_price: Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. :param pulumi.Input[List[pulumi.Input['ClusterMasterInstanceGroupEbsConfigArgs']]] ebs_configs: Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. :param pulumi.Input[str] id: The ID of the EMR Cluster :param pulumi.Input[float] instance_count: Target number of instances for the instance group. Must be 1 or 3. Defaults to 1. Launching with multiple master nodes is only supported in EMR version 5.23.0+, and requires this resource's `core_instance_group` to be configured. Public (Internet accessible) instances must be created in VPC subnets that have `map public IP on launch` enabled. Termination protection is automatically enabled when launched with multiple master nodes and this provider must have the `termination_protection = false` configuration applied before destroying this resource. :param pulumi.Input[str] name: The name of the step. """ pulumi.set(__self__, "instance_type", instance_type) if bid_price is not None: pulumi.set(__self__, "bid_price", bid_price) if ebs_configs is not None: pulumi.set(__self__, "ebs_configs", ebs_configs) if id is not None: pulumi.set(__self__, "id", id) if instance_count is not None: pulumi.set(__self__, "instance_count", instance_count) if name is not None: pulumi.set(__self__, "name", name) @property @pulumi.getter(name="instanceType") def instance_type(self) -> pulumi.Input[str]: """ EC2 instance type for all instances in the instance group. """ return pulumi.get(self, "instance_type") @instance_type.setter def instance_type(self, value: pulumi.Input[str]): pulumi.set(self, "instance_type", value) @property @pulumi.getter(name="bidPrice") def bid_price(self) -> Optional[pulumi.Input[str]]: """ Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. """ return pulumi.get(self, "bid_price") @bid_price.setter def bid_price(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "bid_price", value) @property @pulumi.getter(name="ebsConfigs") def ebs_configs(self) -> Optional[pulumi.Input[List[pulumi.Input['ClusterMasterInstanceGroupEbsConfigArgs']]]]: """ Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. """ return pulumi.get(self, "ebs_configs") @ebs_configs.setter def ebs_configs(self, value: Optional[pulumi.Input[List[pulumi.Input['ClusterMasterInstanceGroupEbsConfigArgs']]]]): pulumi.set(self, "ebs_configs", value) @property @pulumi.getter def id(self) -> Optional[pulumi.Input[str]]: """ The ID of the EMR Cluster """ return pulumi.get(self, "id") @id.setter def id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "id", value) @property @pulumi.getter(name="instanceCount") def instance_count(self) -> Optional[pulumi.Input[float]]: """ Target number of instances for the instance group. Must be 1 or 3. Defaults to 1. Launching with multiple master nodes is only supported in EMR version 5.23.0+, and requires this resource's `core_instance_group` to be configured. Public (Internet accessible) instances must be created in VPC subnets that have `map public IP on launch` enabled. Termination protection is automatically enabled when launched with multiple master nodes and this provider must have the `termination_protection = false` configuration applied before destroying this resource. """ return pulumi.get(self, "instance_count") @instance_count.setter def instance_count(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "instance_count", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name of the step. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @pulumi.input_type class ClusterMasterInstanceGroupEbsConfigArgs: def __init__(__self__, *, size: pulumi.Input[float], type: pulumi.Input[str], iops: Optional[pulumi.Input[float]] = None, volumes_per_instance: Optional[pulumi.Input[float]] = None): """ :param pulumi.Input[float] size: The volume size, in gibibytes (GiB). :param pulumi.Input[str] type: The volume type. Valid options are `gp2`, `io1`, `standard` and `st1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). :param pulumi.Input[float] iops: The number of I/O operations per second (IOPS) that the volume supports :param pulumi.Input[float] volumes_per_instance: The number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1) """ pulumi.set(__self__, "size", size) pulumi.set(__self__, "type", type) if iops is not None: pulumi.set(__self__, "iops", iops) if volumes_per_instance is not None: pulumi.set(__self__, "volumes_per_instance", volumes_per_instance) @property @pulumi.getter def size(self) -> pulumi.Input[float]: """ The volume size, in gibibytes (GiB). """ return pulumi.get(self, "size") @size.setter def size(self, value: pulumi.Input[float]): pulumi.set(self, "size", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The volume type. Valid options are `gp2`, `io1`, `standard` and `st1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def iops(self) -> Optional[pulumi.Input[float]]: """ The number of I/O operations per second (IOPS) that the volume supports """ return pulumi.get(self, "iops") @iops.setter def iops(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "iops", value) @property @pulumi.getter(name="volumesPerInstance") def volumes_per_instance(self) -> Optional[pulumi.Input[float]]: """ The number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1) """ return pulumi.get(self, "volumes_per_instance") @volumes_per_instance.setter def volumes_per_instance(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "volumes_per_instance", value) @pulumi.input_type class ClusterStepArgs: def __init__(__self__, *, action_on_failure: pulumi.Input[str], hadoop_jar_step: pulumi.Input['ClusterStepHadoopJarStepArgs'], name: pulumi.Input[str]): """ :param pulumi.Input[str] action_on_failure: The action to take if the step fails. Valid values: `TERMINATE_JOB_FLOW`, `TERMINATE_CLUSTER`, `CANCEL_AND_WAIT`, and `CONTINUE` :param pulumi.Input['ClusterStepHadoopJarStepArgs'] hadoop_jar_step: The JAR file used for the step. Defined below. :param pulumi.Input[str] name: The name of the step. """ pulumi.set(__self__, "action_on_failure", action_on_failure) pulumi.set(__self__, "hadoop_jar_step", hadoop_jar_step) pulumi.set(__self__, "name", name) @property @pulumi.getter(name="actionOnFailure") def action_on_failure(self) -> pulumi.Input[str]: """ The action to take if the step fails. Valid values: `TERMINATE_JOB_FLOW`, `TERMINATE_CLUSTER`, `CANCEL_AND_WAIT`, and `CONTINUE` """ return pulumi.get(self, "action_on_failure") @action_on_failure.setter def action_on_failure(self, value: pulumi.Input[str]): pulumi.set(self, "action_on_failure", value) @property @pulumi.getter(name="hadoopJarStep") def hadoop_jar_step(self) -> pulumi.Input['ClusterStepHadoopJarStepArgs']: """ The JAR file used for the step. Defined below. """ return pulumi.get(self, "hadoop_jar_step") @hadoop_jar_step.setter def hadoop_jar_step(self, value: pulumi.Input['ClusterStepHadoopJarStepArgs']): pulumi.set(self, "hadoop_jar_step", value) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ The name of the step. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @pulumi.input_type class ClusterStepHadoopJarStepArgs: def __init__(__self__, *, jar: pulumi.Input[str], args: Optional[pulumi.Input[List[pulumi.Input[str]]]] = None, main_class: Optional[pulumi.Input[str]] = None, properties: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None): """ :param pulumi.Input[str] jar: Path to a JAR file run during the step. :param pulumi.Input[List[pulumi.Input[str]]] args: List of command line arguments passed to the JAR file's main function when executed. :param pulumi.Input[str] main_class: Name of the main class in the specified Java file. If not specified, the JAR file should specify a Main-Class in its manifest file. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] properties: Key-Value map of Java properties that are set when the step runs. You can use these properties to pass key value pairs to your main function. """ pulumi.set(__self__, "jar", jar) if args is not None: pulumi.set(__self__, "args", args) if main_class is not None: pulumi.set(__self__, "main_class", main_class) if properties is not None: pulumi.set(__self__, "properties", properties) @property @pulumi.getter def jar(self) -> pulumi.Input[str]: """ Path to a JAR file run during the step. """ return pulumi.get(self, "jar") @jar.setter def jar(self, value: pulumi.Input[str]): pulumi.set(self, "jar", value) @property @pulumi.getter def args(self) -> Optional[pulumi.Input[List[pulumi.Input[str]]]]: """ List of command line arguments passed to the JAR file's main function when executed. """ return pulumi.get(self, "args") @args.setter def args(self, value: Optional[pulumi.Input[List[pulumi.Input[str]]]]): pulumi.set(self, "args", value) @property @pulumi.getter(name="mainClass") def main_class(self) -> Optional[pulumi.Input[str]]: """ Name of the main class in the specified Java file. If not specified, the JAR file should specify a Main-Class in its manifest file. """ return pulumi.get(self, "main_class") @main_class.setter def main_class(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "main_class", value) @property @pulumi.getter def properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Key-Value map of Java properties that are set when the step runs. You can use these properties to pass key value pairs to your main function. """ return pulumi.get(self, "properties") @properties.setter def properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "properties", value) @pulumi.input_type class InstanceGroupEbsConfigArgs: def __init__(__self__, *, size: pulumi.Input[float], type: pulumi.Input[str], iops: Optional[pulumi.Input[float]] = None, volumes_per_instance: Optional[pulumi.Input[float]] = None): """ :param pulumi.Input[float] size: The volume size, in gibibytes (GiB). This can be a number from 1 - 1024. If the volume type is EBS-optimized, the minimum value is 10. :param pulumi.Input[str] type: The volume type. Valid options are 'gp2', 'io1' and 'standard'. :param pulumi.Input[float] iops: The number of I/O operations per second (IOPS) that the volume supports. :param pulumi.Input[float] volumes_per_instance: The number of EBS Volumes to attach per instance. """ pulumi.set(__self__, "size", size) pulumi.set(__self__, "type", type) if iops is not None: pulumi.set(__self__, "iops", iops) if volumes_per_instance is not None: pulumi.set(__self__, "volumes_per_instance", volumes_per_instance) @property @pulumi.getter def size(self) -> pulumi.Input[float]: """ The volume size, in gibibytes (GiB). This can be a number from 1 - 1024. If the volume type is EBS-optimized, the minimum value is 10. """ return pulumi.get(self, "size") @size.setter def size(self, value: pulumi.Input[float]): pulumi.set(self, "size", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The volume type. Valid options are 'gp2', 'io1' and 'standard'. """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def iops(self) -> Optional[pulumi.Input[float]]: """ The number of I/O operations per second (IOPS) that the volume supports. """ return pulumi.get(self, "iops") @iops.setter def iops(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "iops", value) @property @pulumi.getter(name="volumesPerInstance") def volumes_per_instance(self) -> Optional[pulumi.Input[float]]: """ The number of EBS Volumes to attach per instance. """ return pulumi.get(self, "volumes_per_instance") @volumes_per_instance.setter def volumes_per_instance(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "volumes_per_instance", value)
46.337278
604
0.677947
5,008
39,155
5.135383
0.067093
0.094953
0.074034
0.046193
0.898476
0.849911
0.824753
0.774438
0.751069
0.712808
0
0.003378
0.2213
39,155
844
605
46.39218
0.840112
0.35574
0
0.596078
1
0
0.119069
0.059662
0
0
0
0
0
1
0.207843
false
0.05098
0.009804
0
0.331373
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
9
bd86bf450c22926890dd3e00aca5632f9c858486
185
py
Python
evaluation_framework/Classification/__init__.py
nheist/Evaluation-Framework
0561fcbca5025f280624c02f6fad24a888c653ab
[ "Apache-2.0" ]
2
2020-08-01T07:12:00.000Z
2022-02-10T10:19:11.000Z
evaluation_framework/Classification/__init__.py
nheist/Evaluation-Framework
0561fcbca5025f280624c02f6fad24a888c653ab
[ "Apache-2.0" ]
null
null
null
evaluation_framework/Classification/__init__.py
nheist/Evaluation-Framework
0561fcbca5025f280624c02f6fad24a888c653ab
[ "Apache-2.0" ]
null
null
null
from evaluation_framework.Classification.classification_model import ClassificationModel from evaluation_framework.Classification.classification_taskManager import ClassificationManager
92.5
96
0.940541
16
185
10.625
0.5625
0.164706
0.270588
0.435294
0.6
0
0
0
0
0
0
0
0.037838
185
2
96
92.5
0.955056
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
da83e8ab16fc871f4059c9091181a7c3a1d8e9f0
115
py
Python
src/models/utils.py
arijitmondal-94/app-review-sentiment-anslysis-using-bert
78edceb6c2077348d6b3f2904477d6cad00c1671
[ "MIT" ]
null
null
null
src/models/utils.py
arijitmondal-94/app-review-sentiment-anslysis-using-bert
78edceb6c2077348d6b3f2904477d6cad00c1671
[ "MIT" ]
null
null
null
src/models/utils.py
arijitmondal-94/app-review-sentiment-anslysis-using-bert
78edceb6c2077348d6b3f2904477d6cad00c1671
[ "MIT" ]
null
null
null
import transformers def get_tokenizer(): return transformers.BertTokenizer.from_pretrained('bert-base-cased')
23
72
0.808696
13
115
7
0.923077
0
0
0
0
0
0
0
0
0
0
0
0.095652
115
5
72
23
0.875
0
0
0
0
0
0.12931
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
1
1
0
0
7
e526ba9034575784c4f2b4f0a06c6c5fecf35813
4,658
py
Python
tests/test_metrics.py
jaluebbe/ahrs
4b4a33b1006e0d455a71ac8379a2697202361758
[ "MIT" ]
1
2022-01-11T20:10:48.000Z
2022-01-11T20:10:48.000Z
tests/test_metrics.py
geoKinga/ahrs
87f9210cfcf6c545d86ae8588a93f012020164ee
[ "MIT" ]
null
null
null
tests/test_metrics.py
geoKinga/ahrs
87f9210cfcf6c545d86ae8588a93f012020164ee
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import unittest import numpy as np import ahrs class TestEuclidean(unittest.TestCase): def test_correct_values(self): self.assertEqual(ahrs.utils.euclidean(np.zeros(3), np.zeros(3)), 0.0) self.assertEqual(ahrs.utils.euclidean(np.zeros(3), np.ones(3)), np.sqrt(3)) self.assertEqual(ahrs.utils.euclidean(np.ones(3), -np.ones(3)), 2.0*np.sqrt(3)) self.assertEqual(ahrs.utils.euclidean(np.array([1, 2, 3]), np.array([4, 5, 6])), 5.196152422706632) self.assertGreaterEqual(ahrs.utils.euclidean(np.random.random(3)-0.5, np.random.random(3)-0.5), 0.0) def test_guard_clauses(self): self.assertRaises(ValueError, ahrs.utils.euclidean, np.zeros(3), np.zeros(2)) class TestChordal(unittest.TestCase): def setUp(self): self.R1 = ahrs.DCM(rpy=[10.0, -20.0, 30.0]) self.R2 = ahrs.DCM(rpy=[-10.0, 20.0, -30.0]) def test_correct_values(self): self.assertEqual(ahrs.utils.chordal(np.identity(3), np.identity(3)), 0.0) self.assertEqual(ahrs.utils.chordal(np.identity(3), -np.identity(3)), 2.0*np.sqrt(3)) self.assertGreaterEqual(ahrs.utils.euclidean(np.random.random((3, 3))-0.5, np.random.random((3, 3))-0.5), 0.0) self.assertEqual(ahrs.utils.chordal(self.R1, self.R2), 1.6916338074634352) self.assertEqual(ahrs.utils.chordal(self.R1.tolist(), self.R2.tolist()), 1.6916338074634352) def test_guard_clauses(self): self.assertRaises(TypeError, ahrs.utils.chordal, np.identity(3), 3.0) self.assertRaises(TypeError, ahrs.utils.chordal, 3.0, np.identity(3)) self.assertRaises(TypeError, ahrs.utils.chordal, "np.identity(3)", np.identity(3)) self.assertRaises(TypeError, ahrs.utils.chordal, np.identity(3), "np.identity(3)") self.assertRaises(ValueError, ahrs.utils.chordal, np.identity(3), np.identity(2)) self.assertRaises(ValueError, ahrs.utils.chordal, np.tile(np.identity(3), (2, 1, 1)), np.tile(np.identity(3), (3, 1, 1))) class TestIdentityDeviation(unittest.TestCase): def setUp(self): self.R1 = ahrs.DCM(rpy=[10.0, -20.0, 30.0]) self.R2 = ahrs.DCM(rpy=[-10.0, 20.0, -30.0]) def test_correct_values(self): self.assertEqual(ahrs.utils.identity_deviation(np.identity(3), np.identity(3)), 0.0) self.assertEqual(ahrs.utils.identity_deviation(np.identity(3), -np.identity(3)), 2.0*np.sqrt(3)) self.assertGreaterEqual(ahrs.utils.identity_deviation(np.random.random((3, 3))-0.5, np.random.random((3, 3))-0.5), 0.0) self.assertEqual(ahrs.utils.identity_deviation(self.R1, self.R2), 1.6916338074634352) self.assertEqual(ahrs.utils.identity_deviation(self.R1.tolist(), self.R2.tolist()), 1.6916338074634352) def test_guard_clauses(self): self.assertRaises(TypeError, ahrs.utils.identity_deviation, np.identity(3), 3.0) self.assertRaises(TypeError, ahrs.utils.identity_deviation, 3.0, np.identity(3)) self.assertRaises(TypeError, ahrs.utils.identity_deviation, "np.identity(3)", np.identity(3)) self.assertRaises(TypeError, ahrs.utils.identity_deviation, np.identity(3), "np.identity(3)") self.assertRaises(ValueError, ahrs.utils.identity_deviation, np.identity(3), np.identity(2)) self.assertRaises(ValueError, ahrs.utils.identity_deviation, np.zeros((3, 3)), np.zeros((2, 2))) class TestAngularDistance(unittest.TestCase): def setUp(self): self.R1 = ahrs.DCM(rpy=[10.0, -20.0, 30.0]) self.R2 = ahrs.DCM(rpy=[-10.0, 20.0, -30.0]) def test_correct_values(self): self.assertEqual(ahrs.utils.angular_distance(np.identity(3), np.identity(3)), 0.0) self.assertGreaterEqual(ahrs.utils.angular_distance(np.random.random((3, 3))-0.5, np.random.random((3, 3))-0.5), 0.0) self.assertEqual(ahrs.utils.angular_distance(self.R1, self.R2), 1.282213683073497) self.assertEqual(ahrs.utils.angular_distance(self.R1.tolist(), self.R2.tolist()), 1.282213683073497) def test_guard_clauses(self): self.assertRaises(TypeError, ahrs.utils.angular_distance, np.identity(3), 3.0) self.assertRaises(TypeError, ahrs.utils.angular_distance, 3.0, np.identity(3)) self.assertRaises(TypeError, ahrs.utils.angular_distance, "np.identity(3)", np.identity(3)) self.assertRaises(TypeError, ahrs.utils.angular_distance, np.identity(3), "np.identity(3)") self.assertRaises(ValueError, ahrs.utils.angular_distance, np.identity(3), np.identity(2)) self.assertRaises(ValueError, ahrs.utils.angular_distance, np.zeros((3, 3)), np.zeros((2, 2))) if __name__ == "__main__": unittest.main()
59.717949
129
0.689137
690
4,658
4.586957
0.091304
0.108057
0.114692
0.113744
0.9109
0.881833
0.878989
0.836019
0.76872
0.672986
0
0.07989
0.137398
4,658
77
130
60.493506
0.707815
0.004508
0
0.265625
0
0
0.019845
0
0
0
0
0
0.59375
1
0.171875
false
0
0.046875
0
0.28125
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
e5874ae1c7c42915e8cd9d0bff8febd39ede0361
18,978
py
Python
src/conductor/client/http/api/workflow_bulk_resource_api.py
conductor-sdk/conductor-python
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
[ "Apache-2.0" ]
3
2022-03-10T18:24:46.000Z
2022-03-22T20:49:30.000Z
src/conductor/client/http/api/workflow_bulk_resource_api.py
conductor-sdk/conductor-python
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
[ "Apache-2.0" ]
6
2022-03-08T17:48:28.000Z
2022-03-30T00:39:22.000Z
src/conductor/client/http/api/workflow_bulk_resource_api.py
conductor-sdk/conductor-python
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from conductor.client.http.api_client import ApiClient class WorkflowBulkResourceApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def pause_workflow1(self, body, **kwargs): # noqa: E501 """Pause the list of workflows # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.pause_workflow1(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :return: BulkResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.pause_workflow1_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.pause_workflow1_with_http_info(body, **kwargs) # noqa: E501 return data def pause_workflow1_with_http_info(self, body, **kwargs): # noqa: E501 """Pause the list of workflows # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.pause_workflow1_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :return: BulkResponse If the method is called asynchronously, returns the request thread. """ all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method pause_workflow1" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `pause_workflow1`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/bulk/pause', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='BulkResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def restart1(self, body, **kwargs): # noqa: E501 """Restart the list of completed workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.restart1(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :param bool use_latest_definitions: :return: BulkResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.restart1_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.restart1_with_http_info(body, **kwargs) # noqa: E501 return data def restart1_with_http_info(self, body, **kwargs): # noqa: E501 """Restart the list of completed workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.restart1_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :param bool use_latest_definitions: :return: BulkResponse If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'use_latest_definitions'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method restart1" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `restart1`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'use_latest_definitions' in params: query_params.append(('useLatestDefinitions', params['use_latest_definitions'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/bulk/restart', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='BulkResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def resume_workflow1(self, body, **kwargs): # noqa: E501 """Resume the list of workflows # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_workflow1(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :return: BulkResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.resume_workflow1_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.resume_workflow1_with_http_info(body, **kwargs) # noqa: E501 return data def resume_workflow1_with_http_info(self, body, **kwargs): # noqa: E501 """Resume the list of workflows # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_workflow1_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :return: BulkResponse If the method is called asynchronously, returns the request thread. """ all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method resume_workflow1" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `resume_workflow1`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/bulk/resume', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='BulkResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def retry1(self, body, **kwargs): # noqa: E501 """Retry the last failed task for each workflow from the list # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.retry1(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :return: BulkResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.retry1_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.retry1_with_http_info(body, **kwargs) # noqa: E501 return data def retry1_with_http_info(self, body, **kwargs): # noqa: E501 """Retry the last failed task for each workflow from the list # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.retry1_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :return: BulkResponse If the method is called asynchronously, returns the request thread. """ all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method retry1" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `retry1`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/bulk/retry', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='BulkResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def terminate(self, body, **kwargs): # noqa: E501 """Terminate workflows execution # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.terminate(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :param str reason: :return: BulkResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.terminate_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.terminate_with_http_info(body, **kwargs) # noqa: E501 return data def terminate_with_http_info(self, body, **kwargs): # noqa: E501 """Terminate workflows execution # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.terminate_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param list[str] body: (required) :param str reason: :return: BulkResponse If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'reason'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method terminate" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `terminate`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'reason' in params: query_params.append(('reason', params['reason'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/api/workflow/bulk/terminate', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='BulkResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
36.850485
115
0.593371
2,123
18,978
5.072068
0.073481
0.049777
0.026003
0.033432
0.942793
0.933414
0.931742
0.931742
0.928399
0.911311
0
0.018228
0.311993
18,978
514
116
36.922179
0.806464
0.303457
0
0.805654
0
0
0.169092
0.042764
0
0
0
0
0
1
0.038869
false
0
0.014134
0
0.109541
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e590707d49634a0cb8d2ffdb1eba9e26c922160f
1,363
py
Python
pycspr/factory/__init__.py
hboshnak/casper-python-sdk
19db9bf3b4720d5b4e133463e5a32fd64f1c33ed
[ "Apache-2.0" ]
11
2021-09-27T08:41:18.000Z
2022-03-24T11:25:20.000Z
pycspr/factory/__init__.py
hboshnak/casper-python-sdk
19db9bf3b4720d5b4e133463e5a32fd64f1c33ed
[ "Apache-2.0" ]
13
2021-09-13T19:08:45.000Z
2022-02-08T10:01:12.000Z
pycspr/factory/__init__.py
hboshnak/casper-python-sdk
19db9bf3b4720d5b4e133463e5a32fd64f1c33ed
[ "Apache-2.0" ]
14
2021-07-12T10:46:33.000Z
2022-03-01T08:25:07.000Z
from pycspr.factory.accounts import create_private_key from pycspr.factory.accounts import create_public_key from pycspr.factory.accounts import create_public_key_from_account_key from pycspr.factory.accounts import parse_private_key from pycspr.factory.accounts import parse_private_key_bytes from pycspr.factory.accounts import parse_public_key from pycspr.factory.accounts import parse_public_key_bytes from pycspr.factory.deploys import create_deploy from pycspr.factory.deploys import create_deploy_approval from pycspr.factory.deploys import create_deploy_arguments from pycspr.factory.deploys import create_deploy_body from pycspr.factory.deploys import create_deploy_header from pycspr.factory.deploys import create_deploy_parameters from pycspr.factory.deploys import create_deploy_ttl from pycspr.factory.deploys import create_transfer from pycspr.factory.deploys import create_transfer_session from pycspr.factory.deploys import create_standard_payment from pycspr.factory.deploys import create_validator_auction_bid from pycspr.factory.deploys import create_validator_auction_bid_withdrawal from pycspr.factory.deploys import create_validator_delegation from pycspr.factory.deploys import create_validator_delegation_withdrawal from pycspr.factory.digests import create_digest_of_deploy from pycspr.factory.digests import create_digest_of_deploy_body
56.791667
74
0.898753
194
1,363
6.025773
0.164948
0.196749
0.334474
0.287425
0.920445
0.911891
0.849444
0.516681
0.345595
0.084688
0
0
0.067498
1,363
23
75
59.26087
0.919748
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
e5ece634fbf2957d2b0497bc43bc87d22c7d8b81
10,962
py
Python
src/jlauto/models/load_premade.py
AllaVinner/JL-ML
9d0bbbd324fd59ee812144ef0b4cff88d339ee76
[ "MIT" ]
null
null
null
src/jlauto/models/load_premade.py
AllaVinner/JL-ML
9d0bbbd324fd59ee812144ef0b4cff88d339ee76
[ "MIT" ]
null
null
null
src/jlauto/models/load_premade.py
AllaVinner/JL-ML
9d0bbbd324fd59ee812144ef0b4cff88d339ee76
[ "MIT" ]
null
null
null
import tensorflow as tf from tensorflow import keras import numpy as np from collections import defaultdict from jlauto.models.autoencoder import Autoencoder from jlauto.models.variational_autoencoder import VariationalAutoencoder def load_premade_model(model_type = None, model_name = None, **kwargs): load = defaultdict(dict) # Autoencoder models load['autoencoder']['dense'] = autoencoder_dense load['autoencoder']['mnist_cnn_deep'] = autoencoder_mnist_cnn_deep load['autoencoder']['mnist_cnn_shallow'] = autoencoder_mnist_cnn_shallow # Variational autoencoder models load['variational_autoencoder']['mnist_dense'] = variational_autoencoder_dense load['variational_autoencoder']['mnist_cnn_deep'] = variational_autoencoder_mnist_cnn_deep load['variational_autoencoder']['mnist_cnn_shallow'] = variational_autoencoder_mnist_cnn_shallow return load[model_type][model_name](**kwargs) ### Architectures ### #----------------------------------------------------------------------------- # Autoencoders def autoencoder_mnist_cnn_deep(latent_dim = 10, input_shape = (28,28,1), **kwargs): # OUTPUT_SHAPE = (28,28,1) or (28,28) # Variational autoencoder encoder = keras.Sequential([ keras.layers.InputLayer(input_shape=input_shape), keras.layers.Reshape((28,28,1)), keras.layers.Conv2D(8, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(8, (3,3), padding = "same", activation = "relu"), keras.layers.MaxPool2D((2,2)), keras.layers.Conv2D(16, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(16, (3,3), padding = "same", activation = "relu"), keras.layers.MaxPool2D((2,2)), keras.layers.Conv2D(32, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(32, (3,3), padding = "same", activation = "relu"), keras.layers.MaxPool2D((2,2)), keras.layers.Conv2D(64, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(64, (3,3), padding = "same", activation = "relu"), keras.layers.Flatten(), keras.layers.Dense(200, activation = 'relu'), keras.layers.Dense(100, activation = 'relu'), keras.layers.Dense(latent_dim), keras.layers.Reshape((latent_dim,)), ], name = "Encoder" ) decoder = keras.Sequential([ keras.layers.InputLayer(input_shape = (latent_dim,)), keras.layers.Dense(200, activation = "relu"), keras.layers.Dense(3*3*64, activation = "relu"), keras.layers.Reshape((3,3,64)), keras.layers.Conv2DTranspose(32, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(32, (3,3), activation = "relu", strides = 2), keras.layers.UpSampling2D((2,2)), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.UpSampling2D((2,2)), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(1, (3,3), activation = "sigmoid", padding = "same"), keras.layers.Reshape(input_shape), ], name = "Decoder") model = Autoencoder(encoder, decoder) return model def autoencoder_mnist_cnn_shallow(latent_dim = 10, input_shape = (28,28,1), **kwargs): # OUTPUT_SHAPE = (28,28,1) or (28,28) # Variational autoencoder encoder = tf.keras.Sequential( [ tf.keras.layers.InputLayer(input_shape=input_shape), tf.keras.layers.Reshape((28,28,1)), tf.keras.layers.Conv2D( filters=32, kernel_size=3, strides=(2, 2), activation='relu'), tf.keras.layers.Conv2D( filters=64, kernel_size=3, strides=(2, 2), activation='relu'), tf.keras.layers.Flatten(), # No activation tf.keras.layers.Dense(latent_dim), tf.keras.layers.Reshape((latent_dim,)), ] ) decoder = tf.keras.Sequential( [ tf.keras.layers.InputLayer(input_shape=(latent_dim,)), tf.keras.layers.Dense(units=7*7*32, activation=tf.nn.relu), tf.keras.layers.Reshape(target_shape=(7, 7, 32)), tf.keras.layers.Conv2DTranspose( filters=64, kernel_size=3, strides=2, padding='same', activation='relu'), tf.keras.layers.Conv2DTranspose( filters=32, kernel_size=3, strides=2, padding='same', activation='relu'), # No activation tf.keras.layers.Conv2DTranspose( filters=1, kernel_size=3, strides=1, padding='same', activation='sigmoid'), tf.keras.layers.Reshape(input_shape) ] ) # Initiate model model = Autoencoder(encoder, decoder) return model def autoencoder_dense(latent_dim = 10, input_shape = None, intermediat_dim = 100, **kwargs): encoder = keras.Sequential([ keras.layers.InputLayer(input_shape=input_shape), keras.layers.Flatten(), keras.layers.Dense(intermediat_dim, activation="relu"), keras.layers.Dense(latent_dim), keras.layers.Reshape((latent_dim,)), ], name = "Encoder") decoder = keras.Sequential([ keras.layers.InputLayer(input_shape=(latent_dim,)), keras.layers.Dense(intermediat_dim, activation = "relu"), keras.layers.Dense(np.prod(input_shape), activation = "sigmoid"), keras.layers.Reshape(input_shape), ], name = "Decoder") # Initiate model model = Autoencoder(encoder, decoder) return model #----------------------------------------------------------------------------- # Variational autoencoders def variational_autoencoder_mnist_cnn_deep(input_shape = (28,28,1), latent_dim = 10, **kwargs): # OUTPUT_SHAPE = (28,28,1) # Variational autoencoder encoder = keras.Sequential([ keras.layers.InputLayer(input_shape=input_shape), keras.layers.Reshape((28,28,1)), keras.layers.Conv2D(8, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(8, (3,3), padding = "same", activation = "relu"), keras.layers.MaxPool2D((2,2)), keras.layers.Conv2D(16, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(16, (3,3), padding = "same", activation = "relu"), keras.layers.MaxPool2D((2,2)), keras.layers.Conv2D(32, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(32, (3,3), padding = "same", activation = "relu"), keras.layers.MaxPool2D((2,2)), keras.layers.Conv2D(64, (3,3), padding = "same", activation = "relu"), keras.layers.Conv2D(64, (3,3), padding = "same", activation = "relu"), keras.layers.Flatten(), keras.layers.Dense(200, activation = 'relu'), keras.layers.Dense(100, activation = 'relu'), keras.layers.Dense(2*latent_dim), keras.layers.Reshape((2,latent_dim)), ], name = "Encoder" ) decoder = keras.Sequential([ keras.layers.InputLayer(input_shape=(latent_dim,)), keras.layers.Dense(200, activation = "relu"), keras.layers.Dense(3*3*64, activation = "relu"), keras.layers.Reshape((3,3,64)), keras.layers.Conv2DTranspose(32, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(32, (3,3), activation = "relu", strides = 2), keras.layers.UpSampling2D((2,2)), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.UpSampling2D((2,2)), keras.layers.Conv2DTranspose(16, (3,3), activation = "relu", padding = "same"), keras.layers.Conv2DTranspose(1, (3,3), activation = "sigmoid", padding = "same"), keras.layers.Reshape(input_shape), ], name = "Decoder") model = VariationalAutoencoder(encoder, decoder) return model def variational_autoencoder_mnist_cnn_shallow(input_shape = (28,28,1), latent_dim = 10, **kwargs): encoder = tf.keras.Sequential( [ tf.keras.layers.InputLayer(input_shape=input_shape), tf.keras.layers.Reshape((28,28,1)), tf.keras.layers.Conv2D( filters=32, kernel_size=3, strides=(2, 2), activation='relu'), tf.keras.layers.Conv2D( filters=64, kernel_size=3, strides=(2, 2), activation='relu'), tf.keras.layers.Flatten(), # No activation tf.keras.layers.Dense(2*latent_dim), tf.keras.layers.Reshape((2,latent_dim)), ] ) decoder = tf.keras.Sequential( [ tf.keras.layers.InputLayer(input_shape=(latent_dim,)), tf.keras.layers.Dense(units=7*7*32, activation=tf.nn.relu), tf.keras.layers.Reshape(target_shape=(7, 7, 32)), tf.keras.layers.Conv2DTranspose( filters=64, kernel_size=3, strides=2, padding='same', activation='relu'), tf.keras.layers.Conv2DTranspose( filters=32, kernel_size=3, strides=2, padding='same', activation='relu'), # No activation tf.keras.layers.Conv2DTranspose( filters=1, kernel_size=3, strides=1, padding='same', activation = "sigmoid"), tf.keras.layers.Reshape(input_shape), ] ) # Initiate model model = VariationalAutoencoder(encoder, decoder) return model def variational_autoencoder_dense(input_shape = None, latent_dim = 10, intermediat_dim = 100, **kwargs): encoder = keras.Sequential([ keras.layers.InputLayer(input_shape=input_shape), keras.layers.Flatten(), keras.layers.Dense(intermediat_dim, activation="relu"), keras.layers.Dense(2*latent_dim), keras.layers.Reshape((2,latent_dim)), ], name = "Encoder") decoder = keras.Sequential([ keras.layers.InputLayer(input_shape=(latent_dim,)), keras.layers.Dense(intermediat_dim, activation = "relu"), keras.layers.Dense(np.prod(input_shape), activation = "sigmoid"), keras.layers.Reshape(input_shape), ], name = "Decoder") # Initiate model model = VariationalAutoencoder(encoder, decoder) return model #-----------------------------------------------------------------------------
42.653696
101
0.599161
1,216
10,962
5.294408
0.063322
0.187947
0.082634
0.108729
0.905405
0.874029
0.847313
0.847313
0.818888
0.78658
0
0.04353
0.235085
10,962
256
102
42.820313
0.72427
0.056377
0
0.783505
0
0
0.058766
0.006691
0
0
0
0
0
1
0.036082
false
0
0.030928
0
0.103093
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e5fcdd809462d257927bcca5d2a664f24d1cb0d5
874
py
Python
ddtn/run_many.py
DukeGonzo/ddtn
156cf5fb2f2e46619c0243a5accfddbe3567f109
[ "MIT" ]
51
2018-03-25T07:18:21.000Z
2022-02-11T12:05:52.000Z
ddtn/run_many.py
DukeGonzo/ddtn
156cf5fb2f2e46619c0243a5accfddbe3567f109
[ "MIT" ]
2
2018-10-26T06:43:44.000Z
2018-12-20T02:05:31.000Z
ddtn/run_many.py
DukeGonzo/ddtn
156cf5fb2f2e46619c0243a5accfddbe3567f109
[ "MIT" ]
7
2018-04-11T20:34:27.000Z
2021-07-19T17:57:40.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Mon May 28 14:18:21 2018 @author: nsde """ #%% import os #%% ne = '-ne 50 ' bs = '-bs 100 ' lr = '-lr 1e-5 ' #%% os.system("PYTHONPATH='/home/nsde/Documents/ddtn' python mnist_classifier.py -tt no " + ne + bs + lr) os.system("PYTHONPATH='/home/nsde/Documents/ddtn' python mnist_classifier.py -tt affine " + ne + bs + lr) os.system("PYTHONPATH='/home/nsde/Documents/ddtn' python mnist_classifier.py -tt affinediffeo " + ne + bs + lr) os.system("PYTHONPATH='/home/nsde/Documents/ddtn' python mnist_classifier.py -tt homografy " + ne + bs + lr) os.system("PYTHONPATH='/home/nsde/Documents/ddtn' python mnist_classifier.py -tt TPS " + ne + bs + lr) os.system("PYTHONPATH='/home/nsde/Documents/ddtn' python mnist_classifier.py -tt CPAB " + ne + bs + lr)
30.137931
95
0.632723
126
874
4.34127
0.34127
0.087751
0.197441
0.241316
0.756856
0.756856
0.756856
0.756856
0.756856
0.756856
0
0.029957
0.197941
874
29
96
30.137931
0.750357
0.114416
0
0.375
0
0
0.636959
0.29882
0
0
0
0
0
1
0
false
0
0.0625
0
0.0625
0
0
0
0
null
0
1
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
00640b1be1be8bf2e594bb79ca0146d1db973f66
1,560
py
Python
cloudformation/test_cf.py
DYeag/AWS-Shell
b5318e72373b1a948ac6aced1c0bb4566d5ae46f
[ "0BSD" ]
3
2016-08-22T07:14:56.000Z
2018-03-16T07:31:44.000Z
cloudformation/test_cf.py
QualiSystemsLab/AWS-Shell-ext
bf7b62640d8d97a5e9199edb7a1ada0b98aac6fb
[ "0BSD" ]
470
2016-03-24T13:38:08.000Z
2022-02-05T01:14:05.000Z
cloudformation/test_cf.py
QualiSystemsLab/AWS-Shell-ext
bf7b62640d8d97a5e9199edb7a1ada0b98aac6fb
[ "0BSD" ]
9
2016-06-20T11:41:54.000Z
2020-11-21T00:42:45.000Z
from unittest import TestCase import json class TestCloudFormation(TestCase): def setUp(self): pass def test_main_json_valid(self): json_file = open('0_Main.json', 'r') json_string = json_file.read() json.loads(json_string) def test_main_ex_json_valid(self): json_file = open('0_Main_EX.json', 'r') json_string = json_file.read() json.loads(json_string) def test_main_ex_no_vpn_json_valid(self): json_file = open('0_Main_EX_No_VPN.json', 'r') json_string = json_file.read() json.loads(json_string) def test_vpc_json_valid(self): json_file = open('1_VPC.json', 'r') json_string = json_file.read() json.loads(json_string) def test_vpc_ex_json_valid(self): json_file = open('1_VPC_EX.json', 'r') json_string = json_file.read() json.loads(json_string) def test_vpc_ex__no_vpn_json_valid(self): json_file = open('1_VPC_EX_No_VPN.json', 'r') json_string = json_file.read() json.loads(json_string) def test_ec2_json_valid(self): json_file = open('2_EC2.json', 'r') json_string = json_file.read() json.loads(json_string) def test_ec2_ex_json_valid(self): json_file = open('2_EC2_EX.json', 'r') json_string = json_file.read() json.loads(json_string) def test_ec2_ex__no_vpn_json_valid(self): json_file = open('2_EC2_EX_No_VPN.json', 'r') json_string = json_file.read() json.loads(json_string)
28.888889
54
0.644872
235
1,560
3.86383
0.119149
0.15859
0.128855
0.168502
0.896476
0.896476
0.896476
0.896476
0.799559
0.589207
0
0.01268
0.241667
1,560
53
55
29.433962
0.754861
0
0
0.439024
0
0
0.090385
0.013462
0
0
0
0
0
1
0.243902
false
0.02439
0.04878
0
0.317073
0
0
0
0
null
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
00db7ec563c22288a9920b45af3e41147620391d
282
py
Python
src/genie/libs/parser/iosxe/tests/ShowBootvar/cli/equal/golden_output1_expected.py
balmasea/genieparser
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
[ "Apache-2.0" ]
204
2018-06-27T00:55:27.000Z
2022-03-06T21:12:18.000Z
src/genie/libs/parser/iosxe/tests/ShowBootvar/cli/equal/golden_output1_expected.py
balmasea/genieparser
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
[ "Apache-2.0" ]
468
2018-06-19T00:33:18.000Z
2022-03-31T23:23:35.000Z
src/genie/libs/parser/iosxe/tests/ShowBootvar/cli/equal/golden_output1_expected.py
balmasea/genieparser
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
[ "Apache-2.0" ]
309
2019-01-16T20:21:07.000Z
2022-03-30T12:56:41.000Z
expected_output = { "active": { "boot_variable": "harddisk:/ISSUCleanGolden,12;bootflash:12351822-iedge-asr-uut,12;", "configuration_register": "0x2", }, "next_reload_boot_variable": "harddisk:/ISSUCleanGolden,12;bootflash:12351822-iedge-asr-uut,12;", }
35.25
101
0.687943
30
282
6.266667
0.6
0.12766
0.212766
0.37234
0.712766
0.712766
0.712766
0.712766
0.712766
0.712766
0
0.107884
0.14539
282
7
102
40.285714
0.672199
0
0
0
0
0
0.705674
0.62766
0
0
0.010638
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
1
1
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
00e04f82f30d8a9e4efd91f4afa8ee177322df18
112
py
Python
MiscContests/Code Galdiators/Semifinal/gen.py
Mindjolt2406/Competitive-Programming
d000d98bf7005ee4fb809bcea2f110e4c4793b80
[ "MIT" ]
2
2018-12-11T14:37:24.000Z
2022-01-23T18:11:54.000Z
MiscContests/Code Galdiators/Semifinal/gen.py
Mindjolt2406/Competitive-Programming
d000d98bf7005ee4fb809bcea2f110e4c4793b80
[ "MIT" ]
null
null
null
MiscContests/Code Galdiators/Semifinal/gen.py
Mindjolt2406/Competitive-Programming
d000d98bf7005ee4fb809bcea2f110e4c4793b80
[ "MIT" ]
null
null
null
print 1 print 100000,100000 for i in range(100000): print 1, print "" for i in range(100000): print 1, print ""
16
32
0.705357
21
112
3.761905
0.333333
0.227848
0.417722
0.278481
0.708861
0.708861
0.708861
0.708861
0
0
0
0.293478
0.178571
112
6
33
18.666667
0.565217
0
0
0.666667
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
1
1
0
0
null
1
1
1
0
1
1
1
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
11
dab2e81c8710a956681a93975d3427ecfb8f0a04
150
py
Python
src/test/boostrap/salle/salles.py
stormi/tsunami
bdc853229834b52b2ee8ed54a3161a1a3133d926
[ "BSD-3-Clause" ]
14
2015-08-21T19:15:21.000Z
2017-11-26T13:59:17.000Z
src/test/boostrap/salle/salles.py
stormi/tsunami
bdc853229834b52b2ee8ed54a3161a1a3133d926
[ "BSD-3-Clause" ]
20
2015-09-29T20:50:45.000Z
2018-06-21T12:58:30.000Z
src/test/boostrap/salle/salles.py
stormi/tsunami
bdc853229834b52b2ee8ed54a3161a1a3133d926
[ "BSD-3-Clause" ]
3
2015-05-02T19:42:03.000Z
2018-09-06T10:55:00.000Z
importeur.salle.creer_salle("autre", "1", 10, 10) importeur.salle.creer_salle("autre", "2", 10, 11) importeur.salle.creer_salle("autre", "3", 10, 12)
37.5
49
0.7
24
150
4.25
0.416667
0.411765
0.558824
0.705882
0.852941
0
0
0
0
0
0
0.108696
0.08
150
3
50
50
0.630435
0
0
0
0
0
0.12
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
1
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
9
975948a23edd2bc842a406dbf7332a58c0adf99f
9,118
py
Python
tests/cli/env/test_remove.py
ashemedai/hatch
9ec00d5e027c992efbc16dd777b1f6926368b6bf
[ "MIT" ]
null
null
null
tests/cli/env/test_remove.py
ashemedai/hatch
9ec00d5e027c992efbc16dd777b1f6926368b6bf
[ "MIT" ]
null
null
null
tests/cli/env/test_remove.py
ashemedai/hatch
9ec00d5e027c992efbc16dd777b1f6926368b6bf
[ "MIT" ]
null
null
null
from hatch.config.constants import AppEnvVars from hatch.project.core import Project def test_unknown(hatch, temp_dir, helpers, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' with project_path.as_cwd(): result = hatch('env', 'remove', 'foo') assert result.exit_code == 1 assert result.output == helpers.dedent( """ Unknown environment: foo """ ) def test_nonexistent(hatch, temp_dir, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' with project_path.as_cwd(): result = hatch('env', 'remove', 'default') assert result.exit_code == 0, result.output assert not result.output def test_single(hatch, helpers, temp_dir, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' project = Project(project_path) helpers.update_project_environment(project, 'default', {'skip-install': True, **project.config.envs['default']}) helpers.update_project_environment(project, 'foo', {}) helpers.update_project_environment(project, 'bar', {}) with project_path.as_cwd(): result = hatch('env', 'create', 'foo') assert result.exit_code == 0, result.output with project_path.as_cwd(): result = hatch('env', 'create', 'bar') assert result.exit_code == 0, result.output env_cache_path = cache_path / 'env' / 'virtual' assert env_cache_path.is_dir() storage_dirs = list(env_cache_path.iterdir()) assert len(storage_dirs) == 1 storage_path = storage_dirs[0] project_part = f'{project_path.name}-' assert storage_path.name.startswith(project_part) hash_part = storage_path.name[len(project_part) :] assert len(hash_part) == 8 env_dirs = list(storage_path.iterdir()) assert len(env_dirs) == 2 foo_env_path = storage_path / 'foo' bar_env_path = storage_path / 'bar' assert foo_env_path.is_dir() assert bar_env_path.is_dir() with project_path.as_cwd(): result = hatch('env', 'remove', 'bar') assert result.exit_code == 0, result.output assert not result.output assert foo_env_path.is_dir() assert not bar_env_path.is_dir() def test_all(hatch, helpers, temp_dir, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' project = Project(project_path) helpers.update_project_environment(project, 'default', {'skip-install': True, **project.config.envs['default']}) helpers.update_project_environment(project, 'foo', {}) helpers.update_project_environment(project, 'bar', {}) with project_path.as_cwd(): result = hatch('env', 'create', 'foo') assert result.exit_code == 0, result.output with project_path.as_cwd(): result = hatch('env', 'create', 'bar') assert result.exit_code == 0, result.output env_cache_path = cache_path / 'env' / 'virtual' assert env_cache_path.is_dir() storage_dirs = list(env_cache_path.iterdir()) assert len(storage_dirs) == 1 storage_path = storage_dirs[0] project_part = f'{project_path.name}-' assert storage_path.name.startswith(project_part) hash_part = storage_path.name[len(project_part) :] assert len(hash_part) == 8 env_dirs = list(storage_path.iterdir()) assert len(env_dirs) == 2 foo_env_path = storage_path / 'foo' bar_env_path = storage_path / 'bar' assert foo_env_path.is_dir() assert bar_env_path.is_dir() with project_path.as_cwd(): result = hatch('env', 'remove', 'foo') assert result.exit_code == 0, result.output assert not result.output with project_path.as_cwd(): result = hatch('env', 'remove', 'bar') assert result.exit_code == 0, result.output assert not result.output assert not storage_path.is_dir() def test_matrix_all(hatch, helpers, temp_dir, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' project = Project(project_path) helpers.update_project_environment(project, 'default', {'skip-install': True, **project.config.envs['default']}) helpers.update_project_environment(project, 'foo', {'matrix': [{'version': ['9000', '42']}]}) with project_path.as_cwd(): result = hatch('env', 'create', 'foo') assert result.exit_code == 0, result.output env_cache_path = cache_path / 'env' / 'virtual' assert env_cache_path.is_dir() storage_dirs = list(env_cache_path.iterdir()) assert len(storage_dirs) == 1 storage_path = storage_dirs[0] project_part = f'{project_path.name}-' assert storage_path.name.startswith(project_part) hash_part = storage_path.name[len(project_part) :] assert len(hash_part) == 8 env_dirs = list(storage_path.iterdir()) assert len(env_dirs) == 2 foo_env_path = storage_path / 'foo.42' bar_env_path = storage_path / 'foo.9000' assert foo_env_path.is_dir() assert bar_env_path.is_dir() with project_path.as_cwd(): result = hatch('env', 'remove', 'foo') assert result.exit_code == 0, result.output assert not result.output assert not storage_path.is_dir() def test_incompatible_ok(hatch, helpers, temp_dir, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' project = Project(project_path) helpers.update_project_environment( project, 'default', {'skip-install': True, 'platforms': ['foo'], **project.config.envs['default']} ) with project_path.as_cwd(): result = hatch('env', 'remove') assert result.exit_code == 0, result.output assert not result.output def test_active(hatch, temp_dir, helpers, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' with project_path.as_cwd(env_vars={AppEnvVars.ENV_ACTIVE: 'default'}): result = hatch('env', 'remove') assert result.exit_code == 1 assert result.output == helpers.dedent( """ Cannot remove active environment: default """ ) def test_active_override(hatch, helpers, temp_dir, config_file): project_name = 'My App' cache_path = temp_dir / 'cache' config_file.model.dirs.env = str(cache_path) config_file.save() with temp_dir.as_cwd(): result = hatch('new', project_name) assert result.exit_code == 0, result.output project_path = temp_dir / 'my-app' project = Project(project_path) helpers.update_project_environment(project, 'default', {'skip-install': True, **project.config.envs['default']}) helpers.update_project_environment(project, 'foo', {}) with project_path.as_cwd(): result = hatch('env', 'create') assert result.exit_code == 0, result.output env_cache_path = cache_path / 'env' / 'virtual' assert env_cache_path.is_dir() storage_dirs = list(env_cache_path.iterdir()) assert len(storage_dirs) == 1 storage_path = storage_dirs[0] project_part = f'{project_path.name}-' assert storage_path.name.startswith(project_part) hash_part = storage_path.name[len(project_part) :] assert len(hash_part) == 8 env_dirs = list(storage_path.iterdir()) assert len(env_dirs) == 1 (storage_path / 'default').is_dir() with project_path.as_cwd(env_vars={AppEnvVars.ENV_ACTIVE: 'foo'}): result = hatch('env', 'remove', 'default') assert result.exit_code == 0, result.output assert not result.output assert not storage_path.is_dir()
27.21791
116
0.670432
1,241
9,118
4.651088
0.060435
0.038808
0.063756
0.079695
0.945773
0.937803
0.937803
0.933299
0.929834
0.915974
0
0.007008
0.201908
9,118
334
117
27.299401
0.786176
0
0
0.885714
0
0
0.075625
0
0
0
0
0
0.3
1
0.038095
false
0
0.009524
0
0.047619
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
97a5a9b25dfa5067e639f7c0b5076832ed97be6a
2,499
py
Python
config.py
jkhu29/Deblurring-by-Realistic-Blurring
fef7041f96127e415912ae62975ca0be01797204
[ "MIT" ]
10
2021-07-30T08:33:03.000Z
2021-12-03T07:04:53.000Z
config.py
jkhu29/Deblurring-by-Realistic-Blurring
fef7041f96127e415912ae62975ca0be01797204
[ "MIT" ]
4
2021-09-15T08:16:05.000Z
2022-02-14T06:35:12.000Z
config.py
jkhu29/Deblurring-by-Realistic-Blurring
fef7041f96127e415912ae62975ca0be01797204
[ "MIT" ]
1
2022-02-17T09:59:36.000Z
2022-02-17T09:59:36.000Z
import argparse def get_cyclegan_options(parser=argparse.ArgumentParser()): parser.add_argument('--train_file', type=str, required=True) parser.add_argument('--valid_file', type=str, required=True) parser.add_argument('--output_dir', type=str, default="/home/jkhu29/img-edit/deblur") parser.add_argument('--workers', type=int, default=4, help='number of data loading workers, you had better put it ' '4 times of your gpu') parser.add_argument('--batch_size', type=int, default=64, help='input batch size, default=64') parser.add_argument('--batch_scale', type=int, default=4, help='input batch size, default=64') parser.add_argument('--niter', type=int, default=10, help='number of epochs to train for, default=10') parser.add_argument('--lr', type=float, default=1e-3, help='select the learning rate, default=1e-4') parser.add_argument('--adam', action='store_true', default=True, help='whether to use adam') parser.add_argument('--cuda', action='store_true', default=True, help='enables cuda') parser.add_argument('--seed', type=int, default=118, help="random seed") opt = parser.parse_args() return opt def get_dbgan_options(parser=argparse.ArgumentParser()): parser.add_argument('--train_file', type=str, required=True) parser.add_argument('--valid_file', type=str, required=True) parser.add_argument('--output_dir', type=str, default="/home/jkhu29/img-edit/deblur") parser.add_argument('--workers', type=int, default=4, help='number of data loading workers, you had better put it ' '4 times of your gpu') parser.add_argument('--batch_size', type=int, default=64, help='input batch size, default=64') parser.add_argument('--batch_scale', type=int, default=4, help='input batch size, default=64') parser.add_argument('--niter', type=int, default=10, help='number of epochs to train for, default=10') parser.add_argument('--lr', type=float, default=1e-4, help='select the learning rate, default=1e-4') parser.add_argument('--adam', action='store_true', default=True, help='whether to use adam') parser.add_argument('--cuda', action='store_true', default=True, help='enables cuda') parser.add_argument('--seed', type=int, default=118, help="random seed") parser.add_argument('--blur_model_path', type=str, required=True) opt = parser.parse_args() return opt
64.076923
119
0.679872
350
2,499
4.725714
0.22
0.125151
0.236397
0.057437
0.945586
0.945586
0.912938
0.912938
0.912938
0.912938
0
0.021093
0.165266
2,499
38
120
65.763158
0.771812
0
0
0.8125
0
0
0.32453
0.022409
0
0
0
0
0
1
0.0625
false
0
0.03125
0
0.15625
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
97ac4aaa302b8266e188a1227e9d754f647d9ffb
3,338
py
Python
tests/data.py
BolunThompson/PyLox
afcdebf44ace3c032d7247d79704d60791f605ef
[ "MIT" ]
2
2021-09-07T06:50:46.000Z
2021-11-15T22:54:42.000Z
tests/data.py
BolunThompson/PyLox
afcdebf44ace3c032d7247d79704d60791f605ef
[ "MIT" ]
null
null
null
tests/data.py
BolunThompson/PyLox
afcdebf44ace3c032d7247d79704d60791f605ef
[ "MIT" ]
1
2021-03-26T20:05:13.000Z
2021-03-26T20:05:13.000Z
from pylox.lox_types import nil from pylox.token_classes import Token, TokenType #TODO: Use mocking? _source_text = ( "2+2;\n" " /* 𐀀 ⍅*/ // \r\n" "\tif (nil) {var x = 3/2; print x+-2.1;} fun test(a, b) {;}\n" "true==false\n" '"test"=="test"' ) SOURCE = ( _source_text, ( Token(type=TokenType("NUMBER"), lexeme="2", literal=2.0, line=1), Token(type=TokenType("PLUS"), lexeme="+", literal=None, line=1), Token(type=TokenType("NUMBER"), lexeme="2", literal=2.0, line=1), Token(type=TokenType("SEMICOLON"), lexeme=";", literal=None, line=1), Token(type=TokenType("IF"), lexeme="if", literal=None, line=2), Token(type=TokenType("LEFT_PAREN"), lexeme="(", literal=None, line=2), Token(type=TokenType("NIL"), lexeme="nil", literal=nil, line=2), Token(type=TokenType("RIGHT_PAREN"), lexeme=")", literal=None, line=2), Token(type=TokenType("LEFT_BRACE"), lexeme="{", literal=None, line=2), Token(type=TokenType("VAR"), lexeme="var", literal=None, line=2), Token(type=TokenType("IDENTIFIER"), lexeme="x", literal=None, line=2), Token(type=TokenType("EQUAL"), lexeme="=", literal=None, line=2), Token(type=TokenType("NUMBER"), lexeme="3", literal=3.0, line=2), Token(type=TokenType("SLASH"), lexeme="/", literal=None, line=2), Token(type=TokenType("NUMBER"), lexeme="2", literal=2.0, line=2), Token(type=TokenType("SEMICOLON"), lexeme=";", literal=None, line=2), Token(type=TokenType("PRINT"), lexeme="print", literal=None, line=2), Token(type=TokenType("IDENTIFIER"), lexeme="x", literal=None, line=2), Token(type=TokenType("PLUS"), lexeme="+", literal=None, line=2), Token(type=TokenType("MINUS"), lexeme="-", literal=None, line=2), Token(type=TokenType("NUMBER"), lexeme="2.1", literal=2.1, line=2), Token(type=TokenType("SEMICOLON"), lexeme=";", literal=None, line=2), Token(type=TokenType("RIGHT_BRACE"), lexeme="}", literal=None, line=2), Token(type=TokenType("FUN"), lexeme="fun", literal=None, line=2), Token(type=TokenType("IDENTIFIER"), lexeme="test", literal=None, line=2), Token(type=TokenType("LEFT_PAREN"), lexeme="(", literal=None, line=2), Token(type=TokenType("IDENTIFIER"), lexeme="a", literal=None, line=2), Token(type=TokenType("COMMA"), lexeme=",", literal=None, line=2), Token(type=TokenType("IDENTIFIER"), lexeme="b", literal=None, line=2), Token(type=TokenType("RIGHT_PAREN"), lexeme=")", literal=None, line=2), Token(type=TokenType("LEFT_BRACE"), lexeme="{", literal=None, line=2), Token(type=TokenType("SEMICOLON"), lexeme=";", literal=None, line=2), Token(type=TokenType("RIGHT_BRACE"), lexeme="}", literal=None, line=2), Token(type=TokenType("TRUE"), lexeme="true", literal=True, line=3), Token(type=TokenType("EQUAL_EQUAL"), lexeme="==", literal=None, line=3), Token(type=TokenType("FALSE"), lexeme="false", literal=False, line=3), Token(type=TokenType("STRING"), lexeme='"test"', literal="test", line=4), Token(type=TokenType("EQUAL_EQUAL"), lexeme="==", literal=None, line=4), Token(type=TokenType("STRING"), lexeme='"test"', literal="test", line=4), ), ) EXPRS = ()
53.83871
81
0.615938
435
3,338
4.691954
0.121839
0.171975
0.343949
0.198922
0.837335
0.820186
0.808427
0.791769
0.728074
0.655561
0
0.021903
0.165668
3,338
61
82
54.721311
0.710592
0.005392
0
0.314815
0
0.018519
0.144321
0
0
0
0
0.016393
0
1
0
false
0
0.037037
0
0.037037
0.037037
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
9
c141576a2d7baa0c86ff9f62da60c2294afdcf27
9,825
py
Python
tests/components/azure_devops/test_config_flow.py
pcaston/core
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
[ "Apache-2.0" ]
1
2021-07-08T20:09:55.000Z
2021-07-08T20:09:55.000Z
tests/components/azure_devops/test_config_flow.py
pcaston/core
e74d946cef7a9d4e232ae9e0ba150d18018cfe33
[ "Apache-2.0" ]
47
2021-02-21T23:43:07.000Z
2022-03-31T06:07:10.000Z
tests/components/azure_devops/test_config_flow.py
OpenPeerPower/core
f673dfac9f2d0c48fa30af37b0a99df9dd6640ee
[ "Apache-2.0" ]
null
null
null
"""Test the Azure DevOps config flow.""" from unittest.mock import patch from aioazuredevops.core import DevOpsProject import aiohttp from openpeerpower import config_entries, data_entry_flow from openpeerpower.components.azure_devops.const import ( CONF_ORG, CONF_PAT, CONF_PROJECT, DOMAIN, ) from openpeerpower.core import OpenPeerPower from tests.common import MockConfigEntry FIXTURE_REAUTH_INPUT = {CONF_PAT: "abc123"} FIXTURE_USER_INPUT = {CONF_ORG: "random", CONF_PROJECT: "project", CONF_PAT: "abc123"} UNIQUE_ID = "random_project" async def test_show_user_form(opp: OpenPeerPower) -> None: """Test that the setup form is served.""" result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" async def test_authorization_error(opp: OpenPeerPower) -> None: """Test we show user form on Azure DevOps authorization error.""" with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", return_value=False, ): result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT, ) await opp.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["step_id"] == "user" assert result2["errors"] == {"base": "invalid_auth"} async def test_reauth_authorization_error(opp: OpenPeerPower) -> None: """Test we show user form on Azure DevOps authorization error.""" with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", return_value=False, ): result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FIXTURE_USER_INPUT, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "reauth" result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, ) await opp.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "invalid_auth"} async def test_connection_error(opp: OpenPeerPower) -> None: """Test we show user form on Azure DevOps connection error.""" with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", side_effect=aiohttp.ClientError, ): result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT, ) await opp.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["step_id"] == "user" assert result2["errors"] == {"base": "cannot_connect"} async def test_reauth_connection_error(opp: OpenPeerPower) -> None: """Test we show user form on Azure DevOps connection error.""" with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", side_effect=aiohttp.ClientError, ): result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FIXTURE_USER_INPUT, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "reauth" result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, ) await opp.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "cannot_connect"} async def test_project_error(opp: OpenPeerPower) -> None: """Test we show user form on Azure DevOps connection error.""" with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorized", return_value=True, ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.get_project", return_value=None, ): result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT, ) await opp.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["step_id"] == "user" assert result2["errors"] == {"base": "project_error"} async def test_reauth_project_error(opp: OpenPeerPower) -> None: """Test we show user form on Azure DevOps project error.""" with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorized", return_value=True, ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.get_project", return_value=None, ): result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FIXTURE_USER_INPUT, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "reauth" result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, ) await opp.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["step_id"] == "reauth" assert result2["errors"] == {"base": "project_error"} async def test_reauth_flow(opp: OpenPeerPower) -> None: """Test reauth works.""" with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", return_value=False, ): mock_config = MockConfigEntry( domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT ) mock_config.add_to_opp(opp) result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FIXTURE_USER_INPUT, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "reauth" assert result["errors"] == {"base": "invalid_auth"} with patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorized", return_value=True, ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.get_project", return_value=DevOpsProject( "abcd-abcd-abcd-abcd", FIXTURE_USER_INPUT[CONF_PROJECT] ), ): result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_REAUTH_INPUT, ) await opp.async_block_till_done() assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result2["reason"] == "reauth_successful" async def test_full_flow_implementation(opp: OpenPeerPower) -> None: """Test registering an integration and finishing flow works.""" with patch( "openpeerpower.components.azure_devops.async_setup_entry", return_value=True, ) as mock_setup_entry, patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorized", return_value=True, ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.authorize", ), patch( "openpeerpower.components.azure_devops.config_flow.DevOpsClient.get_project", return_value=DevOpsProject( "abcd-abcd-abcd-abcd", FIXTURE_USER_INPUT[CONF_PROJECT] ), ): result = await opp.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result2 = await opp.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT, ) await opp.async_block_till_done() assert len(mock_setup_entry.mock_calls) == 1 assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert ( result2["title"] == f"{FIXTURE_USER_INPUT[CONF_ORG]}/{FIXTURE_USER_INPUT[CONF_PROJECT]}" ) assert result2["data"][CONF_ORG] == FIXTURE_USER_INPUT[CONF_ORG] assert result2["data"][CONF_PROJECT] == FIXTURE_USER_INPUT[CONF_PROJECT]
35.989011
86
0.663511
1,115
9,825
5.552466
0.093274
0.056695
0.085931
0.104345
0.846067
0.833791
0.825069
0.816508
0.810047
0.79454
0
0.005147
0.228804
9,825
272
87
36.121324
0.811931
0.003461
0
0.732394
0
0
0.213963
0.146089
0
0
0
0
0.206573
1
0
false
0
0.032864
0
0.032864
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c16f4b9fbb0eac79b4625155736f17d80bf10449
102
py
Python
tests/test_analyze.py
lbsx/gov-purchase-analyzer
e6bf01289fef3ed35e493868617a9e6b26064dde
[ "MIT" ]
null
null
null
tests/test_analyze.py
lbsx/gov-purchase-analyzer
e6bf01289fef3ed35e493868617a9e6b26064dde
[ "MIT" ]
null
null
null
tests/test_analyze.py
lbsx/gov-purchase-analyzer
e6bf01289fef3ed35e493868617a9e6b26064dde
[ "MIT" ]
null
null
null
from analyze import starts_with_wan def test_starts_with_wan(): assert(starts_with_wan('万abc'))
17
35
0.784314
16
102
4.5625
0.625
0.410959
0.534247
0
0
0
0
0
0
0
0
0
0.127451
102
5
36
20.4
0.820225
0
0
0
0
0
0.039216
0
0
0
0
0
0.333333
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
8
c1e53b1a9dae7b7da023cffa31a8cc4957eeb231
1,561
py
Python
Final_QCNN/Angular_hybrid.py
magelead/QCNN
611750f1529b361713dcf5a4792e901295077688
[ "Apache-2.0" ]
9
2021-11-10T07:06:00.000Z
2022-03-10T18:15:29.000Z
Final_QCNN/Angular_hybrid.py
magelead/QCNN
611750f1529b361713dcf5a4792e901295077688
[ "Apache-2.0" ]
1
2022-03-08T03:13:57.000Z
2022-03-22T20:33:17.000Z
Final_QCNN/Angular_hybrid.py
magelead/QCNN
611750f1529b361713dcf5a4792e901295077688
[ "Apache-2.0" ]
6
2021-11-04T01:17:44.000Z
2022-03-05T14:16:24.000Z
# This is an implementation of an alternative Mottonen State Preparation to avoid normalization problem. import pennylane as qml # 3 bits of information is embedded in 2 wires def Angular_Hybrid_2(X, wires): qml.RY(X[0], wires=wires[0]) qml.PauliX(wires=wires[0]) qml.CRY(X[1], wires=[wires[0], wires[1]]) qml.PauliX(wires=wires[0]) qml.CRY(X[2], wires=[wires[0], wires[1]]) # 15 bits of information is embedded in 4 wires def Angular_Hybrid_4(X, wires): qml.RY(X[0], wires=wires[0]) qml.PauliX(wires=wires[0]) qml.CRY(X[1], wires=[wires[0], wires[1]]) qml.PauliX(wires=wires[0]) qml.CRY(X[2], wires=[wires[0], wires[1]]) qml.RY(X[3], wires=wires[2]) qml.CNOT(wires=[wires[1], wires[2]]) qml.RY(X[4], wires=wires[2]) qml.CNOT(wires=[wires[0], wires[2]]) qml.RY(X[5], wires=wires[2]) qml.CNOT(wires=[wires[1], wires[2]]) qml.RY(X[6], wires=wires[2]) qml.CNOT(wires=[wires[0], wires[2]]) qml.RY(X[7], wires=wires[3]) qml.CNOT(wires=[wires[2], wires[3]]) qml.RY(X[8], wires=wires[3]) qml.CNOT(wires=[wires[1], wires[3]]) qml.RY(X[9], wires=wires[3]) qml.CNOT(wires=[wires[2], wires[3]]) qml.RY(X[10], wires=wires[3]) qml.CNOT(wires=[wires[0], wires[3]]) qml.RY(X[11], wires=wires[3]) qml.CNOT(wires=[wires[2], wires[3]]) qml.RY(X[12], wires=wires[3]) qml.CNOT(wires=[wires[1], wires[3]]) qml.RY(X[13], wires=wires[3]) qml.CNOT(wires=[wires[2], wires[3]]) qml.RY(X[14], wires=wires[3]) qml.CNOT(wires=[wires[0], wires[3]])
33.212766
104
0.612428
286
1,561
3.328671
0.160839
0.357143
0.141807
0.214286
0.814076
0.807773
0.743697
0.743697
0.743697
0.743697
0
0.061256
0.163357
1,561
46
105
33.934783
0.667688
0.123639
0
0.594595
0
0
0
0
0
0
0
0
0
1
0.054054
false
0
0.027027
0
0.081081
0
0
0
0
null
1
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
a9f4130bef02146210579f91f74128009efc1dc3
160
py
Python
neat_code_backup/neatcode_backup_20190317/nn/__init__.py
felix0901/NEAT
a1f25608c98ff1003c1525a291577fa59dec2469
[ "BSD-3-Clause" ]
null
null
null
neat_code_backup/neatcode_backup_20190317/nn/__init__.py
felix0901/NEAT
a1f25608c98ff1003c1525a291577fa59dec2469
[ "BSD-3-Clause" ]
null
null
null
neat_code_backup/neatcode_backup_20190317/nn/__init__.py
felix0901/NEAT
a1f25608c98ff1003c1525a291577fa59dec2469
[ "BSD-3-Clause" ]
null
null
null
from neat.nn.feed_forward import FeedForwardNetwork from neat.nn.feed_forward_fpga import FeedForwardNetworkFPGA from neat.nn.recurrent import RecurrentNetwork
40
60
0.8875
21
160
6.619048
0.52381
0.172662
0.215827
0.201439
0.302158
0
0
0
0
0
0
0
0.075
160
3
61
53.333333
0.939189
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
e73e17bfb5e1f3ecd7be9aa8f9c0cd97969a3b30
102
py
Python
python/testData/postfix/main/severalStatements_after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/postfix/main/severalStatements_after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/postfix/main/severalStatements_after.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
if __name__ == '__main__': print("I want to be inside main") print("I want to be inside main too")
34
37
0.676471
18
102
3.388889
0.555556
0.295082
0.327869
0.459016
0.852459
0.852459
0.852459
0.852459
0
0
0
0
0.196078
102
3
38
34
0.743902
0
0
0
0
0
0.582524
0
0
0
0
0
0
1
0
true
0
0
0
0
0.666667
1
0
0
null
1
1
1
1
1
1
1
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
11
e7a713da77059cbfee5cf0fbd349c080f3fe49df
93
py
Python
stubs/esp32_1_10_0/ssl.py
jmannau/micropython-stubber
8930e8a0038192fd259b31a193d1da3b2501256a
[ "MIT" ]
null
null
null
stubs/esp32_1_10_0/ssl.py
jmannau/micropython-stubber
8930e8a0038192fd259b31a193d1da3b2501256a
[ "MIT" ]
null
null
null
stubs/esp32_1_10_0/ssl.py
jmannau/micropython-stubber
8930e8a0038192fd259b31a193d1da3b2501256a
[ "MIT" ]
null
null
null
"Module 'ssl' on firmware 'v1.10-247-g0fb15fc3f on 2019-03-29'" def wrap_socket(): pass
18.6
63
0.688172
16
93
3.9375
0.9375
0
0
0
0
0
0
0
0
0
0
0.230769
0.16129
93
4
64
23.25
0.576923
0.655914
0
0
0
0.333333
0.663043
0.228261
0
0
0
0
0
1
0.333333
true
0.333333
0
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
8
e7daf06d2b0e2ee5dc0d54c0b6253d4d3b0a3c2c
30,715
py
Python
src/controller/wamdamAPI/GetValues.py
WamdamProject/WaMDaM_Wizard
f8f5a830464f3c8f45e4eb0557833eefb267d7b2
[ "BSD-3-Clause" ]
null
null
null
src/controller/wamdamAPI/GetValues.py
WamdamProject/WaMDaM_Wizard
f8f5a830464f3c8f45e4eb0557833eefb267d7b2
[ "BSD-3-Clause" ]
3
2018-11-17T05:49:18.000Z
2020-12-31T15:57:14.000Z
src/controller/wamdamAPI/GetValues.py
WamdamProject/WaMDaM_Wizard
f8f5a830464f3c8f45e4eb0557833eefb267d7b2
[ "BSD-3-Clause" ]
null
null
null
from ..ConnectDB_ParseExcel import DB_Setup from ..ConnectDB_ParseExcel import SqlAlchemy as sq ''' This class is used to get result that query to get data of values in sqlite db. ''' class GetValues(object): def __init__(self, pathOfSqlite=''): self.setup = DB_Setup() if self.setup.get_session() == None and pathOfSqlite != '': self.setup.connect(pathOfSqlite, db_type='sqlite') self.session = self.setup.get_session() self.excel_pointer = None def getNumericValue(self, selectedType='', selectedAttribute='', selectedInstance=''): ''' This method is used to get data making NumericValues_table. :param selectedType: selected Object Type :param selectedAttribute: controlled Attribute :param selectedInstance: controlled Instance Name :param excelPath: full path of excel file to export data :return: None ''' try: if selectedType == '' and selectedAttribute == '' and selectedInstance == '': sql = 'SELECT "ResourceTypes"."ResourceType", ObjectType,AttributeName, SourceName, InstanceName,MasterNetworkName,' \ 'ScenarioName,MethodName, NumericValue ' \ 'FROM "ResourceTypes" '\ 'Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" '\ 'Left JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "NumericValues" ON "NumericValues"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'WHERE "AttributeDataTypeCV"="Parameter"' else: sql = 'SELECT "ResourceTypes"."ResourceType", ObjectType,AttributeName, SourceName, InstanceName,MasterNetworkName,' \ 'ScenarioName,MethodName, NumericValue ' \ 'FROM "ResourceTypes" '\ 'Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" '\ 'Left JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "NumericValues" ON "NumericValues"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'WHERE "AttributeDataTypeCV"="Parameter" AND "ObjectTypeCV" = "{}" AND "InstanceNameCV" = "{}" AND "AttributeNameCV" = "{}"'\ .format(selectedType, selectedInstance, selectedAttribute) result = self.session.execute(sql) # nameResult = list() complete_result = list() for row in result: # isExisting = False # for name in nameResult: # if name == row.InstanceName: # isExisting = True # break # if not isExisting: # nameResult.append(row.InstanceName) complete_result.append([row.ObjectType, row.InstanceName, row.ScenarioName, row.AttributeName, row.SourceName, row.MethodName, row.NumericValue]) return complete_result except Exception as e: print e raise Exception('Erro occure in reading Data Structure.\n' + e.message) def getFreeText(self, selectedType = '', selectedAttribute='', selectedInstance=''): ''' This method is used to get data making FreeTextSheet. :param selectedType: selected Object Type :param selectedAttribute: controlled Attribute :param selectedInstance: controlled Instance Name :param excelPath: full path of excel file to export data :return: None ''' try: if selectedType == '' and selectedAttribute == '' and selectedInstance == '': sql = 'SELECT ResourceType, ObjectType, AttributeName, SourceName, InstanceName,FreeTextValue,' \ 'ScenarioName,MethodName ' \ 'FROM "ResourceTypes" '\ 'Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" '\ 'Left JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "FreeText" ON "FreeText"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'WHERE AttributeDataTypeCV="FreeText" ' else: sql = 'SELECT ResourceType, ObjectType, AttributeName, SourceName, InstanceName,FreeTextValue,' \ 'ScenarioName,MethodName ' \ 'FROM "ResourceTypes" '\ 'Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" '\ 'Left JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "FreeText" ON "FreeText"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'WHERE AttributeDataTypeCV="FreeText" ' \ ' AND "ObjectTypeCV" = "{}" AND "InstanceNameCV" = "{}" AND "AttributeNameCV" = "{}"'\ .format(selectedType, selectedInstance, selectedAttribute) result = self.session.execute(sql) # nameResult = list() complete_result = list() for row in result: complete_result.append([row.ObjectType, row.InstanceName, row.ScenarioName, row.AttributeName, row.SourceName, row.MethodName,row.FreeTextValue]) return complete_result except Exception as e: print e raise Exception('Erro occure in reading Data Structure.\n' + e.message) def getSeasonaNumericValues(self, selectedResourceType='', selectedNetwork='', selectedScenarior=''): ''' This method is used to get data making SeasonalParameter. :param selectedResourceType: selected Model name :param selectedNetwork: selected Master Network name :param selectedScenarior: selected scenario Name :param excelPath: full path of excel file to export data :return: None ''' try: if selectedResourceType == '' and selectedNetwork == '' and selectedScenarior == '': sql = 'SELECT ObjectType, AttributeName, SourceName, InstanceName,MasterNetworkName,' \ 'ScenarioName,MethodName,SeasonName, SeasonNumericValue, SeasonNameCV, SeasonDateFormate ' \ 'FROM "Attributes" '\ 'Left JOIN "ObjectTypes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "SeasonalNumericValues" ON "SeasonalNumericValues"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'WHERE "AttributeDataTypeCV"="SeasonaNumericValues" ' else: sql = 'SELECT ObjectType, AttributeName, SourceName, InstanceName,MasterNetworkName,' \ 'ScenarioName,MethodName,SeasonName, SeasonNumericValue, SeasonNameCV, SeasonDateFormate ' \ 'FROM "Attributes" '\ 'Left JOIN "ObjectTypes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "SeasonalNumericValues" ON "SeasonalNumericValues"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'WHERE "AttributeDataTypeCV"="SeasonaNumericValues" AND "MasterNetworkName" = "{}" AND "ScenarioName" = "{}"'\ .format(selectedNetwork, selectedScenarior) result = self.session.execute(sql) # nameResult = list() complete_result = list() for row in result: complete_result.append([row.ObjectType, row.InstanceName, row.ScenarioName, row.AttributeName, row.SourceName, row.MethodName, row.SeasonName, row.SeasonNameCV, row.SeasonNumericValue, row.SeasonDateFormate]) return complete_result except Exception as e: print e raise Exception('Erro occure in reading Data Structure.\n' + e.message) def gettTimeSeriesValues(self, selectedResourceType='', selectedNetwork='', selectedScenarior=''): ''' This method is used to get data making TimeSeries. :param selectedResourceType: selected Model name :param selectedNetwork: selected Master Network name :param selectedScenarior: selected scenario Name :param excelPath: full path of excel file to export data :return: None ''' try: if selectedResourceType == '' and selectedNetwork == '' and selectedScenarior == '': sql = 'SELECT ResourceType ObjectType, AttributeName, SourceName, InstanceName,YearType,' \ 'ScenarioName,MethodName,AggregationStatisticCV, AggregationInterval, IntervalTimeUnitCV,' \ 'IsRegular, NoDataValue, "TimeSeries"."Description", "TimeSeriesValues"."DataValue", "TimeSeriesValues"."DataTimeStamp"' \ 'FROM "ResourceTypes" '\ 'Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" '\ 'Left JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "TimeSeries" ON "TimeSeries"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'LEFT JOIN "TimeSeriesValues" ON "TimeSeriesValues"."TimeSeriesID" = "TimeSeries"."TimeSeriesID" '\ 'WHERE AttributeName!="ObjectInstances" AND AttributeDataTypeCV="TimeSeries" ' else: sql = 'SELECT ResourceType ObjectType, AttributeName, SourceName, InstanceName,YearType,' \ 'ScenarioName,MethodName,AggregationStatisticCV, AggregationInterval, IntervalTimeUnitCV,' \ 'IsRegular, NoDataValue, "TimeSeries"."Description", "TimeSeriesValues"."DataValue", "TimeSeriesValues"."DataTimeStamp" ' \ 'FROM "ResourceTypes" '\ 'Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" '\ 'Left JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" '\ 'Left JOIN "Mappings" ON "Mappings"."AttributeID"= "Attributes"."AttributeID" '\ 'Left JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" '\ 'Left JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" '\ 'Left JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" '\ 'Left JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" '\ 'Left JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" '\ 'Left JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" '\ 'Left JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" '\ 'LEFT JOIN "TimeSeries" ON "TimeSeries"."ValuesMapperID" = "ValuesMapper"."ValuesMapperID" '\ 'LEFT JOIN "TimeSeriesValues" ON "TimeSeriesValues"."TimeSeriesID" = "TimeSeries"."TimeSeriesID" '\ 'WHERE AttributeName!="ObjectInstances" AND AttributeDataTypeCV="TimeSeries" ' \ 'AND "ResourceTypeAcronym" = "{}" AND "MasterNetworkName" = "{}" AND "ScenarioName" = "{}"'\ .format(selectedResourceType, selectedNetwork, selectedScenarior) result = self.session.execute(sql) # nameResult = list() complete_result = list() for row in result: # isExisting = False # for name in nameResult: # if name == row.InstanceName: # isExisting = True # break # if not isExisting: # nameResult.append(row.InstanceName) complete_result.append([row.ObjectType, row.InstanceName, row.ScenarioName, row.AttributeName, row.DataTimeStamp, row.DataValue]) return complete_result except Exception as e: print e raise Exception('Erro occure in reading Data Structure.\n' + e.message) def getMultiAttributeSeries(self, selectedResourceType='', selectedNetwork='', selectedScenarior=''): ''' This method is used to get data making MultiVariableSeries. :param selectedResourceType: selected Model name :param selectedNetwork: selected Master Network name :param selectedScenarior: selected scenario Name :param excelPath: full path of excel file to export data :return: None ''' try: if selectedResourceType == '' and selectedNetwork == '' and selectedScenarior == '': sql = """ SELECT "ObjectTypes"."ObjectType", "Instances"."InstanceName",ScenarioName,"Attributes"."AttributeName" AS MultiAttributeName,"Attributes".AttributeDataTypeCV, SourceName,MethodName, "AttributesColumns"."AttributeName" AS "AttributeName", "AttributesColumns"."AttributeNameCV", "AttributesColumns"."UnitNameCV" AS "AttributeNameUnitName", "ValueOrder","DataValue" FROM ResourceTypes Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" -- Join the Object types to get their attributes LEFT JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" -- Join the Attributes to get their Mappings LEFT JOIN "Mappings" ON Mappings.AttributeID= Attributes.AttributeID -- Join the Mappings to get their Instances LEFT JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" -- Join the Mappings to get their ScenarioMappings LEFT JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" -- Join the ScenarioMappings to get their Scenarios LEFT JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" -- Join the Scenarios to get their MasterNetworks LEFT JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" -- Join the Mappings to get their Methods LEFT JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" -- Join the Mappings to get their Sources LEFT JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" -- Join the Mappings to get their DataValuesMappers LEFT JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" -- Join the DataValuesMapper to get their MultiAttributeSeries LEFT JOIN "MultiAttributeSeries" ON "MultiAttributeSeries" ."ValuesMapperID"="ValuesMapper"."ValuesMapperID" /*This is an extra join to get to each column name within the MultiColumn Array */ -- Join the MultiAttributeSeries to get to their specific DataValuesMapper, now called DataValuesMapperColumn LEFT JOIN "ValuesMapper" As "ValuesMapperColumn" ON "ValuesMapperColumn"."ValuesMapperID"="MultiAttributeSeries"."MappingID_Attribute" -- Join the DataValuesMapperColumn to get back to their specific Mapping, now called MappingColumns LEFT JOIN "Mappings" As "MappingColumns" ON "MappingColumns"."ValuesMapperID"="ValuesMapperColumn"."ValuesMapperID" -- Join the MappingColumns to get back to their specific Attribute, now called AttributeColumns LEFT JOIN "Attributes" AS "AttributesColumns" ON "AttributesColumns"."AttributeID"="MappingColumns"."AttributeID" /* Finishes here */ -- Join the MultiAttributeSeries to get access to their MultiAttributeSeriesValues LEFT JOIN "MultiAttributeSeriesValues" ON "MultiAttributeSeriesValues"."MultiAttributeSeriesID"="MultiAttributeSeries"."MultiAttributeSeriesID" -- Select one InstanceName and restrict the query AttributeDataTypeCV that is MultiAttributeSeries WHERE "Attributes".AttributeDataTypeCV='MultiAttributeSeries' """ else: sql = """ SELECT "ObjectTypes"."ObjectType", "Instances"."InstanceName",ScenarioName,"Attributes"."AttributeName" AS MultiAttributeName,"Attributes".AttributeDataTypeCV, SourceName,MethodName, "AttributesColumns"."AttributeName" AS "AttributeName", "AttributesColumns"."AttributeNameCV", "AttributesColumns"."UnitNameCV" AS "AttributeNameUnitName", "ValueOrder","DataValue" FROM ResourceTypes Left JOIN "ObjectTypes" ON "ObjectTypes"."ResourceTypeID"="ResourceTypes"."ResourceTypeID" -- Join the Object types to get their attributes LEFT JOIN "Attributes" ON "Attributes"."ObjectTypeID"="ObjectTypes"."ObjectTypeID" -- Join the Attributes to get their Mappings LEFT JOIN "Mappings" ON Mappings.AttributeID= Attributes.AttributeID -- Join the Mappings to get their Instances LEFT JOIN "Instances" ON "Instances"."InstanceID"="Mappings"."InstanceID" -- Join the Mappings to get their ScenarioMappings LEFT JOIN "ScenarioMappings" ON "ScenarioMappings"."MappingID"="Mappings"."MappingID" -- Join the ScenarioMappings to get their Scenarios LEFT JOIN "Scenarios" ON "Scenarios"."ScenarioID"="ScenarioMappings"."ScenarioID" -- Join the Scenarios to get their MasterNetworks LEFT JOIN "MasterNetworks" ON "MasterNetworks"."MasterNetworkID"="Scenarios"."MasterNetworkID" -- Join the Mappings to get their Methods LEFT JOIN "Methods" ON "Methods"."MethodID"="Mappings"."MethodID" -- Join the Mappings to get their Sources LEFT JOIN "Sources" ON "Sources"."SourceID"="Mappings"."SourceID" -- Join the Mappings to get their DataValuesMappers LEFT JOIN "ValuesMapper" ON "ValuesMapper"."ValuesMapperID"="Mappings"."ValuesMapperID" -- Join the DataValuesMapper to get their MultiAttributeSeries LEFT JOIN "MultiAttributeSeries" ON "MultiAttributeSeries" ."ValuesMapperID"="ValuesMapper"."ValuesMapperID" /*This is an extra join to get to each column name within the MultiColumn Array */ -- Join the MultiAttributeSeries to get to their specific DataValuesMapper, now called DataValuesMapperColumn LEFT JOIN "ValuesMapper" As "ValuesMapperColumn" ON "ValuesMapperColumn"."ValuesMapperID"="MultiAttributeSeries"."MappingID_Attribute" -- Join the DataValuesMapperColumn to get back to their specific Mapping, now called MappingColumns LEFT JOIN "Mappings" As "MappingColumns" ON "MappingColumns"."ValuesMapperID"="ValuesMapperColumn"."ValuesMapperID" -- Join the MappingColumns to get back to their specific Attribute, now called AttributeColumns LEFT JOIN "Attributes" AS "AttributesColumns" ON "AttributesColumns"."AttributeID"="MappingColumns"."AttributeID" /* Finishes here */ -- Join the MultiAttributeSeries to get access to their MultiAttributeSeriesValues LEFT JOIN "MultiAttributeSeriesValues" ON "MultiAttributeSeriesValues"."MultiAttributeSeriesID"="MultiAttributeSeries"."MultiAttributeSeriesID" -- Select one InstanceName and restrict the query AttributeDataTypeCV that is MultiAttributeSeries WHERE "Attributes".AttributeDataTypeCV='MultiAttributeSeries' AND "ResourceTypeAcronym"="{}" AND "MasterNetworkName"= "{}" AND "ScenarioName" ="{}" Order By ScenarioName, AttributeName,ValueOrder asc """.format(selectedResourceType, selectedNetwork, selectedScenarior) result = self.session.execute(sql) '''Down Table(MultiVariableSeries_table Table) write''' complete_result = list() strAtrributName = '' valueOrder = None AttributeName = '' tempColumn = {} sourceName = '' i = 0 currentrow = 0 setNumber = 0 for row in result: if row.AttributeName == None or row.AttributeName == "": continue if strAtrributName != row.AttributeName: strAtrributName = row.AttributeName tempColumn[row.AttributeName] = [] tempColumn[row.AttributeName].append(row.AttributeName) AttributeName = row.AttributeName if sourceName != row.ScenarioName: sourceName = row.ScenarioName setNumber = i currentrow = 0 if AttributeName != row.AttributeName: AttributeName = row.AttributeName currentrow = 0 if row.AttributeName in tempColumn[row.AttributeName]: index = tempColumn[row.AttributeName].index(row.AttributeName) if index == 0: complete_result.append([row.ObjectType, row.InstanceName, row.ScenarioName, row.AttributeName, row.SourceName, row.MethodName, row.DataValue]) i += 1 else: complete_result[setNumber + currentrow].append(row.DataValue) currentrow += 1 else: currentrow = 0 tempColumn[row.AttributeName].append(row.AttributeName) index = tempColumn[row.AttributeName].index(row.AttributeName) if index == 0: complete_result.append([row.ObjectType, row.InstanceName, row.ScenarioName, row.AttributeName, row.SourceName, row.MethodName, row.DataValue]) i += 1 else: complete_result[setNumber + currentrow].append(row.DataValue) currentrow += 1 return complete_result except Exception as e: print e raise Exception('Error occured in reading Data Structure.\n' + e.message)
68.104213
148
0.572554
2,284
30,715
7.684764
0.092382
0.053783
0.011395
0.011964
0.93807
0.929125
0.918072
0.916306
0.91585
0.909526
0
0.00058
0.326909
30,715
451
149
68.104213
0.848409
0.013967
0
0.850136
0
0.002725
0.656386
0.310487
0
0
0
0
0
0
null
null
0
0.00545
null
null
0.013624
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
9
99b5acc26a3a3b7b1e534cd5f516ad60d34cd65e
4,042
py
Python
src/bert_models/training/at_training.py
roronoayhd/2021daguan
132380c55c54de08ec44c2c4161f962312c50a29
[ "Apache-2.0" ]
24
2021-09-02T10:50:13.000Z
2021-11-03T10:06:36.000Z
src/bert_models/training/at_training.py
roronoayhd/2021daguan
132380c55c54de08ec44c2c4161f962312c50a29
[ "Apache-2.0" ]
2
2021-09-16T02:12:06.000Z
2021-12-03T06:50:18.000Z
src/bert_models/training/at_training.py
roronoayhd/2021daguan
132380c55c54de08ec44c2c4161f962312c50a29
[ "Apache-2.0" ]
7
2021-09-02T15:25:21.000Z
2021-09-18T17:09:24.000Z
import logging import re import torch logger = logging.getLogger(__name__) class FGM(object): """Reference: https://arxiv.org/pdf/1605.07725.pdf""" def __init__(self, model, emb_names=['word_embeddings', "encoder.layer.0"], epsilon=1.0): self.model = model # emb_names 这个参数要换成你模型中embedding的参数名 # 可以是多组参数 self.emb_names = emb_names self.epsilon = epsilon self.emb_backup = {} self.grad_backup = {} def attack(self): """Add adversity.""" for name, param in self.model.named_parameters(): if param.requires_grad and re.search("|".join(self.emb_names), name): # 把真实参数保存起来 self.emb_backup[name] = param.data.clone() norm = torch.norm(param.grad) if norm != 0 and not torch.isnan(norm): r_adv = self.epsilon * param.grad / norm param.data.add_(r_adv) def restore(self): """ restore embedding """ for name, param in self.model.named_parameters(): if param.requires_grad and re.search("|".join(self.emb_names), name): assert name in self.emb_backup param.data = self.emb_backup[name] self.emb_backup = {} def backup_grad(self): for name, param in self.model.named_parameters(): if param.requires_grad and param.grad is not None: self.grad_backup[name] = param.grad.clone() def restore_grad(self): for name, param in self.model.named_parameters(): if param.requires_grad and param.grad is not None: if re.search("|".join(self.emb_names), name): param.grad = self.grad_backup[name] else: param.grad += self.grad_backup[name] class PGD(object): """Reference: https://arxiv.org/pdf/1706.06083.pdf""" def __init__(self, model, emb_names=['word_embeddings', "encoder.layer.0"], epsilon=1.0, alpha=0.3): self.model = model self.emb_names = emb_names self.epsilon = epsilon self.alpha = alpha self.emb_backup = {} self.grad_backup = {} def attack(self, is_first_attack=False): """Add adversity.""" for name, param in self.model.named_parameters(): if param.requires_grad and re.search("|".join(self.emb_names), name): if is_first_attack: self.emb_backup[name] = param.data.clone() norm = torch.norm(param.grad) if norm != 0 and not torch.isnan(norm): r_adv = self.alpha * param.grad / norm param.data.add_(r_adv) param.data = self.project(name, param.data) def restore(self): """restore embedding""" for name, param in self.model.named_parameters(): if param.requires_grad and re.search("|".join(self.emb_names), name): assert name in self.emb_backup param.data = self.emb_backup[name] self.emb_backup = {} def project(self, param_name, param_data): r_adv = param_data - self.emb_backup[param_name] if torch.norm(r_adv) > self.epsilon: r_adv_0 = self.epsilon * r_adv / torch.norm(r_adv) return self.emb_backup[param_name] + r_adv def backup_grad(self): for name, param in self.model.named_parameters(): if param.requires_grad and param.grad is not None: self.grad_backup[name] = param.grad.clone() def restore_grad(self): for name, param in self.model.named_parameters(): if param.requires_grad and param.grad is not None: if re.search("|".join(self.emb_names), name): param.grad = self.grad_backup[name] else: param.grad += self.grad_backup[name]
36.089286
81
0.563582
500
4,042
4.378
0.14
0.063956
0.071265
0.051165
0.852901
0.809045
0.780722
0.780722
0.754226
0.681133
0
0.010627
0.324839
4,042
112
82
36.089286
0.791499
0.053439
0
0.746988
0
0
0.017405
0
0
0
0
0
0.024096
1
0.13253
false
0
0.036145
0
0.204819
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
99bfb3c9c7d380b43de31eb120bad28acfbe40c8
7,443
py
Python
assignments/assignment2/model.py
MorrisNein/dlcourse_ai
9458921dc60ae56579793e295eb3b28f95eda1c2
[ "MIT" ]
null
null
null
assignments/assignment2/model.py
MorrisNein/dlcourse_ai
9458921dc60ae56579793e295eb3b28f95eda1c2
[ "MIT" ]
null
null
null
assignments/assignment2/model.py
MorrisNein/dlcourse_ai
9458921dc60ae56579793e295eb3b28f95eda1c2
[ "MIT" ]
null
null
null
import numpy as np from layers import FullyConnectedLayer, ReLULayer, softmax_with_cross_entropy, l2_regularization, softmax from collections import OrderedDict class TwoLayerNet: """ Neural network with two fully connected layers """ def __init__(self, n_input, n_output, hidden_layer_size, reg): """ Initializes the neural network Arguments: n_input, int - dimension of the model input n_output, int - number of classes to predict hidden_layer_size, int - number of neurons in the hidden layer reg, float - L2 regularization strength """ self.reg = reg # TODO Create necessary layers self.layers = OrderedDict({ "linear_1": FullyConnectedLayer(n_input, hidden_layer_size), "relu_1": ReLULayer(), "linear_2": FullyConnectedLayer(hidden_layer_size, n_output) }) def compute_loss_and_gradients(self, X, y): """ Computes total loss and updates parameter gradients on a batch of training examples Arguments: X, np array (batch_size, input_features) - input data y, np array of int (batch_size) - classes """ # Before running forward and backward pass through the model, # clear parameter gradients aggregated from the previous pass # TODO Set parameter gradient to zeros # Hint: using self.params() might be useful! params = self.params().values() for par in params: par.grad = np.zeros_like(par.grad) # TODO Compute loss and fill param gradients # by running forward and backward passes through the model # Forward pass for n_lay, lay in enumerate(self.layers.values()): if n_lay == 0: # print(X) current_X = lay.forward(X) else: # print(current_X) current_X = lay.forward(current_X) # print(current_X) # print(f"{n_lay}, {lay}") clf_output = current_X CE_loss, dpredictions = softmax_with_cross_entropy(clf_output, y) # Backward pass for n_lay, lay in enumerate(reversed(self.layers.values())): # print(f"{n_lay}") if n_lay == 0: # print(dpredictions) current_dX = lay.backward(dpredictions) else: # print(current_dX) current_dX = lay.backward(current_dX) # print(current_dX) # After that, implement l2 regularization on all params # Hint: self.params() is useful again! reg_loss_accumulated = 0 for par in params: reg_loss, dpar = l2_regularization(par.value, self.reg) par.grad += dpar reg_loss_accumulated += reg_loss loss = CE_loss + reg_loss_accumulated return loss def predict(self, X): """ Produces classifier predictions on the set Arguments: X, np array (test_samples, num_features) Returns: y_pred, np.array of int (test_samples) """ # TODO: Implement predict # Hint: some of the code of the compute_loss_and_gradients # can be reused y_pred = np.zeros(X.shape[0], np.int) for n_lay, lay in enumerate(self.layers.values()): if n_lay == 0: current_X = lay.forward(X) else: current_X = lay.forward(current_X) clf_output = current_X probs = softmax(clf_output) # print(probs) y_pred = np.argmax(probs, axis=-1) # print(y_pred) # raise Exception("Not implemented!") return y_pred def params(self): result = {} # TODO Implement aggregating all of the params for layname, lay in self.layers.items(): result.update({f"{layname}_{parname}" : par for parname, par in lay.params().items()}) return result class OneLayerNet: """ Neural network with two fully connected layers """ def __init__(self, n_input, n_output, hidden_layer_size, reg): """ Initializes the neural network Arguments: n_input, int - dimension of the model input n_output, int - number of classes to predict hidden_layer_size, int - number of neurons in the hidden layer reg, float - L2 regularization strength """ self.reg = reg # TODO Create necessary layers self.layers = OrderedDict({ "linear_1": FullyConnectedLayer(n_input, n_output), }) def compute_loss_and_gradients(self, X, y): """ Computes total loss and updates parameter gradients on a batch of training examples Arguments: X, np array (batch_size, input_features) - input data y, np array of int (batch_size) - classes """ # Before running forward and backward pass through the model, # clear parameter gradients aggregated from the previous pass # TODO Set parameter gradient to zeros # Hint: using self.params() might be useful! params = self.params().values() for par in params: par.grad = np.zeros_like(par.grad) # TODO Compute loss and fill param gradients # by running forward and backward passes through the model # Forward pass for n_lay, lay in enumerate(self.layers.values()): if n_lay == 0: current_X = lay.forward(X) else: current_X = lay.forward(current_X) clf_output = current_X CE_loss, dpredictions = softmax_with_cross_entropy(clf_output, y) # Backward pass for n_lay, lay in enumerate(reversed(self.layers.values())): if n_lay == 0: current_dX = lay.backward(dpredictions) else: current_dX = lay.backward(current_dX) # After that, implement l2 regularization on all params # Hint: self.params() is useful again! reg_loss_accumulated = 0 for par in params: reg_loss, dpar = l2_regularization(par.value, self.reg) par.grad += dpar reg_loss_accumulated += reg_loss loss = CE_loss + reg_loss_accumulated return loss def predict(self, X): """ Produces classifier predictions on the set Arguments: X, np array (test_samples, num_features) Returns: y_pred, np.array of int (test_samples) """ # TODO: Implement predict # Hint: some of the code of the compute_loss_and_gradients # can be reused y_pred = np.zeros(X.shape[0], np.int) for n_lay, lay in enumerate(self.layers.values()): if n_lay == 0: current_X = lay.forward(X) else: current_X = lay.forward(current_X) clf_output = current_X probs = softmax(clf_output) # print(probs) y_pred = np.argmax(probs, axis=-1) # print(y_pred) # raise Exception("Not implemented!") return y_pred def params(self): result = {} # TODO Implement aggregating all of the params for layname, lay in self.layers.items(): result.update({f"{layname}_{parname}" : par for parname, par in lay.params().items()}) return result
33.678733
105
0.595593
913
7,443
4.686747
0.164294
0.033653
0.020566
0.033653
0.925684
0.921243
0.88315
0.88315
0.879878
0.879878
0
0.004569
0.32366
7,443
220
106
33.831818
0.845451
0.361413
0
0.905263
0
0
0.015629
0
0
0
0
0.027273
0
1
0.084211
false
0
0.031579
0
0.2
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
8
99eaae6aa3d3914f71d2af6c29dc34d918cf7454
15,872
py
Python
tests/tasks/kubernetes/test_service.py
concreted/prefect
dd732f5990ee2b0f3d816adb285168fd63b239e4
[ "Apache-2.0" ]
8,633
2019-03-23T17:51:03.000Z
2022-03-31T22:17:42.000Z
tests/tasks/kubernetes/test_service.py
concreted/prefect
dd732f5990ee2b0f3d816adb285168fd63b239e4
[ "Apache-2.0" ]
3,903
2019-03-23T19:11:21.000Z
2022-03-31T23:21:23.000Z
tests/tasks/kubernetes/test_service.py
concreted/prefect
dd732f5990ee2b0f3d816adb285168fd63b239e4
[ "Apache-2.0" ]
937
2019-03-23T18:49:44.000Z
2022-03-31T21:45:13.000Z
from unittest.mock import MagicMock import pytest import prefect from prefect.tasks.kubernetes import ( CreateNamespacedService, DeleteNamespacedService, ListNamespacedService, PatchNamespacedService, ReadNamespacedService, ReplaceNamespacedService, ) from prefect.utilities.configuration import set_temporary_config @pytest.fixture def kube_secret(): with set_temporary_config({"cloud.use_local_secrets": True}): with prefect.context(secrets=dict(KUBERNETES_API_KEY="test_key")): yield @pytest.fixture def api_client(monkeypatch): client = MagicMock() monkeypatch.setattr( "prefect.tasks.kubernetes.service.get_kubernetes_client", MagicMock(return_value=client), ) return client class TestCreateNamespacedServiceTask: def test_empty_initialization(self, kube_secret): task = CreateNamespacedService() assert task.body == {} assert task.namespace == "default" assert task.kube_kwargs == {} assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY" def test_filled_initialization(self, kube_secret): task = CreateNamespacedService( body={"test": "test"}, namespace="test", kube_kwargs={"test": "test"}, kubernetes_api_key_secret="test", ) assert task.body == {"test": "test"} assert task.namespace == "test" assert task.kube_kwargs == {"test": "test"} assert task.kubernetes_api_key_secret == "test" def test_empty_body_raises_error(self, kube_secret): task = CreateNamespacedService() with pytest.raises(ValueError): task.run() def test_invalid_body_raises_error(self, kube_secret): task = CreateNamespacedService() with pytest.raises(ValueError): task.run(body=None) def test_body_value_is_replaced(self, kube_secret, api_client): task = CreateNamespacedService(body={"test": "a"}) task.run(body={"test": "b"}) assert api_client.create_namespaced_service.call_args[1]["body"] == { "test": "b" } def test_body_value_is_appended(self, kube_secret, api_client): task = CreateNamespacedService(body={"test": "a"}) task.run(body={"a": "test"}) assert api_client.create_namespaced_service.call_args[1]["body"] == { "a": "test", "test": "a", } def test_empty_body_value_is_updated(self, kube_secret, api_client): task = CreateNamespacedService() task.run(body={"test": "a"}) assert api_client.create_namespaced_service.call_args[1]["body"] == { "test": "a" } def test_kube_kwargs_value_is_replaced(self, kube_secret, api_client): task = CreateNamespacedService(body={"test": "a"}, kube_kwargs={"test": "a"}) task.run(kube_kwargs={"test": "b"}) assert api_client.create_namespaced_service.call_args[1]["test"] == "b" def test_kube_kwargs_value_is_appended(self, kube_secret, api_client): task = CreateNamespacedService(body={"test": "a"}, kube_kwargs={"test": "a"}) task.run(kube_kwargs={"a": "test"}) assert api_client.create_namespaced_service.call_args[1]["a"] == "test" assert api_client.create_namespaced_service.call_args[1]["test"] == "a" def test_empty_kube_kwargs_value_is_updated(self, kube_secret, api_client): task = CreateNamespacedService(body={"test": "a"}) task.run(kube_kwargs={"test": "a"}) assert api_client.create_namespaced_service.call_args[1]["test"] == "a" class TestDeleteNamespacedServiceTask: def test_empty_initialization(self, kube_secret): task = DeleteNamespacedService() assert not task.service_name assert task.namespace == "default" assert task.kube_kwargs == {} assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY" def test_filled_initialization(self, kube_secret): task = DeleteNamespacedService( service_name="test", namespace="test", kube_kwargs={"test": "test"}, kubernetes_api_key_secret="test", ) assert task.service_name == "test" assert task.namespace == "test" assert task.kube_kwargs == {"test": "test"} assert task.kubernetes_api_key_secret == "test" def test_empty_name_raises_error(self, kube_secret): task = DeleteNamespacedService() with pytest.raises(ValueError): task.run() def test_invalid_body_raises_error(self, kube_secret): task = DeleteNamespacedService() with pytest.raises(ValueError): task.run(service_name=None) def test_kube_kwargs_value_is_replaced(self, kube_secret, api_client): task = DeleteNamespacedService(service_name="test", kube_kwargs={"test": "a"}) task.run(kube_kwargs={"test": "b"}) assert api_client.delete_namespaced_service.call_args[1]["test"] == "b" def test_kube_kwargs_value_is_appended(self, kube_secret, api_client): task = DeleteNamespacedService(service_name="test", kube_kwargs={"test": "a"}) task.run(kube_kwargs={"a": "test"}) assert api_client.delete_namespaced_service.call_args[1]["a"] == "test" assert api_client.delete_namespaced_service.call_args[1]["test"] == "a" def test_empty_kube_kwargs_value_is_updated(self, kube_secret, api_client): task = DeleteNamespacedService(service_name="test") task.run(kube_kwargs={"test": "a"}) assert api_client.delete_namespaced_service.call_args[1]["test"] == "a" class TestListNamespacedServiceTask: def test_empty_initialization(self, kube_secret): task = ListNamespacedService() assert task.namespace == "default" assert task.kube_kwargs == {} assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY" def test_filled_initialization(self, kube_secret): task = ListNamespacedService( namespace="test", kube_kwargs={"test": "test"}, kubernetes_api_key_secret="test", ) assert task.namespace == "test" assert task.kube_kwargs == {"test": "test"} assert task.kubernetes_api_key_secret == "test" def test_kube_kwargs_value_is_replaced(self, kube_secret, api_client): task = ListNamespacedService(kube_kwargs={"test": "a"}) task.run(kube_kwargs={"test": "b"}) assert api_client.list_namespaced_service.call_args[1]["test"] == "b" def test_kube_kwargs_value_is_appended(self, kube_secret, api_client): task = ListNamespacedService(kube_kwargs={"test": "a"}) task.run(kube_kwargs={"a": "test"}) assert api_client.list_namespaced_service.call_args[1]["a"] == "test" assert api_client.list_namespaced_service.call_args[1]["test"] == "a" def test_empty_kube_kwargs_value_is_updated(self, kube_secret, api_client): task = ListNamespacedService() task.run(kube_kwargs={"test": "a"}) assert api_client.list_namespaced_service.call_args[1]["test"] == "a" class TestPatchNamespacedServiceTask: def test_empty_initialization(self, kube_secret): task = PatchNamespacedService() assert not task.service_name assert task.body == {} assert task.namespace == "default" assert task.kube_kwargs == {} assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY" def test_filled_initialization(self, kube_secret): task = PatchNamespacedService( service_name="test", body={"test": "test"}, namespace="test", kube_kwargs={"test": "test"}, kubernetes_api_key_secret="test", ) assert task.service_name == "test" assert task.body == {"test": "test"} assert task.namespace == "test" assert task.kube_kwargs == {"test": "test"} assert task.kubernetes_api_key_secret == "test" def test_empty_body_raises_error(self, kube_secret): task = PatchNamespacedService() with pytest.raises(ValueError): task.run() def test_invalid_body_raises_error(self, kube_secret): task = PatchNamespacedService() with pytest.raises(ValueError): task.run(body=None) def test_invalid_service_name_raises_error(self, kube_secret): task = PatchNamespacedService() with pytest.raises(ValueError): task.run(body={"test": "test"}, service_name=None) def test_body_value_is_replaced(self, kube_secret, api_client): task = PatchNamespacedService(body={"test": "a"}, service_name="test") task.run(body={"test": "b"}) assert api_client.patch_namespaced_service.call_args[1]["body"] == {"test": "b"} def test_body_value_is_appended(self, kube_secret, api_client): task = PatchNamespacedService(body={"test": "a"}, service_name="test") task.run(body={"a": "test"}) assert api_client.patch_namespaced_service.call_args[1]["body"] == { "a": "test", "test": "a", } def test_empty_body_value_is_updated(self, kube_secret, api_client): task = PatchNamespacedService(service_name="test") task.run(body={"test": "a"}) assert api_client.patch_namespaced_service.call_args[1]["body"] == {"test": "a"} def test_kube_kwargs_value_is_replaced(self, kube_secret, api_client): task = PatchNamespacedService( body={"test": "a"}, kube_kwargs={"test": "a"}, service_name="test" ) task.run(kube_kwargs={"test": "b"}) assert api_client.patch_namespaced_service.call_args[1]["test"] == "b" def test_kube_kwargs_value_is_appended(self, kube_secret, api_client): task = PatchNamespacedService( body={"test": "a"}, kube_kwargs={"test": "a"}, service_name="test" ) task.run(kube_kwargs={"a": "test"}) assert api_client.patch_namespaced_service.call_args[1]["a"] == "test" assert api_client.patch_namespaced_service.call_args[1]["test"] == "a" def test_empty_kube_kwargs_value_is_updated(self, kube_secret, api_client): task = PatchNamespacedService(body={"test": "a"}, service_name="test") task.run(kube_kwargs={"test": "a"}) assert api_client.patch_namespaced_service.call_args[1]["test"] == "a" class TestReadNamespacedServiceTask: def test_empty_initialization(self, kube_secret): task = ReadNamespacedService() assert not task.service_name assert task.namespace == "default" assert task.kube_kwargs == {} assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY" def test_filled_initialization(self, kube_secret): task = ReadNamespacedService( service_name="test", namespace="test", kube_kwargs={"test": "test"}, kubernetes_api_key_secret="test", ) assert task.service_name == "test" assert task.namespace == "test" assert task.kube_kwargs == {"test": "test"} assert task.kubernetes_api_key_secret == "test" def test_empty_name_raises_error(self, kube_secret): task = ReadNamespacedService() with pytest.raises(ValueError): task.run() def test_invalid_body_raises_error(self, kube_secret): task = ReadNamespacedService() with pytest.raises(ValueError): task.run(service_name=None) def test_kube_kwargs_value_is_replaced(self, kube_secret, api_client): task = ReadNamespacedService(service_name="test", kube_kwargs={"test": "a"}) task.run(kube_kwargs={"test": "b"}) assert api_client.read_namespaced_service.call_args[1]["test"] == "b" def test_kube_kwargs_value_is_appended(self, kube_secret, api_client): task = ReadNamespacedService(service_name="test", kube_kwargs={"test": "a"}) task.run(kube_kwargs={"a": "test"}) assert api_client.read_namespaced_service.call_args[1]["a"] == "test" assert api_client.read_namespaced_service.call_args[1]["test"] == "a" def test_empty_kube_kwargs_value_is_updated(self, kube_secret, api_client): task = ReadNamespacedService(service_name="test") task.run(kube_kwargs={"test": "a"}) assert api_client.read_namespaced_service.call_args[1]["test"] == "a" class TestReplaceNamespacedServiceTask: def test_empty_initialization(self, kube_secret): task = ReplaceNamespacedService() assert not task.service_name assert task.body == {} assert task.namespace == "default" assert task.kube_kwargs == {} assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY" def test_filled_initialization(self, kube_secret): task = ReplaceNamespacedService( service_name="test", body={"test": "test"}, namespace="test", kube_kwargs={"test": "test"}, kubernetes_api_key_secret="test", ) assert task.service_name == "test" assert task.body == {"test": "test"} assert task.namespace == "test" assert task.kube_kwargs == {"test": "test"} assert task.kubernetes_api_key_secret == "test" def test_empty_body_raises_error(self, kube_secret): task = ReplaceNamespacedService() with pytest.raises(ValueError): task.run() def test_invalid_body_raises_error(self, kube_secret): task = ReplaceNamespacedService() with pytest.raises(ValueError): task.run(body=None) def test_invalid_service_name_raises_error(self, kube_secret): task = ReplaceNamespacedService() with pytest.raises(ValueError): task.run(body={"test": "test"}, service_name=None) def test_body_value_is_replaced(self, kube_secret, api_client): task = ReplaceNamespacedService(body={"test": "a"}, service_name="test") task.run(body={"test": "b"}) assert api_client.replace_namespaced_service.call_args[1]["body"] == { "test": "b" } def test_body_value_is_appended(self, kube_secret, api_client): task = ReplaceNamespacedService(body={"test": "a"}, service_name="test") task.run(body={"a": "test"}) assert api_client.replace_namespaced_service.call_args[1]["body"] == { "a": "test", "test": "a", } def test_empty_body_value_is_updated(self, kube_secret, api_client): task = ReplaceNamespacedService(service_name="test") task.run(body={"test": "a"}) assert api_client.replace_namespaced_service.call_args[1]["body"] == { "test": "a" } def test_kube_kwargs_value_is_replaced(self, kube_secret, api_client): task = ReplaceNamespacedService( body={"test": "a"}, kube_kwargs={"test": "a"}, service_name="test" ) task.run(kube_kwargs={"test": "b"}) assert api_client.replace_namespaced_service.call_args[1]["test"] == "b" def test_kube_kwargs_value_is_appended(self, kube_secret, api_client): task = ReplaceNamespacedService( body={"test": "a"}, kube_kwargs={"test": "a"}, service_name="test" ) task.run(kube_kwargs={"a": "test"}) assert api_client.replace_namespaced_service.call_args[1]["a"] == "test" assert api_client.replace_namespaced_service.call_args[1]["test"] == "a" def test_empty_kube_kwargs_value_is_updated(self, kube_secret, api_client): task = ReplaceNamespacedService(body={"test": "a"}, service_name="test") task.run(kube_kwargs={"test": "a"}) assert api_client.replace_namespaced_service.call_args[1]["test"] == "a"
38.245783
88
0.654612
1,837
15,872
5.339684
0.045727
0.067285
0.07279
0.084106
0.92354
0.92354
0.896218
0.894077
0.863493
0.86023
0
0.002657
0.217427
15,872
414
89
38.338164
0.787054
0
0
0.786378
0
0
0.068107
0.004851
0
0
0
0
0.256966
1
0.164087
false
0
0.01548
0
0.201238
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
99f96715495e4a2b1e0641cb6a394b2a6f575da5
131
py
Python
python/testData/joinLines/CommentProducesTooLongLineAfterJoin.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/joinLines/CommentProducesTooLongLineAfterJoin.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/joinLines/CommentProducesTooLongLineAfterJoin.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
# this comment is very very very very very very long. # And this is the second line of this very long comment def test(): pass
32.75
56
0.725191
24
131
3.958333
0.541667
0.421053
0.505263
0.505263
0.252632
0
0
0
0
0
0
0
0.229008
131
4
57
32.75
0.940594
0.80916
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0.5
0
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
9
41579165f191d47071c14d731a8728016fa78030
6,815
py
Python
fabfile.py
errord/sputnik
b83c635a9a160dcd5809265c0d9d231ade33e5ea
[ "BSD-3-Clause" ]
null
null
null
fabfile.py
errord/sputnik
b83c635a9a160dcd5809265c0d9d231ade33e5ea
[ "BSD-3-Clause" ]
null
null
null
fabfile.py
errord/sputnik
b83c635a9a160dcd5809265c0d9d231ade33e5ea
[ "BSD-3-Clause" ]
1
2018-03-04T04:48:44.000Z
2018-03-04T04:48:44.000Z
#!/usr/bin/env python #coding:utf8 # author: zhizhimama # date: 2014-10-09 from fabric.api import * from fabric.colors import * import os import sys import time fab_time = time.strftime('%m%d_%H%M%S') env.roledefs = { 'web_server': ['xxxx@xx.xx.xx.xx'], 'service_server': ['xxxx@xx.xx.xx.xx'], 'all_server': ['xxxx@xx.xx.xx.xx', 'xxxx@xx.xx.xx.xx'], } target_path='/home/msx/pip_local' @roles('service_server') def _upload_service(level): """ upload project to the service server, for service server """ pwd = local('pwd', capture=True) localpath, project = os.path.split(pwd) with lcd(localpath): local('rm -rf {0}_temp'.format(project)) local('rm -rf {0}_temp.tar.bz'.format(project)) local('cp -rf {0} {0}_temp'.format(project)) local("find ./{0}_temp -type f -name '*.pyc' | xargs rm -rf".format(project)) local("rm -rf {0}_temp/tags".format(project)) local("rm -rf {0}_temp/.git".format(project)) local('tar jcvf {0}_temp.tar.bz {0}_temp'.format(project)) print green('上传到 msx@{host}:{path}/{level}/{dir}'.format(host=env.host, path=target_path, dir=project, level=level)) local("scp {filename}_temp.tar.bz msx@{host}:{path}/{level}/{dir}".format(filename=project, host=env.host, path=target_path, dir=project, level=level)) @roles('all_server') def _upload_all(level): """ upload project to all the server """ pwd = local('pwd', capture=True) localpath, project = os.path.split(pwd) with lcd(localpath): local('rm -rf {0}_temp'.format(project)) local('rm -rf {0}_temp.tar.bz'.format(project)) local('cp -rf {0} {0}_temp'.format(project)) local("find ./{0}_temp -type f -name '*.pyc' | xargs rm -rf".format(project)) local("rm -rf {0}_temp/tags".format(project)) local("rm -rf {0}_temp/.git".format(project)) local('tar jcvf {0}_temp.tar.bz {0}_temp'.format(project)) print green('上传到 msx@{host}:{path}/{level}/{dir}'.format(host=env.host, path=target_path, dir=project, level=level)) local("scp {filename}_temp.tar.bz msx@{host}:{path}/{level}/{dir}".format(filename=project, host=env.host, path=target_path, dir=project, level=level)) @roles('service_server') def _install_service(workon=None, level=''): """ install on the service server, need to specify env and level """ if not (workon and level): print red('please specify a environment:\nuseage: fab install:env_name,level') sys.exit(0) pwd = local('pwd', capture=True) localpath, project = os.path.split(pwd) with prefix('workon {0}'.format(workon)): with cd('/'.join((target_path, level, project))): run('mv {0} {0}.bak/{0}_{1}'.format(project, fab_time)) run('tar jxf {0}_temp.tar.bz'.format(project)) run('mv {0}_temp {0}'.format (project)) with cd('./{0}'.format (project)): run('python setup.py install') @roles('all_server') def _install_all(workon=None, level=''): """ install on the service server, need to specify env and level """ if not (workon and level): print red('please specify a environment:\nuseage: fab install:env_name,level') sys.exit(0) pwd = local('pwd', capture=True) localpath, project = os.path.split(pwd) with prefix('workon {0}'.format(workon)): with cd('/'.join((target_path, level, project))): run('mv {0} {0}.bak/{0}_{1}'.format(project, fab_time)) run('tar jxf {0}_temp.tar.bz'.format(project)) run('mv {0}_temp {0}'.format (project)) with cd('./{0}'.format (project)): run('python setup.py install') @roles('service_server') def _start(level=None, workon=None): """ start mode under env, userage: fab start:mode,env """ if not (level and workon): print red('please specify a environment:\nuseage: fab start:level,env_name') sys.exit(0) pwd = local('pwd', capture=True) localpath, project = os.path.split(pwd) with prefix('workon {0}'.format(workon)): with cd('/'.join((target_path, level, project,'{0}', 'server')).format(project)): service = 'spumaster' print yellow('new restart {}'.format(service)) run('./run_{0}.sh stop {1}'.format(service, level)) run('./run_{0}.sh start {1}'.format(service, level)) run('sleep 5') run('./run_{0}.sh list {1}'.format(service, level)) service = 'fastmq' print yellow('new restart {}'.format(service)) run('./run_{0}.sh stop {1}'.format(service, level)) run('./run_{0}.sh start {1}'.format(service, level)) run('sleep 5') run('./run_{0}.sh list {1}'.format(service, level)) @roles('service_server') def _show_status(level=None, workon=None): """ show status, userage: fab start:mode,env """ if not (level and workon): print red('please specify a environment:\nuseage: fab start:level,env_name') sys.exit(0) pwd = local('pwd', capture=True) localpath, project = os.path.split(pwd) with prefix('workon {0}'.format(workon)): with cd('/'.join((target_path, level, project,'{0}', 'server')).format(project)): service = 'spumaster' print yellow('new show {}'.format(service)) run('./run_{0}.sh list {1}'.format(service, level)) service = 'fastmq' print yellow('new show {}'.format(service)) run('./run_{0}.sh list {1}'.format(service, level)) def dev(level='dev', env='leo_dev'): execute(_upload_service, level=level) execute(_install_service, workon=env, level=level) execute(_start, level=level, workon=env) def pre(level='pre', env='leo_pre'): execute(_upload_service, level=level) execute(_install_service, workon=env, level=level) execute(_start, level=level, workon=env) def online(level='online', env='leo_online'): execute(_upload_all, level=level) execute(_install_all, workon=env, level=level) execute(_start, level=level, workon=env) def restart_dev(level='dev', env='leo_dev'): execute(_start, level=level, workon=env) def restart_pre(level='pre', env='leo_pre'): execute(_start, level=level, workon=env) def restart_online(level='online', env='leo_online'): execute(_start, level=level, workon=env) def show_dev_status(level='dev', env='leo_dev'): execute(_show_status, level=level, workon=env) def show_pre_status(level='pre', env='leo_pre'): execute(_show_status, level=level, workon=env) def show_online_status(level='online', env='leo_online'): execute(_show_status, level=level, workon=env)
39.166667
159
0.618782
948
6,815
4.330169
0.130802
0.076005
0.052619
0.041657
0.872838
0.860901
0.832887
0.780512
0.758831
0.737881
0
0.013078
0.203375
6,815
173
160
39.393064
0.743047
0.009831
0
0.767442
0
0
0.257597
0.046607
0
0
0
0
0
0
null
null
0
0.03876
null
null
0.077519
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
416e6c67df7a90e642f6631bf3630440f1383f14
123
py
Python
trends/filters.py
marissapang/covid19-Django
d29cd1f751dd8d0914492c2bfa1310ab8275cde0
[ "Apache-2.0" ]
null
null
null
trends/filters.py
marissapang/covid19-Django
d29cd1f751dd8d0914492c2bfa1310ab8275cde0
[ "Apache-2.0" ]
7
2020-04-12T22:42:55.000Z
2021-09-22T18:48:51.000Z
trends/filters.py
marissapang/covid19-Django
d29cd1f751dd8d0914492c2bfa1310ab8275cde0
[ "Apache-2.0" ]
null
null
null
from django.contrib.auth.models import User import django_filters class CountryFilter(django_filters.FilterSet): pass
20.5
46
0.829268
16
123
6.25
0.75
0.26
0
0
0
0
0
0
0
0
0
0
0.113821
123
5
47
24.6
0.917431
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.5
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
7
4180cbfade79f931f22d4b36181aa1f37f7269ee
192
py
Python
ada_loss/chainer_impl/__init__.py
kumasento/gradient-scaling
0ca435433b9953e33656173c4d60ebd61c5c5e87
[ "MIT" ]
7
2020-08-12T12:04:28.000Z
2021-11-22T15:56:08.000Z
ada_loss/chainer_impl/__init__.py
kumasento/gradient-scaling
0ca435433b9953e33656173c4d60ebd61c5c5e87
[ "MIT" ]
1
2021-10-07T08:37:39.000Z
2021-10-08T02:41:39.000Z
ada_loss/chainer_impl/__init__.py
kumasento/gradient-scaling
0ca435433b9953e33656173c4d60ebd61c5c5e87
[ "MIT" ]
null
null
null
from ada_loss.chainer_impl.ada_loss_scaled import AdaLossScaled # all the transformations from ada_loss.chainer_impl.ada_loss_transforms import * from ada_loss.chainer_impl import transforms
32
63
0.875
29
192
5.448276
0.413793
0.221519
0.208861
0.341772
0.506329
0.367089
0.367089
0
0
0
0
0
0.088542
192
5
64
38.4
0.902857
0.119792
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
68fda1df1d75d3352993a629c68dbc2a010871af
151
py
Python
tests/test_async_rust_sleep.py
Pure-Peace/peace-performance-python
51bac7c346aeaac5b62b602ff0ec16acced87e7b
[ "MIT" ]
8
2021-08-07T19:43:17.000Z
2022-02-02T11:51:42.000Z
tests/test_async_rust_sleep.py
Pure-Peace/peace-performance-python
51bac7c346aeaac5b62b602ff0ec16acced87e7b
[ "MIT" ]
1
2021-08-08T08:38:50.000Z
2021-08-08T08:38:50.000Z
tests/test_async_rust_sleep.py
Pure-Peace/peace-performance-python
51bac7c346aeaac5b62b602ff0ec16acced87e7b
[ "MIT" ]
3
2021-08-08T04:30:29.000Z
2021-08-18T22:52:05.000Z
from peace_performance_python.functions import rust_sleep from . import async_run def test_async_rust_sleep() -> None: async_run(rust_sleep(0))
18.875
57
0.794702
23
151
4.826087
0.608696
0.243243
0
0
0
0
0
0
0
0
0
0.007634
0.13245
151
7
58
21.571429
0.839695
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.5
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7
ec1ccd348f2ec84603fa123aa453f1e8592a72b7
2,100
py
Python
layout/misc.py
euxhenh/cellar
679387216043f3d287ea29a15f78868f412d2948
[ "MIT" ]
9
2021-09-08T16:56:45.000Z
2021-12-12T03:13:29.000Z
layout/misc.py
euxhenh/cellar
679387216043f3d287ea29a15f78868f412d2948
[ "MIT" ]
null
null
null
layout/misc.py
euxhenh/cellar
679387216043f3d287ea29a15f78868f412d2948
[ "MIT" ]
1
2022-01-20T03:04:44.000Z
2022-01-20T03:04:44.000Z
empty_figure = { "layout": { "xaxis": { "visible": False }, "yaxis": { "visible": False }, "annotations": [ { "text": "Nothing to show. Load the data and " "run dimensionality reduction.", "xref": "paper", "yref": "paper", "showarrow": False, "font": { "size": 14 } } ], "height": "700" } } empty_analysis_figure = { "layout": { "xaxis": { "visible": False }, "yaxis": { "visible": False }, "annotations": [ { "text": "No heatmap or violin plot to show. " + "Select genes first.", "xref": "paper", "yref": "paper", "showarrow": False, "font": { "size": 14 } } ] # "height": "650" } } empty_colocalization_figure = { "layout": { "xaxis": { "visible": False }, "yaxis": { "visible": False }, "annotations": [ { "text": "Nothing to show. Load a spatial tile first.", "xref": "paper", "yref": "paper", "showarrow": False, "font": { "size": 14 } } ] # "height": "650" } } empty_spatial_figure = { "layout": { "xaxis": { "visible": False }, "yaxis": { "visible": False }, "annotations": [ { "text": "Nothing to show. Load a spatial tile first.", "xref": "paper", "yref": "paper", "showarrow": False, "font": { "size": 14 } } ], "width": "1000" } }
21.649485
70
0.318095
127
2,100
5.204724
0.346457
0.145234
0.102874
0.145234
0.810893
0.810893
0.810893
0.810893
0.810893
0.810893
0
0.021944
0.544286
2,100
96
71
21.875
0.668757
0.014762
0
0.545455
0
0
0.262343
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
ec238809b37030fa483f39d83baea2f7036c8dcc
756
py
Python
IMS/ims_users/permissions.py
AyushPaudel/Inventory-Management-System
04e57b0d02b1b7cade992b959569e750ca339c8e
[ "MIT" ]
2
2021-09-01T13:00:24.000Z
2021-11-19T12:16:52.000Z
IMS/ims_users/permissions.py
aadarshadhakalg/Inventory-Management-System-1
075ec49b9d4abebb7d9a0b150a6cb70f6cbf5144
[ "MIT" ]
null
null
null
IMS/ims_users/permissions.py
aadarshadhakalg/Inventory-Management-System-1
075ec49b9d4abebb7d9a0b150a6cb70f6cbf5144
[ "MIT" ]
1
2021-12-23T23:41:20.000Z
2021-12-23T23:41:20.000Z
from rest_framework.permissions import BasePermission class adminPermission(BasePermission): def has_permission(self, request, view): return request.user.is_authenticated and request.user.user_type == 'AD' class staffPermission(BasePermission): def has_permission(self, request, view): return request.user.is_authenticated and request.user.user_type == 'ST' class StaffOrAdmin(BasePermission): def has_permission(self, request, view): return request.user.is_authenticated and (request.user.user_type == 'ST' or request.user.user_type == 'AD') class customerPermission(BasePermission): def has_permission(self, request, view): return request.user.is_authenticated and request.user.user_type == 'CU'
31.5
115
0.752646
92
756
6.032609
0.293478
0.178378
0.135135
0.171171
0.78018
0.78018
0.720721
0.720721
0.720721
0.720721
0
0
0.150794
756
23
116
32.869565
0.864486
0
0
0.307692
0
0
0.013228
0
0
0
0
0
0
1
0.307692
false
0
0.076923
0.307692
1
0
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
8
6be2a74246dfad622fae84a189603f927215bc57
47,452
py
Python
billforward/apis/products_api.py
billforward/bf-python
d2b812329ca3ed1fd94364d7f46f69ad74665596
[ "Apache-2.0" ]
2
2016-11-23T17:32:37.000Z
2022-02-24T05:13:20.000Z
billforward/apis/products_api.py
billforward/bf-python
d2b812329ca3ed1fd94364d7f46f69ad74665596
[ "Apache-2.0" ]
null
null
null
billforward/apis/products_api.py
billforward/bf-python
d2b812329ca3ed1fd94364d7f46f69ad74665596
[ "Apache-2.0" ]
1
2016-12-30T20:02:48.000Z
2016-12-30T20:02:48.000Z
# coding: utf-8 """ BillForward REST API OpenAPI spec version: 1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class ProductsApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def create_product(self, product, **kwargs): """ Create a product. {\"nickname\":\"Create a new product\",\"request\":\"createProductRequest.html\",\"response\":\"createProductResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_product(product, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param Product product: The product object to be updated. (required) :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_product_with_http_info(product, **kwargs) else: (data) = self.create_product_with_http_info(product, **kwargs) return data def create_product_with_http_info(self, product, **kwargs): """ Create a product. {\"nickname\":\"Create a new product\",\"request\":\"createProductRequest.html\",\"response\":\"createProductResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_product_with_http_info(product, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param Product product: The product object to be updated. (required) :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['product'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_product" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'product' is set if ('product' not in params) or (params['product'] is None): raise ValueError("Missing the required parameter `product` when calling `create_product`") resource_path = '/products'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'product' in params: body_params = params['product'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/xml', 'application/xml', 'application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProductPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def delete_metadata_for_product(self, product_id, **kwargs): """ Remove any associated metadata. {\"nickname\":\"Clear metadata from product\",\"request\" :\"deleteProductMetadataRequest.html\",\"response\":\"deleteProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_metadata_for_product(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_metadata_for_product_with_http_info(product_id, **kwargs) else: (data) = self.delete_metadata_for_product_with_http_info(product_id, **kwargs) return data def delete_metadata_for_product_with_http_info(self, product_id, **kwargs): """ Remove any associated metadata. {\"nickname\":\"Clear metadata from product\",\"request\" :\"deleteProductMetadataRequest.html\",\"response\":\"deleteProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_metadata_for_product_with_http_info(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['product_id', 'organizations'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_metadata_for_product" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'product_id' is set if ('product_id' not in params) or (params['product_id'] is None): raise ValueError("Missing the required parameter `product_id` when calling `delete_metadata_for_product`") resource_path = '/products/{product-ID}/metadata'.replace('{format}', 'json') path_params = {} if 'product_id' in params: path_params['product-ID'] = params['product_id'] query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['text/plain', 'application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DynamicMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_all_products(self, **kwargs): """ Returns a collection of products. By default 10 values are returned. Records are returned in natural order. {\"nickname\":\"Get all products\",\"response\":\"getProductAll.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_all_products(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :param int offset: The offset from the first product to return. :param int records: The maximum number of products to return. :param str order_by: Specify a field used to order the result set. :param str order: Ihe direction of any ordering, either ASC or DESC. :param bool include_retired: Whether retired products should be returned. :param str metadata: :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_all_products_with_http_info(**kwargs) else: (data) = self.get_all_products_with_http_info(**kwargs) return data def get_all_products_with_http_info(self, **kwargs): """ Returns a collection of products. By default 10 values are returned. Records are returned in natural order. {\"nickname\":\"Get all products\",\"response\":\"getProductAll.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_all_products_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :param int offset: The offset from the first product to return. :param int records: The maximum number of products to return. :param str order_by: Specify a field used to order the result set. :param str order: Ihe direction of any ordering, either ASC or DESC. :param bool include_retired: Whether retired products should be returned. :param str metadata: :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['organizations', 'offset', 'records', 'order_by', 'order', 'include_retired', 'metadata'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_all_products" % key ) params[key] = val del params['kwargs'] resource_path = '/products'.replace('{format}', 'json') path_params = {} query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] if 'offset' in params: query_params['offset'] = params['offset'] if 'records' in params: query_params['records'] = params['records'] if 'order_by' in params: query_params['order_by'] = params['order_by'] if 'order' in params: query_params['order'] = params['order'] if 'include_retired' in params: query_params['include_retired'] = params['include_retired'] if 'metadata' in params: query_params['metadata'] = params['metadata'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'text/plain']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type([]) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProductPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_metadata_for_product(self, product_id, **kwargs): """ Retrieve any associated metadata. {\"nickname\":\"Retrieve metadata on product\",\"request\":\"getProductMetadataRequest.html\",\"response\":\"getProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_metadata_for_product(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_metadata_for_product_with_http_info(product_id, **kwargs) else: (data) = self.get_metadata_for_product_with_http_info(product_id, **kwargs) return data def get_metadata_for_product_with_http_info(self, product_id, **kwargs): """ Retrieve any associated metadata. {\"nickname\":\"Retrieve metadata on product\",\"request\":\"getProductMetadataRequest.html\",\"response\":\"getProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_metadata_for_product_with_http_info(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['product_id', 'organizations'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_metadata_for_product" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'product_id' is set if ('product_id' not in params) or (params['product_id'] is None): raise ValueError("Missing the required parameter `product_id` when calling `get_metadata_for_product`") resource_path = '/products/{product-ID}/metadata'.replace('{format}', 'json') path_params = {} if 'product_id' in params: path_params['product-ID'] = params['product_id'] query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'text/plain']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DynamicMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_product_by_id(self, product_id, **kwargs): """ Returns a single product, specified by the product-ID parameter. {\"nickname\":\"Retrieve an existing product\",\"response\":\"getProductByID.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_product_by_id(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: ID or name of the product. (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :param int offset: The offset from the first product-rate-plan to return. :param int records: The maximum number of product-rate-plans to return. :param str order_by: Specify a field used to order the result set. :param str order: Ihe direction of any ordering, either ASC or DESC. :param bool include_retired: Whether retired products should be returned. :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_product_by_id_with_http_info(product_id, **kwargs) else: (data) = self.get_product_by_id_with_http_info(product_id, **kwargs) return data def get_product_by_id_with_http_info(self, product_id, **kwargs): """ Returns a single product, specified by the product-ID parameter. {\"nickname\":\"Retrieve an existing product\",\"response\":\"getProductByID.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_product_by_id_with_http_info(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: ID or name of the product. (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :param int offset: The offset from the first product-rate-plan to return. :param int records: The maximum number of product-rate-plans to return. :param str order_by: Specify a field used to order the result set. :param str order: Ihe direction of any ordering, either ASC or DESC. :param bool include_retired: Whether retired products should be returned. :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['product_id', 'organizations', 'offset', 'records', 'order_by', 'order', 'include_retired'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_product_by_id" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'product_id' is set if ('product_id' not in params) or (params['product_id'] is None): raise ValueError("Missing the required parameter `product_id` when calling `get_product_by_id`") resource_path = '/products/{product-ID}'.replace('{format}', 'json') path_params = {} if 'product_id' in params: path_params['product-ID'] = params['product_id'] query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] if 'offset' in params: query_params['offset'] = params['offset'] if 'records' in params: query_params['records'] = params['records'] if 'order_by' in params: query_params['order_by'] = params['order_by'] if 'order' in params: query_params['order'] = params['order'] if 'include_retired' in params: query_params['include_retired'] = params['include_retired'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['text/plain', 'application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProductPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def retire_product(self, product_id, **kwargs): """ Deletes the product specified by the product-ID parameter. Any existing subscriptions will continue; it is a soft delete. {\"nickname\":\"Delete a product\",\"response\":\"deleteProduct.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.retire_product(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: ID of the Product. (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.retire_product_with_http_info(product_id, **kwargs) else: (data) = self.retire_product_with_http_info(product_id, **kwargs) return data def retire_product_with_http_info(self, product_id, **kwargs): """ Deletes the product specified by the product-ID parameter. Any existing subscriptions will continue; it is a soft delete. {\"nickname\":\"Delete a product\",\"response\":\"deleteProduct.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.retire_product_with_http_info(product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str product_id: ID of the Product. (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['product_id', 'organizations'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method retire_product" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'product_id' is set if ('product_id' not in params) or (params['product_id'] is None): raise ValueError("Missing the required parameter `product_id` when calling `retire_product`") resource_path = '/products/{product-ID}'.replace('{format}', 'json') path_params = {} if 'product_id' in params: path_params['product-ID'] = params['product_id'] query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['text/plain', 'application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProductPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def set_metadata_for_product(self, metadata, product_id, **kwargs): """ Remove any existing metadata keys and create the provided data. {\"nickname\":\"Set metadata on product\",\"request\":\"setProductMetadataRequest.html\",\"response\":\"setProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.set_metadata_for_product(metadata, product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param DynamicMetadata metadata: (required) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.set_metadata_for_product_with_http_info(metadata, product_id, **kwargs) else: (data) = self.set_metadata_for_product_with_http_info(metadata, product_id, **kwargs) return data def set_metadata_for_product_with_http_info(self, metadata, product_id, **kwargs): """ Remove any existing metadata keys and create the provided data. {\"nickname\":\"Set metadata on product\",\"request\":\"setProductMetadataRequest.html\",\"response\":\"setProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.set_metadata_for_product_with_http_info(metadata, product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param DynamicMetadata metadata: (required) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['metadata', 'product_id', 'organizations'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_metadata_for_product" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'metadata' is set if ('metadata' not in params) or (params['metadata'] is None): raise ValueError("Missing the required parameter `metadata` when calling `set_metadata_for_product`") # verify the required parameter 'product_id' is set if ('product_id' not in params) or (params['product_id'] is None): raise ValueError("Missing the required parameter `product_id` when calling `set_metadata_for_product`") resource_path = '/products/{product-ID}/metadata'.replace('{format}', 'json') path_params = {} if 'product_id' in params: path_params['product-ID'] = params['product_id'] query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] header_params = {} form_params = [] local_var_files = {} body_params = None if 'metadata' in params: body_params = params['metadata'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DynamicMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def update_product(self, product, **kwargs): """ Update a product. {\"nickname\":\"Update a product\",\"request\":\"updateProductRequest.html\",\"response\":\"updateProductResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_product(product, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param Product product: The product object to be updated. (required) :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.update_product_with_http_info(product, **kwargs) else: (data) = self.update_product_with_http_info(product, **kwargs) return data def update_product_with_http_info(self, product, **kwargs): """ Update a product. {\"nickname\":\"Update a product\",\"request\":\"updateProductRequest.html\",\"response\":\"updateProductResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_product_with_http_info(product, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param Product product: The product object to be updated. (required) :return: ProductPagedMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['product'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_product" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'product' is set if ('product' not in params) or (params['product'] is None): raise ValueError("Missing the required parameter `product` when calling `update_product`") resource_path = '/products'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None if 'product' in params: body_params = params['product'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['text/xml', 'application/xml', 'application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProductPagedMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def upsert_metadata_for_product(self, metadata, product_id, **kwargs): """ Update any existing metadata key-values and insert any new key-values, no keys will be removed. {\"nickname\":\"Upsert metadata on product\",\"request\":\"upsertProductMetadataRequest.html\",\"response\":\"upsertProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.upsert_metadata_for_product(metadata, product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param DynamicMetadata metadata: (required) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.upsert_metadata_for_product_with_http_info(metadata, product_id, **kwargs) else: (data) = self.upsert_metadata_for_product_with_http_info(metadata, product_id, **kwargs) return data def upsert_metadata_for_product_with_http_info(self, metadata, product_id, **kwargs): """ Update any existing metadata key-values and insert any new key-values, no keys will be removed. {\"nickname\":\"Upsert metadata on product\",\"request\":\"upsertProductMetadataRequest.html\",\"response\":\"upsertProductMetadataResponse.html\"} This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.upsert_metadata_for_product_with_http_info(metadata, product_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param DynamicMetadata metadata: (required) :param str product_id: (required) :param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. :return: DynamicMetadata If the method is called asynchronously, returns the request thread. """ all_params = ['metadata', 'product_id', 'organizations'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upsert_metadata_for_product" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'metadata' is set if ('metadata' not in params) or (params['metadata'] is None): raise ValueError("Missing the required parameter `metadata` when calling `upsert_metadata_for_product`") # verify the required parameter 'product_id' is set if ('product_id' not in params) or (params['product_id'] is None): raise ValueError("Missing the required parameter `product_id` when calling `upsert_metadata_for_product`") resource_path = '/products/{product-ID}/metadata'.replace('{format}', 'json') path_params = {} if 'product_id' in params: path_params['product-ID'] = params['product_id'] query_params = {} if 'organizations' in params: query_params['organizations'] = params['organizations'] header_params = {} form_params = [] local_var_files = {} body_params = None if 'metadata' in params: body_params = params['metadata'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = [] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DynamicMetadata', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'))
44.472352
157
0.588869
4,942
47,452
5.461149
0.055443
0.035348
0.018674
0.02401
0.959835
0.955389
0.953055
0.944978
0.93627
0.92923
0
0.000436
0.323274
47,452
1,066
158
44.514071
0.840029
0.386917
0
0.828974
1
0
0.180726
0.039509
0
0
0
0
0
1
0.038229
false
0
0.014085
0
0.108652
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
d41e32ea1110c67d567c5cf35a4ee2adf609e70f
155
py
Python
privatekube/privatekube/platform/__init__.py
DelphianCalamity/PrivateKube
14f575e77021ab7baca30f4061140ec83bdc96a7
[ "Apache-2.0" ]
9
2021-06-16T00:22:45.000Z
2021-11-25T07:19:11.000Z
privatekube/privatekube/platform/__init__.py
DelphianCalamity/PrivateKube
14f575e77021ab7baca30f4061140ec83bdc96a7
[ "Apache-2.0" ]
2
2021-11-14T10:42:43.000Z
2022-03-16T03:43:22.000Z
privatekube/privatekube/platform/__init__.py
DelphianCalamity/PrivateKube
14f575e77021ab7baca30f4061140ec83bdc96a7
[ "Apache-2.0" ]
3
2021-04-08T08:08:48.000Z
2021-12-24T01:42:20.000Z
import privatekube.platform.stoppable_thread, privatekube.platform.privacy_budget, privatekube.platform.privacy_resource_client, privatekube.platform.timer
155
155
0.903226
17
155
8
0.588235
0.558824
0.382353
0
0
0
0
0
0
0
0
0
0.025806
155
1
155
155
0.900662
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
2e432d7b01df2fb8ca3fd3aff489ace421db7504
155
py
Python
xrdtools/__init__.py
monkeyclass/xrdtools
c462bf71709c71f9600c916353f62f0d24e995b6
[ "MIT" ]
2
2017-02-27T20:25:47.000Z
2019-12-18T22:31:10.000Z
xrdtools/__init__.py
monkeyclass/xrdtools
c462bf71709c71f9600c916353f62f0d24e995b6
[ "MIT" ]
5
2015-10-17T00:09:06.000Z
2018-04-13T22:17:12.000Z
xrdtools/__init__.py
monkeyclass/xrdtools
c462bf71709c71f9600c916353f62f0d24e995b6
[ "MIT" ]
6
2016-08-02T23:28:00.000Z
2021-04-23T12:30:21.000Z
from xrdtools.io import read_xrdml # noqa: F401 from xrdtools import utils # noqa: F401 from xrdtools import tools # noqa: F401 __version__ = '0.1.1'
22.142857
48
0.735484
24
155
4.541667
0.541667
0.330275
0.220183
0.366972
0.477064
0
0
0
0
0
0
0.095238
0.187097
155
6
49
25.833333
0.769841
0.206452
0
0
0
0
0.042017
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
7
2e79be134aa47099d74d0f78b55c089f17e9369d
7,129
py
Python
tests/test_spaces.py
neuroio/neuroio-python
160f96515877e5e2ee0e888b7424c77cb2d7496a
[ "MIT" ]
null
null
null
tests/test_spaces.py
neuroio/neuroio-python
160f96515877e5e2ee0e888b7424c77cb2d7496a
[ "MIT" ]
6
2021-09-06T08:23:09.000Z
2021-11-10T16:19:20.000Z
tests/test_spaces.py
neuroio/neuroio-python
160f96515877e5e2ee0e888b7424c77cb2d7496a
[ "MIT" ]
null
null
null
import pytest import respx from neuroio.constants import IAM_BASE_URL from tests.utils import mock_query_params_all_combos @respx.mock def test_create_ok(client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/").respond( status_code=201, json={"id": 1, "name": "name"}, ) response = client.spaces.create(name="name") assert request.called assert response.status_code == 201 assert response.json()["name"] == "name" @respx.mock def test_create_failed(client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/").respond(status_code=400) response = client.spaces.create(name="name") assert request.called assert response.status_code == 400 @respx.mock @pytest.mark.asyncio async def test_async_create_ok(async_client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/").respond( status_code=201, json={"id": 1, "name": "name"}, ) response = await async_client.spaces.create(name="name") assert request.called assert response.status_code == 201 assert response.json()["name"] == "name" @respx.mock @pytest.mark.asyncio async def test_async_create_failed(async_client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/").respond(status_code=400) response = await async_client.spaces.create(name="name") assert request.called assert response.status_code == 400 @respx.mock def test_list_without_params(client): requests = mock_query_params_all_combos( f"{IAM_BASE_URL}/v1/spaces", "limit=20", "offset=0", "q=", json={"results": [{"id": 1, "name": "name"}]}, ) response = client.spaces.list() assert any([request.called for request in requests]) assert response.status_code == 200 assert response.json()["results"][0]["id"] == 1 @respx.mock def test_list_with_params(client): requests = mock_query_params_all_combos( f"{IAM_BASE_URL}/v1/spaces", "limit=20", "offset=20", "q=test", json={"results": [{"id": 1, "name": "name"}]}, ) response = client.spaces.list(q="test", offset=20) assert any([request.called for request in requests]) assert response.status_code == 200 assert response.json()["results"][0]["id"] == 1 @respx.mock @pytest.mark.asyncio async def test_async_list_without_params(async_client): requests = mock_query_params_all_combos( f"{IAM_BASE_URL}/v1/spaces", "limit=20", "offset=0", "q=", json={"results": [{"id": 1, "name": "name"}]}, ) response = await async_client.spaces.list() assert any([request.called for request in requests]) assert response.status_code == 200 assert response.json()["results"][0]["id"] == 1 @respx.mock @pytest.mark.asyncio async def test_async_list_with_params(async_client): requests = mock_query_params_all_combos( f"{IAM_BASE_URL}/v1/spaces", "limit=20", "offset=20", "q=test", json={"results": [{"id": 1, "name": "name"}]}, ) response = await async_client.spaces.list(q="test", offset=20) assert any([request.called for request in requests]) assert response.status_code == 200 assert response.json()["results"][0]["id"] == 1 @respx.mock def test_get_ok(client): request = respx.get(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=200, json={"id": 1, "name": "name"}, ) response = client.spaces.get(id=1) assert request.called assert response.status_code == 200 assert response.json()["id"] == 1 @respx.mock def test_get_not_found(client): request = respx.get(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=404 ) response = client.spaces.get(id=1) assert request.called assert response.status_code == 404 @respx.mock @pytest.mark.asyncio async def test_async_get_ok(async_client): request = respx.get(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=200, json={"id": 1, "name": "name"}, ) response = await async_client.spaces.get(id=1) assert request.called assert response.status_code == 200 assert response.json()["id"] == 1 @respx.mock @pytest.mark.asyncio async def test_async_get_not_found(async_client): request = respx.get(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=404 ) response = await async_client.spaces.get(id=1) assert request.called assert response.status_code == 404 @respx.mock def test_update_ok(client): request = respx.patch(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=200, json={"id": 1, "name": "new_name"}, ) response = client.spaces.update(id=1, name="new_name") assert request.called assert response.status_code == 200 assert response.json()["name"] == "new_name" @respx.mock @pytest.mark.asyncio async def test_async_update_ok(async_client): request = respx.patch(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=200, json={"id": 1, "name": "new_name"}, ) response = await async_client.spaces.update(id=1, name="new_name") assert request.called assert response.status_code == 200 assert response.json()["name"] == "new_name" @respx.mock def test_delete_ok(client): request = respx.delete(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=202 ) response = client.spaces.delete(id=1) assert request.called assert response.status_code == 202 @respx.mock @pytest.mark.asyncio async def test_async_delete_ok(async_client): request = respx.delete(f"{IAM_BASE_URL}/v1/spaces/1/").respond( status_code=202 ) response = await async_client.spaces.delete(id=1) assert request.called assert response.status_code == 202 @respx.mock def test_token_create_ok(client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/1/tokens/").respond( status_code=201, json={"is_active": True, "key": "key"}, ) response = client.spaces.token(id=1) assert request.called assert response.status_code == 201 assert response.json()["key"] == "key" @respx.mock def test_token_create_failed(client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/1/tokens/").respond( status_code=400 ) response = client.spaces.token(id=1) assert request.called assert response.status_code == 400 @respx.mock @pytest.mark.asyncio async def test_async_token_create(async_client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/1/tokens/").respond( status_code=201, json={"is_active": True, "key": "key"}, ) response = await async_client.spaces.token(id=1) assert request.called assert response.status_code == 201 assert response.json()["key"] == "key" @respx.mock @pytest.mark.asyncio async def test_async_token_create_failed(async_client): request = respx.post(f"{IAM_BASE_URL}/v1/spaces/1/tokens/").respond( status_code=400 ) response = await async_client.spaces.token(id=1) assert request.called assert response.status_code == 400
26.600746
79
0.663768
987
7,129
4.60689
0.072948
0.079173
0.046184
0.048384
0.966132
0.944799
0.937321
0.934462
0.934462
0.926985
0
0.032861
0.188947
7,129
267
80
26.700375
0.753545
0
0
0.757282
0
0
0.130173
0.076869
0
0
0
0
0.252427
1
0.048544
false
0
0.019417
0
0.067961
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2e83c34a899798597eb8c8f98a9a569bc917f23c
1,457
py
Python
tests/test_order.py
nikoheikkila/semmy
cca9efcd65c6c4006bc0405780dcaca919d84b73
[ "MIT" ]
1
2022-02-13T18:07:10.000Z
2022-02-13T18:07:10.000Z
tests/test_order.py
nikoheikkila/semmy
cca9efcd65c6c4006bc0405780dcaca919d84b73
[ "MIT" ]
null
null
null
tests/test_order.py
nikoheikkila/semmy
cca9efcd65c6c4006bc0405780dcaca919d84b73
[ "MIT" ]
null
null
null
from pytest import mark from semmy import Semver @mark.parametrize( ("a", "b"), [ [Semver(0, 1, 1), Semver(0, 1, 0)], [Semver(1, 2, 0), Semver(1, 1, 0)], [Semver(2, 0, 0), Semver(1, 0, 0)], ], ) def test_greater(a: Semver, b: Semver) -> None: assert a > b assert not a < b @mark.parametrize( ("a", "b"), [ [Semver(0, 1, 1), Semver(0, 1, 0)], [Semver(0, 1, 1), Semver(0, 1, 1)], [Semver(1, 2, 0), Semver(1, 1, 0)], [Semver(1, 2, 0), Semver(1, 2, 0)], [Semver(2, 0, 0), Semver(1, 0, 0)], [Semver(2, 0, 0), Semver(2, 0, 0)], ], ) def test_greater_or_equal(a: Semver, b: Semver) -> None: assert a >= b def test_not_greater_than_object() -> None: assert not Semver().__gt__(object()) @mark.parametrize( ("a", "b"), [ [Semver(0, 1, 1), Semver(0, 1, 2)], [Semver(1, 2, 0), Semver(1, 3, 0)], [Semver(2, 0, 0), Semver(3, 0, 0)], ], ) def test_lesser(a: Semver, b: Semver) -> None: assert a < b assert not a > b @mark.parametrize( ("a", "b"), [ [Semver(0, 1, 1), Semver(0, 1, 2)], [Semver(0, 1, 1), Semver(0, 1, 1)], [Semver(1, 2, 0), Semver(1, 3, 0)], [Semver(1, 2, 0), Semver(1, 2, 0)], [Semver(2, 0, 0), Semver(3, 0, 0)], [Semver(2, 0, 0), Semver(2, 0, 0)], ], ) def test_lesser_or_equal(a: Semver, b: Semver) -> None: assert a <= b
22.765625
56
0.47838
236
1,457
2.885593
0.105932
0.226138
0.140969
0.105727
0.860499
0.801762
0.801762
0.801762
0.801762
0.707783
0
0.105366
0.2965
1,457
63
57
23.126984
0.559024
0
0
0.576923
0
0
0.005491
0
0
0
0
0
0.134615
1
0.096154
false
0
0.038462
0
0.134615
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
cf48616dd3a7f79ed19588c2adca9da9f8ffde7a
4,078
py
Python
display3D/image_resizer_fields.py
seVenVo1d/General-Relativity-Tensorial-Calculations
6c07823f74840352253c235af2e4dbe60044941a
[ "MIT" ]
1
2021-06-16T07:29:30.000Z
2021-06-16T07:29:30.000Z
display3D/image_resizer_fields.py
seVenVo1d/General-Relativity-Tensorial-Calculations
6c07823f74840352253c235af2e4dbe60044941a
[ "MIT" ]
null
null
null
display3D/image_resizer_fields.py
seVenVo1d/General-Relativity-Tensorial-Calculations
6c07823f74840352253c235af2e4dbe60044941a
[ "MIT" ]
1
2021-12-02T15:11:06.000Z
2021-12-02T15:11:06.000Z
from PIL import Image def resize_cd_image3d(field_object): """ Re-sizing the image of covariant derivative for a given field object for the case of 3D Args: field_object [str]: The name of the field object (scalar, vector or tensor) """ if field_object == 'Scalar Field': im = Image.open(r'display3D\output images\cd_scalar_field.png') size = (500, 500) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\cd_scalar_field.png' elif field_object == 'Type (1,0) Vector Field': im = Image.open(r'display3D\output images\cd_vector_field_10.png') size = (800, 600) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\cd_vector_field_10.png' elif field_object == 'Type (0,1) Vector Field': im = Image.open(r'display3D\output images\cd_vector_field_01.png') size = (800, 600) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\cd_vector_field_01.png' elif field_object == 'Type (2,0) Tensor Field': im = Image.open(r'display3D\output images\cd_tensor_field_20.png') size = (1200, 650) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\cd_tensor_field_20.png' elif field_object == 'Type (1,1) Tensor Field': im = Image.open(r'display3D\output images\cd_tensor_field_11.png') size = (1200, 650) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\cd_tensor_field_11.png' elif field_object == 'Type (0,2) Tensor Field': im = Image.open(r'display3D\output images\cd_tensor_field_02.png') size = (1200, 650) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\cd_tensor_field_02.png' im.save(out_name, "PNG") im.close() def resize_ld_image3d(field_object): """ Re-sizing the image of lie derivative for a given field object for the case of 3D Args: field_object [str]: The name of the field object (scalar, vector or tensor) """ if field_object == 'Scalar Field': im = Image.open(r'display3D\output images\ld_scalar_field.png') size = (500, 500) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\ld_scalar_field.png' elif field_object == 'Type (1,0) Vector Field': im = Image.open(r'display3D\output images\ld_vector_field_10.png') size = (800, 600) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\ld_vector_field_10.png' elif field_object == 'Type (0,1) Vector Field': im = Image.open(r'display3D\output images\ld_vector_field_01.png') size = (800, 600) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\ld_vector_field_01.png' elif field_object == 'Type (2,0) Tensor Field': im = Image.open(r'display3D\output images\ld_tensor_field_20.png') size = (1200, 650) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\ld_tensor_field_20.png' elif field_object == 'Type (1,1) Tensor Field': im = Image.open(r'display3D\output images\ld_tensor_field_11.png') size = (1200, 650) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\ld_tensor_field_11.png' elif field_object == 'Type (0,2) Tensor Field': im = Image.open(r'display3D\output images\ld_tensor_field_02.png') size = (1200, 650) im.thumbnail(size, Image.ANTIALIAS) out_dim = im.size out_name = r'display3D\output images\ld_tensor_field_02.png' im.save(out_name, "PNG") im.close()
37.759259
83
0.649338
598
4,078
4.237458
0.100334
0.094712
0.151539
0.208366
0.979479
0.979479
0.979479
0.979479
0.951066
0.949487
0
0.053514
0.239333
4,078
107
84
38.11215
0.763378
0.08411
0
0.658228
0
0
0.367391
0.18587
0
0
0
0
0
1
0.025316
false
0
0.012658
0
0.037975
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d8765cbacb4cd3662f8b64ee70c8aabcb7936b8c
41
py
Python
sabueso/tools/database_RCSB_PDB/__init__.py
dprada/sabueso
14843cf3522b5b89db5b61c1541a7015f114dd53
[ "MIT" ]
null
null
null
sabueso/tools/database_RCSB_PDB/__init__.py
dprada/sabueso
14843cf3522b5b89db5b61c1541a7015f114dd53
[ "MIT" ]
2
2022-01-31T21:22:17.000Z
2022-02-04T20:20:12.000Z
sabueso/tools/database_RCSB_PDB/__init__.py
dprada/sabueso
14843cf3522b5b89db5b61c1541a7015f114dd53
[ "MIT" ]
1
2021-07-20T15:01:14.000Z
2021-07-20T15:01:14.000Z
from .is_accessible import is_accessible
20.5
40
0.878049
6
41
5.666667
0.666667
0.705882
0
0
0
0
0
0
0
0
0
0
0.097561
41
1
41
41
0.918919
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
d87f811db9a17bd199ebfea46bac9fb3d8e84824
30,926
py
Python
model_VAE.py
se7endragon/cnn_model
21d76edfa73bd679182430341979f8d17f7b2940
[ "MIT" ]
null
null
null
model_VAE.py
se7endragon/cnn_model
21d76edfa73bd679182430341979f8d17f7b2940
[ "MIT" ]
null
null
null
model_VAE.py
se7endragon/cnn_model
21d76edfa73bd679182430341979f8d17f7b2940
[ "MIT" ]
null
null
null
import tensorflow as tf import numpy as np import matplotlib.pyplot as plt import logging import os import utils logging.basicConfig(level=logging.INFO, format='%(message)s') class VAE_mnist(): def __init__(self, sess, batch_size=100, report_period=100, learning_rate=1e-3, epoch_number=20, # this is used when we train without using random batch num_iteration=2e+4, # this is used when we train with using random batch middle_man_dim_1=1000, middle_man_dim_2=1000, latent_space_dim=10, activation_function=tf.nn.relu, alpha=0.5): """ original_img ---> middle_man_1 ---> middle_man_2 ---> latent_space ---> middle_man_2 ---> middle_man_1 ---> reconstruced_img: self.middle_man_dim_1 : dimension of middle_man_1 space self.middle_man_dim_2 : dimension of middle_man_2 space self.latent_space_dim : dimension of latent_man space """ self.sess = sess self.batch_size = int(batch_size) self.report_period = int(report_period) self.learning_rate = float(learning_rate) self.epoch_number = int(epoch_number) self.num_iteration = int(num_iteration) self.middle_man_dim_1 = int(middle_man_dim_1) self.middle_man_dim_2 = int(middle_man_dim_2) self.latent_space_dim = int(latent_space_dim) self.activation_function = activation_function self.alpha = float(alpha) def data_loading(self, data): self.data = data # data pre-processing self.x_train, self.x_test, self.y_train, self.y_test, self.y_train_cls, self.y_test_cls = self.data self.class_names = [0,1,2,3,4,5,6,7,8,9] self.num_test = self.y_test.shape[0] def encoding(self, x, E_W1, E_b1, E_W2, E_b2, E_W3_mu, E_b3_mu, E_W3_log_var, E_b3_log_var): h1 = self.activation_function(tf.matmul(x, E_W1) + E_b1) h2 = self.activation_function(tf.matmul(h1, E_W2) + E_b2) mu = tf.matmul(h2, E_W3_mu) + E_b3_mu log_var = tf.matmul(h2, E_W3_log_var) + E_b3_log_var return mu, log_var def sampling(self, mu, log_var): eps = tf.random_normal(shape=tf.shape(mu)) z = mu + tf.exp(log_var / 2) * eps return z def decoding(self, z, D_W1, D_b1, D_W2, D_b2, D_W3, D_b3): h1 = self.activation_function(tf.matmul(z, D_W1) + D_b1) h2 = self.activation_function(tf.matmul(h1, D_W2) + D_b2) logits = tf.matmul(h2, D_W3) + D_b3 probs = tf.nn.sigmoid(logits) return logits, probs def graph_construction(self): # data dimension self.img_size = 28 self.img_size_flat = 784 self.img_shape = (28, 28) self.num_classes = 10 # placeholders self.x = tf.placeholder(tf.float32, shape=[None, self.img_size_flat], name='x') self.z = tf.placeholder(tf.float32, shape=[None, self.latent_space_dim], name='z') # weights self.E_W1 = tf.get_variable("E_W1", shape=(self.img_size_flat, self.middle_man_dim_1), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b1 = tf.get_variable("E_b1", shape=(self.middle_man_dim_1, ), initializer=tf.constant_initializer(0.0)) self.E_W2 = tf.get_variable("E_W2", shape=(self.middle_man_dim_1, self.middle_man_dim_2), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b2 = tf.get_variable("E_b2", shape=(self.middle_man_dim_2, ), initializer=tf.constant_initializer(0.0)) self.E_W3_mu = tf.get_variable("E_W3_mu", shape=(self.middle_man_dim_2, self.latent_space_dim), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b3_mu = tf.get_variable("E_b3_mu", shape=(self.latent_space_dim, ), initializer=tf.constant_initializer(0.0)) self.E_W3_log_var = tf.get_variable("E_W3_log_var", shape=(self.middle_man_dim_2, self.latent_space_dim), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b3_log_var = tf.get_variable("E_b3_log_var", shape=(self.latent_space_dim, ), initializer=tf.constant_initializer(0.0)) self.D_W1 = tf.get_variable("D_W1", shape=(self.latent_space_dim, self.middle_man_dim_2), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b1 = tf.get_variable("D_b1", shape=(self.middle_man_dim_2, ), initializer=tf.constant_initializer(0.0)) self.D_W2 = tf.get_variable("D_W2", shape=(self.middle_man_dim_2, self.middle_man_dim_1), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b2 = tf.get_variable("D_b2", shape=(self.middle_man_dim_1, ), initializer=tf.constant_initializer(0.0)) self.D_W3 = tf.get_variable("D_W3", shape=(self.middle_man_dim_1, self.img_size_flat), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b3 = tf.get_variable("D_b3", shape=(self.img_size_flat, ), initializer=tf.constant_initializer(0.0)) # encoding, sampling, and decoding of x self.mu_x, self.log_var_x = self.encoding(self.x, self.E_W1, self.E_b1, self.E_W2, self.E_b2, self.E_W3_mu, self.E_b3_mu, self.E_W3_log_var, self.E_b3_log_var) self.z_x = self.sampling(self.mu_x, self.log_var_x) self.logits_x, self.probs_x = self.decoding(self.z_x, self.D_W1, self.D_b1, self.D_W2, self.D_b2, self.D_W3, self.D_b3) # reconstructed images self.x_reconstructed = self.probs_x # cost and optimizer self.cross_entropy = tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.logits_x, labels=self.x), axis=1) self.cost_ce = tf.reduce_mean(self.cross_entropy) self.kl_divergence = tf.reduce_sum(tf.exp(self.log_var_x) + self.mu_x**2 - 1. - self.log_var_x, axis=1) self.cost_kl = tf.reduce_mean(self.kl_divergence) self.cost = self.cost_ce + self.alpha * self.cost_kl self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.cost) # decoding of z self.logits_z, self.probs_z = self.decoding(self.z, self.D_W1, self.D_b1, self.D_W2, self.D_b2, self.D_W3, self.D_b3) # generated images self.x_generated = self.probs_z def train(self): for i in range(self.epoch_number): start_time = time.time() # start time of this epoch training_batch = zip(range(0, len(self.y_train), self.batch_size), range(self.batch_size, len(self.y_train), self.batch_size)) idx_for_print_cost = 0 for start, end in training_batch: feed_dict = {self.x: self.x_train[start:end, :]} self.sess.run(self.optimizer, feed_dict=feed_dict) if idx_for_print_cost % self.report_period == 0: # for every self.report_period train cost_now = self.sess.run(self.cost, feed_dict=feed_dict) # we compute cost now print(idx_for_print_cost, cost_now) # we print cost now idx_for_print_cost += 1 end_time = time.time() # end time of this epoch print("==========================================================") print("Epoch:", i) time_dif = end_time - start_time # we check computing time for each epoch print("Time Usage: " + str(timedelta(seconds=int(round(time_dif))))) # and print it self.plot_16_generated_images(figure_save_dir='./img', figure_index=i) def train_random_batch(self): for i in range(self.num_iteration): idx = np.random.choice(self.x_train.shape[0], size=self.batch_size, replace=False) # random_batch x_batch = self.x_train[idx] # random_batch feed_dict = {self.x: x_batch} self.sess.run(self.optimizer, feed_dict=feed_dict) if (i % self.report_period == 0) or (i == self.num_iteration - 1): loss = self.sess.run(self.cost, feed_dict=feed_dict) logging.info('train iter : {:6d} | loss : {:.6f}'.format(i, loss)) self.plot_16_generated_images(figure_save_dir='./img', figure_index=i) def plot_16_generated_images(self, figure_save_dir, figure_index): if not os.path.exists(figure_save_dir): os.makedirs(figure_save_dir) feed_dict = {self.z: np.random.normal(0, 1, size=(16, self.latent_space_dim))} images = self.sess.run(self.x_generated, feed_dict=feed_dict) fig = utils.plot_16_images_2d_and_returen(images, img_shape=self.img_shape) plt.savefig(figure_save_dir + '/{}.png'.format(figure_index), bbox_inches='tight') plt.close(fig) def visualization_of_reconstruction(self): imgs_original = self.x_test[0:16, :] feed_dict = {self.x: imgs_original} imgs_recon = self.sess.run(self.x_reconstructed, feed_dict=feed_dict) fig = utils.plot_16_images_2d_and_returen(imgs_original, img_shape=self.img_shape) plt.show(fig) fig = utils.plot_16_images_2d_and_returen(imgs_recon, img_shape=self.img_shape) plt.show(fig) def visualization_of_16_loading_vectors(self): z_batch = np.zeros(shape=(16, self.latent_space_dim)) for i in range(16): if i < self.latent_space_dim: z_batch[i, i] = 1 feed_dict = {self.z: z_batch} images = self.sess.run(self.x_generated, feed_dict=feed_dict) fig = utils.plot_16_images_2d_and_returen(images, img_shape=self.img_shape) plt.show(fig) def visualization_of_zero_vector_in_latent_space(self): z_batch = np.zeros(shape=(1, self.latent_space_dim)) feed_dict = {self.z: z_batch} img = self.sess.run(self.x_generated, feed_dict=feed_dict) fig = utils.plot_one_image(img, self.img_shape) plt.show(fig) def save(self, sess, save_path): self.saver = tf.train.Saver() self.sess = sess self.save_path = save_path self.save_dir = self.save_path.split('/')[0] if not os.path.isdir(self.save_dir): os.makedirs(self.save_dir) self.saver.save(sess=self.sess, save_path=self.save_path) print("Graph Saved") def restore(self, sess, save_path): self.saver = tf.train.Saver() self.sess = sess self.save_path = save_path self.save_dir = self.save_path.split('/')[0] if not os.path.isdir(self.save_dir): os.makedirs(self.save_dir) self.saver.restore(sess=self.sess, save_path=self.save_path) print("Graph Restored") class DVAE_mnist(VAE_mnist): def __init__(self, sess, batch_size=100, report_period=100, learning_rate=1e-3, epoch_number=20, # this is used when we train without using random batch num_iteration=int(2e+4), # this is used when we train with using random batch middle_man_dim_1=int(1000), middle_man_dim_2=int(1000), latent_space_dim=int(10), activation_function=tf.nn.relu, alpha=0.5, noise_factor=0.1): super().__init__(sess, batch_size, report_period, learning_rate, epoch_number, num_iteration, middle_man_dim_1, middle_man_dim_2, latent_space_dim, activation_function, alpha) self.noise_factor = float(noise_factor) def graph_construction(self): # data dimension self.img_size = 28 self.img_size_flat = 784 self.img_shape = (28, 28) self.num_classes = 10 # placeholders self.x = tf.placeholder(tf.float32, shape=[None, self.img_size_flat], name='x') self.z = tf.placeholder(tf.float32, shape=[None, self.latent_space_dim], name='z') # weights self.E_W1 = tf.get_variable("E_W1", shape=(self.img_size_flat, self.middle_man_dim_1), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b1 = tf.get_variable("E_b1", shape=(self.middle_man_dim_1, ), initializer=tf.constant_initializer(0.0)) self.E_W2 = tf.get_variable("E_W2", shape=(self.middle_man_dim_1, self.middle_man_dim_2), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b2 = tf.get_variable("E_b2", shape=(self.middle_man_dim_2, ), initializer=tf.constant_initializer(0.0)) self.E_W3_mu = tf.get_variable("E_W3_mu", shape=(self.middle_man_dim_2, self.latent_space_dim), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b3_mu = tf.get_variable("E_b3_mu", shape=(self.latent_space_dim, ), initializer=tf.constant_initializer(0.0)) self.E_W3_log_var = tf.get_variable("E_W3_log_var", shape=(self.middle_man_dim_2, self.latent_space_dim), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b3_log_var = tf.get_variable("E_b3_log_var", shape=(self.latent_space_dim, ), initializer=tf.constant_initializer(0.0)) self.D_W1 = tf.get_variable("D_W1", shape=(self.latent_space_dim, self.middle_man_dim_2), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b1 = tf.get_variable("D_b1", shape=(self.middle_man_dim_2, ), initializer=tf.constant_initializer(0.0)) self.D_W2 = tf.get_variable("D_W2", shape=(self.middle_man_dim_2, self.middle_man_dim_1), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b2 = tf.get_variable("D_b2", shape=(self.middle_man_dim_1, ), initializer=tf.constant_initializer(0.0)) self.D_W3 = tf.get_variable("D_W3", shape=(self.middle_man_dim_1, self.img_size_flat), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b3 = tf.get_variable("D_b3", shape=(self.img_size_flat, ), initializer=tf.constant_initializer(0.0)) # encoding, sampling, and decoding of x # Add noise to X self.x_noise = self.x + self.noise_factor * tf.random_normal(tf.shape(self.x)) self.x_noise_clipped = tf.clip_by_value(self.x_noise, 0., 1.) self.mu_x, self.log_var_x = self.encoding(self.x_noise_clipped, self.E_W1, self.E_b1, self.E_W2, self.E_b2, self.E_W3_mu, self.E_b3_mu, self.E_W3_log_var, self.E_b3_log_var) self.z_x = self.sampling(self.mu_x, self.log_var_x) self.logits_x, self.probs_x = self.decoding(self.z_x, self.D_W1, self.D_b1, self.D_W2, self.D_b2, self.D_W3, self.D_b3) # reconstructed images self.x_reconstructed = self.probs_x # cost and optimizer self.cross_entropy = tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.logits_x, labels=self.x), axis=1) self.cost_ce = tf.reduce_mean(self.cross_entropy) self.kl_divergence = tf.reduce_sum(tf.exp(self.log_var_x) + self.mu_x**2 - 1. - self.log_var_x, axis=1) self.cost_kl = tf.reduce_mean(self.kl_divergence) self.cost = self.cost_ce + self.alpha * self.cost_kl self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.cost) # decoding of z self.logits_z, self.probs_z = self.decoding(self.z, self.D_W1, self.D_b1, self.D_W2, self.D_b2, self.D_W3, self.D_b3) # generated images self.x_generated = self.probs_z class CVAE_mnist(VAE_mnist): def __init__(self, sess, batch_size=100, report_period=100, learning_rate=1e-3, epoch_number=20, # this is used when we train without using random batch num_iteration=int(2e+4), # this is used when we train with using random batch middle_man_dim_1=int(1000), middle_man_dim_2=int(1000), latent_space_dim=int(10), activation_function=tf.nn.relu, alpha=0.5): super().__init__(sess, batch_size, report_period, learning_rate, epoch_number, num_iteration, middle_man_dim_1, middle_man_dim_2, latent_space_dim, activation_function, alpha) def encoding(self, x, y, E_W1, E_b1, E_W2, E_b2, E_W3_mu, E_b3_mu, E_W3_log_var, E_b3_log_var): inputs = tf.concat(axis=1, values=[x, y]) h1 = self.activation_function(tf.matmul(inputs, E_W1) + E_b1) h2 = self.activation_function(tf.matmul(h1, E_W2) + E_b2) mu = tf.matmul(h2, E_W3_mu) + E_b3_mu log_var = tf.matmul(h2, E_W3_log_var) + E_b3_log_var return mu, log_var def sampling(self, mu, log_var): eps = tf.random_normal(shape=tf.shape(mu)) z = mu + tf.exp(log_var / 2) * eps return z def decoding(self, z, y, D_W1, D_b1, D_W2, D_b2, D_W3, D_b3): inputs = tf.concat(axis=1, values=[z, y]) h1 = self.activation_function(tf.matmul(inputs, D_W1) + D_b1) h2 = self.activation_function(tf.matmul(h1, D_W2) + D_b2) logits = tf.matmul(h2, D_W3) + D_b3 probs = tf.nn.sigmoid(logits) return logits, probs def graph_construction(self): # data dimension self.img_size = 28 self.img_size_flat = 784 self.img_shape = (28, 28) self.num_classes = 10 # placeholders self.x = tf.placeholder(tf.float32, shape=[None, self.img_size_flat], name='x') self.y = tf.placeholder(tf.float32, shape=[None, self.num_classes], name='y') self.z = tf.placeholder(tf.float32, shape=[None, self.latent_space_dim], name='z') # weights self.E_W1 = tf.get_variable("E_W1", shape=(self.img_size_flat + self.num_classes, self.middle_man_dim_1), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b1 = tf.get_variable("E_b1", shape=(self.middle_man_dim_1, ), initializer=tf.constant_initializer(0.0)) self.E_W2 = tf.get_variable("E_W2", shape=(self.middle_man_dim_1, self.middle_man_dim_2), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b2 = tf.get_variable("E_b2", shape=(self.middle_man_dim_2, ), initializer=tf.constant_initializer(0.0)) self.E_W3_mu = tf.get_variable("E_W3_mu", shape=(self.middle_man_dim_2, self.latent_space_dim), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b3_mu = tf.get_variable("E_b3_mu", shape=(self.latent_space_dim, ), initializer=tf.constant_initializer(0.0)) self.E_W3_log_var = tf.get_variable("E_W3_log_var", shape=(self.middle_man_dim_2, self.latent_space_dim), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.E_b3_log_var = tf.get_variable("E_b3_log_var", shape=(self.latent_space_dim, ), initializer=tf.constant_initializer(0.0)) self.D_W1 = tf.get_variable("D_W1", shape=(self.latent_space_dim + self.num_classes, self.middle_man_dim_2), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b1 = tf.get_variable("D_b1", shape=(self.middle_man_dim_2, ), initializer=tf.constant_initializer(0.0)) self.D_W2 = tf.get_variable("D_W2", shape=(self.middle_man_dim_2, self.middle_man_dim_1), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b2 = tf.get_variable("D_b2", shape=(self.middle_man_dim_1, ), initializer=tf.constant_initializer(0.0)) self.D_W3 = tf.get_variable("D_W3", shape=(self.middle_man_dim_1, self.img_size_flat), initializer=tf.contrib.layers.variance_scaling_initializer(mode="FAN_AVG")) #initializer=tf.truncated_normal_initializer(stddev=0.1)) self.D_b3 = tf.get_variable("D_b3", shape=(self.img_size_flat, ), initializer=tf.constant_initializer(0.0)) # encoding, sampling, and decoding of x self.mu_x, self.log_var_x = self.encoding(self.x, self.y, self.E_W1, self.E_b1, self.E_W2, self.E_b2, self.E_W3_mu, self.E_b3_mu, self.E_W3_log_var, self.E_b3_log_var) self.z_x = self.sampling(self.mu_x, self.log_var_x) self.logits_x, self.probs_x = self.decoding(self.z_x, self.y, self.D_W1, self.D_b1, self.D_W2, self.D_b2, self.D_W3, self.D_b3) # reconstructed images self.x_reconstructed = self.probs_x # cost and optimizer self.cross_entropy = tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.logits_x, labels=self.x), axis=1) self.cost_ce = tf.reduce_mean(self.cross_entropy) self.kl_divergence = tf.reduce_sum(tf.exp(self.log_var_x) + self.mu_x**2 - 1. - self.log_var_x, axis=1) self.cost_kl = tf.reduce_mean(self.kl_divergence) self.cost = self.cost_ce + self.alpha * self.cost_kl self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.cost) # decoding of z self.logits_z, self.probs_z = self.decoding(self.z, self.y, self.D_W1, self.D_b1, self.D_W2, self.D_b2, self.D_W3, self.D_b3) # generated images self.x_generated = self.probs_z def train(self): for i in range(self.epoch_number): start_time = time.time() # start time of this epoch training_batch = zip(range(0, len(self.y_train), self.batch_size), range(self.batch_size, len(self.y_train), self.batch_size)) idx_for_print_cost = 0 for start, end in training_batch: feed_dict = {self.x: self.x_train[start:end, :], self.y: self.y_train[start:end, :]} self.sess.run(self.optimizer, feed_dict=feed_dict) if idx_for_print_cost % self.report_period == 0: # for every self.report_period train cost_now = self.sess.run(self.cost, feed_dict=feed_dict) # we compute cost now print(idx_for_print_cost, cost_now) # we print cost now idx_for_print_cost += 1 end_time = time.time() # end time of this epoch print("==========================================================") print("Epoch:", i) time_dif = end_time - start_time # we check computing time for each epoch print("Time Usage: " + str(timedelta(seconds=int(round(time_dif))))) # and print it self.plot_16_generated_images(figure_save_dir='./img', figure_index=i) def train_random_batch(self): for i in range(self.num_iteration): idx = np.random.choice(self.x_train.shape[0], size=self.batch_size, replace=False) # random_batch x_batch = self.x_train[idx] y_batch = self.y_train[idx] # random_batch feed_dict = {self.x: x_batch, self.y: y_batch} self.sess.run(self.optimizer, feed_dict=feed_dict) if (i % self.report_period == 0) or (i == self.num_iteration - 1): loss = self.sess.run(self.cost, feed_dict=feed_dict) logging.info('train iter : {:6d} | loss : {:.6f}'.format(i, loss)) def visualization_of_reconstruction(self): imgs_original = self.x_test[0:16, :] labels_original = self.y_test[0:16, :] feed_dict = {self.x: imgs_original, self.y: labels_original} imgs_recon = self.sess.run(self.x_reconstructed, feed_dict=feed_dict) fig = utils.plot_16_images_2d_and_returen(imgs_original, img_shape=self.img_shape) plt.show(fig) fig = utils.plot_16_images_2d_and_returen(imgs_recon, img_shape=self.img_shape) plt.show(fig) def visualization_of_16_loading_vectors(self): z_batch = np.zeros(shape=(16, self.latent_space_dim)) y_batch = np.array([[1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0]]).astype(np.float32) for i in range(16): if i < self.latent_space_dim: z_batch[i, i] = 1 feed_dict = {self.z: z_batch, self.y: y_batch} images = self.sess.run(self.x_generated, feed_dict=feed_dict) fig = utils.plot_16_images_2d_and_returen(images, img_shape=self.img_shape) plt.show(fig) def visualization_of_zero_vector_in_latent_space(self): z_batch = np.zeros(shape=(1, self.latent_space_dim)) y_batch = np.array([[1, 0, 0, 0, 0, 0, 0, 0, 0, 0]]).astype(np.float32) feed_dict = {self.z: z_batch, self.y: y_batch} img = self.sess.run(self.x_generated, feed_dict=feed_dict) fig = utils.plot_one_image(img, self.img_shape) plt.show(fig)
53.137457
181
0.566481
4,185
30,926
3.88411
0.058542
0.01944
0.022332
0.025838
0.922055
0.915964
0.90809
0.902676
0.89597
0.888957
0
0.036538
0.322997
30,926
581
182
53.228916
0.739839
0.08585
0
0.806527
0
0
0.023911
0.004121
0
0
0
0
0
1
0.060606
false
0
0.013986
0
0.095571
0.037296
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d8bd398339ef38d751d18de4656b7c2165543483
19,783
py
Python
sdk/python/pulumi_alicloud/arms/alert_contact.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
42
2019-03-18T06:34:37.000Z
2022-03-24T07:08:57.000Z
sdk/python/pulumi_alicloud/arms/alert_contact.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
152
2019-04-15T21:03:44.000Z
2022-03-29T18:00:57.000Z
sdk/python/pulumi_alicloud/arms/alert_contact.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
3
2020-08-26T17:30:07.000Z
2021-07-05T01:37:45.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['AlertContactArgs', 'AlertContact'] @pulumi.input_type class AlertContactArgs: def __init__(__self__, *, alert_contact_name: Optional[pulumi.Input[str]] = None, ding_robot_webhook_url: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None, phone_num: Optional[pulumi.Input[str]] = None, system_noc: Optional[pulumi.Input[bool]] = None): """ The set of arguments for constructing a AlertContact resource. :param pulumi.Input[str] alert_contact_name: The name of the alert contact. :param pulumi.Input[str] ding_robot_webhook_url: The webhook URL of the DingTalk chatbot. For more information about how to obtain the URL, see Configure a DingTalk chatbot to send alert notifications: https://www.alibabacloud.com/help/en/doc-detail/106247.htm. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] email: The email address of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] phone_num: The mobile number of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[bool] system_noc: Specifies whether the alert contact receives system notifications. Valid values: true: receives system notifications. false: does not receive system notifications. """ if alert_contact_name is not None: pulumi.set(__self__, "alert_contact_name", alert_contact_name) if ding_robot_webhook_url is not None: pulumi.set(__self__, "ding_robot_webhook_url", ding_robot_webhook_url) if email is not None: pulumi.set(__self__, "email", email) if phone_num is not None: pulumi.set(__self__, "phone_num", phone_num) if system_noc is not None: pulumi.set(__self__, "system_noc", system_noc) @property @pulumi.getter(name="alertContactName") def alert_contact_name(self) -> Optional[pulumi.Input[str]]: """ The name of the alert contact. """ return pulumi.get(self, "alert_contact_name") @alert_contact_name.setter def alert_contact_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "alert_contact_name", value) @property @pulumi.getter(name="dingRobotWebhookUrl") def ding_robot_webhook_url(self) -> Optional[pulumi.Input[str]]: """ The webhook URL of the DingTalk chatbot. For more information about how to obtain the URL, see Configure a DingTalk chatbot to send alert notifications: https://www.alibabacloud.com/help/en/doc-detail/106247.htm. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "ding_robot_webhook_url") @ding_robot_webhook_url.setter def ding_robot_webhook_url(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ding_robot_webhook_url", value) @property @pulumi.getter def email(self) -> Optional[pulumi.Input[str]]: """ The email address of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "email") @email.setter def email(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "email", value) @property @pulumi.getter(name="phoneNum") def phone_num(self) -> Optional[pulumi.Input[str]]: """ The mobile number of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "phone_num") @phone_num.setter def phone_num(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "phone_num", value) @property @pulumi.getter(name="systemNoc") def system_noc(self) -> Optional[pulumi.Input[bool]]: """ Specifies whether the alert contact receives system notifications. Valid values: true: receives system notifications. false: does not receive system notifications. """ return pulumi.get(self, "system_noc") @system_noc.setter def system_noc(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "system_noc", value) @pulumi.input_type class _AlertContactState: def __init__(__self__, *, alert_contact_name: Optional[pulumi.Input[str]] = None, ding_robot_webhook_url: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None, phone_num: Optional[pulumi.Input[str]] = None, system_noc: Optional[pulumi.Input[bool]] = None): """ Input properties used for looking up and filtering AlertContact resources. :param pulumi.Input[str] alert_contact_name: The name of the alert contact. :param pulumi.Input[str] ding_robot_webhook_url: The webhook URL of the DingTalk chatbot. For more information about how to obtain the URL, see Configure a DingTalk chatbot to send alert notifications: https://www.alibabacloud.com/help/en/doc-detail/106247.htm. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] email: The email address of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] phone_num: The mobile number of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[bool] system_noc: Specifies whether the alert contact receives system notifications. Valid values: true: receives system notifications. false: does not receive system notifications. """ if alert_contact_name is not None: pulumi.set(__self__, "alert_contact_name", alert_contact_name) if ding_robot_webhook_url is not None: pulumi.set(__self__, "ding_robot_webhook_url", ding_robot_webhook_url) if email is not None: pulumi.set(__self__, "email", email) if phone_num is not None: pulumi.set(__self__, "phone_num", phone_num) if system_noc is not None: pulumi.set(__self__, "system_noc", system_noc) @property @pulumi.getter(name="alertContactName") def alert_contact_name(self) -> Optional[pulumi.Input[str]]: """ The name of the alert contact. """ return pulumi.get(self, "alert_contact_name") @alert_contact_name.setter def alert_contact_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "alert_contact_name", value) @property @pulumi.getter(name="dingRobotWebhookUrl") def ding_robot_webhook_url(self) -> Optional[pulumi.Input[str]]: """ The webhook URL of the DingTalk chatbot. For more information about how to obtain the URL, see Configure a DingTalk chatbot to send alert notifications: https://www.alibabacloud.com/help/en/doc-detail/106247.htm. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "ding_robot_webhook_url") @ding_robot_webhook_url.setter def ding_robot_webhook_url(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ding_robot_webhook_url", value) @property @pulumi.getter def email(self) -> Optional[pulumi.Input[str]]: """ The email address of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "email") @email.setter def email(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "email", value) @property @pulumi.getter(name="phoneNum") def phone_num(self) -> Optional[pulumi.Input[str]]: """ The mobile number of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "phone_num") @phone_num.setter def phone_num(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "phone_num", value) @property @pulumi.getter(name="systemNoc") def system_noc(self) -> Optional[pulumi.Input[bool]]: """ Specifies whether the alert contact receives system notifications. Valid values: true: receives system notifications. false: does not receive system notifications. """ return pulumi.get(self, "system_noc") @system_noc.setter def system_noc(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "system_noc", value) class AlertContact(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, alert_contact_name: Optional[pulumi.Input[str]] = None, ding_robot_webhook_url: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None, phone_num: Optional[pulumi.Input[str]] = None, system_noc: Optional[pulumi.Input[bool]] = None, __props__=None): """ Provides a Application Real-Time Monitoring Service (ARMS) Alert Contact resource. For information about Application Real-Time Monitoring Service (ARMS) Alert Contact and how to use it, see [What is Alert Contact](https://www.alibabacloud.com/help/en/doc-detail/42953.htm). > **NOTE:** Available in v1.129.0+. ## Example Usage Basic Usage ```python import pulumi import pulumi_alicloud as alicloud example = alicloud.arms.AlertContact("example", alert_contact_name="example_value", ding_robot_webhook_url="https://oapi.dingtalk.com/robot/send?access_token=91f2f6****", email="someone@example.com", phone_num="1381111****") ``` ## Import Application Real-Time Monitoring Service (ARMS) Alert Contact can be imported using the id, e.g. ```sh $ pulumi import alicloud:arms/alertContact:AlertContact example <id> ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] alert_contact_name: The name of the alert contact. :param pulumi.Input[str] ding_robot_webhook_url: The webhook URL of the DingTalk chatbot. For more information about how to obtain the URL, see Configure a DingTalk chatbot to send alert notifications: https://www.alibabacloud.com/help/en/doc-detail/106247.htm. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] email: The email address of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] phone_num: The mobile number of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[bool] system_noc: Specifies whether the alert contact receives system notifications. Valid values: true: receives system notifications. false: does not receive system notifications. """ ... @overload def __init__(__self__, resource_name: str, args: Optional[AlertContactArgs] = None, opts: Optional[pulumi.ResourceOptions] = None): """ Provides a Application Real-Time Monitoring Service (ARMS) Alert Contact resource. For information about Application Real-Time Monitoring Service (ARMS) Alert Contact and how to use it, see [What is Alert Contact](https://www.alibabacloud.com/help/en/doc-detail/42953.htm). > **NOTE:** Available in v1.129.0+. ## Example Usage Basic Usage ```python import pulumi import pulumi_alicloud as alicloud example = alicloud.arms.AlertContact("example", alert_contact_name="example_value", ding_robot_webhook_url="https://oapi.dingtalk.com/robot/send?access_token=91f2f6****", email="someone@example.com", phone_num="1381111****") ``` ## Import Application Real-Time Monitoring Service (ARMS) Alert Contact can be imported using the id, e.g. ```sh $ pulumi import alicloud:arms/alertContact:AlertContact example <id> ``` :param str resource_name: The name of the resource. :param AlertContactArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(AlertContactArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, alert_contact_name: Optional[pulumi.Input[str]] = None, ding_robot_webhook_url: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None, phone_num: Optional[pulumi.Input[str]] = None, system_noc: Optional[pulumi.Input[bool]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = AlertContactArgs.__new__(AlertContactArgs) __props__.__dict__["alert_contact_name"] = alert_contact_name __props__.__dict__["ding_robot_webhook_url"] = ding_robot_webhook_url __props__.__dict__["email"] = email __props__.__dict__["phone_num"] = phone_num __props__.__dict__["system_noc"] = system_noc super(AlertContact, __self__).__init__( 'alicloud:arms/alertContact:AlertContact', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, alert_contact_name: Optional[pulumi.Input[str]] = None, ding_robot_webhook_url: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None, phone_num: Optional[pulumi.Input[str]] = None, system_noc: Optional[pulumi.Input[bool]] = None) -> 'AlertContact': """ Get an existing AlertContact resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] alert_contact_name: The name of the alert contact. :param pulumi.Input[str] ding_robot_webhook_url: The webhook URL of the DingTalk chatbot. For more information about how to obtain the URL, see Configure a DingTalk chatbot to send alert notifications: https://www.alibabacloud.com/help/en/doc-detail/106247.htm. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] email: The email address of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[str] phone_num: The mobile number of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. :param pulumi.Input[bool] system_noc: Specifies whether the alert contact receives system notifications. Valid values: true: receives system notifications. false: does not receive system notifications. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _AlertContactState.__new__(_AlertContactState) __props__.__dict__["alert_contact_name"] = alert_contact_name __props__.__dict__["ding_robot_webhook_url"] = ding_robot_webhook_url __props__.__dict__["email"] = email __props__.__dict__["phone_num"] = phone_num __props__.__dict__["system_noc"] = system_noc return AlertContact(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="alertContactName") def alert_contact_name(self) -> pulumi.Output[Optional[str]]: """ The name of the alert contact. """ return pulumi.get(self, "alert_contact_name") @property @pulumi.getter(name="dingRobotWebhookUrl") def ding_robot_webhook_url(self) -> pulumi.Output[Optional[str]]: """ The webhook URL of the DingTalk chatbot. For more information about how to obtain the URL, see Configure a DingTalk chatbot to send alert notifications: https://www.alibabacloud.com/help/en/doc-detail/106247.htm. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "ding_robot_webhook_url") @property @pulumi.getter def email(self) -> pulumi.Output[Optional[str]]: """ The email address of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "email") @property @pulumi.getter(name="phoneNum") def phone_num(self) -> pulumi.Output[Optional[str]]: """ The mobile number of the alert contact. You must specify at least one of the following parameters: PhoneNum, Email, and DingRobotWebhookUrl. """ return pulumi.get(self, "phone_num") @property @pulumi.getter(name="systemNoc") def system_noc(self) -> pulumi.Output[Optional[bool]]: """ Specifies whether the alert contact receives system notifications. Valid values: true: receives system notifications. false: does not receive system notifications. """ return pulumi.get(self, "system_noc")
50.725641
370
0.680433
2,453
19,783
5.283734
0.083979
0.05856
0.058329
0.061106
0.87717
0.866445
0.857804
0.851709
0.847774
0.840367
0
0.00556
0.227165
19,783
389
371
50.856041
0.842174
0.454886
0
0.777228
1
0
0.098776
0.024184
0
0
0
0
0
1
0.158416
false
0.004951
0.024752
0
0.277228
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
d8e11a73fc4299a6cfae0ea508320adada8bb8a1
10,431
py
Python
db_adapter/curw_sim/grids/flo2d_grid_utils.py
CUrW-SL/curw_db_adapter
9d9ef24f42080910e0bd251bc7f001b0a4b0ab31
[ "MIT" ]
2
2019-04-26T07:50:33.000Z
2019-09-28T20:15:33.000Z
db_adapter/curw_sim/grids/flo2d_grid_utils.py
CUrW-SL/curw_db_adapter
9d9ef24f42080910e0bd251bc7f001b0a4b0ab31
[ "MIT" ]
1
2019-04-03T09:30:38.000Z
2019-04-20T18:11:59.000Z
db_adapter/curw_sim/grids/flo2d_grid_utils.py
shadhini/curw_db_adapter
4db8e1ea8794ffbd0dce29ac954a13315e83d843
[ "MIT" ]
null
null
null
import traceback import csv import pkg_resources from db_adapter.logger import logger def add_flo2d_raincell_grid_mappings(pool, grid_interpolation, flo2d_model, obs_map_file_path, d03_map_file_path=None): """ Add flo2d grid mappings to the database :param pool: database connection pool :param grid_interpolation: grid interpolation method :param flo2d_model: string: flo2d model (e.g. FLO2D_250, FLO2D_150, FLO2D_30) :param obs_map_file_path: path to file containing flo2d grids to rainfall observational stations mapping :param d03_map_file_path: path to file containing flo2d grids to d03 stations mapping :return: True if the insertion is successful, else False """ # [flo2d_250_station_id,ob_1_id,ob_1_dist,ob_2_id,ob_2_dist,ob_3_id,ob_3_dist] with open(obs_map_file_path, 'r') as f2: flo2d_obs_mapping=[line for line in csv.reader(f2)][1:] grid_mappings_list = [] if d03_map_file_path is not None: # [flo2d_grid_id,nearest_d03_station_id,dist] with open(d03_map_file_path, 'r') as f1: flo2d_d03_mapping=[line for line in csv.reader(f1)][1:] for index in range(len(flo2d_obs_mapping)): cell_id = flo2d_obs_mapping[index][0] obs1 = flo2d_obs_mapping[index][1] obs2 = flo2d_obs_mapping[index][3] obs3 = flo2d_obs_mapping[index][5] fcst = flo2d_d03_mapping[index][1] grid_mapping = ['{}_{}_{}'.format(flo2d_model, grid_interpolation, (str(cell_id)).zfill(10)), obs1, obs2, obs3, fcst] grid_mappings_list.append(tuple(grid_mapping)) sql_statement = "INSERT INTO `grid_map_flo2d_raincell` (`grid_id`, `obs1`, `obs2`, `obs3`, `fcst`)" \ " VALUES ( %s, %s, %s, %s, %s) " \ "ON DUPLICATE KEY UPDATE `obs1`=VALUES(`obs1`), `obs2`=VALUES(`obs2`), " \ "`obs3`=VALUES(`obs3`), `fcst`=VALUES(`fcst`);" else: for index in range(len(flo2d_obs_mapping)): cell_id = flo2d_obs_mapping[index][0] obs1 = flo2d_obs_mapping[index][1] obs2 = flo2d_obs_mapping[index][3] obs3 = flo2d_obs_mapping[index][5] grid_mapping = ['{}_{}_{}'.format(flo2d_model, grid_interpolation, (str(cell_id)).zfill(10)), obs1, obs2, obs3] grid_mappings_list.append(tuple(grid_mapping)) sql_statement = "INSERT INTO `grid_map_flo2d_raincell` (`grid_id`, `obs1`, `obs2`, `obs3`)" \ " VALUES ( %s, %s, %s, %s) " \ "ON DUPLICATE KEY UPDATE `obs1`=VALUES(`obs1`), `obs2`=VALUES(`obs2`), " \ "`obs3`=VALUES(`obs3`);" connection = pool.connection() try: with connection.cursor() as cursor: row_count = cursor.executemany(sql_statement, grid_mappings_list) connection.commit() return row_count except Exception as exception: connection.rollback() error_message = "Insertion of flo2d raincell grid mappings failed." logger.error(error_message) traceback.print_exc() raise exception finally: if connection is not None: connection.close() def get_flo2d_cells_to_obs_grid_mappings(pool, grid_interpolation, flo2d_model): """ Retrieve flo2d to obs grid mappings :param pool: database connection pool :param grid_interpolation: grid interpolation method :param flo2d_model: string: flo2d model (e.g. FLO2D_250, FLO2D_150, FLO2D_30) :return: dictionary with grid ids as keys and corresponding obs1, obs2, obs3 station ids as a list """ flo2d_grid_mappings = {} connection = pool.connection() try: with connection.cursor() as cursor: sql_statement = "SELECT * FROM `grid_map_flo2d_raincell` WHERE `grid_id` like %s ESCAPE '$'" row_count = cursor.execute(sql_statement, "flo2d$_{}$_{}$_%".format('$_'.join(flo2d_model.split('_')[1:]), grid_interpolation)) if row_count > 0: results = cursor.fetchall() for dict in results: flo2d_grid_mappings[dict.get("grid_id")] = [dict.get("obs1"), dict.get("obs2"), dict.get("obs3")] return flo2d_grid_mappings else: return None except Exception as exception: error_message = "Retrieving flo2d cells to obs grid mappings failed" logger.error(error_message) traceback.print_exc() raise exception finally: if connection is not None: connection.close() def get_flo2d_cells_to_wrf_grid_mappings(pool, grid_interpolation, flo2d_model): """ Retrieve flo2d to wrf stations mappings :param pool: database connection pool :param grid_interpolation: grid interpolation method :param flo2d_model: string: flo2d model (e.g. FLO2D_250, FLO2D_150, FLO2D_30) :return: dictionary with grid ids as keys and corresponding wrf station ids as values """ flo2d_grid_mappings = {} connection = pool.connection() try: with connection.cursor() as cursor: sql_statement = "SELECT `grid_id`, `fcst` FROM `grid_map_flo2d_raincell` WHERE `grid_id` like %s ESCAPE '$'" row_count = cursor.execute(sql_statement, "flo2d$_{}$_{}$_%".format('$_'.join(flo2d_model.split('_')[1:]), grid_interpolation)) if row_count > 0: results = cursor.fetchall() for dict in results: flo2d_grid_mappings[dict.get("grid_id")] = dict.get("fcst") return flo2d_grid_mappings else: return None except Exception as exception: error_message = "Retrieving flo2d cells to obs grid mappings failed" logger.error(error_message) traceback.print_exc() raise exception finally: if connection is not None: connection.close() def add_flo2d_initial_conditions(pool, flo2d_model, initial_condition_file_path): """ Add flo2d grid mappings to the database :param pool: database connection pool :param flo2d_model: string: flo2d model (e.g. enum values of FLO2D_250, FLO2D_150, FLO2D_30) :param initial_condition_file_path: path to the file with flo2d initial conditions :return: True if the insertion is successful, else False """ with open(initial_condition_file_path, 'r') as f1: flo2d_init_cond=[line for line in csv.reader(f1)][1:] grid_mappings_list = [] for index in range(len(flo2d_init_cond)): upstrm = flo2d_init_cond[index][0] downstrm = flo2d_init_cond[index][1] obs_wl = flo2d_init_cond[index][2] canal = flo2d_init_cond[index][3] grid_mapping = ['{}_{}_{}'.format(flo2d_model, upstrm, downstrm), upstrm, downstrm, canal, obs_wl] grid_mappings_list.append(tuple(grid_mapping)) connection = pool.connection() try: with connection.cursor() as cursor: sql_statement = "INSERT INTO `grid_map_flo2d_initial_cond` (`grid_id`, `up_strm`, `down_strm`, `canal_seg`, `obs_wl`)" \ " VALUES ( %s, %s, %s, %s, %s) "\ "ON DUPLICATE KEY UPDATE `up_strm`=VALUES(`up_strm`), `down_strm`=VALUES(`down_strm`), " \ "`canal_seg`=VALUES(`canal_seg`), `obs_wl`=VALUES(`obs_wl`);" row_count = cursor.executemany(sql_statement, grid_mappings_list) connection.commit() return row_count except Exception as exception: connection.rollback() error_message = "Insertion of {} initial conditions failed.".format(flo2d_model) logger.error(error_message) traceback.print_exc() raise exception finally: if connection is not None: connection.close() def get_flo2d_initial_conditions(pool, flo2d_model): """ Retrieve flo2d initial conditions :param pool: database connection pool :param flo2d_model: string: flo2d model (e.g. FLO2D_250, FLO2D_150, FLO2D_30) :return: dictionary with grid ids as keys and corresponding up_strm, down_strm, canal_seg, and obs_wl as a list """ initial_conditions = {} connection = pool.connection() try: with connection.cursor() as cursor: sql_statement = "SELECT `grid_id`,`up_strm`,`down_strm`,`obs_wl`, `obs_wl_down_strm` FROM `grid_map_flo2d_initial_cond` " \ "WHERE `grid_id` like %s ESCAPE '$'" row_count = cursor.execute(sql_statement, "{}$_%".format(flo2d_model)) if row_count > 0: results = cursor.fetchall() for dict in results: initial_conditions[dict.get("grid_id")] = [dict.get("up_strm"), dict.get("down_strm"), dict.get("obs_wl"), dict.get("obs_wl_down_strm")] return initial_conditions else: return None except Exception as exception: error_message = "Retrieving {} initial conditions failed".format(flo2d_model) logger.error(error_message) traceback.print_exc() raise exception finally: if connection is not None: connection.close() def clear_initial_conditions(pool, flo2d_model): """ Clear existing initial conditions of a given flo2d model from database :param pool: database connection pool :param flo2d_model: string: flo2d model (e.g. FLO2D_250, FLO2D_150, FLO2D_30) :return: affected row count if successful """ connection = pool.connection() try: with connection.cursor() as cursor: sql_statement = "DELETE FROM `grid_map_flo2d_initial_cond` " \ "WHERE `grid_id` like %s ESCAPE '$'" row_count = cursor.execute(sql_statement, "{}$_%".format(flo2d_model)) connection.commit() return row_count except Exception as exception: connection.rollback() error_message = "Deletion of {} initial conditions failed.".format(flo2d_model) logger.error(error_message) traceback.print_exc() raise exception finally: if connection is not None: connection.close()
41.392857
139
0.628799
1,281
10,431
4.862607
0.120999
0.046556
0.026489
0.025686
0.815219
0.796597
0.762402
0.734789
0.726762
0.703484
0
0.031344
0.269006
10,431
251
140
41.557769
0.785574
0.189915
0
0.710059
0
0
0.180344
0.060518
0
0
0
0
0
1
0.035503
false
0
0.023669
0
0.112426
0.035503
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d8f1d6cdf7cb6bf9e25449f9f4f8c9533bab3e41
2,128
py
Python
node/blockchain/tests/test_blockchain_facade/test_get_primary_validator.py
thenewboston-developers/Node
e71a405f4867786a54dd17ddd97595dd3a630018
[ "MIT" ]
18
2021-11-30T04:02:13.000Z
2022-03-24T12:33:57.000Z
node/blockchain/tests/test_blockchain_facade/test_get_primary_validator.py
thenewboston-developers/Node
e71a405f4867786a54dd17ddd97595dd3a630018
[ "MIT" ]
1
2022-02-04T17:07:38.000Z
2022-02-04T17:07:38.000Z
node/blockchain/tests/test_blockchain_facade/test_get_primary_validator.py
thenewboston-developers/Node
e71a405f4867786a54dd17ddd97595dd3a630018
[ "MIT" ]
5
2022-01-31T05:28:13.000Z
2022-03-08T17:25:31.000Z
import pytest from node.blockchain.facade import BlockchainFacade from node.blockchain.models import AccountState, Schedule @pytest.mark.django_db def test_get_primary_validator_empty_schedule(): assert not Schedule.objects.exists() assert BlockchainFacade.get_instance().get_primary_validator() is None @pytest.mark.usefixtures('base_blockchain') def test_get_primary_validator_basic(primary_validator_node): assert Schedule.objects.all().count() == 1 schedule = Schedule.objects.get_or_none() assert schedule assert schedule._id == 0 assert schedule.node_identifier == primary_validator_node.identifier facade = BlockchainFacade.get_instance() assert facade.get_next_block_number() == 1 assert facade.get_primary_validator() == primary_validator_node @pytest.mark.usefixtures('base_blockchain') def test_get_primary_validator_exactly_next_block(primary_validator_node, regular_node): assert Schedule.objects.all().count() == 1 schedule = Schedule.objects.get_or_none() assert schedule assert schedule._id == 0 assert schedule.node_identifier == primary_validator_node.identifier facade = BlockchainFacade.get_instance() assert facade.get_next_block_number() == 1 Schedule.objects.create(_id=1, node_identifier=regular_node.identifier) AccountState.objects.create(_id=regular_node.identifier, node=regular_node.dict()) assert facade.get_primary_validator() == regular_node @pytest.mark.usefixtures('base_blockchain') def test_get_primary_validator_with_queue(primary_validator_node, regular_node): assert Schedule.objects.all().count() == 1 schedule = Schedule.objects.get_or_none() assert schedule assert schedule._id == 0 assert schedule.node_identifier == primary_validator_node.identifier facade = BlockchainFacade.get_instance() assert facade.get_next_block_number() == 1 Schedule.objects.create(_id=2, node_identifier=regular_node.identifier) AccountState.objects.create(_id=regular_node.identifier, node=regular_node.dict()) assert facade.get_primary_validator() == primary_validator_node
40.150943
88
0.788064
265
2,128
5.996226
0.173585
0.161108
0.095658
0.042794
0.82253
0.806167
0.806167
0.806167
0.764003
0.764003
0
0.005873
0.119831
2,128
52
89
40.923077
0.842499
0
0
0.682927
0
0
0.021147
0
0
0
0
0
0.487805
1
0.097561
false
0
0.073171
0
0.170732
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
2b4c216c3d7e84c2cbd9f2f6eb187ef1eae9f2cc
1,037
py
Python
elmo/moon_tracker/utils.py
stephenswat/eve_lunar_mining_organiser
2f6e84b0a9fc60588ca9bdc2ffd074be7fbf0b12
[ "MIT" ]
1
2017-09-20T09:15:14.000Z
2017-09-20T09:15:14.000Z
elmo/moon_tracker/utils.py
stephenswat/eve_lunar_mining_organiser
2f6e84b0a9fc60588ca9bdc2ffd074be7fbf0b12
[ "MIT" ]
2
2021-08-19T13:26:04.000Z
2021-08-19T13:26:08.000Z
elmo/moon_tracker/utils.py
stephenswat/eve_lunar_mining_organiser
2f6e84b0a9fc60588ca9bdc2ffd074be7fbf0b12
[ "MIT" ]
2
2017-10-09T20:15:03.000Z
2018-02-03T15:54:53.000Z
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
41.48
94
0.731919
161
1,037
4.341615
0.130435
0.090129
0.141631
0.180258
0.97568
0.967096
0.912732
0.793991
0.793991
0.65093
0
0
0.158149
1,037
24
95
43.208333
0.800687
0
0
0.25
0
0
0.228544
0.228544
0
0
0
0
0
1
0.15
false
0
0
0.15
0.3
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
8
995f0dd6c7a3cfc36cc04611593446bf657cdb17
3,090
py
Python
genetic_neural_network.py
JehunYoo/SnakeRL
3635e2e5bcd6d3147cafa90e57471dbc5587d49a
[ "MIT" ]
null
null
null
genetic_neural_network.py
JehunYoo/SnakeRL
3635e2e5bcd6d3147cafa90e57471dbc5587d49a
[ "MIT" ]
null
null
null
genetic_neural_network.py
JehunYoo/SnakeRL
3635e2e5bcd6d3147cafa90e57471dbc5587d49a
[ "MIT" ]
null
null
null
<<<<<<< HEAD import numpy as np import scipy class GeneticNeuralNetwork(): def __init__(self, inodes, hnodes, onodes=4, activation='relu', eta=0.01, classifier={'activation' : 'softmax'}): ''' inodes : int hnodes : list (the number of each ith hidden nodes) onodes : int (default 4) activation : string eta : float (learning rate) classifier : dict ''' assert type(inodes) is int, 'inodes must be int type' assert type(hnodes) is list and ([True] * len(hnodes) == [type(val) is int for val in hnodes]),\ 'hnodes must be list of integer' assert type(onodes) is int, 'onodes must be int type' self.inodes = inodes self.hnodes = hnodes self.onodes = onodes self.eta = eta self.weight = np.array([], dtype=np.float64) if activation=='relu': self.activation = lambda x: np.maximum(0, x) elif activation=='sigmoid': self.activation = lambda x: scipy.special.expit(x) else : assert False, 'invalid activation' if classifier['activation'] == 'softmax': self.activation_clf = lambda x: scipy.special.softmax(x) else: self.activation_clf = self.activation def compile(self): pass def fit(self): pass def predict(self): pass def crossover(self): pass def mutation(self): pass ======= import numpy as np import scipy class GeneticNeuralNetwork(): def __init__(self, inodes, hnodes, onodes=4, activation='relu', eta=0.01, classifier={'activation' : 'softmax'}): ''' inodes : int hnodes : list (the number of each ith hidden nodes) onodes : int (default 4) activation : string eta : float (learning rate) classifier : dict ''' assert type(inodes) is int, 'inodes must be int type' assert type(hnodes) is list and ([True] * len(hnodes) == [type(val) is int for val in hnodes]),\ 'hnodes must be list of integer' assert type(onodes) is int, 'onodes must be int type' self.inodes = inodes self.hnodes = hnodes self.onodes = onodes self.eta = eta self.weight = np.array([], dtype=np.float64) if activation=='relu': self.activation = lambda x: np.maximum(0, x) elif activation=='sigmoid': self.activation = lambda x: scipy.special.expit(x) else : assert False, 'invalid activation' if classifier['activation'] == 'softmax': self.activation_clf = lambda x: scipy.special.softmax(x) else: self.activation_clf = self.activation def compile(self): pass def fit(self): pass def predict(self): pass def crossover(self): pass def mutation(self): pass >>>>>>> ff35870c235c677bd6e367cfedf2974cac4a6e8a
28.611111
104
0.560518
350
3,090
4.914286
0.205714
0.081395
0.051163
0.030233
0.974419
0.974419
0.974419
0.974419
0.974419
0.974419
0
0.018438
0.33301
3,090
107
105
28.878505
0.816109
0
0
0.957746
0
0
0.107843
0
0
0
0
0
0.112676
0
null
null
0.140845
0.056338
null
null
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
9
99879864fecabb2fe0b38f62c96683e631a50408
17,268
py
Python
tagupy/design/generator/_dsd_ref.py
algebra-club/TaguPy
1ff5a792f7c78cfb6741cf27659215fef287a1c1
[ "MIT" ]
1
2021-08-21T07:36:24.000Z
2021-08-21T07:36:24.000Z
tagupy/design/generator/_dsd_ref.py
algebra-club/TaguPy
1ff5a792f7c78cfb6741cf27659215fef287a1c1
[ "MIT" ]
29
2021-08-15T18:12:58.000Z
2021-09-12T14:48:17.000Z
tagupy/design/generator/_dsd_ref.py
algebra-club/TaguPy
1ff5a792f7c78cfb6741cf27659215fef287a1c1
[ "MIT" ]
null
null
null
import numpy as np from typing import Dict, List _gen_vec = { 4: [0, -1, 1], 6: [0, -1, 1, 1, -1], 8: [0, 1, 1, -1, 1, -1, -1], 10: [[0, 1, -1, -1, 1], [-1, 1, 1, 1, 1]], 12: [0, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1], 14: [0, 1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, 1], 16: [], 18: [0, -1, -1, 1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1], 20: [0, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1, -1, 1], 22: [[0, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1], [0, 1, -1, 1, -1, -1, 1, -1, 1, 1, 1]], 24: [0, 1, 1, 1, 1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, -1, -1, -1, -1], 26: [[0, 1, -1, -1, 1, 1, 1, 1, 1, 1, -1, -1, 1], [-1, -1, 1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1]], 28: [[0, 1, 1, 1, -1, -1, -1], [1, -1, 1, -1, 1, -1, -1], [1, 1, -1, 1, 1, -1, -1], [1, 1, 1, -1, 1, 1, 1]], 30: [0, 1, -1, -1, 1, 1, 1, 1, -1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 1, 1, -1, -1, 1], 32: [0, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, -1], 34: [[0, -1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, -1, 1, -1, 1, -1], [0, -1, -1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, 1]], 36: [[0, 1, 1, 1, -1, 1, -1, -1, -1], [-1, -1, -1, 1, -1, 1, 1, -1, 1], [1, 1, -1, -1, -1, 1, -1, -1, -1], [1, 1, 1, 1, 1, -1, 1, 1, -1]], 38: [0, -1, 1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, 1, -1], 40: [], 42: [0, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, -1], 44: [0, 1, -1, -1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, -1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1], 46: [], 48: [0, 1, 1, 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, 1, -1, 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1], 50: [[0, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1], [1, 1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, 1, 1, -1, 1, 1, -1, 1, 1, 1, -1, 1, 1]], } def _cmateq5(sum_fac: int, gen_vec: Dict[int, List[int]]) -> np.ndarray: ''' create a conference matrix of sum_fac: 4, 6, 8, 12, 14, 18, 20, 24, 30, 32, 38, 42, 44, 48 Parameters ---------- sum_fac: int sum of the number of factors(n_factor) and the number of fake factor gen_vec: dict[int, List[int]] list of vectors used for generating conference matrix returns ------- cmat: np.ndarray(sum_fac * sum_fac if sum_fac is even) conference matrix Note ---- conference matrices are constructed as: ([0, ones(1, sum_fac - 1)], [ones(sum_fac - 1, 0), S]) S is a circulant (0, ±1)-matrix of order(sum_fac - 1), which is generated by gen_vec ''' v = gen_vec[sum_fac] s = np.concatenate([np.roll(v, i).reshape(1, -1) for i in range(len(v))], axis=0) one_vec = np.array([1 for i in range(sum_fac-1)]).reshape(-1, 1) temp0 = np.concatenate([np.array([0]), -one_vec.reshape(-1)]).reshape(1, -1) temp1 = np.concatenate([one_vec, s], axis=1) c_mat = np.concatenate([temp0, temp1], axis=0) return c_mat def _cmateq2(sum_fac: int, gen_vec: Dict[int, List[List[int]]]) -> np.ndarray: ''' create a conference matrix of sum_fac: 10, 22, 26, 34, 50 Parameters ---------- sum_fac: int sum of the number of factors(n_factor) and the number of fake factor gen_vec: dict[int, List[int]] list of vectors used for generating conference matrix returns ------- cmat: np.ndarray(sum_fac * sum_fac if sum_fac is even) conference matrix Note ---- If A is a (0, ±1)-matrix of order m and B a ±1-matrix of the same order such that AB = BA and AA′ + BB′ = (2m − 1)I_m×m, then the following conference matrix of order 2m can be constructed: ([A, B], [B.T, -B.T]) A and B are two circulant matrices generated by gen_vec Only for sum_fac = 22, alternative conference matrix is used based on the paper. See below for details. NGUYEN, N. & STYLIANOU, S. (2013). Constructing Definitive Screening Designs Using Cyclic Generators. Journal of Statistical Theory and Practice. DOI: 10.1080/15598608.2013.781891 ''' v0 = gen_vec[sum_fac][0] v1 = gen_vec[sum_fac][1] a = np.concatenate([np.roll(v0, i).reshape(1, -1) for i in range(len(v0))], axis=0) b = np.concatenate([np.roll(v1, i).reshape(1, -1) for i in range(len(v1))], axis=0) c_mat = np.block([ [a, b], [b.T, -a.T] ]) return c_mat def _dsddb(sum_fac: int, gen_vec: Dict[int, List[int]]) -> np.ndarray: ''' create a conference matrix of sum_fac: 16, 40 Parameters ---------- sum_fac: int sum of the number of factors(n_factor) and the number of fake factor gen_vec: dict[int, List[int]] list of vectors used for generating conference matrix returns ------- cmat: np.ndarray(sum_fac * sum_fac if sum_fac is even) conference matrix Note ---- If A is a (0, ±1)-matrix of order 1/2(sum_fac) and B a ±1-matrix of the same order such that AB = BA and AA′ + BB′ = (sum_fac − 1)I_1/2(sum_fac),1/2(sum_fac), then the following conference matrix of order sum_fac can be constructed: ([A, B], [B.T, -B.T]) A is the conference matrix of the order 1/2(sum_fac) constructed by the gen_vec and B = A + I ''' half_fac = int(sum_fac/2) a = _cmateq5(half_fac, gen_vec) b = a + np.eye(int(sum_fac/2)) b = b.astype(int) c_mat = np.block([ [a, b], [b.T, -a.T] ]) return c_mat def _dsdeq3(sum_fac: int, gen_vec: Dict[int, List[int]]) -> np.ndarray: ''' create a conference matrix of sum_fac: 28, 36 Parameters ---------- sum_fac: int sum of the number of factors(n_factor) and the number of fake factor gen_vec: dict[int, List[int]] list of vectors used for generating conference matrix returns ------- cmat: np.ndarray(sum_fac * sum_fac if sum_fac is even) conference matrix Note ---- If A is a circulant (0,±1)-matrix of order 1/4(sum_fac) and B,C,D are circulant ±1-matrices of the same order such that AA′ + BB′ + CC′ + DD′ = (sum_fac − 1)I_1/4(sum_fac),1/4(sum_fac) then the following conference matrix of order sum_fac can be constructed: ([A, BR, CR, DR], [-BR, A, (D.T)R, -(C.T)R], [-CR, -(D.T)R, A, (B.T)R], [-DR, (C.T)R, -(B.T)R, A]) A, B, C, and D are four circulant matrices generated by gen_vec ''' a = np.concatenate([np.roll(gen_vec[sum_fac][0], i).reshape(1, -1) for i in range(len(gen_vec[sum_fac][0]))], axis=0) b = np.concatenate([np.roll(gen_vec[sum_fac][1], i).reshape(1, -1) for i in range(len(gen_vec[sum_fac][1]))], axis=0) c = np.concatenate([np.roll(gen_vec[sum_fac][2], i).reshape(1, -1) for i in range(len(gen_vec[sum_fac][2]))], axis=0) d = np.concatenate([np.roll(gen_vec[sum_fac][3], i).reshape(1, -1) for i in range(len(gen_vec[sum_fac][3]))], axis=0) r = np.eye(len(gen_vec[sum_fac][0]), dtype=int)[::-1] c_mat = np.block([ [a, b @ r, c @ r, d @ r], [-b @ r, a, d.T @ r, -c.T @ r], [-c @ r, -d.T @ r, a, b.T @ r], [-d @ r, c.T @ r, -b.T @ r, a] ]) return c_mat def _dsd46(): cmat = np.array([ [ 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [ 1, 0, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1], [ 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1], [ 1, 1, 1, 0, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1], [ 1, -1, -1, 1, 0, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1], [ 1, 1, -1, -1, 1, 0, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, -1], [ 1, -1, 1, -1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1], [ 1, -1, 1, -1, -1, -1, 1, 0, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1], [ 1, -1, -1, 1, 1, -1, -1, 1, 0, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, -1, 1], [ 1, 1, -1, -1, -1, 1, -1, 1, 1, 0, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1], [ 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 0, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1], [ 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, 1, 1, 1], [ 1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1], [ 1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 0, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1], [ 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 0, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1], [ 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1], [ 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, 1, 0, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1], [ 1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, 0, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1], [ 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 0, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1], [ 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 0, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1], [ 1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, 1], [ 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1], [ 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 0, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1], [ 1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 0, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1], [ 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1], [ 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, 1, 0, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1], [ 1, -1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, 0, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1], [ 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 0, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1], [ 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 0, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1], [ 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1], [ 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, 1], [ 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 0, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, -1], [ 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 0, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1], [ 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1], [ 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, 1, 0, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1], [ 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, 0, 1, 1, 1, -1, -1, 1, 1, 1, -1, 1], [ 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 0, -1, 1, 1, 1, -1, 1, 1, 1, -1], [ 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 0, 1, 1, -1, 1, -1, -1, -1, 1], [ 1, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, 1, 1, -1, -1], [ 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 0, 1, -1, -1, -1, 1, -1], [ 1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 0, 1, 1, -1, 1, -1], [ 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 0, 1, -1, -1, 1], [ 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, 1, 0, 1, -1, -1], [ 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, 1, 0, 1, 1], [ 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, 0, 1], [ 1, -1, 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 0] ]) return cmat def _get_dsd(n_factor: int, c_mat: np.ndarray) -> np.ndarray: ''' create a definitive screening design from conference matrix Parameters ---------- n_factor: int number of factors used in the experiment c_mat: np.ndarray conference matrix Returns ------- d_mat(: np.ndarray(2(n_factor+n_fake) + 1) * n_factor) if n_factor+n_fake is even) experiment design of dsd n_fake = len(c_mat) - n_factor Note ---- The design matrix for a DSD can be written as ([C], [C], [zeros(1, len(C))] ) ''' zero_vec = np.zeros((1, c_mat.shape[1]), dtype=int) d_mat = np.concatenate([c_mat, -c_mat, zero_vec], axis=0)[:, :n_factor] return d_mat
64.432836
195
0.369527
3,756
17,268
1.669329
0.044196
0.803828
1.1689
1.515789
0.774482
0.759011
0.739872
0.717225
0.693301
0.673365
0
0.251711
0.339935
17,268
267
196
64.674157
0.296719
0.196606
0
0.121951
0
0
0
0
0
0
0
0
0
1
0.04878
false
0
0.01626
0
0.113821
0
0
0
1
null
1
1
1
0
1
1
1
0
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
99977f0abbea58b01cf937083f08cd153e43d73f
5,574
py
Python
api/alembic/versions/004ac48ffe18_indices.py
bcgov/wps
71df0de72de9cd656dc9ebf8461ffe47cfb155f6
[ "Apache-2.0" ]
19
2020-01-31T21:51:31.000Z
2022-01-07T14:40:03.000Z
api/alembic/versions/004ac48ffe18_indices.py
bcgov/wps
71df0de72de9cd656dc9ebf8461ffe47cfb155f6
[ "Apache-2.0" ]
1,680
2020-01-24T23:25:08.000Z
2022-03-31T23:50:27.000Z
api/alembic/versions/004ac48ffe18_indices.py
bcgov/wps
71df0de72de9cd656dc9ebf8461ffe47cfb155f6
[ "Apache-2.0" ]
6
2020-04-28T22:41:08.000Z
2021-05-05T18:16:06.000Z
"""indices Revision ID: 004ac48ffe18 Revises: 81c96876355a Create Date: 2021-03-29 18:24:10.485482 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '004ac48ffe18' down_revision = '81c96876355a' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic ### op.create_index(op.f('ix_hourly_actuals_rh_valid'), 'hourly_actuals', ['rh_valid'], unique=False) op.create_index(op.f('ix_hourly_actuals_station_code'), 'hourly_actuals', ['station_code'], unique=False) op.create_index(op.f('ix_hourly_actuals_temp_valid'), 'hourly_actuals', ['temp_valid'], unique=False) op.create_index(op.f('ix_hourly_actuals_weather_date'), 'hourly_actuals', ['weather_date'], unique=False) op.create_index(op.f('ix_model_run_grid_subset_predictions_prediction_model_grid_subset_id'), 'model_run_grid_subset_predictions', ['prediction_model_grid_subset_id'], unique=False) op.create_index(op.f('ix_model_run_grid_subset_predictions_prediction_model_run_timestamp_id'), 'model_run_grid_subset_predictions', ['prediction_model_run_timestamp_id'], unique=False) op.create_index(op.f('ix_model_run_grid_subset_predictions_prediction_timestamp'), 'model_run_grid_subset_predictions', ['prediction_timestamp'], unique=False) op.create_index(op.f('ix_noon_forecasts_created_at'), 'noon_forecasts', ['created_at'], unique=False) op.create_index(op.f('ix_noon_forecasts_station_code'), 'noon_forecasts', ['station_code'], unique=False) op.create_index(op.f('ix_noon_forecasts_weather_date'), 'noon_forecasts', ['weather_date'], unique=False) op.create_index(op.f('ix_prediction_model_grid_subsets_prediction_model_id'), 'prediction_model_grid_subsets', ['prediction_model_id'], unique=False) op.create_index(op.f('ix_prediction_model_run_timestamps_prediction_model_id'), 'prediction_model_run_timestamps', ['prediction_model_id'], unique=False) op.create_index(op.f('ix_prediction_model_run_timestamps_prediction_run_timestamp'), 'prediction_model_run_timestamps', ['prediction_run_timestamp'], unique=False) op.create_index(op.f('ix_prediction_models_abbreviation'), 'prediction_models', ['abbreviation'], unique=False) op.drop_constraint('processed_model_run_files_url_key', 'processed_model_run_urls', type_='unique') op.create_index(op.f('ix_processed_model_run_urls_url'), 'processed_model_run_urls', ['url'], unique=True) op.create_index(op.f('ix_weather_station_model_predictions_prediction_model_run_timestamp_id'), 'weather_station_model_predictions', ['prediction_model_run_timestamp_id'], unique=False) op.create_index(op.f('ix_weather_station_model_predictions_prediction_timestamp'), 'weather_station_model_predictions', ['prediction_timestamp'], unique=False) op.create_index(op.f('ix_weather_station_model_predictions_station_code'), 'weather_station_model_predictions', ['station_code'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic ### op.drop_index(op.f('ix_weather_station_model_predictions_station_code'), table_name='weather_station_model_predictions') op.drop_index(op.f('ix_weather_station_model_predictions_prediction_timestamp'), table_name='weather_station_model_predictions') op.drop_index(op.f('ix_weather_station_model_predictions_prediction_model_run_timestamp_id'), table_name='weather_station_model_predictions') op.drop_index(op.f('ix_processed_model_run_urls_url'), table_name='processed_model_run_urls') op.create_unique_constraint('processed_model_run_files_url_key', 'processed_model_run_urls', ['url']) op.drop_index(op.f('ix_prediction_models_abbreviation'), table_name='prediction_models') op.drop_index(op.f('ix_prediction_model_run_timestamps_prediction_run_timestamp'), table_name='prediction_model_run_timestamps') op.drop_index(op.f('ix_prediction_model_run_timestamps_prediction_model_id'), table_name='prediction_model_run_timestamps') op.drop_index(op.f('ix_prediction_model_grid_subsets_prediction_model_id'), table_name='prediction_model_grid_subsets') op.drop_index(op.f('ix_noon_forecasts_weather_date'), table_name='noon_forecasts') op.drop_index(op.f('ix_noon_forecasts_station_code'), table_name='noon_forecasts') op.drop_index(op.f('ix_noon_forecasts_created_at'), table_name='noon_forecasts') op.drop_index(op.f('ix_model_run_grid_subset_predictions_prediction_timestamp'), table_name='model_run_grid_subset_predictions') op.drop_index(op.f('ix_model_run_grid_subset_predictions_prediction_model_run_timestamp_id'), table_name='model_run_grid_subset_predictions') op.drop_index(op.f('ix_model_run_grid_subset_predictions_prediction_model_grid_subset_id'), table_name='model_run_grid_subset_predictions') op.drop_index(op.f('ix_hourly_actuals_weather_date'), table_name='hourly_actuals') op.drop_index(op.f('ix_hourly_actuals_temp_valid'), table_name='hourly_actuals') op.drop_index(op.f('ix_hourly_actuals_station_code'), table_name='hourly_actuals') op.drop_index(op.f('ix_hourly_actuals_rh_valid'), table_name='hourly_actuals') # ### end Alembic commands ###
66.357143
110
0.758701
751
5,574
5.10253
0.10253
0.065762
0.075157
0.093946
0.85856
0.845772
0.81237
0.760438
0.705376
0.672756
0
0.011127
0.129351
5,574
83
111
67.156627
0.778488
0.045748
0
0.123077
0
0
0.550738
0.466679
0
0
0
0
0
1
0.030769
false
0
0.030769
0
0.061538
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
99a5a6a0c42b6294fcc61e31dd4f994cc5f6e08e
8,725
py
Python
src/repoAnalysis.py
jasper-xian/github-collab-analyses
a07e00952eda5d0d85e9a4faccc7d238bb0e5191
[ "MIT" ]
null
null
null
src/repoAnalysis.py
jasper-xian/github-collab-analyses
a07e00952eda5d0d85e9a4faccc7d238bb0e5191
[ "MIT" ]
null
null
null
src/repoAnalysis.py
jasper-xian/github-collab-analyses
a07e00952eda5d0d85e9a4faccc7d238bb0e5191
[ "MIT" ]
null
null
null
import github from fileAuthorScore import fileAuthorScore from github import Github import requests import json import cryptocode import pickle import concurrent.futures class GitAnalysis: def __init__(self): self.fileDict = {} self.g = None self.repoName = "" def clearCurrentFileName(self): self.currentFileName = "" def changeCurrentFileName(self, name): self.currentFileName = name def setRepoName(self, repoName): self.repoName = repoName def setG(self, token): self.g = Github(token) def authorsPerFileCommits(self): repo = self.g.get_repo(self.repoName) commits = repo.get_commits() with concurrent.futures.ThreadPoolExecutor(max_workers=commits.totalCount) as executor: list(map(lambda x: executor.submit(self.addCommit, x), commits)) def authorsPerFilePulls(self): repo = self.g.get_repo(self.repoName) pulls = repo.get_pulls("all") with concurrent.futures.ThreadPoolExecutor(max_workers=pulls.totalCount) as executor: list(map(lambda x: executor.submit(self.addPull, x), pulls)) def addCommit(self, commit): files = commit.files author = 0 if commit.author is None: if commit.commit.author is None: return else: author = "*" + commit.commit.author.name else: author = commit.author.login if commit.get_pulls().totalCount != 0: for file in files: fileName = file.filename if fileName in self.fileDict.keys(): if author in self.fileDict[fileName].keys(): self.fileDict[fileName][author].addAdditions(file.additions * 0.2) self.fileDict[fileName][author].addDeletions(file.deletions * 0.2) self.fileDict[fileName][author].addChanges(file.changes * 0.2) else: self.fileDict[fileName][author] = fileAuthorScore(file.additions * 0.2, file.deletions * 0.2, file.changes * 0.2) else: self.fileDict[fileName] = {} self.fileDict[fileName][author] = fileAuthorScore(file.additions * 0.2, file.deletions * 0.2, file.changes * 0.2) else: for file in files: fileName = file.filename if fileName in self.fileDict.keys(): if author in self.fileDict[fileName].keys(): self.fileDict[fileName][author].addAdditions(file.additions) self.fileDict[fileName][author].addDeletions(file.deletions) self.fileDict[fileName][author].addChanges(file.changes) else: self.fileDict[fileName][author] = fileAuthorScore(file.additions, file.deletions, file.changes) else: self.fileDict[fileName] = {} self.fileDict[fileName][author] = fileAuthorScore(file.additions, file.deletions, file.changes) if file.status == "added" or file.status == "renamed": self.fileDict[fileName][author].changeIsOriginalAuthor(True) return True def addPull(self, pull): files = pull.get_files() user = 0 if pull.user is None: return else: user = pull.user.login for file in files: fileName = file.filename if fileName in self.fileDict.keys(): if user in self.fileDict[fileName].keys(): self.fileDict[fileName][user].addAdditions(file.additions * 0.6) self.fileDict[fileName][user].addDeletions(file.deletions * 0.6) self.fileDict[fileName][user].addChanges(file.changes * 0.6) else: self.fileDict[fileName][user] = fileAuthorScore(file.additions * 0.6, file.deletions * 0.6, file.changes * 0.6) else: self.fileDict[fileName] = {} self.fileDict[fileName][user] = fileAuthorScore(file.additions * 0.6, file.deletions * 0.6, file.changes * 0.6) if file.status == "added" or file.status == "renamed": self.fileDict[fileName][user].changeIsOriginalAuthor(True) return True # def authorsPerFileCommits(self): # repo = self.g.get_repo(self.repoName) # commits = repo.get_commits() # count = 0 # for commit in commits: # files = commit.files # author = 0 # if commit.author is None: # if commit.commit.author is None: # continue # else: # author = "*" + commit.commit.author.name # else: # author = commit.author.login # if commit.get_pulls().totalCount != 0: # for file in files: # fileName = file.filename # if fileName in self.fileDict.keys(): # if author in self.fileDict[fileName].keys(): # self.fileDict[fileName][author].addAdditions(file.additions * 0.2) # self.fileDict[fileName][author].addDeletions(file.deletions * 0.2) # self.fileDict[fileName][author].addChanges(file.changes * 0.2) # else: # self.fileDict[fileName][author] = fileAuthorScore(file.additions * 0.2, file.deletions * 0.2, file.changes * 0.2) # else: # self.fileDict[fileName] = {} # self.fileDict[fileName][author] = fileAuthorScore(file.additions * 0.2, file.deletions * 0.2, file.changes * 0.2) # else: # for file in files: # fileName = file.filename # if fileName in self.fileDict.keys(): # if author in self.fileDict[fileName].keys(): # self.fileDict[fileName][author].addAdditions(file.additions) # self.fileDict[fileName][author].addDeletions(file.deletions) # self.fileDict[fileName][author].addChanges(file.changes) # else: # self.fileDict[fileName][author] = fileAuthorScore(file.additions, file.deletions, file.changes) # else: # self.fileDict[fileName] = {} # self.fileDict[fileName][author] = fileAuthorScore(file.additions, file.deletions, file.changes) # if file.status == "added" or file.status == "renamed": # self.fileDict[fileName][author].changeIsOriginalAuthor(True) # #count = count + 1 # if count > 100: # break # def authorsPerFilePulls(self): # repo = self.g.get_repo(self.repoName) # pulls = repo.get_pulls("all") # for pull in pulls: # files = pull.get_files() # user = 0 # if pull.user is None: # continue # else: # user = pull.user.login # for file in files: # fileName = file.filename # if fileName in self.fileDict.keys(): # if user in self.fileDict[fileName].keys(): # self.fileDict[fileName][user].addAdditions(file.additions * 0.6) # self.fileDict[fileName][user].addDeletions(file.deletions * 0.6) # self.fileDict[fileName][user].addChanges(file.changes * 0.6) # else: # self.fileDict[fileName][user] = fileAuthorScore(file.additions * 0.6, file.deletions * 0.6, file.changes * 0.6) # else: # self.fileDict[fileName] = {} # self.fileDict[fileName][user] = fileAuthorScore(file.additions * 0.6, file.deletions * 0.6, file.changes * 0.6) # if file.status == "added" or file.status == "renamed": # self.fileDict[fileName][user].changeIsOriginalAuthor(True) def isGitHubAuthor(self, login): contributors = self.g.get_repo(self.repoName).get_contributors() for contributor in contributors: if contributor.login == login: return True return False def clearFileDict(self): self.fileDict = {} def pickledFileDict(self): return pickle.dumps(self.fileDict)
47.162162
143
0.543725
865
8,725
5.461272
0.10289
0.139712
0.19475
0.121084
0.837638
0.832769
0.806943
0.806943
0.806943
0.806943
0
0.014615
0.349112
8,725
185
144
47.162162
0.817221
0.401948
0
0.401961
0
0
0.005439
0
0
0
0
0
0
1
0.117647
false
0
0.078431
0.009804
0.27451
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
41d33b481868cbb1b76beaa263932f648eaeb744
213
py
Python
cs/hmac/__init__.py
splunk-soar-connectors/crowdstrike
cea9d92522ff14b14146d85be2cee5a90c85823f
[ "Apache-2.0" ]
1
2022-02-13T19:59:11.000Z
2022-02-13T19:59:11.000Z
cs/hmac/__init__.py
splunk-soar-connectors/crowdstrike
cea9d92522ff14b14146d85be2cee5a90c85823f
[ "Apache-2.0" ]
null
null
null
cs/hmac/__init__.py
splunk-soar-connectors/crowdstrike
cea9d92522ff14b14146d85be2cee5a90c85823f
[ "Apache-2.0" ]
null
null
null
try: from client import get, post, put, delete, head, patch, Auth except: from .client import get, post, put, delete, head, patch, Auth __all__ = ['get', 'post', 'put', 'delete', 'head', 'patch', 'Auth']
30.428571
67
0.633803
30
213
4.366667
0.433333
0.160305
0.229008
0.366412
0.908397
0.908397
0.908397
0.687023
0.687023
0.687023
0
0
0.187793
213
6
68
35.5
0.757225
0
0
0
0
0
0.13615
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
0
0
0
null
0
1
1
1
1
1
0
0
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
8
41fdb288ef495be2ff3607479135350b8cbf345c
39
py
Python
max_dump/__init__.py
mki/max_dump
99c5b180d9891349087f7a9d381b3aed1c78a5c3
[ "MIT" ]
3
2019-07-01T05:31:04.000Z
2019-12-16T10:32:38.000Z
max_dump/__init__.py
mki/max_dump
99c5b180d9891349087f7a9d381b3aed1c78a5c3
[ "MIT" ]
null
null
null
max_dump/__init__.py
mki/max_dump
99c5b180d9891349087f7a9d381b3aed1c78a5c3
[ "MIT" ]
2
2019-12-12T04:00:18.000Z
2019-12-13T01:20:19.000Z
from .dump_cameras import dump_cameras
19.5
38
0.871795
6
39
5.333333
0.666667
0.6875
0
0
0
0
0
0
0
0
0
0
0.102564
39
1
39
39
0.914286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
5124e51ca292f29faf5e444361d737a29e40fa79
22
py
Python
flipper/utils.py
yukinarit/flipper
8c4c0ae94ff2113a6723658b951cbab2f4eafb1f
[ "Unlicense" ]
1
2020-10-24T14:17:41.000Z
2020-10-24T14:17:41.000Z
flipper/utils.py
yukinarit/flipper
8c4c0ae94ff2113a6723658b951cbab2f4eafb1f
[ "Unlicense" ]
null
null
null
flipper/utils.py
yukinarit/flipper
8c4c0ae94ff2113a6723658b951cbab2f4eafb1f
[ "Unlicense" ]
null
null
null
def print(): pass
7.333333
12
0.545455
3
22
4
1
0
0
0
0
0
0
0
0
0
0
0
0.318182
22
2
13
11
0.8
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0.5
0
0
0.5
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
1
0
7
51495acadeb50b6e2121cc123900265aa0dffcdc
11,136
py
Python
py3canvas/tests/outcome_groups.py
tylerclair/py3canvas
7485d458606b65200f0ffa5bbe597a9d0bee189f
[ "MIT" ]
null
null
null
py3canvas/tests/outcome_groups.py
tylerclair/py3canvas
7485d458606b65200f0ffa5bbe597a9d0bee189f
[ "MIT" ]
null
null
null
py3canvas/tests/outcome_groups.py
tylerclair/py3canvas
7485d458606b65200f0ffa5bbe597a9d0bee189f
[ "MIT" ]
null
null
null
"""OutcomeGroups API Tests for Version 1.0. This is a testing template for the generated OutcomeGroupsAPI Class. """ import unittest import requests import secrets from py3canvas.apis.outcome_groups import OutcomeGroupsAPI from py3canvas.apis.outcome_groups import Outcomegroup from py3canvas.apis.outcome_groups import Outcomelink class TestOutcomeGroupsAPI(unittest.TestCase): """Tests for the OutcomeGroupsAPI.""" def setUp(self): self.client = OutcomeGroupsAPI(secrets.instance_address, secrets.access_token) def test_redirect_to_root_outcome_group_for_context_global(self): """Integration test for the OutcomeGroupsAPI.redirect_to_root_outcome_group_for_context_global method.""" r = self.client.redirect_to_root_outcome_group_for_context_global() def test_redirect_to_root_outcome_group_for_context_accounts(self): """Integration test for the OutcomeGroupsAPI.redirect_to_root_outcome_group_for_context_accounts method.""" account_id = None # Change me!! r = self.client.redirect_to_root_outcome_group_for_context_accounts(account_id) def test_redirect_to_root_outcome_group_for_context_courses(self): """Integration test for the OutcomeGroupsAPI.redirect_to_root_outcome_group_for_context_courses method.""" course_id = None # Change me!! r = self.client.redirect_to_root_outcome_group_for_context_courses(course_id) def test_get_all_outcome_groups_for_context_accounts(self): """Integration test for the OutcomeGroupsAPI.get_all_outcome_groups_for_context_accounts method.""" account_id = None # Change me!! r = self.client.get_all_outcome_groups_for_context_accounts(account_id) def test_get_all_outcome_groups_for_context_courses(self): """Integration test for the OutcomeGroupsAPI.get_all_outcome_groups_for_context_courses method.""" course_id = None # Change me!! r = self.client.get_all_outcome_groups_for_context_courses(course_id) def test_get_all_outcome_links_for_context_accounts(self): """Integration test for the OutcomeGroupsAPI.get_all_outcome_links_for_context_accounts method.""" account_id = None # Change me!! r = self.client.get_all_outcome_links_for_context_accounts( account_id, outcome_group_style=None, outcome_style=None ) def test_get_all_outcome_links_for_context_courses(self): """Integration test for the OutcomeGroupsAPI.get_all_outcome_links_for_context_courses method.""" course_id = None # Change me!! r = self.client.get_all_outcome_links_for_context_courses( course_id, outcome_group_style=None, outcome_style=None ) def test_show_outcome_group_global(self): """Integration test for the OutcomeGroupsAPI.show_outcome_group_global method.""" id = None # Change me!! r = self.client.show_outcome_group_global(id) def test_show_outcome_group_accounts(self): """Integration test for the OutcomeGroupsAPI.show_outcome_group_accounts method.""" account_id = None # Change me!! id = None # Change me!! r = self.client.show_outcome_group_accounts(account_id, id) def test_show_outcome_group_courses(self): """Integration test for the OutcomeGroupsAPI.show_outcome_group_courses method.""" course_id = None # Change me!! id = None # Change me!! r = self.client.show_outcome_group_courses(course_id, id) def test_update_outcome_group_global(self): """Integration test for the OutcomeGroupsAPI.update_outcome_group_global method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_update_outcome_group_accounts(self): """Integration test for the OutcomeGroupsAPI.update_outcome_group_accounts method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_update_outcome_group_courses(self): """Integration test for the OutcomeGroupsAPI.update_outcome_group_courses method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_delete_outcome_group_global(self): """Integration test for the OutcomeGroupsAPI.delete_outcome_group_global method.""" id = None # Change me!! r = self.client.delete_outcome_group_global(id) def test_delete_outcome_group_accounts(self): """Integration test for the OutcomeGroupsAPI.delete_outcome_group_accounts method.""" account_id = None # Change me!! id = None # Change me!! r = self.client.delete_outcome_group_accounts(account_id, id) def test_delete_outcome_group_courses(self): """Integration test for the OutcomeGroupsAPI.delete_outcome_group_courses method.""" course_id = None # Change me!! id = None # Change me!! r = self.client.delete_outcome_group_courses(course_id, id) def test_list_linked_outcomes_global(self): """Integration test for the OutcomeGroupsAPI.list_linked_outcomes_global method.""" id = None # Change me!! r = self.client.list_linked_outcomes_global(id, outcome_style=None) def test_list_linked_outcomes_accounts(self): """Integration test for the OutcomeGroupsAPI.list_linked_outcomes_accounts method.""" account_id = None # Change me!! id = None # Change me!! r = self.client.list_linked_outcomes_accounts( account_id, id, outcome_style=None ) def test_list_linked_outcomes_courses(self): """Integration test for the OutcomeGroupsAPI.list_linked_outcomes_courses method.""" course_id = None # Change me!! id = None # Change me!! r = self.client.list_linked_outcomes_courses(course_id, id, outcome_style=None) def test_create_link_outcome_global(self): """Integration test for the OutcomeGroupsAPI.create_link_outcome_global method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_create_link_outcome_global_outcome_id(self): """Integration test for the OutcomeGroupsAPI.create_link_outcome_global_outcome_id method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_create_link_outcome_accounts(self): """Integration test for the OutcomeGroupsAPI.create_link_outcome_accounts method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_create_link_outcome_accounts_outcome_id(self): """Integration test for the OutcomeGroupsAPI.create_link_outcome_accounts_outcome_id method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_create_link_outcome_courses(self): """Integration test for the OutcomeGroupsAPI.create_link_outcome_courses method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_create_link_outcome_courses_outcome_id(self): """Integration test for the OutcomeGroupsAPI.create_link_outcome_courses_outcome_id method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_unlink_outcome_global(self): """Integration test for the OutcomeGroupsAPI.unlink_outcome_global method.""" id = None # Change me!! outcome_id = None # Change me!! r = self.client.unlink_outcome_global(id, outcome_id) def test_unlink_outcome_accounts(self): """Integration test for the OutcomeGroupsAPI.unlink_outcome_accounts method.""" account_id = None # Change me!! id = None # Change me!! outcome_id = None # Change me!! r = self.client.unlink_outcome_accounts(account_id, id, outcome_id) def test_unlink_outcome_courses(self): """Integration test for the OutcomeGroupsAPI.unlink_outcome_courses method.""" course_id = None # Change me!! id = None # Change me!! outcome_id = None # Change me!! r = self.client.unlink_outcome_courses(course_id, id, outcome_id) def test_list_subgroups_global(self): """Integration test for the OutcomeGroupsAPI.list_subgroups_global method.""" id = None # Change me!! r = self.client.list_subgroups_global(id) def test_list_subgroups_accounts(self): """Integration test for the OutcomeGroupsAPI.list_subgroups_accounts method.""" account_id = None # Change me!! id = None # Change me!! r = self.client.list_subgroups_accounts(account_id, id) def test_list_subgroups_courses(self): """Integration test for the OutcomeGroupsAPI.list_subgroups_courses method.""" course_id = None # Change me!! id = None # Change me!! r = self.client.list_subgroups_courses(course_id, id) def test_create_subgroup_global(self): """Integration test for the OutcomeGroupsAPI.create_subgroup_global method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_create_subgroup_accounts(self): """Integration test for the OutcomeGroupsAPI.create_subgroup_accounts method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_create_subgroup_courses(self): """Integration test for the OutcomeGroupsAPI.create_subgroup_courses method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_import_outcome_group_global(self): """Integration test for the OutcomeGroupsAPI.import_outcome_group_global method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_import_outcome_group_accounts(self): """Integration test for the OutcomeGroupsAPI.import_outcome_group_accounts method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_import_outcome_group_courses(self): """Integration test for the OutcomeGroupsAPI.import_outcome_group_courses method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass
45.82716
126
0.730154
1,423
11,136
5.406887
0.057625
0.063946
0.108656
0.105797
0.95308
0.933455
0.899922
0.884845
0.783858
0.621653
0
0.000562
0.201509
11,136
242
127
46.016529
0.86471
0.477281
0
0.398374
1
0
0
0
0
0
0
0
0
1
0.308943
false
0.121951
0.073171
0
0.390244
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
9
514e0ddb6d640d2271cde5f580e233e706cafa44
176
py
Python
gfdx/analysis/__init__.py
streamlit-badge-bot/gfdx
7dadc5240cd3be40aff458e227c02bfd3c5ecc12
[ "MIT" ]
null
null
null
gfdx/analysis/__init__.py
streamlit-badge-bot/gfdx
7dadc5240cd3be40aff458e227c02bfd3c5ecc12
[ "MIT" ]
null
null
null
gfdx/analysis/__init__.py
streamlit-badge-bot/gfdx
7dadc5240cd3be40aff458e227c02bfd3c5ecc12
[ "MIT" ]
null
null
null
from . import analysis_potential_nutrient_intake from . import foundational_documents from . import gfdx_redcap_algorithm from . import gfdx_redcap_who from . import monitoring
35.2
48
0.863636
23
176
6.26087
0.565217
0.347222
0.194444
0.277778
0
0
0
0
0
0
0
0
0.107955
176
5
49
35.2
0.917197
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
5aa881a7bc23e3fd575dc2d08e0691f37bfa9b84
42
py
Python
src/networks/__init__.py
claudius-kienle/self-supervised-depth-denoising
4dffb30e8ef5022ef665825d26f45f67bf712cfd
[ "MIT" ]
2
2021-12-02T15:06:28.000Z
2021-12-03T09:48:32.000Z
src/networks/__init__.py
claudius-kienle/self-supervised-depth-denoising
4dffb30e8ef5022ef665825d26f45f67bf712cfd
[ "MIT" ]
23
2022-02-24T09:17:03.000Z
2022-03-21T16:57:58.000Z
src/networks/__init__.py
alr-internship/self-supervised-depth-denoising
4dffb30e8ef5022ef665825d26f45f67bf712cfd
[ "MIT" ]
null
null
null
from . import UNet from . import LSTMUNet
14
22
0.761905
6
42
5.333333
0.666667
0.625
0
0
0
0
0
0
0
0
0
0
0.190476
42
2
23
21
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
5ac34a87645091063036eed20f608783dcdcc2b0
167
py
Python
djangae/contrib/gauth/sql.py
ikedaosushi/djangae
5fd2f8d70699fbbf155740effe42a36b205a6540
[ "BSD-3-Clause" ]
null
null
null
djangae/contrib/gauth/sql.py
ikedaosushi/djangae
5fd2f8d70699fbbf155740effe42a36b205a6540
[ "BSD-3-Clause" ]
null
null
null
djangae/contrib/gauth/sql.py
ikedaosushi/djangae
5fd2f8d70699fbbf155740effe42a36b205a6540
[ "BSD-3-Clause" ]
null
null
null
import warnings warnings.warn( "djangae.contrib.gauth.sql is deprecated, please use djangae.contrib.gauth_sql instead" ) from djangae.contrib.gauth_sql import *
20.875
91
0.790419
23
167
5.652174
0.565217
0.323077
0.438462
0.507692
0
0
0
0
0
0
0
0
0.125749
167
7
92
23.857143
0.890411
0
0
0
0
0
0.508982
0.299401
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
5ac5ae904109ff40c0da14378fc8031e575f4e95
225
py
Python
tests/test_utils.py
sasikala-binary/python-deriv-api
b40fca1b5ba06a47579f87258f41a50de9fb55fc
[ "MIT" ]
2
2022-01-23T13:31:22.000Z
2022-03-04T23:26:41.000Z
tests/test_utils.py
sasikala-binary/python-deriv-api
b40fca1b5ba06a47579f87258f41a50de9fb55fc
[ "MIT" ]
1
2021-12-20T14:55:03.000Z
2021-12-22T03:00:53.000Z
tests/test_utils.py
sasikala-binary/python-deriv-api
b40fca1b5ba06a47579f87258f41a50de9fb55fc
[ "MIT" ]
4
2021-12-10T05:18:44.000Z
2022-03-07T20:06:11.000Z
from deriv_api.utils import dict_to_cache_key import pickle def test_dict_to_cache_key(): assert(pickle.loads(dict_to_cache_key({"hello": "world", "subscribe": 1, "passthrough": 1, "req_id": 1})) == {"hello": "world"})
32.142857
132
0.715556
35
225
4.257143
0.6
0.120805
0.221477
0.281879
0
0
0
0
0
0
0
0.015
0.111111
225
6
133
37.5
0.73
0
0
0
0
0
0.204444
0
0
0
0
0
0.25
1
0.25
true
0.25
0.5
0
0.75
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
1
0
0
0
0
7
5acca2bf3aa586de9d30b3feab67bfd874aa29da
39
py
Python
EC2/backup_ec2/tests/test_todo.py
kyhau/aws-lambdas
be2f4de47f20dbee2157245895832d59a1e19c00
[ "Unlicense" ]
null
null
null
EC2/backup_ec2/tests/test_todo.py
kyhau/aws-lambdas
be2f4de47f20dbee2157245895832d59a1e19c00
[ "Unlicense" ]
1
2020-09-25T09:14:42.000Z
2020-09-28T09:13:43.000Z
EC2/backup_ec2/tests/test_todo.py
kyhau/aws-lambdas
be2f4de47f20dbee2157245895832d59a1e19c00
[ "Unlicense" ]
2
2018-04-22T17:46:51.000Z
2021-09-25T05:28:31.000Z
def test_nothing(): assert 2+2==2*2
19.5
19
0.641026
8
39
3
0.625
0.25
0.25
0
0
0
0
0
0
0
0
0.125
0.179487
39
2
20
19.5
0.625
0
0
0
0
0
0
0
0
0
0
0
0.5
1
0.5
true
0
0
0
0.5
0
1
1
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
8
5aef40b287e53d7f08ce0a9a3eebef068758c828
358
py
Python
test_get_speakable_weather.py
jluszcz/JakeSky
10a794817ff49773a57e6b900f74becf20613554
[ "MIT" ]
1
2018-01-11T16:36:57.000Z
2018-01-11T16:36:57.000Z
test_get_speakable_weather.py
jluszcz/JakeSky
10a794817ff49773a57e6b900f74becf20613554
[ "MIT" ]
3
2021-03-25T21:41:57.000Z
2021-06-01T21:49:14.000Z
test_get_speakable_weather.py
jluszcz/JakeSky
10a794817ff49773a57e6b900f74becf20613554
[ "MIT" ]
null
null
null
import jakesky def test_get_speakable_weather_summary(): assert 'Drizzling' == jakesky.get_speakable_weather_summary('Drizzle') assert 'Raining' == jakesky.get_speakable_weather_summary('Raining') def test_get_speakable_weather(): assert '65 and Sunny' == jakesky.get_speakable_weather(jakesky.Weather('2021-06-05T00:00:00Z', 'Sunny', 65.45))
35.8
115
0.77095
47
358
5.553191
0.446809
0.229885
0.363985
0.298851
0.452107
0
0
0
0
0
0
0.062305
0.103352
358
9
116
39.777778
0.750779
0
0
0
0
0
0.187151
0
0
0
0
0
0.5
1
0.333333
true
0
0.166667
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
7
5af0d2d65f9b829cadd51ff719c535a50adeddd2
199
py
Python
QFTSampler/transformers/BaseTransformer.py
IntenF/QFTSampler
5324e1a11ed77bfc67aaef0902da4b32543e96cc
[ "MIT" ]
2
2021-03-19T14:15:16.000Z
2022-02-13T14:34:52.000Z
QFTSampler/transformers/BaseTransformer.py
IntenF/QFTSampler
5324e1a11ed77bfc67aaef0902da4b32543e96cc
[ "MIT" ]
null
null
null
QFTSampler/transformers/BaseTransformer.py
IntenF/QFTSampler
5324e1a11ed77bfc67aaef0902da4b32543e96cc
[ "MIT" ]
1
2021-03-31T17:38:03.000Z
2021-03-31T17:38:03.000Z
class BaseTransformer: def __init__(me): pass def phi(me): raise NotImplementedError() def update(me): raise NotImplementedError() def clear(me): pass
19.9
35
0.592965
20
199
5.7
0.55
0.105263
0.45614
0.508772
0
0
0
0
0
0
0
0
0.321608
199
9
36
22.111111
0.844444
0
0
0.444444
0
0
0
0
0
0
0
0
0
1
0.444444
false
0.222222
0
0
0.555556
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
7
51cae440dc5bffecd46bcbcfeca2bc1e0d03377f
12,487
py
Python
lom/_numba/posterior_score_fcts.py
TammoR/LogicalFactorisationMachines
55bd94001f2852ea61f69cbb07a0cbdb41231028
[ "Apache-2.0" ]
19
2018-05-16T00:51:52.000Z
2022-02-02T10:04:13.000Z
lom/_numba/posterior_score_fcts.py
TammoR/LogicalOperatorMachines
55bd94001f2852ea61f69cbb07a0cbdb41231028
[ "Apache-2.0" ]
1
2018-07-20T01:46:25.000Z
2019-01-10T14:44:42.000Z
lom/_numba/posterior_score_fcts.py
TammoR/LogicalOperatorMachines
55bd94001f2852ea61f69cbb07a0cbdb41231028
[ "Apache-2.0" ]
6
2018-05-16T03:05:41.000Z
2020-10-08T06:34:07.000Z
#!/usr/bin/env python """ Posterior score functions for logical operator machines """ import numpy as np from numba import jit from numba.types import int64 # OR-AND @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_OR_AND_2D(Z_n, U, X_n, l): """ Return count of correct/incorrect explanations caused by setting Z[n,l] to 1, respecting explaining away dependencies TODO: should this be given a signature? """ D, L = U.shape score = 0 for d in range(D): if U[d, l] != 1: # AND continue alrdy_active = False for l_prime in range(L): if (Z_n[l_prime] == 1) and\ (U[d, l_prime] == 1) and\ (l_prime != l): alrdy_active = True # OR break if alrdy_active is False: score += X_n[d] return score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_OR_AND_3D(Z_n, U, V, X_n, l): """ Return count of correct/incorrect explanations caused by setting Z[n,l] to 1, respecting explaining away dependencies TODO: should this be given a signature? """ D, L = U.shape M, _ = V.shape score = int64(0) for d in range(D): for m in range(M): if (U[d, l] != 1) or (V[m, l] != 1): # AND continue alrdy_active = False for l_prime in range(L): if (Z_n[l_prime] == 1) and\ (U[d, l_prime] == 1) and\ (V[m, l_prime] == 1) and\ (l_prime != l): alrdy_active = True # OR break if alrdy_active is False: score += X_n[d, m] return score # XOR-AND @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_XOR_AND_2D(Z_n, U, X_n, l): """ Return count of correct/incorrect explanations caused by setting Z[n,l] to 1, respecting explaining away dependencies TODO: should this be given a signature? """ D, L = U.shape score = 0 for d in range(D): if U[d, l] != 1: # AND continue # compute deltaXOR-AND num_active = np.int8(0) for l_prime in range(L): if (Z_n[l_prime] == 1) and\ (U[d, l_prime] == 1) and\ (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score += X_n[d] elif num_active == 1: score -= X_n[d] return score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_XOR_AND_3D(Z_n, U, V, X_n, l): """ Return count of correct/incorrect explanations caused by setting Z[n,l] to 1, respecting explaining away dependencies TODO: should this be given a signature? """ D, L = U.shape M, _ = V.shape score = int64(0) for d in range(D): for m in range(M): if (U[d, l] != 1) or (V[m, l] != 1): # AND continue # compute deltaXOR-AND num_active = np.int8(0) for l_prime in range(L): if (Z_n[l_prime] == 1) and\ (U[d, l_prime] == 1) and\ (V[m, l_prime] == 1) and\ (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score += X_n[d, m] elif num_active == 1: score -= X_n[d, m] return score # XOR-NAND @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_XOR_NAND_2D(Z_n, U, X_n, l): """ Return count of correct/incorrect explanations caused by setting Z[n,l] to 1, respecting explaining away dependencies TODO: should this be given a signature? """ D, L = U.shape score = 0 for d in range(D): if U[d, l] != 1: # AND continue # compute deltaXOR-NAND num_active = np.int8(0) for l_prime in range(L): if ((Z_n[l_prime] != 1) or (U[d, l_prime] != 1)) and\ (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score += X_n[d] elif num_active == 1: score -= X_n[d] return -score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_XOR_NAND_3D(Z_n, U, V, X_n, l): D, L = U.shape M, _ = V.shape score = int64(0) for d in range(D): for m in range(M): if U[d, l] != 1 or V[m, l] != 1: # AND continue # compute deltaXOR-NAND num_active = np.int8(0) for l_prime in range(L): if ((Z_n[l_prime] != 1) or (U[d, l_prime] != 1) or (V[m, l_prime] != 1)) and\ (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score += X_n[d, m] elif num_active == 1: score -= X_n[d, m] return -score raise NotImplementedError # OR-NAND @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_OR_NAND_2D(Z_n, U, X_n, l): """ Return count of correct/incorrect explanations caused by setting Z[n,l] to 1, respecting explaining away dependencies TODO: needs testing! """ D, L = U.shape score = 0 for d in range(D): if U[d, l] == -1: # NAND continue alrdy_active = False for l_prime in range(L): if ((Z_n[l_prime] == -1) or (U[d, l_prime] == -1)) and\ (l_prime != l): alrdy_active = True # OR break if alrdy_active is False: score += X_n[d] return -score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_OR_NAND_3D(Z_n, U, V, X_n, l): M, _ = V.shape D, L = U.shape score = int64(0) for d in range(D): for m in range(M): if (U[d, l] == -1) or (V[m, l] == -1): # NAND continue alrdy_active = False for l_prime in range(L): if ((Z_n[l_prime] == -1) or (U[d, l_prime] == -1) or (V[m, l_prime] == -1)) and\ (l_prime != l): alrdy_active = True # OR break if alrdy_active is False: score += X_n[d, m] return -score # OR-XOR @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_OR_XOR_2D(Z_n, U, X_n, l): """ Return count of correct/incorrect explanations caused by setting Z[n,l] to 1, respecting explaining away dependencies TODO: needs testing! """ D, L = U.shape score = 0 for d in range(D): explained_away = False for l_prime in range(L): if (Z_n[l_prime] != U[d, l_prime]) and (l_prime != l): explained_away = True break if explained_away is False: score += X_n[d] * U[d, l] return -score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_OR_XOR_3D(Z_n, U, V, X_n, l): D, L = U.shape M, _ = V.shape score = int64(0) for d in range(D): for m in range(M): if U[d, l] == 1 and V[m, l] == 1: # XOR cant be changed by z_nl continue explained_away = False for l_prime in range(L): if (Z_n[l_prime] + U[d, l_prime] + V[m, l_prime] == -1) and\ (l_prime != l): explained_away = True break if explained_away is False: score += X_n[d, m] * U[d, l] * V[m, l] # very elegant ;) return score # NAND-XOR @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_NAND_XOR_2D(Z_n, U, X_n, l): D, L = U.shape score = 0 for d in range(D): explained_away = False for l_prime in range(L): if (Z_n[l_prime] == U[d, l_prime]) and (l_prime != l): explained_away = True break if explained_away is False: score += X_n[d] * U[d, l] return score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_NAND_XOR_3D(Z_n, U, V, X_n, l): M, _ = V.shape D, L = U.shape score = int64(0) for d in range(D): for m in range(M): if U[d, l] == 1 and V[m, l] == 1: # XOR cant be changed by z_nl continue explained_away = False for l_prime in range(L): if (Z_n[l_prime] + U[d, l_prime] + V[m, l_prime] != -1) and\ (l_prime != l): explained_away = True break if explained_away is False: score += X_n[d, m] * U[d, l] * V[m, l] return -score # XOR-XOR @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_XOR_XOR_2D(Z_n, U, X_n, l): D, L = U.shape score = 0 for d in range(D): num_active = np.int8(0) for l_prime in range(L): if (Z_n[l_prime] != U[d, l_prime]) and (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score -= X_n[d] * U[d, l] elif num_active == 1: score += X_n[d] * U[d, l] return score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_XOR_XOR_3D(Z_n, U, V, X_n, l): M, _ = V.shape D, L = U.shape score = int64(0) for d in range(D): for m in range(M): if U[d, l] == 1 and V[m, l] == 1: # XOR cant be changed by z_nl continue num_active = np.int8(0) for l_prime in range(L): if (Z_n[l_prime] + U[d, l_prime] + V[m, l_prime] == -1) and\ (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score += X_n[d, m] * U[d, l] * V[m, l] elif num_active == 1: score -= X_n[d, m] * U[d, l] * V[m, l] return score # XOR-NXOR @jit('int16(int8[:], int8[:,:], int8[:], int16)', nopython=True, nogil=True) def posterior_score_XOR_NXOR_2D(Z_n, U, X_n, l): D, L = U.shape score = 0 for d in range(D): num_active = np.int8(0) for l_prime in range(L): if (U[d, l_prime] == Z_n[l_prime]) and (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score += X_n[d] * U[d, l] elif num_active == 1: score -= X_n[d] * U[d, l] return score @jit('int64(int8[:], int8[:,:], int8[:,:], int8[:,:], int16)', nopython=True, nogil=True) def posterior_score_XOR_NXOR_3D(Z_n, U, V, X_n, l): M, _ = V.shape D, L = U.shape score = int64(0) for d in range(D): for m in range(M): if U[d, l] == 1 and V[m, l] == 1: # NXOR cant be changed by z_nl continue num_active = np.int8(0) for l_prime in range(L): if (U[d, l_prime] + Z_n[l_prime] + V[m, l_prime] != -1) and\ (l_prime != l): num_active += 1 if num_active > 1: break if num_active == 0: score -= X_n[d, m] * U[d, l] * V[m, l] elif num_active == 1: score += X_n[d, m] * U[d, l] * V[m, l] return score
27.204793
89
0.476816
1,821
12,487
3.113674
0.048874
0.07619
0.021164
0.033862
0.967196
0.967196
0.967196
0.967196
0.966138
0.966138
0
0.035026
0.38496
12,487
458
90
27.264192
0.703255
0.117722
0
0.876254
0
0
0.070429
0
0
0
0
0.015284
0
1
0.053512
false
0
0.010033
0
0.117057
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
8
cfe9d68eba7ade9416dbbb76c0477e9b7e0dbcba
1,923
py
Python
tests/errors/values_not_allowed_error.py
caputomarcos/mongorest
57d6b28d75e18afed5cef7160522958153b5be15
[ "BSD-3-Clause" ]
16
2015-04-18T02:51:09.000Z
2020-12-15T18:05:16.000Z
tests/errors/values_not_allowed_error.py
caputomarcos/mongorest
57d6b28d75e18afed5cef7160522958153b5be15
[ "BSD-3-Clause" ]
8
2015-11-24T23:06:03.000Z
2016-07-21T17:57:59.000Z
tests/errors/values_not_allowed_error.py
caputomarcos/mongorest
57d6b28d75e18afed5cef7160522958153b5be15
[ "BSD-3-Clause" ]
2
2015-12-04T13:45:32.000Z
2016-06-11T13:44:53.000Z
# -*- encoding: UTF-8 -*- from __future__ import absolute_import, unicode_literals from mongorest.errors import ValuesNotAllowedError from mongorest.testcase import TestCase class TestValuesNotAllowedError(TestCase): def test_values_not_allowed_error_sets_correct_fields_if_list(self): self.assertEqual( ValuesNotAllowedError('collection', 'field', ['values']), { 'error_code': 31, 'error_type': 'ValuesNotAllowedError', 'error_message': 'Values: values; are not allowed for field ' '\'field\' on collection \'collection\'.', 'collection': 'collection', 'field': 'field', 'values': 'values' } ) def test_values_not_allowed_error_sets_correct_fields_if_json_string(self): self.assertEqual( ValuesNotAllowedError('collection', 'field', '[\'values\']'), { 'error_code': 31, 'error_type': 'ValuesNotAllowedError', 'error_message': 'Values: values; are not allowed for field ' '\'field\' on collection \'collection\'.', 'collection': 'collection', 'field': 'field', 'values': 'values' } ) def test_values_not_allowed_error_sets_correct_fields_if_string(self): self.assertEqual( ValuesNotAllowedError('collection', 'field', 'values'), { 'error_code': 31, 'error_type': 'ValuesNotAllowedError', 'error_message': 'Values: values; are not allowed for field ' '\'field\' on collection \'collection\'.', 'collection': 'collection', 'field': 'field', 'values': 'values' } )
37.705882
79
0.534581
153
1,923
6.437909
0.254902
0.182741
0.182741
0.048731
0.813198
0.813198
0.813198
0.813198
0.813198
0.813198
0
0.005618
0.352054
1,923
50
80
38.46
0.784912
0.01196
0
0.55814
0
0
0.278188
0.033193
0
0
0
0
0.069767
1
0.069767
false
0
0.069767
0
0.162791
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
321cbda9991956b3e819c4e9866e769b8380062e
248
py
Python
bitio/src/microbit/microbits.py
hungjuchen/Atmosmakers
4e8e64fba3d7a31840f69a5aa3823247aa5dca02
[ "MIT" ]
85
2017-06-09T20:53:46.000Z
2022-03-09T21:35:05.000Z
bitio/src/microbit/microbits.py
hungjuchen/Atmosmakers
4e8e64fba3d7a31840f69a5aa3823247aa5dca02
[ "MIT" ]
34
2017-06-09T20:52:05.000Z
2021-02-19T19:49:45.000Z
bitio/src/microbit/microbits.py
hungjuchen/Atmosmakers
4e8e64fba3d7a31840f69a5aa3823247aa5dca02
[ "MIT" ]
32
2017-06-09T10:15:19.000Z
2021-11-20T09:08:08.000Z
# microbits.py - provide non auto connect to 1 or more microbits print("microbits imported") #TODO: The idea of this is to have a microbit factory, that won't auto connect #to a single microbit, and will allow multiple to be discovered and used.
35.428571
78
0.766129
43
248
4.418605
0.767442
0.115789
0.136842
0
0
0
0
0
0
0
0
0.004902
0.177419
248
6
79
41.333333
0.926471
0.850806
0
0
0
0
0.545455
0
0
0
0
0.166667
0
1
0
true
0
1
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
1
0
1
1
0
7
5c82deab0e930410243bb45349cd9d730edc3406
144
py
Python
SSD/ssd_model/__init__.py
erum-omdena/ai-challenge-mars
50fbfe1f478093aaba92e8c267548f64846c7846
[ "MIT" ]
3
2021-04-25T16:02:47.000Z
2021-04-26T07:30:07.000Z
SSD/ssd_model/__init__.py
erum-omdena/ai-challenge-mars
50fbfe1f478093aaba92e8c267548f64846c7846
[ "MIT" ]
39
2019-06-03T18:10:45.000Z
2022-02-10T11:11:51.000Z
SSD/ssd_model/__init__.py
erum-omdena/ai-challenge-mars
50fbfe1f478093aaba92e8c267548f64846c7846
[ "MIT" ]
12
2019-06-01T11:21:27.000Z
2021-12-29T15:18:42.000Z
from ssd_model import data_processor from ssd_model import data_processor_utils from ssd_model import model from ssd_model import model_utils
36
43
0.875
24
144
4.916667
0.291667
0.237288
0.40678
0.610169
0.915254
0.525424
0
0
0
0
0
0
0.125
144
4
44
36
0.936508
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
1
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
8
5c88c2aa7fcd3336f8d63e2a779b630f0dc7454d
2,550
py
Python
trec2014/python/cuttsum/judgements.py
kedz/cuttsum
992c21192af03fd2ef863f5ab7d10752f75580fa
[ "Apache-2.0" ]
6
2015-09-10T02:22:21.000Z
2021-10-01T16:36:46.000Z
trec2014/python/cuttsum/judgements.py
kedz/cuttsum
992c21192af03fd2ef863f5ab7d10752f75580fa
[ "Apache-2.0" ]
null
null
null
trec2014/python/cuttsum/judgements.py
kedz/cuttsum
992c21192af03fd2ef863f5ab7d10752f75580fa
[ "Apache-2.0" ]
2
2018-04-04T10:44:32.000Z
2021-10-01T16:37:26.000Z
from datetime import datetime from pkg_resources import resource_stream, resource_filename import gzip import pandas as pd import os #from pkg_resources import resource_filename #def this_is_a_test(): # print "High" def convert_to_datetime(x): return datetime.utcfromtimestamp(int(x)) def get_2014_nuggets(): nuggets_tsv = resource_filename( u'cuttsum', os.path.join(u'2014-data', u'nuggets.tsv.gz')) with gzip.open(nuggets_tsv, u'r') as f: df = pd.io.parsers.read_csv( f, sep='\t', quoting=3, header=0, converters={u'timestamp': convert_to_datetime}, names=[u'query id', u'nugget id', u'timestamp', u'important', u'length', 'text']) return df def get_2013_nuggets(): nuggets_tsv = resource_filename( u'cuttsum', os.path.join(u'2013-data', u'nuggets.tsv.gz')) with gzip.open(nuggets_tsv, u'r') as f: df = pd.io.parsers.read_csv( f, sep='\t', quoting=3, header=0, converters={u'timestamp': convert_to_datetime}, names=[u'query id', u'nugget id', u'timestamp', u'important', u'length', 'text']) return df def get_2013_matches(): matches_tsv = resource_filename( u'cuttsum', os.path.join(u'2013-data', u'matches.tsv.gz')) with gzip.open(matches_tsv, u'r') as f: df = pd.io.parsers.read_csv( f, sep='\t', quoting=3, header=0, dtype={u'match start': int, u'match end': int}, names=[u'query id', u'update id', u'nugget id', u'match start', u'match end', 'auto p']) return df def get_2014_matches(): matches_tsv = resource_filename( u'cuttsum', os.path.join(u'2014-data', u'matches.tsv.gz')) with gzip.open(matches_tsv, u'r') as f: df = pd.io.parsers.read_csv( f, sep='\t', quoting=3, header=0, dtype={u'match start': int, u'match end': int}, names=[u'query id', u'update id', u'nugget id', u'match start', u'match end', 'auto p']) return df def get_mturk_matches(): matches_tsv = resource_filename( u'cuttsum', os.path.join(u'2015-data', u'mturk-matches.tsv.gz')) with gzip.open(matches_tsv, u'r') as f: df = pd.io.parsers.read_csv( f, sep='\t', quoting=3, header=0, dtype={u'match start': int, u'match end': int}, names=[u'query id', u'update id', u'nugget id', u'match start', u'match end', 'auto p']) return df
36.956522
76
0.591765
385
2,550
3.805195
0.184416
0.026621
0.045051
0.068259
0.862799
0.821843
0.821843
0.821843
0.821843
0.821843
0
0.024287
0.257255
2,550
68
77
37.5
0.749208
0.031373
0
0.701754
0
0
0.200649
0
0
0
0
0
0
1
0.105263
false
0
0.122807
0.017544
0.333333
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5c9294612bef69498f10a1cc04234531da78b9a8
149
py
Python
plotly_express/__init__.py
gaofp/plotly_express
df55408739c88bf8a2249d79f2d1887d68b72af9
[ "MIT" ]
null
null
null
plotly_express/__init__.py
gaofp/plotly_express
df55408739c88bf8a2249d79f2d1887d68b72af9
[ "MIT" ]
null
null
null
plotly_express/__init__.py
gaofp/plotly_express
df55408739c88bf8a2249d79f2d1887d68b72af9
[ "MIT" ]
null
null
null
""" `plotly_express` is now an alias to `plotly.express` """ __version__ = "0.4.0" from plotly.express import * from plotly.express import line_3d
16.555556
52
0.724832
23
149
4.434783
0.608696
0.509804
0.333333
0.45098
0
0
0
0
0
0
0
0.031496
0.147651
149
8
53
18.625
0.771654
0.348993
0
0
0
0
0.05618
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
7
5ca35f2a8738cdf6000f49a46fef7fde9d283f16
24,794
py
Python
src/ebay_rest/api/buy_marketplace_insights/api/item_sales_api.py
gbm001/ebay_rest
077d3478423ccd80ff35e0361821d6a11180bc54
[ "MIT" ]
null
null
null
src/ebay_rest/api/buy_marketplace_insights/api/item_sales_api.py
gbm001/ebay_rest
077d3478423ccd80ff35e0361821d6a11180bc54
[ "MIT" ]
null
null
null
src/ebay_rest/api/buy_marketplace_insights/api/item_sales_api.py
gbm001/ebay_rest
077d3478423ccd80ff35e0361821d6a11180bc54
[ "MIT" ]
null
null
null
# coding: utf-8 """ Marketplace Insights API <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#Limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> The Marketplace Insights API provides the ability to search for sold items on eBay by keyword, GTIN, category, and product and returns the of sales history of those items. # noqa: E501 OpenAPI spec version: v1_beta.2.2 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from ...buy_marketplace_insights.api_client import ApiClient class ItemSalesApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def search(self, **kwargs): # noqa: E501 """search # noqa: E501 (Limited Release) This method searches for sold eBay items by various URI query parameters and retrieves the sales history of the items for the last 90 days. You can search by keyword, category, eBay product ID (ePID), or GTIN, or a combination of these. This method also supports the following: Filtering by the value of one or multiple fields, such as listing format, item condition, price range, location, and more. For the fields supported by this method, see the filter parameter. Retrieving the refinements (metadata) of an item , such as item aspects (color, brand), condition, category, etc. using the fieldgroups parameter. Filtering by item aspects and other refinements using the aspect_filter parameter. Creating aspects histograms, which enables shoppers to drill down in each refinement narrowing the search results. For details and examples of these capabilities, see Browse API in the Buying Integration Guide. Pagination and sort controls There are pagination controls (limit and offset fields) and sort query parameters that control/sort the data that is returned. By default, the results are sorted by &quot;Best Match&quot;. For more information about Best Match, see the eBay help page Best Match. URLs for this method Production URL: https://api.ebay.com/buy/marketplace_insights/v1_beta/item_sales/search? Sandbox URL: https://api.sandbox.ebay.com/buy/marketplace_insights/v1_beta/item_sales/search? Request headers You will want to use the X-EBAY-C-ENDUSERCTX request header with this method. If you are an eBay Network Partner you must use affiliateCampaignId=ePNCampaignId,affiliateReferenceId=referenceId in the header in order to be paid for selling eBay items on your site . For details see, Request headers in the Buy APIs Overview. URL Encoding for Parameters Query parameter values need to be URL encoded. For details, see URL encoding query parameter values. Restrictions For a list of supported sites and other restrictions, see API Restrictions. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search(async_req=True) >>> result = thread.get() :param async_req bool :param str aspect_filter: This field lets you filter by item aspects. The aspect name/value pairs and category, which is required, is used to limit the results to specific aspects of the item. For example, in a clothing category one aspect pair would be Color/Red. The results are returned in the refinement container. For example, the method below uses the category ID for Women's Clothing. This will return only sold items for a woman's red or blue shirt. /buy/marketplace_insights/v1_beta/item_sales/search?q=shirt&amp;category_ids=15724&amp;aspect_filter=categoryId:15724,Color:{Red|Blue} To get a list of the aspects pairs and the category, which is returned in the dominantCategoryId field, set fieldgroups to ASPECT_REFINEMENTS. /buy/marketplace_insights/v1_beta/item_sales/search?q=shirt&amp;category_ids=15724&amp;fieldgroups=ASPECT_REFINEMENTS Format: aspectName:{value1|value2} Required: The category ID is required twice; once as a URI parameter and as part of the aspect_filter parameter. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/marketplace_insights/types/gct:AspectFilter :param str category_ids: The category ID is required and is used to limit the results. For example, if you search for 'shirt' the result set will be very large. But if you also include the category ID 137084, the results will be limited to 'Men's Athletic Apparel'. For example: /buy/marketplace-insights/v1_beta/item_sales/search?q=shirt&amp;category_ids=137084 The list of eBay category IDs is not published and category IDs are not the same across all the eBay marketplaces. You can use the following techniques to find a category by site: For the US marketplace, use the Category Changes page. Use the Taxonomy API. For details see Get Categories for Buy APIs. Usage: This field can have one category ID or a comma separated list of IDs. You can use category_ids by itself or use it with any combination of the gtin, epid, and q fields, which gives you additional control over the result set. Restrictions: Partners will be given a list of categories they can use. To use a top-level (L1) category, you must also include the q, or gtin, or epid query parameter. Maximum number of categories: 4 :param str epid: The ePID is the eBay product identifier of a product from the eBay product catalog. This field limits the results to only items in the specified ePID. /buy/marketplace-insights/v1_beta/item_sales/search?epid=241986085&amp;category_ids=168058 You can use the product_summary/search method in the Catalog API to search for the ePID of the product. Required: At least 1 category_ids Maximum: 1 epid Optional: Any combination of epid, gtin, or q :param str fieldgroups: This field lets you control what is to be returned in the response and accepts a comma separated list of values. The default is MATCHING_ITEMS, which returns the items that match the keyword or category specified. The other values return data that can be used to create histograms. For code examples see, aspect_filter. Valid Values: ASPECT_REFINEMENTS - This returns the aspectDistributions container, which has the dominantCategoryId, matchCount, and refinementHref for the various aspects of the items found. For example, if you searched for 'Mustang', some of the aspect would be Model Year, Exterior Color, Vehicle Mileage, etc. Note: ASPECT_REFINEMENTS are category specific. BUYING_OPTION_REFINEMENTS - This returns the buyingOptionDistributions container, which has the matchCount and refinementHref for AUCTION and FIXED_PRICE (Buy It Now) items. Note: Classified items are not supported. CATEGORY_REFINEMENTS - This returns the categoryDistributions container, which has the categories that the item is in. CONDITION_REFINEMENTS - This returns the conditionDistributions container, such as NEW, USED, etc. Within these groups are multiple states of the condition. For example, New can be New without tag, New in box, New without box, etc. MATCHING_ITEMS - This is meant to be used with one or more of the refinement values above. You use this to return the specified refinements and all the matching items. FULL - This returns all the refinement containers and all the matching items. Code so that your app gracefully handles any future changes to this list. Default: MATCHING_ITEMS :param str filter: This field supports multiple field filters that can be used to limit/customize the result set. The following lists the supported filters. For details and examples for all the filters, see Buy API Field Filters. buyingOptions conditionIds conditions itemLocationCountry lastSoldDate price priceCurrency The following example filters the result set by price. Note: To filter by price, price and priceCurrency must always be used together. /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone&amp;category_ids=15724&amp;filter=price:[50..500],priceCurrency:USD For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/marketplace_insights/types/cos:FilterField :param str gtin: This field lets you search by the Global Trade Item Number of the item as defined by https://www.gtin.info. This can be a UPC (Universal Product Code), EAN (European Article Number), or an ISBN (International Standard Book Number) value. /buy/marketplace-insights/v1_beta/item_sales/search?gtin=241986085&amp;category_ids=9355 Required: At least 1 category_ids Maximum: 1 gtin Optional: Any combination of epid, gtin, or q :param str limit: The number of items, from the result set, returned in a single page. Default: 50 Maximum number of items per page (limit): 200 Maximum number of items in a result set: 10,000 :param str offset: Specifies the number of items to skip in the result set. This is used with the limit field to control the pagination of the output. If offset is 0 and limit is 10, the method will retrieve items 1-10 from the list of items returned, if offset is 10 and limit is 10, the method will retrieve items 11 thru 20 from the list of items returned. Valid Values: 0-10,000 (inclusive) Default: 0 Maximum number of items returned: 10,000 :param str q: A string consisting of one or more keywords that are used to search for items on eBay. The keywords are handled as follows: If the keywords are separated by a comma, it is treated as an AND. In the following example, the query returns items that have iphone AND ipad. /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone,ipad&amp;category_ids=15724 If the keywords are separated by a space, it is treated as an OR. In the following examples, the query returns items that have iphone OR ipad. /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone&amp;category_ids=15724&nbsp;ipad /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone,&nbsp;ipad&amp;category_ids=15724 Restriction: The * wildcard character is not allowed in this field. Required: At least 1 category_ids Optional: Any combination of epid, gtin, or q :param str sort: This field specifies the order and the field name to use to sort the items. To sort in descending order use - before the field name. Currently, you can only sort by price (in ascending or descending order). If no sort parameter is submitted, the result set is sorted by &quot;Best Match&quot;. The following are examples of using the sort query parameter. Sort Result &amp;sort=price Sorts by price in ascending order (lowest price first) &amp;sort=-price Sorts by price in descending order (highest price first) Default: ascending For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/marketplace_insights/types/cos:SortField :return: SalesHistoryPagedCollection If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_with_http_info(**kwargs) # noqa: E501 else: (data) = self.search_with_http_info(**kwargs) # noqa: E501 return data def search_with_http_info(self, **kwargs): # noqa: E501 """search # noqa: E501 (Limited Release) This method searches for sold eBay items by various URI query parameters and retrieves the sales history of the items for the last 90 days. You can search by keyword, category, eBay product ID (ePID), or GTIN, or a combination of these. This method also supports the following: Filtering by the value of one or multiple fields, such as listing format, item condition, price range, location, and more. For the fields supported by this method, see the filter parameter. Retrieving the refinements (metadata) of an item , such as item aspects (color, brand), condition, category, etc. using the fieldgroups parameter. Filtering by item aspects and other refinements using the aspect_filter parameter. Creating aspects histograms, which enables shoppers to drill down in each refinement narrowing the search results. For details and examples of these capabilities, see Browse API in the Buying Integration Guide. Pagination and sort controls There are pagination controls (limit and offset fields) and sort query parameters that control/sort the data that is returned. By default, the results are sorted by &quot;Best Match&quot;. For more information about Best Match, see the eBay help page Best Match. URLs for this method Production URL: https://api.ebay.com/buy/marketplace_insights/v1_beta/item_sales/search? Sandbox URL: https://api.sandbox.ebay.com/buy/marketplace_insights/v1_beta/item_sales/search? Request headers You will want to use the X-EBAY-C-ENDUSERCTX request header with this method. If you are an eBay Network Partner you must use affiliateCampaignId=ePNCampaignId,affiliateReferenceId=referenceId in the header in order to be paid for selling eBay items on your site . For details see, Request headers in the Buy APIs Overview. URL Encoding for Parameters Query parameter values need to be URL encoded. For details, see URL encoding query parameter values. Restrictions For a list of supported sites and other restrictions, see API Restrictions. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str aspect_filter: This field lets you filter by item aspects. The aspect name/value pairs and category, which is required, is used to limit the results to specific aspects of the item. For example, in a clothing category one aspect pair would be Color/Red. The results are returned in the refinement container. For example, the method below uses the category ID for Women's Clothing. This will return only sold items for a woman's red or blue shirt. /buy/marketplace_insights/v1_beta/item_sales/search?q=shirt&amp;category_ids=15724&amp;aspect_filter=categoryId:15724,Color:{Red|Blue} To get a list of the aspects pairs and the category, which is returned in the dominantCategoryId field, set fieldgroups to ASPECT_REFINEMENTS. /buy/marketplace_insights/v1_beta/item_sales/search?q=shirt&amp;category_ids=15724&amp;fieldgroups=ASPECT_REFINEMENTS Format: aspectName:{value1|value2} Required: The category ID is required twice; once as a URI parameter and as part of the aspect_filter parameter. For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/marketplace_insights/types/gct:AspectFilter :param str category_ids: The category ID is required and is used to limit the results. For example, if you search for 'shirt' the result set will be very large. But if you also include the category ID 137084, the results will be limited to 'Men's Athletic Apparel'. For example: /buy/marketplace-insights/v1_beta/item_sales/search?q=shirt&amp;category_ids=137084 The list of eBay category IDs is not published and category IDs are not the same across all the eBay marketplaces. You can use the following techniques to find a category by site: For the US marketplace, use the Category Changes page. Use the Taxonomy API. For details see Get Categories for Buy APIs. Usage: This field can have one category ID or a comma separated list of IDs. You can use category_ids by itself or use it with any combination of the gtin, epid, and q fields, which gives you additional control over the result set. Restrictions: Partners will be given a list of categories they can use. To use a top-level (L1) category, you must also include the q, or gtin, or epid query parameter. Maximum number of categories: 4 :param str epid: The ePID is the eBay product identifier of a product from the eBay product catalog. This field limits the results to only items in the specified ePID. /buy/marketplace-insights/v1_beta/item_sales/search?epid=241986085&amp;category_ids=168058 You can use the product_summary/search method in the Catalog API to search for the ePID of the product. Required: At least 1 category_ids Maximum: 1 epid Optional: Any combination of epid, gtin, or q :param str fieldgroups: This field lets you control what is to be returned in the response and accepts a comma separated list of values. The default is MATCHING_ITEMS, which returns the items that match the keyword or category specified. The other values return data that can be used to create histograms. For code examples see, aspect_filter. Valid Values: ASPECT_REFINEMENTS - This returns the aspectDistributions container, which has the dominantCategoryId, matchCount, and refinementHref for the various aspects of the items found. For example, if you searched for 'Mustang', some of the aspect would be Model Year, Exterior Color, Vehicle Mileage, etc. Note: ASPECT_REFINEMENTS are category specific. BUYING_OPTION_REFINEMENTS - This returns the buyingOptionDistributions container, which has the matchCount and refinementHref for AUCTION and FIXED_PRICE (Buy It Now) items. Note: Classified items are not supported. CATEGORY_REFINEMENTS - This returns the categoryDistributions container, which has the categories that the item is in. CONDITION_REFINEMENTS - This returns the conditionDistributions container, such as NEW, USED, etc. Within these groups are multiple states of the condition. For example, New can be New without tag, New in box, New without box, etc. MATCHING_ITEMS - This is meant to be used with one or more of the refinement values above. You use this to return the specified refinements and all the matching items. FULL - This returns all the refinement containers and all the matching items. Code so that your app gracefully handles any future changes to this list. Default: MATCHING_ITEMS :param str filter: This field supports multiple field filters that can be used to limit/customize the result set. The following lists the supported filters. For details and examples for all the filters, see Buy API Field Filters. buyingOptions conditionIds conditions itemLocationCountry lastSoldDate price priceCurrency The following example filters the result set by price. Note: To filter by price, price and priceCurrency must always be used together. /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone&amp;category_ids=15724&amp;filter=price:[50..500],priceCurrency:USD For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/marketplace_insights/types/cos:FilterField :param str gtin: This field lets you search by the Global Trade Item Number of the item as defined by https://www.gtin.info. This can be a UPC (Universal Product Code), EAN (European Article Number), or an ISBN (International Standard Book Number) value. /buy/marketplace-insights/v1_beta/item_sales/search?gtin=241986085&amp;category_ids=9355 Required: At least 1 category_ids Maximum: 1 gtin Optional: Any combination of epid, gtin, or q :param str limit: The number of items, from the result set, returned in a single page. Default: 50 Maximum number of items per page (limit): 200 Maximum number of items in a result set: 10,000 :param str offset: Specifies the number of items to skip in the result set. This is used with the limit field to control the pagination of the output. If offset is 0 and limit is 10, the method will retrieve items 1-10 from the list of items returned, if offset is 10 and limit is 10, the method will retrieve items 11 thru 20 from the list of items returned. Valid Values: 0-10,000 (inclusive) Default: 0 Maximum number of items returned: 10,000 :param str q: A string consisting of one or more keywords that are used to search for items on eBay. The keywords are handled as follows: If the keywords are separated by a comma, it is treated as an AND. In the following example, the query returns items that have iphone AND ipad. /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone,ipad&amp;category_ids=15724 If the keywords are separated by a space, it is treated as an OR. In the following examples, the query returns items that have iphone OR ipad. /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone&amp;category_ids=15724&nbsp;ipad /buy/marketplace-insights/v1_beta/item_sales/search?q=iphone,&nbsp;ipad&amp;category_ids=15724 Restriction: The * wildcard character is not allowed in this field. Required: At least 1 category_ids Optional: Any combination of epid, gtin, or q :param str sort: This field specifies the order and the field name to use to sort the items. To sort in descending order use - before the field name. Currently, you can only sort by price (in ascending or descending order). If no sort parameter is submitted, the result set is sorted by &quot;Best Match&quot;. The following are examples of using the sort query parameter. Sort Result &amp;sort=price Sorts by price in ascending order (lowest price first) &amp;sort=-price Sorts by price in descending order (highest price first) Default: ascending For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/buy/marketplace_insights/types/cos:SortField :return: SalesHistoryPagedCollection If the method is called asynchronously, returns the request thread. """ all_params = ['aspect_filter', 'category_ids', 'epid', 'fieldgroups', 'filter', 'gtin', 'limit', 'offset', 'q', 'sort'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method search" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'aspect_filter' in params: query_params.append(('aspect_filter', params['aspect_filter'])) # noqa: E501 if 'category_ids' in params: query_params.append(('category_ids', params['category_ids'])) # noqa: E501 if 'epid' in params: query_params.append(('epid', params['epid'])) # noqa: E501 if 'fieldgroups' in params: query_params.append(('fieldgroups', params['fieldgroups'])) # noqa: E501 if 'filter' in params: query_params.append(('filter', params['filter'])) # noqa: E501 if 'gtin' in params: query_params.append(('gtin', params['gtin'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'offset' in params: query_params.append(('offset', params['offset'])) # noqa: E501 if 'q' in params: query_params.append(('q', params['q'])) # noqa: E501 if 'sort' in params: query_params.append(('sort', params['sort'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/item_sales/search', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SalesHistoryPagedCollection', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
154
2,002
0.760547
3,827
24,794
4.863339
0.124902
0.021277
0.034279
0.028369
0.886525
0.875779
0.870943
0.86686
0.862992
0.862992
0
0.017335
0.176373
24,794
160
2,003
154.9625
0.89408
0.850165
0
0
0
0
0.171662
0.028156
0
0
0
0
0
1
0.039474
false
0
0.052632
0
0.144737
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
1
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
5cbc9522d9a5a86115abcf2c7bf63837e436bb7f
1,354
py
Python
tests/test_415.py
sungho-joo/leetcode2github
ce7730ef40f6051df23681dd3c0e1e657abba620
[ "MIT" ]
null
null
null
tests/test_415.py
sungho-joo/leetcode2github
ce7730ef40f6051df23681dd3c0e1e657abba620
[ "MIT" ]
null
null
null
tests/test_415.py
sungho-joo/leetcode2github
ce7730ef40f6051df23681dd3c0e1e657abba620
[ "MIT" ]
null
null
null
#!/usr/bin/env python import pytest """ Test 415. Add Strings """ @pytest.fixture(scope="session") def init_variables_415(): from src.leetcode_415_add_strings import Solution solution = Solution() def _init_variables_415(): return solution yield _init_variables_415 class TestClass415: def test_solution_0(self, init_variables_415): assert init_variables_415().addStrings("11", "123") == "134" def test_solution_1(self, init_variables_415): assert init_variables_415().addStrings("456", "77") == "533" def test_solution_2(self, init_variables_415): assert init_variables_415().addStrings("0", "0") == "0" #!/usr/bin/env python import pytest """ Test 415. Add Strings """ @pytest.fixture(scope="session") def init_variables_415(): from src.leetcode_415_add_strings import Solution solution = Solution() def _init_variables_415(): return solution yield _init_variables_415 class TestClass415: def test_solution_0(self, init_variables_415): assert init_variables_415().addStrings("11", "123") == "134" def test_solution_1(self, init_variables_415): assert init_variables_415().addStrings("456", "77") == "533" def test_solution_2(self, init_variables_415): assert init_variables_415().addStrings("0", "0") == "0"
21.492063
68
0.692762
176
1,354
5
0.204545
0.265909
0.327273
0.136364
1
1
1
1
1
1
0
0.104882
0.183161
1,354
62
69
21.83871
0.690778
0.029542
0
1
0
0
0.041467
0
0
0
0
0
0.2
1
0.333333
false
0
0.133333
0.066667
0.6
0
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
11
5cbe959a079d0f720bf726d3eb9cb46a854cd58f
44
py
Python
generator/__init__.py
crabmandable/cereal-pack
22674cbb0369df786df858e247f50ab9edcfe8b2
[ "MIT" ]
null
null
null
generator/__init__.py
crabmandable/cereal-pack
22674cbb0369df786df858e247f50ab9edcfe8b2
[ "MIT" ]
null
null
null
generator/__init__.py
crabmandable/cereal-pack
22674cbb0369df786df858e247f50ab9edcfe8b2
[ "MIT" ]
null
null
null
from . import generate from . import parser
14.666667
22
0.772727
6
44
5.666667
0.666667
0.588235
0
0
0
0
0
0
0
0
0
0
0.181818
44
2
23
22
0.944444
0
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
5ce53ef9d963f60e66cc61b7eb187ab29d0b9545
46
py
Python
python/testData/inspections/PyUnresolvedReferencesInspection/UnusedImportsInPackage/a.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/inspections/PyUnresolvedReferencesInspection/UnusedImportsInPackage/a.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/inspections/PyUnresolvedReferencesInspection/UnusedImportsInPackage/a.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def g(x): return x def h(x): return x
9.2
12
0.521739
10
46
2.4
0.5
0.583333
0.666667
0
0
0
0
0
0
0
0
0
0.347826
46
5
13
9.2
0.8
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
1
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
8
7a426c37b275a5ddb3527f83269cd1955f361070
1,753
py
Python
python/euler/8.py
1m0r74l17y/random-files
46dd697b4748ba355f647f02d3625ebd3d2b3014
[ "MIT" ]
null
null
null
python/euler/8.py
1m0r74l17y/random-files
46dd697b4748ba355f647f02d3625ebd3d2b3014
[ "MIT" ]
null
null
null
python/euler/8.py
1m0r74l17y/random-files
46dd697b4748ba355f647f02d3625ebd3d2b3014
[ "MIT" ]
null
null
null
''' Find the greatest product of five consecutive digits in the 1000-digit number ''' import time start = time.time() num = '7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450' biggest = 0 i = 0 while i < len(num) - 12: one = int(num[i]) two = int(num[i+1]) thr = int(num[i+2]) fou = int(num[i+3]) fiv = int(num[i+4]) six = int(num[i+5]) sev = int(num[i+6]) eig = int(num[i+7]) nin = int(num[i+8]) ten = int(num[i+9]) ele = int(num[i+10]) twe = int(num[i+11]) thi = int(num[i+12]) product = one*two*thr*fou*fiv*six*sev*eig*nin*ten*ele*twe*thi if product > biggest: biggest = product i = i + 1 print(biggest) elapsed = (time.time() - start) print("This code took: " + str(elapsed) + " seconds")
51.558824
1,009
0.801483
128
1,753
10.976563
0.4375
0.055516
0.064769
0
0
0
0
0
0
0
0
0.664504
0.120936
1,753
33
1,010
53.121212
0.247242
0.043925
0
0
0
0
0.6263
0.611621
0
1
0
0
0
1
0
false
0
0.038462
0
0.038462
0.076923
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
7
8fe7e8b720f671a78d7a8b3cca47f8294fc488d5
113
py
Python
handler.py
cfk1996/teacher-tree
dc03a464a96f7fbd8dc8b289507045a14ba73c4e
[ "MIT" ]
null
null
null
handler.py
cfk1996/teacher-tree
dc03a464a96f7fbd8dc8b289507045a14ba73c4e
[ "MIT" ]
null
null
null
handler.py
cfk1996/teacher-tree
dc03a464a96f7fbd8dc8b289507045a14ba73c4e
[ "MIT" ]
null
null
null
# -*-coding: utf-8 -*- '''请求基类 ''' import tornado.web class BaseHandler(tornado.web.RequestHandler): pass
11.3
46
0.654867
13
113
5.692308
0.846154
0.27027
0
0
0
0
0
0
0
0
0
0.010526
0.159292
113
9
47
12.555556
0.768421
0.230089
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
7
89166028cb6c2fee5c3c10de82c27e1c407ffb14
1,874
py
Python
recipes/stages/_base_/data/pipelines/selfsl.py
openvinotoolkit/model_preparation_algorithm
8d36bf5944837b7a3d22fc2c3a4cb93423619fc2
[ "Apache-2.0" ]
null
null
null
recipes/stages/_base_/data/pipelines/selfsl.py
openvinotoolkit/model_preparation_algorithm
8d36bf5944837b7a3d22fc2c3a4cb93423619fc2
[ "Apache-2.0" ]
null
null
null
recipes/stages/_base_/data/pipelines/selfsl.py
openvinotoolkit/model_preparation_algorithm
8d36bf5944837b7a3d22fc2c3a4cb93423619fc2
[ "Apache-2.0" ]
null
null
null
__img_norm_cfg = dict(mean=None, std=None) __resize_target_size = -1 train_pipeline_v0 = [ dict(type='RandomResizedCrop', size=__resize_target_size), dict(type='RandomHorizontalFlip'), dict( type='RandomAppliedTrans', transforms=[ dict( type='ColorJitter', brightness=0.4, contrast=0.4, saturation=0.2, hue=0.1) ], p=0.8), dict(type='RandomGrayscale', p=0.2), dict( type='RandomAppliedTrans', transforms=[ dict( type='GaussianBlur', sigma_min=0.1, sigma_max=2.0) ], p=1.), dict(type='RandomAppliedTrans', transforms=[dict(type='Solarization')], p=0.), dict(type='ToNumpy'), dict(type='Normalize', **__img_norm_cfg), dict(type='ImageToTensor', keys=['img']), dict(type='Collect', keys=['img']) ] train_pipeline_v1 = [ dict(type='RandomResizedCrop', size=__resize_target_size), dict(type='RandomHorizontalFlip'), dict( type='RandomAppliedTrans', transforms=[ dict( type='ColorJitter', brightness=0.4, contrast=0.4, saturation=0.2, hue=0.1) ], p=0.8), dict(type='RandomGrayscale', p=0.2), dict( type='RandomAppliedTrans', transforms=[ dict( type='GaussianBlur', sigma_min=0.1, sigma_max=2.0) ], p=0.1), dict(type='RandomAppliedTrans', transforms=[dict(type='Solarization')], p=0.2), dict(type='ToNumpy'), dict(type='Normalize', **__img_norm_cfg), dict(type='ImageToTensor', keys=['img']), dict(type='Collect', keys=['img']) ]
27.558824
62
0.511206
186
1,874
4.978495
0.22043
0.224622
0.168467
0.233261
0.916847
0.915767
0.915767
0.915767
0.915767
0.915767
0
0.033118
0.339381
1,874
68
63
27.558824
0.714863
0
0
0.848485
0
0
0.1952
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
64e6d49ae27896259a23034583b18c70b9c9f837
144
py
Python
nagios_registration/tests.py
k24dizzle/nagios_registration
be18dbadd2c08def81e795e4afe2fe2cf41775cf
[ "Apache-2.0" ]
1
2021-04-27T02:04:10.000Z
2021-04-27T02:04:10.000Z
nagios_registration/tests.py
k24dizzle/nagios_registration
be18dbadd2c08def81e795e4afe2fe2cf41775cf
[ "Apache-2.0" ]
null
null
null
nagios_registration/tests.py
k24dizzle/nagios_registration
be18dbadd2c08def81e795e4afe2fe2cf41775cf
[ "Apache-2.0" ]
null
null
null
from django.test import TestCase from nagios_registration.test.file_output import TestFile from nagios_registration.test.views import TestViews
36
57
0.881944
20
144
6.2
0.6
0.16129
0.354839
0.419355
0
0
0
0
0
0
0
0
0.083333
144
3
58
48
0.939394
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
8f315544f58bfdc707f17efd003ad48ecf38364a
48
py
Python
Florence/BoundaryCondition/__init__.py
jdlaubrie/florence
830dca4a34be00d6e53cbec3007c10d438b27f57
[ "MIT" ]
65
2017-08-04T10:21:13.000Z
2022-02-21T21:45:09.000Z
Florence/BoundaryCondition/__init__.py
jdlaubrie/florence
830dca4a34be00d6e53cbec3007c10d438b27f57
[ "MIT" ]
6
2018-06-03T02:29:20.000Z
2022-01-18T02:30:22.000Z
Florence/BoundaryCondition/__init__.py
jdlaubrie/florence
830dca4a34be00d6e53cbec3007c10d438b27f57
[ "MIT" ]
10
2018-05-30T09:44:10.000Z
2021-05-18T08:06:51.000Z
from .BoundaryCondition import BoundaryCondition
48
48
0.916667
4
48
11
0.75
0
0
0
0
0
0
0
0
0
0
0
0.0625
48
1
48
48
0.977778
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
8f3812b48a1ad7271b831278d39fd2844b33b701
3,968
py
Python
apps/snippet/api/mock/mock_snippet_views.py
zavanton123/coderators
55f860689ad48409bb4a1460c10e33694fed1b8a
[ "MIT" ]
null
null
null
apps/snippet/api/mock/mock_snippet_views.py
zavanton123/coderators
55f860689ad48409bb4a1460c10e33694fed1b8a
[ "MIT" ]
null
null
null
apps/snippet/api/mock/mock_snippet_views.py
zavanton123/coderators
55f860689ad48409bb4a1460c10e33694fed1b8a
[ "MIT" ]
null
null
null
from rest_framework import status from rest_framework.response import Response from rest_framework.views import APIView class MockCategoriesApiView(APIView): def get(self, request, *args, **kwargs): cat1 = { 'name': 'Some-category', 'slug': 'some-slug', 'created-at': 'some-time', 'updated-at': 'some-time' } cat2 = { 'name': 'Some-category', 'slug': 'some-slug', 'created-at': 'some-time', 'updated-at': 'some-time' } data = [cat1, cat2] return Response(data) def post(self, request, *args, **kwargs): return Response(status=status.HTTP_201_CREATED) class MockCategoryApiView(APIView): def get(self, request, *args, **kwargs): data = { 'name': 'Some-category', 'slug': 'some-slug', 'created-at': 'some-time', 'updated-at': 'some-time' } return Response(data) def put(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) def patch(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) def delete(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) class MockTagsApiView(APIView): def get(self, request, *args, **kwargs): tag1 = { 'name': 'Some-tag', 'slug': 'some-slug', 'created-at': 'some-time', 'updated-at': 'some-time' } tag2 = { 'name': 'Some-tag', 'slug': 'some-slug', 'created-at': 'some-time', 'updated-at': 'some-time' } data = [tag1, tag2] return Response(data) def post(self, request, *args, **kwargs): return Response(status=status.HTTP_201_CREATED) class MockTagApiView(APIView): def get(self, request, *args, **kwargs): data = { 'name': 'Some-tag', 'slug': 'some-slug', 'created-at': 'some-time', 'updated-at': 'some-time' } return Response(data) def put(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) def patch(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) def delete(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) class MockSnippetsApiView(APIView): def get(self, request, *args, **kwargs): snippet1 = { 'title': 'Some Title', 'content': 'Some Content', 'category': 'Some Category', 'tags': [ 'Tag One', 'Tag Two', ], 'author': 'https://127.0.0.1:9999?api/users/123', 'published_at': 'some-time', 'updated_at': 'some-time' } snippet2 = snippet1 data = [snippet1, snippet2] return Response(data) def post(self, request, *args, **kwargs): return Response(status=status.HTTP_201_CREATED) class MockSnippetApiView(APIView): def get(self, request, *args, **kwargs): snippet1 = { 'title': 'Some Title', 'content': 'Some Content', 'category': 'Some Category', 'tags': [ 'Tag One', 'Tag Two', ], 'author': 'https://127.0.0.1:9999?api/users/123', 'published_at': 'some-time', 'updated_at': 'some-time' } return Response(snippet1) def put(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) def patch(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT) def delete(self, request, *args, **kwargs): return Response(status=status.HTTP_204_NO_CONTENT)
29.61194
61
0.545363
421
3,968
5.045131
0.144893
0.09322
0.127119
0.177966
0.863465
0.863465
0.863465
0.83145
0.83145
0.824859
0
0.028154
0.310736
3,968
133
62
29.834586
0.748446
0
0
0.761468
0
0
0.177167
0
0
0
0
0
0
1
0.165138
false
0
0.027523
0.110092
0.412844
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
9
8f6f85951312c6e8a73447fd819c053b16dbf640
3,631
py
Python
spider.py
dlb-shy/baidu_hs
830d58879ddfb9014ff7783b0a97e019e22b1538
[ "Apache-2.0" ]
null
null
null
spider.py
dlb-shy/baidu_hs
830d58879ddfb9014ff7783b0a97e019e22b1538
[ "Apache-2.0" ]
null
null
null
spider.py
dlb-shy/baidu_hs
830d58879ddfb9014ff7783b0a97e019e22b1538
[ "Apache-2.0" ]
null
null
null
# # # 笔记详情 # import requests # # headers = { # 'Host': 'www.xiaohongshu.com', # 'asid': '202109064127e42cb0a201ebc4f9c00c', # 'x-sign': 'X880f26f7d10297673687b8dd698c7831', # 'x-b3-traceid': 'bf5b291fb5699ce9', # 'referer': 'https://smartapps.cn/KuRdr9OR39BqyAGIg7mYK7Bytityu0Vi/2.35.16/page-frame.html', # 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Redmi Note 4 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/76.0.3809.89 Mobile Safari/537.36 T7/12.23 swan/2.35.0 swan-baiduboxapp/12.23.0.11 baiduboxapp/12.23.0.11 (Baidu; P1 6.0)', # 'x-bd-traceid': '03e47248d5104788a0a6d18015eacf28', # } # # response = requests.get('https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/note/61331521000000002103405c', headers=headers) # # # # 评论 # import requests # # headers = { # 'Host': 'www.xiaohongshu.com', # 'asid': '202109064127e42cb0a201ebc4f9c00c', # 'x-sign': 'Xb080d995f49c0e3f4c81f6316548007b', # 'x-b3-traceid': '9a09cf31a1029930', # 'referer': 'https://smartapps.cn/KuRdr9OR39BqyAGIg7mYK7Bytityu0Vi/2.35.16/page-frame.html', # 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Redmi Note 4 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/76.0.3809.89 Mobile Safari/537.36 T7/12.23 swan/2.35.0 swan-baiduboxapp/12.23.0.11 baiduboxapp/12.23.0.11 (Baidu; P1 6.0)', # 'x-bd-traceid': 'd14f5b1407a74b7c8cb7020c9aefe637', # } # # params = ( # ('endId', ''), # ('hot', 'no'), # ('pageSize', '2'), # ) # # response = requests.get('https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/notes/61331521000000002103405c/comments', headers=headers, params=params) # # #NB. Original query string below. It seems impossible to parse and # #reproduce query strings 100% accurately so the one below is given # #in case the reproduced version is not "correct". # # response = requests.get('https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/notes/61331521000000002103405c/comments?endId=&hot=no&pageSize=2', headers=headers) # # # 用户详情 # import requests # # headers = { # 'Host': 'www.xiaohongshu.com', # 'asid': '202109064127e42cb0a201ebc4f9c00c', # 'x-sign': 'Xb4850ed626be6848c03fe05d5d21a544', # 'x-b3-traceid': '5011521b164a1228', # 'referer': 'https://smartapps.cn/KuRdr9OR39BqyAGIg7mYK7Bytityu0Vi/2.35.16/page-frame.html', # 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Redmi Note 4 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/76.0.3809.89 Mobile Safari/537.36 T7/12.23 swan/2.35.0 swan-baiduboxapp/12.23.0.11 baiduboxapp/12.23.0.11 (Baidu; P1 6.0)', # 'x-bd-traceid': 'caf5a17134a3434ba3a9a8b06e7a41a1', # } # # response = requests.get('https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/user/6004ddc5000000000101e58d', headers=headers) # # 笔记列表 import requests headers = { 'Host': 'www.xiaohongshu.com', 'asid': '202109064127e42cb0a201ebc4f9c00c', 'x-sign': 'X8647f6658a6fd79a7a20ea6efe24b9e4', 'x-b3-traceid': 'bc66794e90ad8627', 'referer': 'https://smartapps.cn/KuRdr9OR39BqyAGIg7mYK7Bytityu0Vi/2.35.16/page-frame.html', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Redmi Note 4 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/76.0.3809.89 Mobile Safari/537.36 T7/12.23 swan/2.35.0 swan-baiduboxapp/12.23.0.11 baiduboxapp/12.23.0.11 (Baidu; P1 6.0)', 'x-bd-traceid': 'c87537dcdc314c849b32e1a28eb4cd50', } response = requests.get('https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/user/6004ddc5000000000101e58d/notes?page=1&pageSize=10', headers=headers) print(response.text)
44.82716
266
0.707519
481
3,631
5.330561
0.24948
0.018721
0.059672
0.049922
0.736349
0.721529
0.721529
0.721529
0.721529
0.721529
0
0.214576
0.119526
3,631
80
267
45.3875
0.587426
0.744148
0
0
0
0.166667
0.714452
0.170163
0
0
0
0
0
1
0
false
0
0.083333
0
0.083333
0.083333
0
0
0
null
0
0
0
0
1
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
71236f24d0ee0f9b56a272051dad27a0399904f8
15,360
py
Python
csdl/examples/ex_sum.py
LSDOlab/csdl
04c2c5764f6ca9b865ec87ecfeaf6f22ecacc5a3
[ "MIT" ]
null
null
null
csdl/examples/ex_sum.py
LSDOlab/csdl
04c2c5764f6ca9b865ec87ecfeaf6f22ecacc5a3
[ "MIT" ]
null
null
null
csdl/examples/ex_sum.py
LSDOlab/csdl
04c2c5764f6ca9b865ec87ecfeaf6f22ecacc5a3
[ "MIT" ]
1
2021-10-04T19:40:32.000Z
2021-10-04T19:40:32.000Z
from csdl import Model import csdl import numpy as np class ExampleSingleVector(Model): """ :param var: v1 :param var: single_vector_sum """ def define(self): n = 3 # Declare a vector of length 3 as input v1 = self.declare_variable('v1', val=np.arange(n)) # Output the sum of all the elements of the vector v1 self.register_output('single_vector_sum', csdl.sum(v1)) class ExampleSingleTensor(Model): """ :param var: T1 :param var: single_tensor_sum """ def define(self): n = 3 m = 4 p = 5 q = 6 # Declare a tensor of shape 3x6x7x10 as input T1 = self.declare_variable('T1', val=np.arange(n * m * p * q).reshape( (n, m, p, q))) # Output the sum of all the elements of the matrix M1 self.register_output('single_tensor_sum', csdl.sum(T1)) class ExampleSingleMatrix(Model): """ :param var: M1 :param var: single_matrix_sum """ def define(self): n = 3 m = 6 # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.arange(n * m).reshape((n, m))) # Output the sum of all the elements of the tensor T1 self.register_output('single_matrix_sum', csdl.sum(M1)) class ExampleMultipleVector(Model): """ :param var: v1 :param var: v2 :param var: multiple_vector_sum """ def define(self): n = 3 # Declare a vector of length 3 as input v1 = self.declare_variable('v1', val=np.arange(n)) # Declare another vector of length 3 as input v2 = self.declare_variable('v2', val=np.arange(n, 2 * n)) # Output the elementwise sum of vectors v1 and v2 self.register_output('multiple_vector_sum', csdl.sum(v1, v2)) class ExampleMultipleMatrix(Model): """ :param var: M1 :param var: M2 :param var: multiple_matrix_sum """ def define(self): n = 3 m = 6 # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.arange(n * m).reshape((n, m))) # Declare another matrix of shape 3x6 as input M2 = self.declare_variable('M2', val=np.arange(n * m, 2 * n * m).reshape( (n, m))) # Output the elementwise sum of matrices M1 and M2 self.register_output('multiple_matrix_sum', csdl.sum(M1, M2)) class ExampleMultipleTensor(Model): """ :param var: T1 :param var: T2 :param var: multiple_tensor_sum """ def define(self): n = 3 m = 6 p = 7 q = 10 # Declare a tensor of shape 3x6x7x10 as input T1 = self.declare_variable('T1', val=np.arange(n * m * p * q).reshape( (n, m, p, q))) # Declare another tensor of shape 3x6x7x10 as input T2 = self.declare_variable('T2', val=np.arange(n * m * p * q, 2 * n * m * p * q).reshape( (n, m, p, q))) # Output the elementwise sum of tensors T1 and T2 self.register_output('multiple_tensor_sum', csdl.sum(T1, T2)) class ExampleSingleMatrixAlong0(Model): """ :param var: M1 :param var: single_matrix_sum_along_0 """ def define(self): n = 3 m = 6 # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.arange(n * m).reshape((n, m))) # Output the axiswise sum of matrix M1 along the columns self.register_output('single_matrix_sum_along_0', csdl.sum(M1, axes=(0, ))) class ExampleSingleMatrixAlong1(Model): """ :param var: M1 :param var: single_matrix_sum_along_1 """ def define(self): n = 3 m = 6 # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.arange(n * m).reshape((n, m))) # Output the axiswise sum of matrix M1 along the columns self.register_output('single_matrix_sum_along_1', csdl.sum(M1, axes=(1, ))) class ExampleMultipleMatrixAlong0(Model): """ :param var: M1 :param var: M2 :param var: multiple_matrix_sum_along_0 """ def define(self): n = 3 m = 6 # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.arange(n * m).reshape((n, m))) # Declare another matrix of shape 3x6 as input M2 = self.declare_variable('M2', val=np.arange(n * m, 2 * n * m).reshape( (n, m))) # Output the elementwise sum of the axiswise sum of matrices M1 ad M2 along the columns self.register_output('multiple_matrix_sum_along_0', csdl.sum(M1, M2, axes=(0, ))) class ExampleMultipleMatrixAlong1(Model): """ :param var: M1 :param var: M2 :param var: multiple_matrix_sum_along_1 """ def define(self): n = 3 m = 6 # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.arange(n * m).reshape((n, m))) # Declare another matrix of shape 3x6 as input M2 = self.declare_variable('M2', val=np.arange(n * m, 2 * n * m).reshape( (n, m))) # Output the elementwise sum of the axiswise sum of matrices M1 ad M2 along the columns self.register_output('multiple_matrix_sum_along_1', csdl.sum(M1, M2, axes=(1, ))) class ExampleConcatenate(Model): """ :param var: single_vector_sum_1a :param var: single_vector_sum_1b :param var: single_vector_sum_2 :param var: single_vector_sum_3 :param var: sum_vector """ def define(self): n = 5 # Declare a vector of length 3 as input v1 = self.declare_variable('v1', val=np.arange(n)) v2 = self.declare_variable('v2', val=np.arange(n - 1)) v3 = self.declare_variable('v3', val=np.zeros(n)) # Output the sum of all the elements of the vector v1 single_vector_sum_1a = csdl.sum(v1, axes=(0, )) single_vector_sum_1b = csdl.sum(v1) self.register_output('single_vector_sum_1a', single_vector_sum_1a) self.register_output('single_vector_sum_1b', single_vector_sum_1b) single_vector_sum_2 = self.register_output( 'single_vector_sum_2', csdl.sum(v2, axes=(0, ))) single_vector_sum_3 = csdl.sum(v3) self.register_output('single_vector_sum_3', single_vector_sum_3) sum_vector = self.create_output(name='sum_vector', shape=(3, )) sum_vector[0] = single_vector_sum_1a sum_vector[1] = single_vector_sum_2 sum_vector[2] = single_vector_sum_3 class ExampleSingleVectorRandom(Model): """ :param var: v1 :param var: single_vector_sum """ def define(self): n = 3 np.random.seed(0) # Declare a vector of length 3 as input v1 = self.declare_variable('v1', val=np.random.rand(n)) # Output the sum of all the elements of the vector v1 self.register_output('single_vector_sum', csdl.sum(v1)) class ExampleSingleTensorRandom(Model): """ :param var: T1 :param var: single_tensor_sum """ def define(self): n = 3 m = 4 p = 5 q = 6 np.random.seed(0) # Declare a tensor of shape 3x6x7x10 as input T1 = self.declare_variable( 'T1', val=np.random.rand(n * m * p * q).reshape((n, m, p, q))) # Output the sum of all the elements of the matrix M1 self.register_output('single_tensor_sum', csdl.sum(T1)) class ExampleSingleMatrixRandom(Model): """ :param var: M1 :param var: single_matrix_sum """ def define(self): n = 3 m = 6 np.random.seed(0) # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.random.rand(n * m).reshape( (n, m))) # Output the sum of all the elements of the tensor T1 self.register_output('single_matrix_sum', csdl.sum(M1)) class ExampleMultipleVectorRandom(Model): """ :param var: v1 :param var: v2 :param var: multiple_vector_sum """ def define(self): n = 3 np.random.seed(0) # Declare a vector of length 3 as input v1 = self.declare_variable('v1', val=np.random.rand(n)) # Declare another vector of length 3 as input v2 = self.declare_variable('v2', val=np.random.rand(n)) # Output the elementwise sum of vectors v1 and v2 self.register_output('multiple_vector_sum', csdl.sum(v1, v2)) class ExampleMultipleMatrixRandom(Model): """ :param var: M1 :param var: M2 :param var: multiple_matrix_sum """ def define(self): n = 3 m = 6 np.random.seed(0) # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.random.rand(n * m).reshape( (n, m))) # Declare another matrix of shape 3x6 as input M2 = self.declare_variable('M2', val=np.random.rand(n * m).reshape( (n, m))) # Output the elementwise sum of matrices M1 and M2 self.register_output('multiple_matrix_sum', csdl.sum(M1, M2)) class ExampleMultipleTensorRandom(Model): """ :param var: T1 :param var: T2 :param var: multiple_tensor_sum """ def define(self): n = 3 m = 6 p = 7 q = 10 np.random.seed(0) # Declare a tensor of shape 3x6x7x10 as input T1 = self.declare_variable( 'T1', val=np.random.rand(n * m * p * q).reshape((n, m, p, q))) # Declare another tensor of shape 3x6x7x10 as input T2 = self.declare_variable( 'T2', val=np.random.rand(n * m * p * q).reshape((n, m, p, q))) # Output the elementwise sum of tensors T1 and T2 self.register_output('multiple_tensor_sum', csdl.sum(T1, T2)) class ExampleSingleMatrixAlong0Random(Model): """ :param var: M1 :param var: single_matrix_sum_along_0 """ def define(self): n = 3 m = 6 np.random.seed(0) # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.random.rand(n * m).reshape( (n, m))) # Output the axiswise sum of matrix M1 along the columns self.register_output('single_matrix_sum_along_0', csdl.sum(M1, axes=(0, ))) class ExampleSingleMatrixAlong1Random(Model): """ :param var: M1 :param var: single_matrix_sum_along_1 """ def define(self): n = 3 m = 6 np.random.seed(0) # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.random.rand(n * m).reshape( (n, m))) # Output the axiswise sum of matrix M1 along the columns self.register_output('single_matrix_sum_along_1', csdl.sum(M1, axes=(1, ))) class ExampleMultipleMatrixAlong0Random(Model): """ :param var: M1 :param var: M2 :param var: multiple_matrix_sum_along_0 """ def define(self): n = 3 m = 6 np.random.seed(0) # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.random.rand(n * m).reshape( (n, m))) # Declare another matrix of shape 3x6 as input M2 = self.declare_variable('M2', val=np.random.rand(n * m).reshape( (n, m))) # Output the elementwise sum of the axiswise sum of matrices M1 ad M2 along the columns self.register_output('multiple_matrix_sum_along_0', csdl.sum(M1, M2, axes=(0, ))) class ExampleMultipleMatrixAlong1Random(Model): """ :param var: M1 :param var: M2 :param var: multiple_matrix_sum_along_1 """ def define(self): n = 3 m = 6 np.random.seed(0) # Declare a matrix of shape 3x6 as input M1 = self.declare_variable('M1', val=np.random.rand(n * m).reshape( (n, m))) # Declare another matrix of shape 3x6 as input M2 = self.declare_variable('M2', val=np.random.rand(n * m).reshape( (n, m))) # Output the elementwise sum of the axiswise sum of matrices M1 ad M2 along the columns self.register_output('multiple_matrix_sum_along_1', csdl.sum(M1, M2, axes=(1, ))) class ExampleConcatenateRandom(Model): """ :param var: single_vector_sum_1a :param var: single_vector_sum_1b :param var: single_vector_sum_2 :param var: single_vector_sum_3 :param var: sum_vector """ def define(self): n = 5 np.random.seed(0) # Declare a vector of length 3 as input v1 = self.declare_variable('v1', val=np.random.rand(n)) v2 = self.declare_variable('v2', val=np.random.rand(n - 1)) v3 = self.declare_variable('v3', val=np.zeros(n)) # Output the sum of all the elements of the vector v1 single_vector_sum_1a = csdl.sum(v1, axes=(0, )) single_vector_sum_1b = csdl.sum(v1) self.register_output('single_vector_sum_1a', single_vector_sum_1a) self.register_output('single_vector_sum_1b', single_vector_sum_1b) single_vector_sum_2 = self.register_output( 'single_vector_sum_2', csdl.sum(v2, axes=(0, ))) single_vector_sum_3 = csdl.sum(v3) self.register_output('single_vector_sum_3', single_vector_sum_3) sum_vector = self.create_output(name='sum_vector', shape=(3, )) sum_vector[0] = single_vector_sum_1a sum_vector[1] = single_vector_sum_2 sum_vector[2] = single_vector_sum_3
29.369025
95
0.534701
1,949
15,360
4.061057
0.049769
0.060644
0.075805
0.038913
0.923437
0.923437
0.923437
0.922173
0.922173
0.916993
0
0.040263
0.366146
15,360
522
96
29.425287
0.772699
0.263021
0
0.859504
0
0
0.062315
0.019288
0
0
0
0
0
1
0.090909
false
0
0.012397
0
0.194215
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
712527dbf0029ef4b6775dad7205dbb4a7d5e0c9
11,494
py
Python
S9/EVA4/Models/Cifar10.py
VijayPrakashReddy-k/EVA
fd78ff8bda4227aebd0f5db14865d3c5a47b19b0
[ "MIT" ]
null
null
null
S9/EVA4/Models/Cifar10.py
VijayPrakashReddy-k/EVA
fd78ff8bda4227aebd0f5db14865d3c5a47b19b0
[ "MIT" ]
null
null
null
S9/EVA4/Models/Cifar10.py
VijayPrakashReddy-k/EVA
fd78ff8bda4227aebd0f5db14865d3c5a47b19b0
[ "MIT" ]
null
null
null
import torch import torch.nn as nn import torch.nn.functional as F from Net import Net class Cifar10_net1(Net): def __init__(self, name="Model", dropout_value=0): super(Cifar10_net1, self).__init__(name) # Input Convolution: C0 self.conv1 = self.create_conv2d(3, 32, dropout=dropout_value) # IN 32x32x3, OUT 32x32x32, RF = 3 self.conv2 = self.create_conv2d(32, 32, dropout=dropout_value) # IN 32x32x32, OUT 32x32x32, RF = 5 self.conv3 = self.create_conv2d(32, 32, dropout=dropout_value) # IN 32x32x32, OUT 32x32x32, RF = 7 # Transition 1 self.pool1 = nn.MaxPool2d(2, 2) # IN 32x32x32 OUT 16x16x32, RF = 8, jump = 2 self.conv4 = self.create_conv2d(32, 64, dropout=dropout_value) # IN 16x16x32, OUT 16x16x64, RF = 12 self.conv5 = self.create_conv2d(64, 64, dropout=dropout_value) # IN 16x16x64, OUT 16x16x64, RF = 16 # Transition 2 self.pool2 = nn.MaxPool2d(2, 2) # IN 16x16x64 OUT 8x8x64, RF = 18, jump = 4 self.dconv1 = self.create_conv2d(64, 128, dilation=2, padding=2) # IN 8x8x64, OUT 8x8x128 self.conv6 = self.create_conv2d(64, 128, dropout=dropout_value) # IN 8x8x64, OUT 8x8x128, RF = 26 self.conv7 = self.create_conv2d(128, 128, dropout=dropout_value) # IN 8x8x128, OUT 8x8x128, RF = 34 # Transition 3 self.pool3 = nn.MaxPool2d(2, 2) # IN 8x8x128 OUT 4x4x128, RF = 38, jump = 8 self.conv8 = self.create_depthwise_conv2d(128, 256, dropout=dropout_value) # IN 4x4x128, OUT 4x4x256, RF = 54 self.conv9 = self.create_depthwise_conv2d(256, 256, dropout=dropout_value) # IN 4x4x256, OUT 4x4x256, RF = 70 # GAP + FC self.gap = nn.AvgPool2d(kernel_size=(4,4)) self.conv10 = self.create_conv2d(256, 10, kernel_size=(1,1), padding=0, bn=False, relu=False) # IN: 256 OUT:10 def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = self.conv3(x) x = self.pool1(x) x = self.conv4(x) x = self.conv5(x) x = self.pool2(x) x2 = self.dconv1(x) x = self.conv6(x) x = self.conv7(x) x = torch.add(x, x2) x = self.pool3(x) x = self.conv8(x) x = self.conv9(x) x = self.gap(x) x = self.conv10(x) x = x.view(-1, 10) return F.log_softmax(x, dim=-1) class Cifar10_net2(Net): def __init__(self, name="Model", dropout_value=0): super(Cifar10_net2, self).__init__(name) # Input Convolution: C0 self.conv1 = self.create_conv2d(3, 16, dropout=dropout_value) # IN 32x32x3, OUT 32x32x16, RF = 3 self.conv2 = self.create_conv2d(16, 16, dropout=dropout_value) # IN 32x32x16, OUT 32x32x16, RF = 5 self.conv3 = self.create_conv2d(16, 16, dropout=dropout_value) # IN 32x32x16, OUT 32x32x16, RF = 7 # Transition 1 self.pool1 = nn.MaxPool2d(2, 2) # IN 32x32x32 OUT 16x16x32, RF = 8, jump = 2 self.conv4 = self.create_conv2d(16, 32, dropout=dropout_value) # IN 16x16x16, OUT 16x16x32, RF = 12 self.conv5 = self.create_conv2d(32, 32, dropout=dropout_value) # IN 16x16x32, OUT 16x16x32, RF = 16 # Transition 2 self.pool2 = nn.MaxPool2d(2, 2) # IN 16x16x64 OUT 8x8x64, RF = 18, jump = 4 self.dconv1 = self.create_conv2d(32, 64, dilation=2, padding=2) # IN 8x8x32, OUT 8x8x64 self.conv6 = self.create_conv2d(32, 64, dropout=dropout_value) # IN 8x8x32, OUT 8x8x64, RF = 26 self.conv7 = self.create_conv2d(64, 64, dropout=dropout_value) # IN 8x8x64, OUT 8x8x64, RF = 34 # Transition 3 self.pool3 = nn.MaxPool2d(2, 2) # IN 8x8x128 OUT 4x4x128, RF = 38, jump = 8 #self.dconv2 = self.create_conv2d(64, 128, dilation=2, padding=2) # IN 8x8x64, OUT 8x8x128 self.conv8 = self.create_depthwise_conv2d(64, 128, dropout=dropout_value) # IN 4x4x64, OUT 4x4x128, RF = 54 self.conv9 = self.create_depthwise_conv2d(128, 128, dropout=dropout_value) # IN 4x4x128, OUT 4x4x128, RF = 70 # GAP + FC self.gap = nn.AvgPool2d(kernel_size=(4,4)) self.conv10 = self.create_conv2d(128, 10, kernel_size=(1,1), padding=0, bn=False, relu=False) # IN: 256 OUT:10 def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = self.conv3(x) x = self.pool1(x) x = self.conv4(x) x = self.conv5(x) x = self.pool2(x) x2 = self.dconv1(x) x = self.conv6(x) x = self.conv7(x) x = torch.add(x, x2) x = self.pool3(x) x = self.conv8(x) x = self.conv9(x) x = self.gap(x) x = self.conv10(x) x = x.view(-1, 10) return F.log_softmax(x, dim=-1) class Cifar10_net3(Net): def __init__(self, name="Cfar10Net3", dropout_value=0): super(Cifar10_net3, self).__init__(name) # Input Convolution: C0 self.conv1 = self.create_depthwise_conv2d(3, 16, dropout=dropout_value) # IN 32x32x3, OUT 32x32x16, RF = 3 self.conv2 = self.create_depthwise_conv2d(16, 16, dropout=dropout_value) # IN 32x32x16, OUT 32x32x16, RF = 5 self.conv3 = self.create_depthwise_conv2d(16, 16, dropout=dropout_value) # IN 32x32x16, OUT 32x32x16, RF = 7 # Transition 1 self.pool1 = nn.MaxPool2d(2, 2) # IN 32x32x32 OUT 16x16x32, RF = 8, jump = 2 self.conv4 = self.create_depthwise_conv2d(16, 32, dropout=dropout_value) # IN 16x16x16, OUT 16x16x32, RF = 12 self.conv5 = self.create_depthwise_conv2d(32, 32, dropout=dropout_value) # IN 16x16x32, OUT 16x16x32, RF = 16 # Transition 2 self.pool2 = nn.MaxPool2d(2, 2) # IN 16x16x64 OUT 8x8x64, RF = 18, jump = 4 self.dconv1 = self.create_depthwise_conv2d(32, 64, dilation=2, padding=2) # IN 8x8x32, OUT 8x8x64 self.conv6 = self.create_depthwise_conv2d(32, 64, dropout=dropout_value) # IN 8x8x32, OUT 8x8x64, RF = 26 self.conv7 = self.create_depthwise_conv2d(64, 64, dropout=dropout_value) # IN 8x8x64, OUT 8x8x64, RF = 34 # Transition 3 self.pool3 = nn.MaxPool2d(2, 2) # IN 8x8x128 OUT 4x4x128, RF = 38, jump = 8 #self.dconv2 = self.create_conv2d(64, 128, dilation=2, padding=2) # IN 8x8x64, OUT 8x8x128 self.conv8 = self.create_depthwise_conv2d(64, 128, dropout=dropout_value) # IN 4x4x64, OUT 4x4x128, RF = 54 self.conv9 = self.create_depthwise_conv2d(128, 128, dropout=dropout_value) # IN 4x4x128, OUT 4x4x128, RF = 70 # GAP + FC self.gap = nn.AvgPool2d(kernel_size=(4,4)) self.conv10 = self.create_conv2d(128, 10, kernel_size=(1,1), padding=0, bn=False, relu=False) # IN: 256 OUT:10 def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = self.conv3(x) x = self.pool1(x) x = self.conv4(x) x = self.conv5(x) x = self.pool2(x) x2 = self.dconv1(x) x = self.conv6(x) x = self.conv7(x) x = torch.add(x, x2) x = self.pool3(x) x = self.conv8(x) x = self.conv9(x) x = self.gap(x) x = self.conv10(x) x = x.view(-1, 10) return F.log_softmax(x, dim=-1) class Cifar10_net4(Net): def __init__(self, name="Cfar10Net4", dropout_value=0): super(Cifar10_net4, self).__init__(name) # Input Convolution: C0 self.conv1 = self.create_conv2d(3, 16, dropout=dropout_value) # IN 32x32x3, OUT 32x32x16, RF = 3 self.conv2 = self.create_conv2d(16, 16, dropout=dropout_value, dilation=2, padding=2) # IN 32x32x16, OUT 32x32x16, RF = 7 # Transition 1 self.pool1 = nn.MaxPool2d(2, 2) # IN 32x32x32 OUT 16x16x32, RF = 8, jump = 2 self.conv4 = self.create_conv2d(16, 32, dropout=dropout_value) # IN 16x16x16, OUT 16x16x32, RF = 12 self.conv5 = self.create_conv2d(32, 32, dropout=dropout_value) # IN 16x16x32, OUT 16x16x32, RF = 16 # Transition 2 self.pool2 = nn.MaxPool2d(2, 2) # IN 16x16x64 OUT 8x8x64, RF = 18, jump = 4 self.conv6 = self.create_conv2d(32, 64, dropout=dropout_value) # IN 8x8x32, OUT 8x8x64, RF = 26 self.conv7 = self.create_conv2d(64, 64, dropout=dropout_value) # IN 8x8x64, OUT 8x8x64, RF = 34 # Transition 3 self.pool3 = nn.MaxPool2d(2, 2) # IN 8x8x128 OUT 4x4x128, RF = 38, jump = 8 #self.dconv2 = self.create_conv2d(64, 128, dilation=2, padding=2) # IN 8x8x64, OUT 8x8x128 self.conv8 = self.create_depthwise_conv2d(64, 128, dropout=dropout_value) # IN 4x4x64, OUT 4x4x128, RF = 54 self.conv9 = self.create_depthwise_conv2d(128, 128, dropout=dropout_value) # IN 4x4x128, OUT 4x4x128, RF = 70 # GAP + FC self.gap = nn.AvgPool2d(kernel_size=(4,4)) self.conv10 = self.create_conv2d(128, 10, kernel_size=(1,1), padding=0, bn=False, relu=False) # IN: 256 OUT:10 def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = self.pool1(x) x = self.conv4(x) x = self.conv5(x) x = self.pool2(x) x = self.conv6(x) x = self.conv7(x) x = self.pool3(x) x = self.conv8(x) x = self.conv9(x) x = self.gap(x) x = self.conv10(x) x = x.view(-1, 10) return F.log_softmax(x, dim=-1) class Cifar10_net5(Net): def __init__(self, name="Cfar10Net5", dropout_value=0): super(Cifar10_net5, self).__init__(name) # Input Convolution: C0 self.conv1 = self.create_conv2d(3, 16, dropout=dropout_value) # IN 32x32x3, OUT 32x32x16, RF = 3 self.conv2 = self.create_conv2d(16, 16, dropout=dropout_value, dilation=2, padding=2) # IN 32x32x16, OUT 32x32x16, RF = 7 # Transition 1 self.pool1 = nn.MaxPool2d(2, 2) # IN 32x32x32 OUT 16x16x32, RF = 8, jump = 2 self.conv4 = self.create_conv2d(16, 32, dropout=dropout_value, dilation=2, padding=2) # IN 16x16x16, OUT 16x16x32, RF = 16 #self.conv5 = self.create_conv2d(32, 32, dropout=dropout_value) # IN 16x16x32, OUT 16x16x32, RF = 16 # Transition 2 self.pool2 = nn.MaxPool2d(2, 2) # IN 16x16x64 OUT 8x8x64, RF = 18, jump = 4 self.conv6 = self.create_conv2d(32, 64, dropout=dropout_value, dilation=2, padding=2) # IN 8x8x32, OUT 8x8x64, RF = 34 #self.conv7 = self.create_conv2d(64, 64, dropout=dropout_value) # IN 8x8x64, OUT 8x8x64, RF = 34 # Transition 3 self.pool3 = nn.MaxPool2d(2, 2) # IN 8x8x128 OUT 4x4x128, RF = 38, jump = 8 #self.dconv2 = self.create_conv2d(64, 128, dilation=2, padding=2) # IN 8x8x64, OUT 8x8x128 self.conv8 = self.create_depthwise_conv2d(64, 128, dropout=dropout_value) # IN 4x4x64, OUT 4x4x128, RF = 70 self.conv9 = self.create_depthwise_conv2d(128, 128, dropout=dropout_value) # IN 4x4x128, OUT 4x4x128, RF = 86 # GAP + FC self.gap = nn.AvgPool2d(kernel_size=(4,4)) self.conv10 = self.create_conv2d(128, 10, kernel_size=(1,1), padding=0, bn=False, relu=False) # IN: 256 OUT:10 def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = self.pool1(x) x = self.conv4(x) #x = self.conv5(x) x = self.pool2(x) x = self.conv6(x) #x = self.conv7(x) x = self.pool3(x) x = self.conv8(x) x = self.conv9(x) x = self.gap(x) x = self.conv10(x) x = x.view(-1, 10) return F.log_softmax(x, dim=-1)
41.197133
130
0.61345
1,723
11,494
3.98433
0.061521
0.022724
0.05681
0.119301
0.962418
0.929497
0.910561
0.892644
0.879388
0.869774
0
0.169689
0.26118
11,494
279
131
41.197133
0.638719
0.266574
0
0.765714
0
0
0.004804
0
0
0
0
0
0
1
0.057143
false
0
0.022857
0
0.137143
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
714391328b292cb722ebb5e04f807e4e9895ffce
120
py
Python
batchout/indexes/__init__.py
ilia-khaustov/batchout
e916a1b0bfac771e6c96d0ff2478dc3f44804a94
[ "MIT" ]
8
2019-11-05T06:54:30.000Z
2021-12-14T14:52:24.000Z
batchout/indexes/__init__.py
ilia-khaustov/batchout
e916a1b0bfac771e6c96d0ff2478dc3f44804a94
[ "MIT" ]
null
null
null
batchout/indexes/__init__.py
ilia-khaustov/batchout
e916a1b0bfac771e6c96d0ff2478dc3f44804a94
[ "MIT" ]
1
2020-05-05T09:31:14.000Z
2020-05-05T09:31:14.000Z
from batchout.indexes.base import Index from batchout.indexes.scalar import IndexForList, IndexForObject, IndexFromList
40
79
0.866667
14
120
7.428571
0.714286
0.230769
0.365385
0
0
0
0
0
0
0
0
0
0.083333
120
2
80
60
0.945455
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
856ac39882c7a22a209ea6377e3381a418887f14
22,043
py
Python
backend/api/v1/routers/poem.py
B3zaleel/Cartedepoezii
217050d5ea1203a11a5ba9a74b3d497b5120cb9a
[ "MIT" ]
4
2022-03-19T09:25:14.000Z
2022-03-31T21:51:30.000Z
backend/api/v1/routers/poem.py
B3zaleel/Cartedepoezii
217050d5ea1203a11a5ba9a74b3d497b5120cb9a
[ "MIT" ]
2
2022-03-24T01:02:13.000Z
2022-03-26T09:50:09.000Z
backend/api/v1/routers/poem.py
B3zaleel/Cartedepoezii
217050d5ea1203a11a5ba9a74b3d497b5120cb9a
[ "MIT" ]
null
null
null
#!/usr/bin/python3 '''The poem router's module. ''' import json import re import uuid from datetime import datetime from fastapi import APIRouter from sqlalchemy import and_ from ..form_types import ( PoemAddForm, PoemUpdateForm, PoemLikeForm, PoemDeleteForm ) from ..database import ( get_session, User, Comment, Poem, PoemLike, UserFollowing ) from ..utils.token_handlers import AuthToken from ..utils.pagination import extract_page router = APIRouter(prefix='/api/v1') @router.get('/poem') async def get_poem(id: str, token: str): '''Retrieves information about a given poem. ''' response = { 'success': False, 'message': 'Failed to find poem.' } auth_token = AuthToken.decode(token) user_id = auth_token.user_id if auth_token is not None else None db_session = get_session() try: poem = db_session.query(Poem).filter( Poem.id == id ).first() if poem: # get the relevant information related to the poem user = db_session.query(User).filter( User.id == poem.user_id ).first() if not user: return response comments = db_session.query(Comment).filter(and_( Comment.poem_id == id, Comment.comment_id == None )).all() comments_count = len(comments) if comments else 0 likes = db_session.query(PoemLike).filter( PoemLike.poem_id == id ).all() likes_count = len(likes) if likes else 0 is_liked_by_user = False if user_id: # check current users reaction on this poem poem_interaction = db_session.query(PoemLike).filter(and_( PoemLike.poem_id == id, PoemLike.user_id == user_id )).first() if poem_interaction: is_liked_by_user = True response = { 'success': True, 'data': { 'id': poem.id, 'user': { 'id': user.id, 'name': user.name, 'profilePhotoId': user.profile_photo_id }, 'title': poem.title, 'publishedOn': poem.created_on.isoformat(), 'verses': json.JSONDecoder().decode(poem.text), 'commentsCount': comments_count, 'likesCount': likes_count, 'isLiked': is_liked_by_user } } finally: db_session.close() return response @router.post('/poem') async def add_poem(body: PoemAddForm): '''Creates a new poem. ''' response = { 'success': False, 'message': 'Failed to add poem.' } # validate body data auth_token = AuthToken.decode(body.authToken) if auth_token is None or auth_token.user_id != body.userId: response['message'] = 'Invalid authentication token.' return response if len(body.title) > 256: response['message'] = 'Title is too long.' return response if len(body.verses) < 1: response['message'] = 'Verses is too short.' return response if not all(list(map(lambda x: len(x.strip()) > 1, body.verses))): response['message'] = 'Verses is too short.' return response db_session = get_session() try: gen_id = str(uuid.uuid4()) cur_time = datetime.utcnow() verses_txt = json.JSONEncoder().encode(body.verses) poem = Poem( id=gen_id, created_on=cur_time, updated_on=cur_time, user_id=body.userId, title=body.title, text=verses_txt ) db_session.add(poem) db_session.commit() response = { 'success': True, 'data': { 'id': gen_id, 'createdOn': cur_time.isoformat(), 'repliesCount': 0, 'likesCount': 0 } } except Exception as ex: print(ex.args[0]) db_session.rollback() finally: db_session.close() return response @router.put('/poem') async def update_poem(body: PoemUpdateForm): '''Edits an existing poem. ''' response = { 'success': False, 'message': 'Failed to update poem.' } # validate body data auth_token = AuthToken.decode(body.authToken) if auth_token is None or auth_token.user_id != body.userId: response['message'] = 'Invalid authentication token.' return response if len(body.title) > 256: response['message'] = 'Title is too long.' return response if len(body.verses) < 1: response['message'] = 'Verses is too short.' return response if not all(list(map(lambda x: len(x.strip()) > 1, body.verses))): response['message'] = 'Verses is too short.' return response db_session = get_session() try: cur_time = datetime.utcnow() verses_txt = json.JSONEncoder().encode(body.verses) db_session.query(Poem).filter(Poem.id == body.poemId).update( { Poem.title: body.title, Poem.updated_on: cur_time, Poem.text: verses_txt }, synchronize_session=False ) db_session.commit() response = { 'success': True, 'data': {} } except Exception as ex: print(ex.args[0]) db_session.rollback() finally: db_session.close() return response @router.delete('/poem') async def remove_poem(body: PoemDeleteForm): '''Deletes a poem. ''' response = { 'success': False, 'message': 'Failed to remove poem.' } auth_token = AuthToken.decode(body.authToken) if auth_token is None or auth_token.user_id != body.userId: response['message'] = 'Invalid authentication token.' return response db_session = get_session() try: poem = db_session.query(Poem).filter(and_( Poem.id == body.poemId, Poem.user_id == body.userId, )).first() if poem: db_session.query(PoemLike).filter( PoemLike.poem_id == body.poemId, ).delete( synchronize_session=False ) db_session.query(Comment).filter( Comment.poem_id == body.poemId, ).delete( synchronize_session=False ) db_session.query(Poem).filter(and_( Poem.id == body.poemId, Poem.user_id == body.userId, )).delete( synchronize_session=False ) db_session.commit() response = { 'success': True, 'data': {} } finally: db_session.close() return response @router.put('/like-poem') async def like_poem(body: PoemLikeForm): '''Toggles a user's reaction on a poem. ''' response = { 'success': False, 'message': 'Failed to like poem.' } auth_token = AuthToken.decode(body.authToken) if auth_token is None or auth_token.user_id != body.userId: response['message'] = 'Invalid authentication token.' return response db_session = get_session() try: cur_usr_fav = db_session.query(PoemLike).filter(and_( PoemLike.user_id == auth_token.user_id, PoemLike.poem_id == body.poemId, )).first() if cur_usr_fav: # dislike poem db_session.query(PoemLike).filter(and_( PoemLike.user_id == auth_token.user_id, PoemLike.poem_id == body.poemId, )).delete( synchronize_session=False ) db_session.commit() response = { 'success': True, 'data': {'status': False} } else: # like poem new_favourite = PoemLike( id=str(uuid.uuid4()), created_on=datetime.utcnow(), user_id=body.userId, poem_id=body.poemId, ) db_session.add(new_favourite) db_session.commit() response = { 'success': True, 'data': {'status': True} } except Exception as ex: print(ex.args[0]) db_session.rollback() finally: db_session.close() return response @router.get('/poems-user-created') async def get_created_poems(userId, token='', span='', after='', before=''): '''Retrieves poems created by the current user. ''' response = { 'success': False, 'message': 'Failed to find poems created by the user.' } if not userId: return response auth_token = AuthToken.decode(token) user_id = auth_token.user_id if auth_token is not None else None db_session = get_session() try: # sanitize span span = span.strip() if span and re.fullmatch(r'\d+', span) is None: response = { 'success': False, 'message': 'Invalid span type.' } db_session.close() return response span = int(span if span else '12') poems_created = db_session.query(Poem).filter( Poem.user_id == userId ).all() user_poems = [] if poems_created: user = db_session.query(User).filter( User.id == userId ).first() if not user: return response for poem in poems_created: # retrieve information related to the current poem comments = db_session.query(Comment).filter(and_( Comment.poem_id == poem.id, Comment.comment_id == None )).all() comments_count = len(comments) if comments else 0 likes = db_session.query(PoemLike).filter( PoemLike.poem_id == poem.id ).all() likes_count = len(likes) if likes else 0 is_liked_by_user = False if user_id: poem_interaction = db_session.query(PoemLike).filter(and_( PoemLike.poem_id == poem.id, PoemLike.user_id == user_id )).first() if poem_interaction: is_liked_by_user = True obj = { 'id': poem.id, 'user': { 'id': user.id, 'name': user.name, 'profilePhotoId': user.profile_photo_id, }, 'title': poem.title, 'publishedOn': poem.created_on.isoformat(), 'verses': json.JSONDecoder().decode(poem.text), 'commentsCount': comments_count, 'likesCount': likes_count, 'isLiked': is_liked_by_user } user_poems.append(obj) user_poems.sort( key=lambda x: datetime.fromisoformat(x['publishedOn']), reverse=True ) response = { 'success': True, 'data': extract_page( user_poems, span, after, before, True, lambda x: x['id'] ) } finally: db_session.close() return response @router.get('/poems-user-likes') async def get_liked_poems(userId, token='', span='', after='', before=''): '''Retrieves poems liked by a given user. ''' response = { 'success': False, 'message': 'Failed to find poems liked by the user.' } if not userId: return response auth_token = AuthToken.decode(token) user_id = auth_token.user_id if auth_token is not None else None db_session = get_session() try: # sanitize span span = span.strip() if span and re.fullmatch(r'\d+', span) is None: response = { 'success': False, 'message': 'Invalid span type.' } db_session.close() return response span = int(span if span else '12') likes = db_session.query(PoemLike).filter( PoemLike.user_id == userId ).all() user_poems_liked = [] for poem_like in likes: # retrieve information related to the current reaction poem = db_session.query(Poem).filter( Poem.id == poem_like.poem_id ).first() user = db_session.query(User).filter( User.id == poem.user_id ).first() comments = db_session.query(Comment).filter(and_( Comment.poem_id == poem.id, Comment.comment_id == None )).all() comments_count = len(comments) if comments else 0 likes = db_session.query(PoemLike).filter( PoemLike.poem_id == poem.id ).all() likes_count = len(likes) if likes else 0 is_liked_by_user = False if user_id != userId: poem_interaction = db_session.query(PoemLike).filter(and_( PoemLike.poem_id == poem.id, PoemLike.user_id == user_id )).first() if poem_interaction: is_liked_by_user = True else: is_liked_by_user = True obj = { 'id': poem.id, 'user': { 'id': user.id, 'name': user.name, 'profilePhotoId': user.profile_photo_id, }, 'title': poem.title, 'publishedOn': poem.created_on.isoformat(), 'verses': json.JSONDecoder().decode(poem.text), 'commentsCount': comments_count, 'likesCount': likes_count, 'isLiked': is_liked_by_user } user_poems_liked.append(obj) user_poems_liked.sort( key=lambda x: datetime.fromisoformat(x['publishedOn']) ) response = { 'success': True, 'data': extract_page( user_poems_liked, span, after, before, True, lambda x: x['id'] ) } finally: db_session.close() return response @router.get('/poems-channel') async def get_channel_poems(token, span='', after='', before=''): '''Retrieves poems for a user's timeline or home section. ''' response = { 'success': False, 'message': 'Failed to find poems for the channel.' } auth_token = AuthToken.decode(token) if auth_token is None: response['message'] = 'Invalid authentication token.' return response user_id = auth_token.user_id db_session = get_session() try: # sanitize span span = span.strip() if span and re.fullmatch(r'\d+', span) is None: response = { 'success': False, 'message': 'Invalid span type.' } db_session.close() return response span = int(span if span else '12') followings = db_session.query(UserFollowing).filter( UserFollowing.follower_id == user_id ).all() max_size = 2**32 - 1 followings_count = len(followings) + 1 if followings else 1 poems_per_following = max_size // followings_count poem_users_ids = [user_id] if followings: poem_users_ids.extend(list(map(lambda x: x.following_id, followings))) users_poems = [] for id in poem_users_ids: # fetch poems_per_following poems for each following poems = db_session.query(Poem).filter( Poem.user_id == id ).limit(poems_per_following).all() user = db_session.query(User).filter( User.id == id ).first() for poem in poems: # retrieve information related to the current poem comments = db_session.query(Comment).filter(and_( Comment.poem_id == poem.id, Comment.comment_id == None )).all() comments_count = len(comments) if comments else 0 likes = db_session.query(PoemLike).filter( PoemLike.poem_id == poem.id ).all() likes_count = len(likes) if likes else 0 is_liked_by_user = False if user_id: poem_interaction = db_session.query(PoemLike).filter(and_( PoemLike.poem_id == poem.id, PoemLike.user_id == user_id )).first() if poem_interaction: is_liked_by_user = True obj = { 'id': poem.id, 'user': { 'id': user.id, 'name': user.name, 'profilePhotoId': user.profile_photo_id }, 'title': poem.title, 'publishedOn': poem.created_on.isoformat(), 'verses': json.JSONDecoder().decode(poem.text), 'commentsCount': comments_count, 'likesCount': likes_count, 'isLiked': is_liked_by_user } users_poems.append(obj) # stable sort based on creation time users_poems.sort( key=lambda x: datetime.fromisoformat(x['publishedOn']), reverse=True ) response = { 'success': True, 'data': extract_page( users_poems, span, after, before, True, lambda x: x['id'] ) } finally: db_session.close() return response @router.get('/poems-explore') async def get_exploratory_poems(token, span='', after='', before=''): '''Retrieves poems a user can explore. ''' response = { 'success': False, 'message': 'Failed to find poems for the user.' } auth_token = AuthToken.decode(token) if auth_token is None: response['message'] = 'Invalid authentication token.' return response user_id = auth_token.user_id if auth_token is not None else None db_session = get_session() try: # sanitize span span = span.strip() if span and re.fullmatch(r'\d+', span) is None: response = { 'success': False, 'message': 'Invalid span type.' } db_session.close() return response span = int(span if span else '12') followings = db_session.query(UserFollowing).filter( UserFollowing.follower_id == user_id ).all() max_poems_count = 48 poem_users_ids = [user_id] if followings: poem_users_ids.extend( list(map(lambda x: x.following_id, followings)) ) explore_poems = [] # fetch max_poems_count for the user from people # the user isn't following poems = db_session.query(Poem).filter( Poem.user_id.notin_(poem_users_ids) ).limit(max_poems_count).all() for poem in poems: # retrieve information related to the current poem user = db_session.query(User).filter( User.id == poem.user_id ).first() comments = db_session.query(Comment).filter(and_( Comment.poem_id == poem.id, Comment.comment_id == None )).all() comments_count = len(comments) if comments else 0 likes = db_session.query(PoemLike).filter( PoemLike.poem_id == poem.id ).all() likes_count = len(likes) if likes else 0 is_liked_by_user = False if user_id: poem_interaction = db_session.query(PoemLike).filter(and_( PoemLike.poem_id == poem.id, PoemLike.user_id == user_id )).first() if poem_interaction: is_liked_by_user = True obj = { 'id': poem.id, 'user': { 'id': user.id, 'name': user.name, 'profilePhotoId': user.profile_photo_id }, 'title': poem.title, 'publishedOn': poem.created_on.isoformat(), 'verses': json.JSONDecoder().decode(poem.text), 'commentsCount': comments_count, 'likesCount': likes_count, 'isLiked': is_liked_by_user } explore_poems.append(obj) explore_poems.sort( key=lambda x: x['likesCount'], reverse=True ) response = { 'success': True, 'data': extract_page( explore_poems, span, after, before, True, lambda x: x['id'] ) } finally: db_session.close() return response
33.24736
82
0.508007
2,248
22,043
4.805605
0.092527
0.055818
0.045358
0.019254
0.819124
0.813478
0.800611
0.768305
0.714709
0.705822
0
0.003354
0.391326
22,043
662
83
33.297583
0.801819
0.027628
0
0.704319
0
0
0.076125
0
0
0
0
0
0
1
0
false
0
0.016611
0
0.064784
0.004983
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
8589d8733519ab58d88b816f23405403882994ef
108
py
Python
sites/one_drive/__init__.py
GeorgOhneH/ethz-document-fetcher
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
[ "MIT" ]
15
2020-03-17T15:43:46.000Z
2022-01-08T04:23:49.000Z
sites/one_drive/__init__.py
GeorgOhneH/ethz-document-fetcher
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
[ "MIT" ]
5
2020-03-12T10:05:27.000Z
2021-03-03T16:01:47.000Z
sites/one_drive/__init__.py
GeorgOhneH/ethz-document-fetcher
42921e5d71698a269eb54cf9d3979e4a7d88a9cf
[ "MIT" ]
2
2020-03-17T17:09:20.000Z
2020-12-28T22:59:17.000Z
from sites.one_drive.producer import producer, get_folder_name from .get_website_url import get_website_url
36
62
0.87963
18
108
4.888889
0.611111
0.227273
0.295455
0
0
0
0
0
0
0
0
0
0.083333
108
2
63
54
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
85bc65c7d281b0d77a307384a41b5d7c31af1c21
137
py
Python
historia/pops/__init__.py
eranimo/historia
5e0b047d4bcdd534f48f8b9bf19d425b0b31a3fd
[ "MIT" ]
6
2016-04-26T18:39:36.000Z
2021-09-01T09:13:38.000Z
historia/pops/__init__.py
eranimo/historia
5e0b047d4bcdd534f48f8b9bf19d425b0b31a3fd
[ "MIT" ]
null
null
null
historia/pops/__init__.py
eranimo/historia
5e0b047d4bcdd534f48f8b9bf19d425b0b31a3fd
[ "MIT" ]
4
2016-04-10T23:47:23.000Z
2021-08-15T11:40:28.000Z
from historia.pops.enums import PopJob, PopClass from historia.pops.models import Inventory, Pop from historia.pops.pop_service import *
34.25
48
0.832117
20
137
5.65
0.55
0.318584
0.424779
0
0
0
0
0
0
0
0
0
0.10219
137
3
49
45.666667
0.918699
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
85bcf6935f35e48f9fd70b86e7f823623c63d66c
9,574
py
Python
prototyping/Python/test_SweepLine.py
pyvain/WebSight
d9b0201ef472c46020bb2fd75af6f867eaa66312
[ "MIT" ]
null
null
null
prototyping/Python/test_SweepLine.py
pyvain/WebSight
d9b0201ef472c46020bb2fd75af6f867eaa66312
[ "MIT" ]
null
null
null
prototyping/Python/test_SweepLine.py
pyvain/WebSight
d9b0201ef472c46020bb2fd75af6f867eaa66312
[ "MIT" ]
null
null
null
import unittest from SweepLine import SweepLine from ComparableSegment import ComparableSegment class TestSweepLine(unittest.TestCase): # __init__ def test__init__empty(self): line = SweepLine() self.assertTrue(line.isEmpty()) # addSegment def test__addSegment__first(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) line.addSegment(s1) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), []) def test__addSegment__equal(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) s2 = ComparableSegment(1, 1, 2, 2) line.addSegment(s1) line.addSegment(s2) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), []) self.assertEqual(line.aboveSegments(s2), []) self.assertEqual(line.belowSegments(s2), []) def test__addSegment__above_different_y(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) s2 = ComparableSegment(1, 2, 2, 2) line.addSegment(s1) line.addSegment(s2) self.assertEqual(line.aboveSegments(s2), []) self.assertEqual(line.belowSegments(s2), [s1]) self.assertEqual(line.aboveSegments(s1), [s2]) self.assertEqual(line.belowSegments(s1), []) def test__addSegment__above_same_y_different_gradient(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) s2 = ComparableSegment(1, 1, 2, 3) line.addSegment(s1) line.addSegment(s2) self.assertEqual(line.aboveSegments(s2), []) self.assertEqual(line.belowSegments(s2), [s1]) self.assertEqual(line.aboveSegments(s1), [s2]) self.assertEqual(line.belowSegments(s1), []) def test__addSegment__below_different_y(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) s2 = ComparableSegment(1, 0, 2, 2) line.addSegment(s1) line.addSegment(s2) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s2]) self.assertEqual(line.aboveSegments(s2), [s1]) self.assertEqual(line.belowSegments(s2), []) def test__addSegment__below_same_y_different_gradient(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) s2 = ComparableSegment(1, 1, 2, 1) line.addSegment(s1) line.addSegment(s2) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s2]) self.assertEqual(line.aboveSegments(s2), [s1]) self.assertEqual(line.belowSegments(s2), []) # remove def test__remove__single(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) line.addSegment(s1) line.removeSegment(s1) self.assertTrue(line.isEmpty) def test__remove__multiple_not_equals(self): line = SweepLine() s1 = ComparableSegment(1, 2, 2, 2) s2 = ComparableSegment(0, 0, 2, 2) s3 = ComparableSegment(1, 0, 2, 2) line.addSegment(s2) line.addSegment(s1) line.addSegment(s3) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s2]) self.assertEqual(line.aboveSegments(s2), [s1]) self.assertEqual(line.belowSegments(s2), [s3]) self.assertEqual(line.aboveSegments(s3), [s2]) self.assertEqual(line.belowSegments(s3), []) line.removeSegment(s2) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s3]) self.assertEqual(line.aboveSegments(s3), [s1]) self.assertEqual(line.belowSegments(s3), []) def test__remove__multiple_equals(self): line = SweepLine() s1 = ComparableSegment(0, 0, 3, 3) s2 = ComparableSegment(0, 0, 2, 2) s3 = ComparableSegment(1, 1, 3, 3) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) self.assertEqual(len(line.l), 3) self.assertTrue(s1 in line.l) self.assertTrue(s2 in line.l) self.assertTrue(s3 in line.l) line.removeSegment(s2) self.assertEqual(len(line.l), 2) self.assertTrue(s1 in line.l) self.assertFalse(s2 in line.l) self.assertTrue(s3 in line.l) # sameLevelAs def test__sameLevelAs__one_on_one(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) line.addSegment(s1) self.assertEqual(line.sameLevelAs(s1), [s1]) # sameLevelAs def test__sameLevelAs__one_on_several(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 1) s2 = ComparableSegment(0, 0, 1, 2) s3 = ComparableSegment(0, 0, 1, 3) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) self.assertEqual(line.sameLevelAs(s2), [s2]) # sameLevelAs def test__sameLevelAs__all_on_several(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 1) s2 = ComparableSegment(0, 0, 2, 2) s3 = ComparableSegment(0, 0, 3, 3) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) res = line.sameLevelAs(s2) self.assertEqual(len(res), 3) self.assertTrue(s1 in res and s2 in res and s3 in res) # sameLevelAs def test__sameLevelAs__several_on_several(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 1) s2 = ComparableSegment(0, 0, 2, 2) s3 = ComparableSegment(0, 0, 3, 3) s4 = ComparableSegment(0, 0, 3, 2) s5 = ComparableSegment(0, 0, 2, 3) line.addSegment(s4) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) line.addSegment(s5) res = line.sameLevelAs(s2) self.assertEqual(len(res), 3) self.assertTrue(s1 in res and s2 in res and s3 in res) # betweenY def test__betweenY__empty(self): line = SweepLine() self.assertEqual(line.betweenY(0, 1, 0), []) def test__betweenY__all_in(self): line = SweepLine() s1 = ComparableSegment(0, 0, 2, 2) s2 = ComparableSegment(0, 1, 2, 3) s3 = ComparableSegment(0, 2, 2, 4) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) self.assertEqual(line.betweenY(1, 3, 1), [s1, s2, s3]) def test_betweenY__few_in(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 0) s2 = ComparableSegment(0, 1, 1, 1) s3 = ComparableSegment(0, 2, 1, 2) s4 = ComparableSegment(0, 3, 1, 3) s5 = ComparableSegment(0, 4, 1, 4) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) line.addSegment(s4) line.addSegment(s5) self.assertEqual(line.betweenY(1, 3, 0), [s2, s3, s4]) def test_betweenY__none_in(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 0) s2 = ComparableSegment(0, 1, 1, 1) s3 = ComparableSegment(0, 2, 1, 2) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) self.assertEqual(line.betweenY(1.5, 1.75, 0), []) # revertOrder def test__revertOrder_nothing_in_between(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 1) s2 = ComparableSegment(0, 1, 1, 0) line.addSegment(s1) line.addSegment(s2) self.assertEqual(line.aboveSegments(s2), []) self.assertEqual(line.belowSegments(s2), [s1]) self.assertEqual(line.aboveSegments(s1), [s2]) self.assertEqual(line.belowSegments(s1), []) line.revertOrder(0.5, [s1, s2]) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s2]) self.assertEqual(line.aboveSegments(s2), [s1]) self.assertEqual(line.belowSegments(s2), []) def test__revertOrder__2_segments(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 1) s2 = ComparableSegment(0, 1, 1, 0) line.addSegment(s1) line.addSegment(s2) self.assertEqual(line.aboveSegments(s2), []) self.assertEqual(line.belowSegments(s2), [s1]) self.assertEqual(line.aboveSegments(s1), [s2]) self.assertEqual(line.belowSegments(s1), []) line.revertOrder(0.5, [s1, s2]) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s2]) self.assertEqual(line.aboveSegments(s2), [s1]) self.assertEqual(line.belowSegments(s2), []) def test__revertOrder__3_segments(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 1) s2 = ComparableSegment(0, 0.5, 1, 0.5) s3 = ComparableSegment(0, 1, 1, 0) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) self.assertEqual(line.aboveSegments(s3), []) self.assertEqual(line.belowSegments(s3), [s2]) self.assertEqual(line.aboveSegments(s2), [s3]) self.assertEqual(line.belowSegments(s2), [s1]) self.assertEqual(line.aboveSegments(s1), [s2]) self.assertEqual(line.belowSegments(s1), []) line.revertOrder(0.5, [s1, s2, s3]) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s2]) self.assertEqual(line.aboveSegments(s2), [s1]) self.assertEqual(line.belowSegments(s2), [s3]) self.assertEqual(line.aboveSegments(s3), [s2]) self.assertEqual(line.belowSegments(s3), []) def test__revertOrder__4_segments(self): line = SweepLine() s1 = ComparableSegment(0, 0, 1, 1) s2 = ComparableSegment(0, 0.25, 1, 0.75) s3 = ComparableSegment(0, 0.75, 1, 0.25) s4 = ComparableSegment(0, 1, 1, 0) line.addSegment(s1) line.addSegment(s2) line.addSegment(s3) line.addSegment(s4) self.assertEqual(line.aboveSegments(s4), []) self.assertEqual(line.belowSegments(s4), [s3]) self.assertEqual(line.aboveSegments(s3), [s4]) self.assertEqual(line.belowSegments(s3), [s2]) self.assertEqual(line.aboveSegments(s2), [s3]) self.assertEqual(line.belowSegments(s2), [s1]) self.assertEqual(line.aboveSegments(s1), [s2]) self.assertEqual(line.belowSegments(s1), []) line.revertOrder(0.5, [s1, s2, s3, s4]) self.assertEqual(line.aboveSegments(s1), []) self.assertEqual(line.belowSegments(s1), [s2]) self.assertEqual(line.aboveSegments(s2), [s1]) self.assertEqual(line.belowSegments(s2), [s3]) self.assertEqual(line.aboveSegments(s3), [s2]) self.assertEqual(line.belowSegments(s3), [s4]) self.assertEqual(line.aboveSegments(s4), [s3]) self.assertEqual(line.belowSegments(s4), []) if __name__ == '__main__': unittest.main()
32.127517
61
0.709212
1,293
9,574
5.152359
0.052591
0.193636
0.233864
0.182528
0.880216
0.837887
0.794356
0.765836
0.758931
0.746022
0
0.061301
0.134427
9,574
298
62
32.127517
0.742609
0.009923
0
0.714286
0
0
0.000845
0
0
0
0
0
0.370656
1
0.084942
false
0
0.011583
0
0.100386
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
a42eb437c5e11ef3452517a9c0c39c2a9c406dbc
100
py
Python
psych_metric/__init__.py
prijatelj/bayesian_eval_ground_truth-free
c0e569c78d63beb79f5e1e727c322293c3584323
[ "MIT" ]
1
2021-12-26T05:55:46.000Z
2021-12-26T05:55:46.000Z
psych_metric/__init__.py
prijatelj/bayesian_eval_ground_truth-free
c0e569c78d63beb79f5e1e727c322293c3584323
[ "MIT" ]
null
null
null
psych_metric/__init__.py
prijatelj/bayesian_eval_ground_truth-free
c0e569c78d63beb79f5e1e727c322293c3584323
[ "MIT" ]
null
null
null
from psych_metric import datasets from psych_metric import distrib from psych_metric import metrics
25
33
0.88
15
100
5.666667
0.466667
0.317647
0.529412
0.741176
0
0
0
0
0
0
0
0
0.12
100
3
34
33.333333
0.965909
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
a45efaa6e9bca6ad4edcaf7a50a7ea5054045bb9
36,330
py
Python
models/inv.py
shuxiang/MT-WMS
38ef18baed6d9eddb88d43da2eeed55988410daf
[ "Apache-2.0" ]
1
2022-03-11T05:42:25.000Z
2022-03-11T05:42:25.000Z
models/inv.py
shuxiang/MT-WMS
38ef18baed6d9eddb88d43da2eeed55988410daf
[ "Apache-2.0" ]
null
null
null
models/inv.py
shuxiang/MT-WMS
38ef18baed6d9eddb88d43da2eeed55988410daf
[ "Apache-2.0" ]
null
null
null
#coding=utf8 __all__ = ['Inv', 'InvRfid', 'InvRfidTrans', 'InvTrans', 'Category', 'Good', 'GoodMap', 'InvAdjust', 'InvMove', 'InvCount', 'InvWarn'] import os.path import json from sqlalchemy.sql import text from uuid import uuid4 from datetime import datetime, timedelta from sqlalchemy import Index, UniqueConstraint from sqlalchemy import func, or_, and_ from werkzeug.utils import cached_property from utils.upload import get_oss_image, save_inv_qrcode, save_inv_barcode from utils.flask_tools import json_dump from extensions.database import db import settings class Inv(db.Model): __tablename__ = 'inv' __table_args__ = ( Index("ix_inv_sku", "sku", "location_code", "company_code"), Index("ix_inv_barcode", "barcode", "location_code", "company_code"), Index("ix_inv_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) # # 可用库存, 不可用库存, 限制库存 # state = db.Column(db.Enum('Y', 'N', 'L'), default='Y') owner_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) # 库位信息 location_code = db.Column(db.String(50)) area_code = db.Column(db.String(50)) workarea_code = db.Column(db.String(50)) # 货品信息 category_code = db.Column(db.String(50), default='') sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), default='') name_en = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), server_default='') brand = db.Column(db.String(20), server_default='') # qty = qty_alloc + qty_able; qty = db.Column(db.Integer, server_default='0', default=0) qty_alloc = db.Column(db.Integer, server_default='0', default=0) qty_able = db.Column(db.Integer, server_default='0', default=0) # 冻结数量 qty_freeze = db.Column(db.Integer, server_default='0', default=0) stockin_date = db.Column(db.Date, default=db.func.current_date()) partner_name = db.Column(db.String(50), server_default='') # 批次属性 supplier_code = db.Column(db.String(50), server_default='') # 库存类型(ZP=正品;CC=残次;JS=机损;XS= 箱损;ZT=在途库存;DJ=冻结;) quality_type = db.Column(db.Enum('ZP', 'CC', 'DJ', 'ZT', 'JS', 'XS'), server_default='ZP') product_date = db.Column(db.Date) expire_date = db.Column(db.Date) batch_code = db.Column(db.String(50), server_default='') virtual_warehouse = db.Column(db.String(50), server_default='') spec = db.Column(db.String(50), server_default='') # 款色码 style = db.Column(db.String(50), server_default='') color = db.Column(db.String(50), server_default='') size = db.Column(db.String(50), server_default='') # 单位 unit = db.Column(db.String(20), server_default='') weight_unit = db.Column(db.String(10), server_default='') # 容器 lpn = db.Column(db.String(50), server_default='', default='') # 出库单PICK关联的stockout_id refid = db.Column(db.Integer, default=0) refin_order_code = db.Column(db.String(50), server_default='', default='') # 分裂库存模式, split by `order_code` 的时候有效 price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) remark = db.Column(db.String(200), server_default='', default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) @property def location(self): Location = db.M('Location') return Location.filter(and_( Location.code==self.location_code, Location.company_code==self.company_code, Location.warehouse_code==self.warehouse_code)).first() @property def company(self): Company = db.M('Company') return Company.query.filter_by(code=self.company_code).first() @property def warehouse(self): Warehouse = db.M('Warehouse') return Warehouse.query.filter(and_( Warehouse.code==self.warehouse_code, Warehouse.company_code==self.company_code)).first() @property def owner(self): Partner = db.M('Partner') return Partner.query.filter(and_( Partner.code==self.owner_code, Partner.company_code==self.company_code)).first() @property def area(self): Area = db.M('Area') return Area.query.filter(and_( Area.code==self.area_code, Area.company_code==self.company_code, Area.warehouse_code==self.warehouse_code)).first() @property def workarea(self): Workarea = db.M('Workarea') return Workarea.query.filter(and_( Workarea.code==self.workarea_code, Workarea.company_code==self.company_code, Workarea.warehouse_code==self.warehouse_code)).first() @property def category(self): Category = db.M('Category') return Category.query.filter(and_( Category.code==self.category_code, Category.owner_code==self.owner_code, Category.company_code==self.company_code)).first() @property def good(self): if getattr(self, '_good', None) is None: Good = db.M('Good') self._good = Good.query.filter(and_( Good.code==self.sku, Good.company_code==self.company_code, Good.owner_code==self.owner_code)).first() return self._good class InvRfid(db.Model): __tablename__ = 'inv_rfid' __table_args__ = ( Index("ix_invrfid_sku", "sku", "location_code", "company_code"), Index("ix_invrfid_rfid", "rfid", "location_code", "company_code"), Index("ix_invrfid_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) owner_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) inv_id = db.Column(db.Integer) qty = db.Column(db.Integer, server_default='0', default=0) rfid = db.Column(db.String(255), server_default='', default='') # 非标件, 可能按重量来算 _weight = db.Column(db.Float(asdecimal=True, precision='15,4'), name='weight', server_default='0.00', default=0.00) _gross_weight = db.Column(db.Float(asdecimal=True, precision='15,4'), name='gross_weight', server_default='0.00', default=0.00) # 非标件, 内部数量 _qty_inner = db.Column(db.Integer, name='qty_inner', server_default='1', default=1) # 上游传入的/系统自动产生的, 用户生成的, 用户导入的 source = db.Column(db.String(50), server_default='erp') printed = db.Column(db.Boolean, default=False, server_default='0') # 在用 on / 废弃 off state = db.Column(db.String(10), default='on', server_default='on') # 库位信息 location_code = db.Column(db.String(50)) area_code = db.Column(db.String(50)) workarea_code = db.Column(db.String(50)) # 货品信息 category_code = db.Column(db.String(50), default='') sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), default='') name_en = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), server_default='') brand = db.Column(db.String(20), server_default='') partner_name = db.Column(db.String(50), server_default='') # 批次属性 supplier_code = db.Column(db.String(50), server_default='') quality_type = db.Column(db.Enum('ZP', 'CC', 'DJ', 'ZT', 'JS', 'XS'), server_default='ZP') product_date = db.Column(db.Date) expire_date = db.Column(db.Date) batch_code = db.Column(db.String(50), server_default='') virtual_warehouse = db.Column(db.String(50), server_default='') spec = db.Column(db.String(50), server_default='') # 款色码 style = db.Column(db.String(50), server_default='') color = db.Column(db.String(50), server_default='') size = db.Column(db.String(50), server_default='') # 单位 unit = db.Column(db.String(20), server_default='') weight_unit = db.Column(db.String(10), server_default='') # 容器 lpn = db.Column(db.String(50), server_default='', default='') # 入库信息---------- stockin_order_code = db.Column(db.String(50)) # erp_order_code stockin_date = db.Column(db.DateTime, default=db.func.current_timestamp()) # 系统操作人信息 in_user_code = db.Column(db.String(20)) in_user_name = db.Column(db.String(20)) # 仓库操作人信息/领料人信息 in_w_user_code = db.Column(db.String(20)) in_w_user_name = db.Column(db.String(20)) # end 入库信息---------- # 出库信息 ---------- stockout_order_code = db.Column(db.String(50)) # erp_order_code stockout_date = db.Column(db.DateTime) # 系统操作人信息 out_user_code = db.Column(db.String(20)) out_user_name = db.Column(db.String(20)) # 仓库操作人信息/领料人信息 out_w_user_code = db.Column(db.String(20)) out_w_user_name = db.Column(db.String(20)) # end 出库信息 ---------- remark = db.Column(db.String(200), server_default='', default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) __dump_prop__ = ('weight', 'gross_weight', 'qty_inner', ) @property def inv(self): return Inv.query.filter(Inv.id==self.inv_id).first() @property def weight(self): return self._weight @property def gross_weight(self): return self._gross_weight @property def qty_inner(self): return self._qty_inner @weight.setter def weight(self, v): self._weight = v @gross_weight.setter def gross_weight(self, v): self._gross_weight = v @qty_inner.setter def qty_inner(self, v): self._qty_inner = v def get_barcode(self, company_id): if not os.path.exists(os.path.join(settings.UPLOAD_DIR, 'barcode', company_id, self.barcode)): _, path = save_inv_barcode(settings.UPLOAD_DIR, company_id, self.barcode) else: path = '/static/upload/barcode/%s/%s.png'%(company_id, self.barcode) return path def get_qrcode(self, company_id): if not os.path.exists(os.path.join(settings.UPLOAD_DIR, 'qrcode', company_id, self.rfid)): _, path = save_inv_qrcode(settings.UPLOAD_DIR, company_id, self.rfid) else: path = '/static/upload/qrcode/%s/%s.png'%(company_id, self.rfid) return path # 唯一码流水只记录进出库信息 class InvRfidTrans(db.Model): __tablename__ = 'inv_rfid_trans' __table_args__ = ( Index("ix_invrfid_rfid", "company_code", 'warehouse_code', "owner_code", "rfid"), Index("ix_invrfid_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) owner_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) rfid = db.Column(db.String(255), server_default='', default='') sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), server_default='') barcode = db.Column(db.String(50), server_default='') # 系统操作人信息 user_code = db.Column(db.String(20)) user_name = db.Column(db.String(20)) # 仓库操作人信息/领料人信息 w_user_code = db.Column(db.String(20)) w_user_name = db.Column(db.String(20)) xtype = db.Column(db.String(20), default='in') # in/out order_type = db.Column(db.String(20), default='produce') # in.xtype/out.order_type order_code = db.Column(db.String(50), default='') remark = db.Column(db.String(200), server_default='', default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) # 库存流水 class InvTrans(db.Model): __tablename__ = 'inv_trans' __table_args__ = (Index("ix_inv_trans_sku", "sku", "location_code", "company_code",), Index("ix_inv_trans_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True, autoincrement=True) owner_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) # 库位信息 location_code = db.Column(db.String(50)) area_code = db.Column(db.String(50)) # 货品信息 category_code = db.Column(db.String(50), server_default='') sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), server_default='') barcode = db.Column(db.String(50), server_default='') before_qty = db.Column(db.Integer, server_default='0', default=0) change_qty = db.Column(db.Integer, server_default='0', default=0) after_qty = db.Column(db.Integer, server_default='0', default=0) price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) qty_able = db.Column(db.Integer, server_default='0', default=0) # 入库单, 出库单, 移库单, 调整单, 转换单 # stockin stockout inv_move inv_adjust inv_transfer xtype = db.Column(db.Enum('stockin', 'stockout', 'inv_move', 'inv_adjust', 'inv_transfer'), server_default='stockout') # 操作过程 xtype_opt = db.Column(db.Enum('alloc', 'pick', 'cancel', 'in', 'out'), default='in') # 操作信息 order_code = db.Column(db.String(50)) erp_order_code = db.Column(db.String(50)) # 系统操作人信息 user_code = db.Column(db.String(20), default='') user_name = db.Column(db.String(20), default='') # 外键 inventory_id = db.Column(db.Integer) remark = db.Column(db.String(200), server_default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) # 货类数据库结构定义 class Category(db.Model): __tablename__ = 'inv_category' __table_args__ = (Index("ix_inv_category_code", 'code', "company_code",), Index("ix_inv_category_tenant", 'owner_code', "company_code",), ) id = db.Column(db.Integer, primary_key=True) code = db.Column(db.String(50), default='') name = db.Column(db.String(50), default='') owner_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) remark = db.Column(db.String(200), server_default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) @property def company(self): Company = db.M('Company') return Company.query.filter_by(code=self.company_code).first() @property def owner(self): Partner = db.M('Partner') return Partner.query.filter(and_( Partner.code==self.owner_code, Partner.company_code==self.company_code)).first() # 货品数据结构定义 class Good(db.Model): __tablename__ = 'inv_good' __table_args__ = (Index("ix_inv_good_code", "code", "company_code",), Index("ix_inv_good_tenant", 'owner_code', "company_code",), ) id = db.Column(db.Integer, primary_key=True) code = db.Column(db.String(50), default='') name = db.Column(db.String(200), default='') name_en = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), default='') middle_code = db.Column(db.String(50), default='') # 需要生产的, 会生成 生产单, 不需要生产的, 会生成 采购单 is_produce = db.Column(db.Boolean, default=False) # is_main has_subs = db.Column(db.Boolean, default=False) # 规格 spec = db.Column(db.String(100), default='', server_default='') # 上架/下架 on/down, 删除delete state = db.Column(db.String(100), default='on', server_default='on') # 长、款、高、体积(奇门发来的是double类型,这里需要string转换) length = db.Column(db.String(50), default='') width = db.Column(db.String(50), default='') height = db.Column(db.String(50), default='') volume = db.Column(db.String(50), default='') # 净重 weight = db.Column(db.String(50), default='0', server_default='0') # 毛重 gross_weight = db.Column(db.String(50), default='0', server_default='0') # 重量单位 kg/g weight_unit = db.Column(db.String(10), default='') index = db.Column(db.Integer, default=0, server_default='0') # 预警: 最高库存, 最低库存 min_qty = db.Column(db.Integer, default=0, server_default='0') max_qty = db.Column(db.Integer, default=0, server_default='0') # 价格,先选择零售价 price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) # 生产成本 cost_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) # 最近一次价格 last_in_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) last_out_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) # 来源 erp, import, other:上游公司码 source = db.Column(db.String(50), server_default='erp') # laoa fields; # app这边的good id appid = db.Column(db.String(50), server_default='') # 质保期限 quality_month = db.Column(db.Integer, server_default='0', default=0) # 开模分摊费 model_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) # 运费 express_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) # 分级经销商价 lv1_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) lv2_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) lv3_price = db.Column(db.Float(asdecimal=True, precision='15,4'), server_default='0.00', default=0) # 是否同步 is_sync = db.Column(db.String(1), server_default='0') # 图片 image_url = db.Column(db.String(255), server_default='') # images images = db.Column(db.String(1500), server_default='') ad_images = db.Column(db.String(1500), server_default='') # 默认存放的库区/库位 area_code = db.Column(db.String(50), server_default='') location_code = db.Column(db.String(50), server_default='') # 款色码 style = db.Column(db.String(50), server_default='') color = db.Column(db.String(50), server_default='') size = db.Column(db.String(50), server_default='') unit = db.Column(db.String(20), server_default='') # 产品参数 args = db.Column(db.String(500), server_default='') # qimen 类型 ZC=正常商品;FX=分销商品;ZH=组合商品;ZP=赠品;BC=包材;HC=耗材;FL=辅料;XN=虚拟品;FS=附属品;CC=残次品; OTHER=其它; item_type = db.Column(db.String(20), server_default='ZC', default='ZC') brand = db.Column(db.String(20), server_default='') category_code = db.Column(db.String(50), default='') # 是否使用保质期管理 on/off is_shelf_life = db.Column(db.String(20), server_default='off', default='off') owner_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) custom_uuid = db.Column(db.String(50), server_default='') version = db.Column(db.Integer) remark = db.Column(db.String(200), server_default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) __dump_prop__ = ('sku', 'is_main', 'images_list', 'ad_images_list', 'args_list',) @property def sku(self): return self.code @cached_property def company_id(self): return db.M('Company').query.filter_by(code=self.company_code).first().id @property def images_list(self): if self.images: if self.image_url: return [img for img in self.images.split(',') if img] return [img for img in self.images.split(',') if img] return [] @property def ad_images_list(self): if self.ad_images: return [img for img in self.ad_images.split(',') if img] return [] @property def args_list(self): return [a for a in self.args.split('\n') if a] @property def company(self): Company = db.M('Company') return Company.query.filter_by(code=self.company_code).first() @property def owner(self): Partner = db.M('Partner') return Partner.query.filter(and_( Partner.code==self.owner_code, Partner.company_code==self.company_code)).first() @property def category(self): Category = db.M('Category') return Category.query.filter(and_( Category.code==self.category_code, Category.owner_code==self.owner_code, Category.company_code==self.company_code)).first() # 是否主件 @property def is_main(self): return self.has_subs # self.has_subs = GoodMap.query.filter(GoodMap.code==self.code, GoodMap.owner_code==self.owner_code, GoodMap.company_code==self.company_code).count() > 0 # return self.has_subs # 需要生产 @property def need_produce(self): return self.is_produce or self.is_main # 是否配件 @property def is_sub(self): return GoodMap.query.filter(GoodMap.subcode==self.code, GoodMap.owner_code==self.owner_code, GoodMap.company_code==self.company_code).count() # 配件列表 @property def sub_goods(self): gm = GoodMap.query.filter(GoodMap.code==self.code, GoodMap.owner_code==self.owner_code, GoodMap.company_code==self.company_code).first() if gm: return gm.sub_goods return [] @property def JSON(self): big = None if self.custom_uuid: big = db.M('Big').query.filter_by(code='JSON', subcode='inv_good__json', uuid=self.custom_uuid).first() return json.loads(big.blob) if big else {} @JSON.setter def JSON(self, obj): val = json_dump(obj) if self.custom_uuid: db.M('Big').query.filter_by(code='JSON', subcode='inv_good__json', uuid=self.custom_uuid).update({'blob':val}) else: uuid = str(uuid4()) big = db.M('Big')(company_code=self.company_code, code='JSON', subcode='inv_good__json', blob=val, uuid=uuid) db.session.add(big) self.custom_uuid = uuid # 计算配件成本价-- 子件 def calc_cost_price(self): return self.cost_price def calc_main_cost_price(self): gm = GoodMap.query.o_query.filter_by(code=self.code).first() if gm: self.cost_price = gm.main_cost_price return self.cost_price def get_barcode(self, company_id): if not os.path.exists(os.path.join(settings.UPLOAD_DIR, 'barcode', company_id, self.barcode)): _, path = save_inv_barcode(settings.UPLOAD_DIR, company_id, self.barcode) else: path = '/static/upload/barcode/%s/%s.png'%(company_id, self.barcode) return path def get_qrcode(self, company_id): if not os.path.exists(os.path.join(settings.UPLOAD_DIR, 'qrcode', company_id, self.sku)): _, path = save_inv_qrcode(settings.UPLOAD_DIR, company_id, self.sku) else: path = '/static/upload/qrcode/%s/%s.png'%(company_id, self.sku) return path class GoodMap(db.Model): __tablename__ = 'inv_good_map' __table_args__ = (Index("ix_inv_good_map_code", "code", 'subcode', "company_code",), Index("ix_inv_good_map_tenant", 'owner_code', "company_code",), ) # 导入时要删除主配件关系,再新增新的 id = db.Column(db.Integer, primary_key=True) code = db.Column(db.String(50), default='') name = db.Column(db.String(200), default='') name_en = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), default='') subcode = db.Column(db.String(50), default='') subname = db.Column(db.String(200), default='') subbarcode = db.Column(db.String(50), default='') qty = db.Column(db.Integer, server_default='1', default=1) owner_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) remark = db.Column(db.String(200), server_default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) @property def company(self): Company = db.M('Company') return Company.query.filter_by(code=self.company_code).first() @property def owner(self): Partner = db.M('Partner') return Partner.query.filter(and_( Partner.code==self.owner_code, Partner.company_code==self.company_code)).first() @property def good(self): return Good.query.filter_by(code=self.code, owner_code=self.owner_code, company_code=self.company_code).first() @property def sub_good(self): return Good.query.filter_by(code=self.subcode, owner_code=self.owner_code, company_code=self.company_code).first() @property def sub_goods(self): return Good.query.filter( Good.owner_code==self.owner_code, Good.company_code==self.company_code, Good.code==GoodMap.subcode, GoodMap.code==self.code, GoodMap.owner_code==self.owner_code, GoodMap.company_code==self.company_code).all() @property def main_cost_price(self): o = GoodMap.query.with_entities(func.sum(Good.cost_price*GoodMap.qty).label('cost_price')).filter( Good.owner_code==self.owner_code, Good.company_code==self.company_code, Good.code==GoodMap.subcode, GoodMap.code==self.code, GoodMap.owner_code==self.owner_code, GoodMap.company_code==self.company_code).first() return float(o.cost_price or 0) if o else 0 @property def map_goods(self): return GoodMap.query.filter_by(code=self.code, owner_code=self.owner_code, company_code=self.company_code).all() class InvAdjust(db.Model): __tablename__ = 'inv_adjust' __table_args__ = ( Index("ix_inv_adjust_series", "company_code", "series_code"), Index("ix_inv_adjust_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True) code = db.Column(db.String(50), default='') count_code = db.Column(db.String(50), default='') owner_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) location_code = db.Column(db.String(50)) sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), server_default='') qty_before = db.Column(db.Integer, default=0) # 调整前数量 qty_after = db.Column(db.Integer, default=0) # 调整后数量 qty_diff = db.Column(db.Integer, default=0) # 前后差值; qty_after - qty_before; qty_real - qty # 一系列可以下发多个调整单 series_code = db.Column(db.String(50), default='') # 盘点单号 count_series_code = db.Column(db.String(50), default='') stockin_date = db.Column(db.Date) source = db.Column(db.String(50), server_default='erp') partner_name = db.Column(db.String(50), server_default='') # 批次属性 supplier_code = db.Column(db.String(50), server_default='') quality_type = db.Column(db.Enum('ZP', 'CC', 'DJ', 'ZT', 'JS', 'XS'), server_default='ZP') product_date = db.Column(db.Date) expire_date = db.Column(db.Date) batch_code = db.Column(db.String(50), server_default='') virtual_warehouse = db.Column(db.String(50), server_default='') spec = db.Column(db.String(50), server_default='') # 款色码 style = db.Column(db.String(50), server_default='') color = db.Column(db.String(50), server_default='') size = db.Column(db.String(50), server_default='') # 容器 lpn = db.Column(db.String(50), server_default='', default='') state = db.Column(db.Enum('create', 'done', 'cancel'), server_default='create') user_code = db.Column(db.String(20), default='') user_name = db.Column(db.String(20), default='') remark = db.Column(db.String(200), server_default='', default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) @property def inv_count(self): InvCount = db.M('InvCount') return InvCount.query.filter(and_( InvCount.code==self.count_code, InvCount.company_code==self.company_code, InvCount.owner_code==self.owner_code, InvCount.warehouse_code==self.warehouse_code)).first() class InvCount(db.Model): __tablename__ = 'inv_count' __table_args__ = ( Index("ix_inv_count_series", "company_code", "series_code"), Index("ix_inv_count_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True) code = db.Column(db.String(50), default='') owner_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) location_code = db.Column(db.String(50)) sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), server_default='') qty = db.Column(db.Integer, default=0) # 系统数量 qty_real = db.Column(db.Integer, default=0) # 盘点数量 qty_alloc = db.Column(db.Integer, default=0) # 锁定数量 # 一系列可以下发多个盘点单 series_code = db.Column(db.String(50), default='') adjust_series_code = db.Column(db.String(50), default='') stockin_date = db.Column(db.Date) source = db.Column(db.String(50), server_default='erp') partner_name = db.Column(db.String(50), server_default='') # 批次属性 supplier_code = db.Column(db.String(50), server_default='') quality_type = db.Column(db.Enum('ZP', 'CC', 'DJ', 'ZT', 'JS', 'XS'), server_default='ZP') product_date = db.Column(db.Date) expire_date = db.Column(db.Date) batch_code = db.Column(db.String(50), server_default='') virtual_warehouse = db.Column(db.String(50), server_default='') spec = db.Column(db.String(50), server_default='') # 款色码 style = db.Column(db.String(50), server_default='') color = db.Column(db.String(50), server_default='') size = db.Column(db.String(50), server_default='') # 容器 lpn = db.Column(db.String(50), server_default='', default='') state = db.Column(db.Enum('create', 'done', 'cancel'), server_default='create') user_code = db.Column(db.String(20), default='') user_name = db.Column(db.String(20), default='') remark = db.Column(db.String(200), server_default='', default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) class InvMove(db.Model): __tablename__ = 'inv_move' __table_args__ = ( Index("ix_inv_move_series", "company_code", "series_code"), Index("ix_inv_move_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True) code = db.Column(db.String(50), default='') # 入库单转移库单 stockin_order_code = db.Column(db.String(50), default='', server_default='') owner_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) dest_warehouse_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) location_code = db.Column(db.String(50)) dest_location_code = db.Column(db.String(50)) sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), server_default='') qty = db.Column(db.Integer, default=0) # 实际移库数量 qty_real = db.Column(db.Integer, default=0, server_default='0') # 一系列可以下发多个移库单 series_code = db.Column(db.String(50), default='') # 系统生成的移库单(可以移库,也可以拣货)system,还是用户生成的移库单 user, 捕获移库 replenish, 上架 onshelf move_type = db.Column(db.String(20), default='user') stockin_date = db.Column(db.Date) # 'erp', 'custom', 'import' source = db.Column(db.String(50), server_default='erp') partner_name = db.Column(db.String(50), server_default='') # 批次属性 supplier_code = db.Column(db.String(50), server_default='') quality_type = db.Column(db.Enum('ZP', 'CC', 'DJ', 'ZT', 'JS', 'XS'), server_default='ZP') product_date = db.Column(db.Date) expire_date = db.Column(db.Date) batch_code = db.Column(db.String(50), server_default='') virtual_warehouse = db.Column(db.String(50), server_default='') spec = db.Column(db.String(50), server_default='') # 款色码 style = db.Column(db.String(50), server_default='') color = db.Column(db.String(50), server_default='') size = db.Column(db.String(50), server_default='') # 容器 lpn = db.Column(db.String(50), server_default='', default='') dest_lpn = db.Column(db.String(50), server_default='', default='') state = db.Column(db.Enum('create', 'done', 'doing', 'cancel'), server_default='create') user_code = db.Column(db.String(20), default='') user_name = db.Column(db.String(20), default='') remark = db.Column(db.String(200), server_default='', default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime()) # 库位库存预警 class InvWarn(db.Model): __tablename__ = 'inv_warn' __table_args__ = ( Index("ix_inv_warn_tenant", "company_code", 'warehouse_code', "owner_code",), ) id = db.Column(db.Integer, primary_key=True) owner_code = db.Column(db.String(50)) warehouse_code = db.Column(db.String(50)) company_code = db.Column(db.String(50)) location_code = db.Column(db.String(50)) sku = db.Column(db.String(50), server_default='') name = db.Column(db.String(200), default='') barcode = db.Column(db.String(50), server_default='') min_qty = db.Column(db.Integer, default=0) # 预警数量 max_qty = db.Column(db.Integer, default=0) # 最高补货数量 # 批次属性 supplier_code = db.Column(db.String(50), server_default='') spec = db.Column(db.String(50), server_default='') remark = db.Column(db.String(200), server_default='', default='') create_time = db.Column(db.DateTime, default=db.default_datetime()) update_time = db.Column(db.DateTime, default=db.default_datetime(), onupdate=db.default_datetime())
38.041885
162
0.625736
4,825
36,330
4.516477
0.081244
0.121145
0.151432
0.165198
0.814152
0.776065
0.737794
0.712555
0.658453
0.628166
0
0.022541
0.22213
36,330
954
163
38.081761
0.748585
0.043105
0
0.56964
0
0
0.061994
0.005045
0
0
0
0
0
1
0.070423
false
0
0.018779
0.023474
0.729264
0.001565
0
0
0
null
0
0
1
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
7
2d35203f6c49c02d33f6d813348ecea7459d1525
43,299
py
Python
tb_rest_client/api/api_ce/tenant_profile_controller_api.py
samson0v/python_tb_rest_client
08ff7898740f7cec2170e85d5c3c89e222e967f7
[ "Apache-2.0" ]
30
2020-06-19T06:42:50.000Z
2021-08-23T21:16:36.000Z
tb_rest_client/api/api_ce/tenant_profile_controller_api.py
samson0v/python_tb_rest_client
08ff7898740f7cec2170e85d5c3c89e222e967f7
[ "Apache-2.0" ]
25
2021-08-30T01:17:27.000Z
2022-03-16T14:10:14.000Z
tb_rest_client/api/api_ce/tenant_profile_controller_api.py
samson0v/python_tb_rest_client
08ff7898740f7cec2170e85d5c3c89e222e967f7
[ "Apache-2.0" ]
23
2020-07-06T13:41:54.000Z
2021-08-23T21:04:50.000Z
# coding: utf-8 """ ThingsBoard REST API ThingsBoard open-source IoT platform REST API documentation. # noqa: E501 OpenAPI spec version: 3.3.3-SNAPSHOT Contact: info@thingsboard.io Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from tb_rest_client.api_client import ApiClient class TenantProfileControllerApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def delete_tenant_profile_using_delete(self, tenant_profile_id, **kwargs): # noqa: E501 """Delete Tenant Profile (deleteTenantProfile) # noqa: E501 Deletes the tenant profile. Referencing non-existing tenant profile Id will cause an error. Referencing profile that is used by the tenants will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_tenant_profile_using_delete(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_tenant_profile_using_delete_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 else: (data) = self.delete_tenant_profile_using_delete_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 return data def delete_tenant_profile_using_delete_with_http_info(self, tenant_profile_id, **kwargs): # noqa: E501 """Delete Tenant Profile (deleteTenantProfile) # noqa: E501 Deletes the tenant profile. Referencing non-existing tenant profile Id will cause an error. Referencing profile that is used by the tenants will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_tenant_profile_using_delete_with_http_info(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['tenant_profile_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_tenant_profile_using_delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'tenant_profile_id' is set if ('tenant_profile_id' not in params or params['tenant_profile_id'] is None): raise ValueError("Missing the required parameter `tenant_profile_id` when calling `delete_tenant_profile_using_delete`") # noqa: E501 collection_formats = {} path_params = {} if 'tenant_profile_id' in params: path_params['tenantProfileId'] = params['tenant_profile_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfile/{tenantProfileId}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_default_tenant_profile_info_using_get(self, **kwargs): # noqa: E501 """Get default Tenant Profile Info (getDefaultTenantProfileInfo) # noqa: E501 Fetch the default Tenant Profile Info object based. Tenant Profile Info is a lightweight object that contains only id and name of the profile. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_default_tenant_profile_info_using_get(async_req=True) >>> result = thread.get() :param async_req bool :return: EntityInfo If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_default_tenant_profile_info_using_get_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_default_tenant_profile_info_using_get_with_http_info(**kwargs) # noqa: E501 return data def get_default_tenant_profile_info_using_get_with_http_info(self, **kwargs): # noqa: E501 """Get default Tenant Profile Info (getDefaultTenantProfileInfo) # noqa: E501 Fetch the default Tenant Profile Info object based. Tenant Profile Info is a lightweight object that contains only id and name of the profile. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_default_tenant_profile_info_using_get_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: EntityInfo If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_default_tenant_profile_info_using_get" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfileInfo/default', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='EntityInfo', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_tenant_profile_by_id_using_get(self, tenant_profile_id, **kwargs): # noqa: E501 """Get Tenant Profile (getTenantProfileById) # noqa: E501 Fetch the Tenant Profile object based on the provided Tenant Profile Id. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profile_by_id_using_get(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: TenantProfile If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_tenant_profile_by_id_using_get_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 else: (data) = self.get_tenant_profile_by_id_using_get_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 return data def get_tenant_profile_by_id_using_get_with_http_info(self, tenant_profile_id, **kwargs): # noqa: E501 """Get Tenant Profile (getTenantProfileById) # noqa: E501 Fetch the Tenant Profile object based on the provided Tenant Profile Id. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profile_by_id_using_get_with_http_info(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: TenantProfile If the method is called asynchronously, returns the request thread. """ all_params = ['tenant_profile_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_tenant_profile_by_id_using_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'tenant_profile_id' is set if ('tenant_profile_id' not in params or params['tenant_profile_id'] is None): raise ValueError("Missing the required parameter `tenant_profile_id` when calling `get_tenant_profile_by_id_using_get`") # noqa: E501 collection_formats = {} path_params = {} if 'tenant_profile_id' in params: path_params['tenantProfileId'] = params['tenant_profile_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfile/{tenantProfileId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TenantProfile', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_tenant_profile_info_by_id_using_get(self, tenant_profile_id, **kwargs): # noqa: E501 """Get Tenant Profile Info (getTenantProfileInfoById) # noqa: E501 Fetch the Tenant Profile Info object based on the provided Tenant Profile Id. Tenant Profile Info is a lightweight object that contains only id and name of the profile. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profile_info_by_id_using_get(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityInfo If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_tenant_profile_info_by_id_using_get_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 else: (data) = self.get_tenant_profile_info_by_id_using_get_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 return data def get_tenant_profile_info_by_id_using_get_with_http_info(self, tenant_profile_id, **kwargs): # noqa: E501 """Get Tenant Profile Info (getTenantProfileInfoById) # noqa: E501 Fetch the Tenant Profile Info object based on the provided Tenant Profile Id. Tenant Profile Info is a lightweight object that contains only id and name of the profile. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profile_info_by_id_using_get_with_http_info(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityInfo If the method is called asynchronously, returns the request thread. """ all_params = ['tenant_profile_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_tenant_profile_info_by_id_using_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'tenant_profile_id' is set if ('tenant_profile_id' not in params or params['tenant_profile_id'] is None): raise ValueError("Missing the required parameter `tenant_profile_id` when calling `get_tenant_profile_info_by_id_using_get`") # noqa: E501 collection_formats = {} path_params = {} if 'tenant_profile_id' in params: path_params['tenantProfileId'] = params['tenant_profile_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfileInfo/{tenantProfileId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='EntityInfo', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_tenant_profile_infos_using_get(self, page_size, page, **kwargs): # noqa: E501 """Get Tenant Profiles Info (getTenantProfileInfos) # noqa: E501 Returns a page of tenant profile info objects registered in the platform. Tenant Profile Info is a lightweight object that contains only id and name of the profile. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profile_infos_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str text_search: The case insensitive 'startsWith' filter based on the tenant profile name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityInfo If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_tenant_profile_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_tenant_profile_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 return data def get_tenant_profile_infos_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501 """Get Tenant Profiles Info (getTenantProfileInfos) # noqa: E501 Returns a page of tenant profile info objects registered in the platform. Tenant Profile Info is a lightweight object that contains only id and name of the profile. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profile_infos_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str text_search: The case insensitive 'startsWith' filter based on the tenant profile name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityInfo If the method is called asynchronously, returns the request thread. """ all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_tenant_profile_infos_using_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_profile_infos_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_tenant_profile_infos_using_get`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfileInfos{?page,pageSize,sortOrder,sortProperty,textSearch}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataEntityInfo', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_tenant_profiles_using_get(self, page_size, page, **kwargs): # noqa: E501 """Get Tenant Profiles (getTenantProfiles) # noqa: E501 Returns a page of tenant profiles registered in the platform. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profiles_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str text_search: The case insensitive 'startsWith' filter based on the tenant profile name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataTenantProfile If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_tenant_profiles_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_tenant_profiles_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 return data def get_tenant_profiles_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501 """Get Tenant Profiles (getTenantProfiles) # noqa: E501 Returns a page of tenant profiles registered in the platform. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_profiles_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str text_search: The case insensitive 'startsWith' filter based on the tenant profile name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataTenantProfile If the method is called asynchronously, returns the request thread. """ all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_tenant_profiles_using_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_profiles_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_tenant_profiles_using_get`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfiles{?page,pageSize,sortOrder,sortProperty,textSearch}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataTenantProfile', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def save_tenant_profile_using_post(self, **kwargs): # noqa: E501 """Create Or update Tenant Profile (saveTenantProfile) # noqa: E501 Create or update the Tenant Profile. When creating tenant profile, platform generates Tenant Profile Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Tenant Profile Id will be present in the response. Specify existing Tenant Profile Id id to update the Tenant Profile. Referencing non-existing Tenant Profile Id will cause 'Not Found' error. Update of the tenant profile configuration will cause immediate recalculation of API limits for all affected Tenants. The **'profileData'** object is the part of Tenant Profile that defines API limits and Rate limits. You have an ability to define maximum number of devices ('maxDevice'), assets ('maxAssets') and other entities. You may also define maximum number of messages to be processed per month ('maxTransportMessages', 'maxREExecutions', etc). The '*RateLimit' defines the rate limits using simple syntax. For example, '1000:1,20000:60' means up to 1000 events per second but no more than 20000 event per minute. Let's review the example of tenant profile data below: ```json { \"name\": \"Default\", \"description\": \"Default tenant profile\", \"isolatedTbCore\": false, \"isolatedTbRuleEngine\": false, \"profileData\": { \"configuration\": { \"type\": \"DEFAULT\", \"maxDevices\": 0, \"maxAssets\": 0, \"maxCustomers\": 0, \"maxUsers\": 0, \"maxDashboards\": 0, \"maxRuleChains\": 0, \"maxResourcesInBytes\": 0, \"maxOtaPackagesInBytes\": 0, \"transportTenantMsgRateLimit\": \"1000:1,20000:60\", \"transportTenantTelemetryMsgRateLimit\": \"1000:1,20000:60\", \"transportTenantTelemetryDataPointsRateLimit\": \"1000:1,20000:60\", \"transportDeviceMsgRateLimit\": \"20:1,600:60\", \"transportDeviceTelemetryMsgRateLimit\": \"20:1,600:60\", \"transportDeviceTelemetryDataPointsRateLimit\": \"20:1,600:60\", \"maxTransportMessages\": 10000000, \"maxTransportDataPoints\": 10000000, \"maxREExecutions\": 4000000, \"maxJSExecutions\": 5000000, \"maxDPStorageDays\": 0, \"maxRuleNodeExecutionsPerMessage\": 50, \"maxEmails\": 0, \"maxSms\": 0, \"maxCreatedAlarms\": 1000, \"defaultStorageTtlDays\": 0, \"alarmsTtlDays\": 0, \"rpcTtlDays\": 0, \"warnThreshold\": 0 } }, \"default\": true } ``` Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.save_tenant_profile_using_post(async_req=True) >>> result = thread.get() :param async_req bool :param TenantProfile body: :return: TenantProfile If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.save_tenant_profile_using_post_with_http_info(**kwargs) # noqa: E501 else: (data) = self.save_tenant_profile_using_post_with_http_info(**kwargs) # noqa: E501 return data def save_tenant_profile_using_post_with_http_info(self, **kwargs): # noqa: E501 """Create Or update Tenant Profile (saveTenantProfile) # noqa: E501 Create or update the Tenant Profile. When creating tenant profile, platform generates Tenant Profile Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Tenant Profile Id will be present in the response. Specify existing Tenant Profile Id id to update the Tenant Profile. Referencing non-existing Tenant Profile Id will cause 'Not Found' error. Update of the tenant profile configuration will cause immediate recalculation of API limits for all affected Tenants. The **'profileData'** object is the part of Tenant Profile that defines API limits and Rate limits. You have an ability to define maximum number of devices ('maxDevice'), assets ('maxAssets') and other entities. You may also define maximum number of messages to be processed per month ('maxTransportMessages', 'maxREExecutions', etc). The '*RateLimit' defines the rate limits using simple syntax. For example, '1000:1,20000:60' means up to 1000 events per second but no more than 20000 event per minute. Let's review the example of tenant profile data below: ```json { \"name\": \"Default\", \"description\": \"Default tenant profile\", \"isolatedTbCore\": false, \"isolatedTbRuleEngine\": false, \"profileData\": { \"configuration\": { \"type\": \"DEFAULT\", \"maxDevices\": 0, \"maxAssets\": 0, \"maxCustomers\": 0, \"maxUsers\": 0, \"maxDashboards\": 0, \"maxRuleChains\": 0, \"maxResourcesInBytes\": 0, \"maxOtaPackagesInBytes\": 0, \"transportTenantMsgRateLimit\": \"1000:1,20000:60\", \"transportTenantTelemetryMsgRateLimit\": \"1000:1,20000:60\", \"transportTenantTelemetryDataPointsRateLimit\": \"1000:1,20000:60\", \"transportDeviceMsgRateLimit\": \"20:1,600:60\", \"transportDeviceTelemetryMsgRateLimit\": \"20:1,600:60\", \"transportDeviceTelemetryDataPointsRateLimit\": \"20:1,600:60\", \"maxTransportMessages\": 10000000, \"maxTransportDataPoints\": 10000000, \"maxREExecutions\": 4000000, \"maxJSExecutions\": 5000000, \"maxDPStorageDays\": 0, \"maxRuleNodeExecutionsPerMessage\": 50, \"maxEmails\": 0, \"maxSms\": 0, \"maxCreatedAlarms\": 1000, \"defaultStorageTtlDays\": 0, \"alarmsTtlDays\": 0, \"rpcTtlDays\": 0, \"warnThreshold\": 0 } }, \"default\": true } ``` Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.save_tenant_profile_using_post_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param TenantProfile body: :return: TenantProfile If the method is called asynchronously, returns the request thread. """ all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method save_tenant_profile_using_post" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfile', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TenantProfile', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def set_default_tenant_profile_using_post(self, tenant_profile_id, **kwargs): # noqa: E501 """Make tenant profile default (setDefaultTenantProfile) # noqa: E501 Makes specified tenant profile to be default. Referencing non-existing tenant profile Id will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_default_tenant_profile_using_post(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: TenantProfile If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.set_default_tenant_profile_using_post_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 else: (data) = self.set_default_tenant_profile_using_post_with_http_info(tenant_profile_id, **kwargs) # noqa: E501 return data def set_default_tenant_profile_using_post_with_http_info(self, tenant_profile_id, **kwargs): # noqa: E501 """Make tenant profile default (setDefaultTenantProfile) # noqa: E501 Makes specified tenant profile to be default. Referencing non-existing tenant profile Id will cause an error. Available for users with 'SYS_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_default_tenant_profile_using_post_with_http_info(tenant_profile_id, async_req=True) >>> result = thread.get() :param async_req bool :param str tenant_profile_id: A string value representing the tenant profile id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: TenantProfile If the method is called asynchronously, returns the request thread. """ all_params = ['tenant_profile_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_default_tenant_profile_using_post" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'tenant_profile_id' is set if ('tenant_profile_id' not in params or params['tenant_profile_id'] is None): raise ValueError("Missing the required parameter `tenant_profile_id` when calling `set_default_tenant_profile_using_post`") # noqa: E501 collection_formats = {} path_params = {} if 'tenant_profile_id' in params: path_params['tenantProfileId'] = params['tenant_profile_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenantProfile/{tenantProfileId}/default', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TenantProfile', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
52.420097
2,502
0.653964
5,182
43,299
5.223852
0.068313
0.089804
0.046546
0.021278
0.974547
0.970964
0.966753
0.961692
0.957407
0.953195
0
0.025236
0.258713
43,299
825
2,503
52.483636
0.818145
0.465992
0
0.816327
0
0
0.20856
0.068071
0
0
0
0
0
1
0.038549
false
0
0.00907
0
0.104308
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
7436d72769d5af35535008d2a3fd4dd68372e08b
8,681
py
Python
2-cannab/code/zoo/models.py
remtav/SpaceNet7_Multi-Temporal_Solutions
ee535c61fc22bffa45331519239c6d1b044b1514
[ "Apache-2.0" ]
38
2021-02-18T07:04:54.000Z
2022-03-22T15:31:06.000Z
2-cannab/code/zoo/models.py
remtav/SpaceNet7_Multi-Temporal_Solutions
ee535c61fc22bffa45331519239c6d1b044b1514
[ "Apache-2.0" ]
2
2021-02-22T18:53:19.000Z
2021-06-22T20:28:06.000Z
2-cannab/code/zoo/models.py
remtav/SpaceNet7_Multi-Temporal_Solutions
ee535c61fc22bffa45331519239c6d1b044b1514
[ "Apache-2.0" ]
15
2021-02-25T17:25:40.000Z
2022-01-31T16:59:32.000Z
import numpy as np import torch from torch import nn import torch.nn.functional as F from efficientnet_pytorch import EfficientNet from efficientnet_pytorch.utils import Conv2dStaticSamePadding class ConvRelu(nn.Module): def __init__(self, in_channels, out_channels, kernel_size=3): super(ConvRelu, self).__init__() self.layer = nn.Sequential( nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, padding=1), nn.ReLU(inplace=True) ) def forward(self, x): return self.layer(x) class EfficientNet_Unet(nn.Module): def __init__(self, name='efficientnet-b0', pretrained=True, **kwargs): super(EfficientNet_Unet, self).__init__() enc_sizes = { 'efficientnet-b0': [16, 24, 40, 112, 1280], 'efficientnet-b1': [16, 24, 40, 112, 1280], 'efficientnet-b2': [16, 24, 48, 120, 1408], 'efficientnet-b3': [24, 32, 48, 136, 1536], 'efficientnet-b4': [24, 32, 56, 160, 1792], 'efficientnet-b5': [24, 40, 64, 176, 2048], 'efficientnet-b6': [32, 40, 72, 200, 2304], 'efficientnet-b7': [32, 48, 80, 224, 2560], 'efficientnet-b8': [32, 56, 88, 248, 2816] } encoder_filters = enc_sizes[name] decoder_filters = np.asarray([48, 64, 128, 160, 320]) self.conv6 = ConvRelu(encoder_filters[-1], decoder_filters[-1]) self.conv6_2 = ConvRelu(decoder_filters[-1] + encoder_filters[-2], decoder_filters[-1]) self.conv7 = ConvRelu(decoder_filters[-1], decoder_filters[-2]) self.conv7_2 = ConvRelu(decoder_filters[-2] + encoder_filters[-3], decoder_filters[-2]) self.conv8 = ConvRelu(decoder_filters[-2], decoder_filters[-3]) self.conv8_2 = ConvRelu(decoder_filters[-3] + encoder_filters[-4], decoder_filters[-3]) self.conv9 = ConvRelu(decoder_filters[-3], decoder_filters[-4]) self.conv9_2 = ConvRelu(decoder_filters[-4] + encoder_filters[-5], decoder_filters[-4]) self.conv10 = ConvRelu(decoder_filters[-4], decoder_filters[-5]) self.res = nn.Conv2d(decoder_filters[-5], 3, 1, stride=1, padding=0) self._initialize_weights() if pretrained: self.encoder = EfficientNet.from_pretrained(name) else: self.encoder = EfficientNet.from_name(name) def extract_features(self, inp): out = [] # Stem x = self.encoder._swish(self.encoder._bn0(self.encoder._conv_stem(inp))) # Blocks for idx, block in enumerate(self.encoder._blocks): drop_connect_rate = self.encoder._global_params.drop_connect_rate if drop_connect_rate: drop_connect_rate *= float(idx) / len(self.encoder._blocks) y = block(x, drop_connect_rate=drop_connect_rate) if y.size()[-1] != x.size()[-1]: out.append(x) x = y # Head x = self.encoder._swish(self.encoder._bn1(self.encoder._conv_head(x))) out.append(x) return out def forward(self, x): batch_size, C, H, W = x.shape enc1, enc2, enc3, enc4, enc5 = self.extract_features(x) dec6 = self.conv6(F.interpolate(enc5, scale_factor=2)) dec6 = self.conv6_2(torch.cat([dec6, enc4 ], 1)) dec7 = self.conv7(F.interpolate(dec6, scale_factor=2)) dec7 = self.conv7_2(torch.cat([dec7, enc3 ], 1)) dec8 = self.conv8(F.interpolate(dec7, scale_factor=2)) dec8 = self.conv8_2(torch.cat([dec8, enc2 ], 1)) dec9 = self.conv9(F.interpolate(dec8, scale_factor=2)) dec9 = self.conv9_2(torch.cat([dec9, enc1 ], 1)) dec10 = self.conv10(F.interpolate(dec9, scale_factor=2)) return self.res(dec10) def _initialize_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear): m.weight.data = nn.init.kaiming_normal_(m.weight.data) if m.bias is not None: m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() class EfficientNet_Unet_Double(nn.Module): def __init__(self, name='efficientnet-b0', pretrained=True, **kwargs): super(EfficientNet_Unet_Double, self).__init__() enc_sizes = { 'efficientnet-b0': [16, 24, 40, 112, 1280], 'efficientnet-b1': [16, 24, 40, 112, 1280], 'efficientnet-b2': [16, 24, 48, 120, 1408], 'efficientnet-b3': [24, 32, 48, 136, 1536], 'efficientnet-b4': [24, 32, 56, 160, 1792], 'efficientnet-b5': [24, 40, 64, 176, 2048], 'efficientnet-b6': [32, 40, 72, 200, 2304], 'efficientnet-b7': [32, 48, 80, 224, 2560], 'efficientnet-b8': [32, 56, 88, 248, 2816] } encoder_filters = enc_sizes[name] decoder_filters = np.asarray([48, 64, 128, 160, 320]) self.conv6 = ConvRelu(encoder_filters[-1], decoder_filters[-1]) self.conv6_2 = ConvRelu(decoder_filters[-1] + encoder_filters[-2], decoder_filters[-1]) self.conv7 = ConvRelu(decoder_filters[-1], decoder_filters[-2]) self.conv7_2 = ConvRelu(decoder_filters[-2] + encoder_filters[-3], decoder_filters[-2]) self.conv8 = ConvRelu(decoder_filters[-2], decoder_filters[-3]) self.conv8_2 = ConvRelu(decoder_filters[-3] + encoder_filters[-4], decoder_filters[-3]) self.conv9 = ConvRelu(decoder_filters[-3], decoder_filters[-4]) self.conv9_2 = ConvRelu(decoder_filters[-4] + encoder_filters[-5], decoder_filters[-4]) self.conv10 = ConvRelu(decoder_filters[-4], decoder_filters[-5]) self.res = nn.Conv2d(decoder_filters[-5] * 2, 7, 1, stride=1, padding=0) self._initialize_weights() if pretrained: self.encoder = EfficientNet.from_pretrained(name) else: self.encoder = EfficientNet.from_name(name) def extract_features(self, inp): out = [] # Stem x = self.encoder._swish(self.encoder._bn0(self.encoder._conv_stem(inp))) # Blocks for idx, block in enumerate(self.encoder._blocks): drop_connect_rate = self.encoder._global_params.drop_connect_rate if drop_connect_rate: drop_connect_rate *= float(idx) / len(self.encoder._blocks) y = block(x, drop_connect_rate=drop_connect_rate) if y.size()[-1] != x.size()[-1]: out.append(x) x = y # Head x = self.encoder._swish(self.encoder._bn1(self.encoder._conv_head(x))) out.append(x) return out def forward1(self, x): batch_size, C, H, W = x.shape enc1, enc2, enc3, enc4, enc5 = self.extract_features(x) dec6 = self.conv6(F.interpolate(enc5, scale_factor=2)) dec6 = self.conv6_2(torch.cat([dec6, enc4 ], 1)) dec7 = self.conv7(F.interpolate(dec6, scale_factor=2)) dec7 = self.conv7_2(torch.cat([dec7, enc3 ], 1)) dec8 = self.conv8(F.interpolate(dec7, scale_factor=2)) dec8 = self.conv8_2(torch.cat([dec8, enc2 ], 1)) dec9 = self.conv9(F.interpolate(dec8, scale_factor=2)) dec9 = self.conv9_2(torch.cat([dec9, enc1 ], 1)) dec10 = self.conv10(F.interpolate(dec9, scale_factor=2)) return dec10 def forward(self, x): batch_size, C, H, W = x.shape dec10_0 = self.forward1(x[:, :3, :, :]) dec10_1 = self.forward1(x[:, 3:, :, :]) dec10 = torch.cat([dec10_0, dec10_1], 1) return self.res(dec10) def _initialize_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear): m.weight.data = nn.init.kaiming_normal_(m.weight.data) if m.bias is not None: m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_()
37.580087
111
0.569635
1,071
8,681
4.428571
0.151261
0.112165
0.074215
0.038794
0.911448
0.887202
0.887202
0.887202
0.887202
0.887202
0
0.08432
0.299159
8,681
231
112
37.580087
0.695266
0.003801
0
0.836364
0
0
0.035663
0
0
0
0
0
0
1
0.066667
false
0
0.036364
0.006061
0.157576
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
74434f7716b57e0bdb42a4996e3cd13dbf97a76f
229
py
Python
blog/views.py
AndrewJBateman/python-django-site
dd1aba6d36cfa10b6786d630263bf6fad23473f1
[ "CNRI-Python" ]
null
null
null
blog/views.py
AndrewJBateman/python-django-site
dd1aba6d36cfa10b6786d630263bf6fad23473f1
[ "CNRI-Python" ]
null
null
null
blog/views.py
AndrewJBateman/python-django-site
dd1aba6d36cfa10b6786d630263bf6fad23473f1
[ "CNRI-Python" ]
null
null
null
from django.shortcuts import render from django.http import HttpResponse # Create your views here. def home(request): return HttpResponse('<h1>Helooooooo</h1>') def members(request): return HttpResponse('<h1>Members</h1>')
22.9
44
0.759825
30
229
5.8
0.6
0.114943
0.287356
0.310345
0
0
0
0
0
0
0
0.019802
0.117904
229
9
45
25.444444
0.841584
0.100437
0
0
0
0
0.171569
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
7
ae027aa3890bd9cd164f065b77d31697f4d4da9e
36
py
Python
fake_camera/__init__.py
fjolublar/fake_camera
05686f729f4514f4e7cf339de9b8b6f794c2fe44
[ "MIT" ]
null
null
null
fake_camera/__init__.py
fjolublar/fake_camera
05686f729f4514f4e7cf339de9b8b6f794c2fe44
[ "MIT" ]
1
2021-04-07T10:39:57.000Z
2021-04-07T10:42:23.000Z
fake_camera/__init__.py
fjolublar/fake_camera
05686f729f4514f4e7cf339de9b8b6f794c2fe44
[ "MIT" ]
null
null
null
from .fake_camera import Fake_Camera
36
36
0.888889
6
36
5
0.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0.083333
36
1
36
36
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
bb087ea7cb483328cfb8cdd8d03fa9e5b3031b34
16,012
py
Python
sorting/quicksort.py
travisariggs/Algorithms
0864880e6b193954a670073244bfd5de523b4e72
[ "MIT" ]
null
null
null
sorting/quicksort.py
travisariggs/Algorithms
0864880e6b193954a670073244bfd5de523b4e72
[ "MIT" ]
null
null
null
sorting/quicksort.py
travisariggs/Algorithms
0864880e6b193954a670073244bfd5de523b4e72
[ "MIT" ]
null
null
null
""" Quicksort Algorithm by Travis Riggs This module implements the quicksort algorithm. """ import copy import random def quick_sort_simple_first(aList, startIndex=0, endIndex=None, comparisons=False): """Sort a list from least to greatest using quicksort Returns a sorted list If 'comparisons' is set to True, it returns the sorted list and the number of comparisons It chooses the first element in the list as the pivot. """ if endIndex is None: endIndex = len(aList) # Base Case if endIndex - startIndex <= 1: if comparisons: return aList, 0 else: return aList # Select the first element as the pivot pivot = aList[startIndex] # Partition the list between elements greater than and less than # the pivot element p = startIndex + 1 # Partition index i = startIndex + 1 # Element index for elem in aList[startIndex+1:endIndex]: # Is this element less than our pivot? if elem < pivot: # Swap this element with the lowest item in the upper # partition. But only do that if we've created an upper # partition. if i != p: aList[i] = aList[p] aList[p] = elem # Move the partition index up to make room for the new # value. p += 1 # Track the index of the next list element i += 1 # Move the pivot element between the partitions aList[startIndex] = aList[p-1] aList[p-1] = pivot ## DEBUG #print(aList, aList[startIndex:endIndex], startIndex, endIndex) ## DEBUG #import ipdb; ipdb.set_trace() if comparisons: compares = len(aList[startIndex:endIndex]) - 1 # Rescursively call quick_sort on the upper and lower partitions aList, lowerCompares = quick_sort_simple_first(aList, startIndex, p-1, True) aList, upperCompares = quick_sort_simple_first(aList, p, endIndex, True) totalCompares = compares + lowerCompares + upperCompares return aList, totalCompares else: # Rescursively call quick_sort on the upper and lower partitions aList = quick_sort_simple_first(aList, startIndex, p-1) aList = quick_sort_simple_first(aList, p, endIndex) # Return the sorted list return aList def quick_sort_simple_last(aList, startIndex=0, endIndex=None, comparisons=False): """Sort a list from least to greatest using quicksort Returns a sorted list If 'comparisons' is set to True, it returns the sorted list and the number of comparisons It chooses the last element in the list as the pivot. """ if endIndex is None: endIndex = len(aList) # Base Case if endIndex - startIndex <= 1: if comparisons: return aList, 0 else: return aList # Select the last element as the pivot pivot = aList[endIndex-1] # Switch the last element with the first aList[endIndex-1] = aList[startIndex] aList[startIndex] = pivot # Partition the list between elements greater than and less than # the pivot element p = startIndex + 1 # Partition index i = startIndex + 1 # Element index for elem in aList[startIndex+1:endIndex]: # Is this element less than our pivot? if elem < pivot: # Swap this element with the lowest item in the upper # partition. But only do that if we've created an upper # partition. if i != p: aList[i] = aList[p] aList[p] = elem # Move the partition index up to make room for the new # value. p += 1 # Track the index of the next list element i += 1 # Move the pivot element between the partitions aList[startIndex] = aList[p-1] aList[p-1] = pivot ## DEBUG #print(aList, aList[startIndex:endIndex], startIndex, endIndex) ## DEBUG #import ipdb; ipdb.set_trace() if comparisons: compares = len(aList[startIndex:endIndex]) - 1 # Rescursively call quick_sort on the upper and lower partitions aList, lowerCompares = quick_sort_simple_last(aList, startIndex, p-1, True) aList, upperCompares = quick_sort_simple_last(aList, p, endIndex, True) totalCompares = compares + lowerCompares + upperCompares return aList, totalCompares else: # Rescursively call quick_sort on the upper and lower partitions aList = quick_sort_simple_last(aList, startIndex, p-1) aList = quick_sort_simple_last(aList, p, endIndex) # Return the sorted list return aList def quick_sort_median(aList, startIndex=0, endIndex=None, comparisons=False): """Sort a list from least to greatest using quicksort Returns a sorted list If 'comparisons' is set to True, it returns the sorted list and the number of comparisons It chooses the median of the first, middle and last element in the list as the pivot. """ if endIndex is None: endIndex = len(aList) # Base Case if endIndex - startIndex <= 1: if comparisons: return aList, 0 else: return aList ## DEBUG #import ipdb; ipdb.set_trace() #print(aList[startIndex:endIndex]) # Find the median of the first, middle and last elements first = aList[startIndex] if (endIndex - startIndex) % 2 == 0: middle = aList[startIndex + int((endIndex-startIndex)/2)-1] else: middle = aList[startIndex + int((endIndex-startIndex)/2)] last = aList[endIndex-1] # Is the first element the median of the three? if middle < first < last or last < first < middle: pivot = first # Is the middle element the median of the three? elif first < middle < last or last < middle < first: pivot = middle # Swap the middle with the first if (endIndex - startIndex) % 2 == 0: aList[startIndex + int((endIndex-startIndex)/2)-1] = aList[startIndex] else: aList[startIndex + int((endIndex-startIndex)/2)] = aList[startIndex] aList[startIndex] = pivot # The last element must be the median of the three... else: pivot = last # Switch the last element with the first aList[endIndex-1] = aList[startIndex] aList[startIndex] = pivot ## DEBUG #print(aList, aList[startIndex:endIndex], first, middle, last, pivot) # Partition the list between elements greater than and less than # the pivot element p = startIndex + 1 # Partition index i = startIndex + 1 # Element index for elem in aList[startIndex+1:endIndex]: # Is this element less than our pivot? if elem < pivot: # Swap this element with the lowest item in the upper # partition. But only do that if we've created an upper # partition. if i != p: aList[i] = aList[p] aList[p] = elem # Move the partition index up to make room for the new # value. p += 1 # Track the index of the next list element i += 1 # Move the pivot element between the partitions aList[startIndex] = aList[p-1] aList[p-1] = pivot ## DEBUG #import ipdb; ipdb.set_trace() if comparisons: compares = len(aList[startIndex:endIndex]) - 1 # Rescursively call quick_sort on the upper and lower partitions aList, lowerCompares = quick_sort_median(aList, startIndex, p-1, True) aList, upperCompares = quick_sort_median(aList, p, endIndex, True) totalCompares = compares + lowerCompares + upperCompares return aList, totalCompares else: # Rescursively call quick_sort on the upper and lower partitions aList = quick_sort_median(aList, startIndex, p-1) aList = quick_sort_median(aList, p, endIndex) # Return the sorted list return aList def quick_sort_random(aList, startIndex=0, endIndex=None, comparisons=False): """Sort a list from least to greatest using quicksort Returns a sorted list If 'comparisons' is set to True, it returns the sorted list and the number of comparisons It chooses a randomized pivot element """ if endIndex is None: endIndex = len(aList) # Base Case if endIndex - startIndex <= 1: if comparisons: return aList, 0 else: return aList ## DEBUG #import ipdb; ipdb.set_trace() #print(aList[startIndex:endIndex]) # Select a random element for the pivot pivotInd = random.randint(startIndex, endIndex-1) pivot = aList[pivotInd] # Switch the pivot element with the first aList[pivotInd] = aList[startIndex] aList[startIndex] = pivot ## DEBUG #print(aList, aList[startIndex:endIndex], first, middle, last, pivot) # Partition the list between elements greater than and less than # the pivot element p = startIndex + 1 # Partition index i = startIndex + 1 # Element index for elem in aList[startIndex+1:endIndex]: # Is this element less than our pivot? if elem < pivot: # Swap this element with the lowest item in the upper # partition. But only do that if we've created an upper # partition. if i != p: aList[i] = aList[p] aList[p] = elem # Move the partition index up to make room for the new # value. p += 1 # Track the index of the next list element i += 1 # Move the pivot element between the partitions aList[startIndex] = aList[p-1] aList[p-1] = pivot ## DEBUG #import ipdb; ipdb.set_trace() if comparisons: compares = len(aList[startIndex:endIndex]) - 1 # Rescursively call quick_sort on the upper and lower partitions aList, lowerCompares = quick_sort_median(aList, startIndex, p-1, True) aList, upperCompares = quick_sort_median(aList, p, endIndex, True) totalCompares = compares + lowerCompares + upperCompares return aList, totalCompares else: # Rescursively call quick_sort on the upper and lower partitions aList = quick_sort_median(aList, startIndex, p-1) aList = quick_sort_median(aList, p, endIndex) # Return the sorted list return aList def quick_sort_ideal(aList, startIndex=0, endIndex=None, comparisons=False): """Sort a list from least to greatest using an idealized quicksort This is not intended for actual use. It is only used to find the best possible sort for analysis of other variations of quick sort. Returns a sorted list If 'comparisons' is set to True, it returns the sorted list and the number of comparisons It chooses the perfect pivot element each time: the median. """ if endIndex is None: endIndex = len(aList) # Base Case if endIndex - startIndex <= 1: if comparisons: return aList, 0 else: return aList ## DEBUG #import ipdb; ipdb.set_trace() #print(aList[startIndex:endIndex]) temp = copy.copy(aList[startIndex:endIndex]) temp.sort() if len(temp) % 2 == 0: median = temp[int(len(temp)/2)-1] else: median = temp[int(len(temp)/2)] # Find the median value's index pivotInd = aList.index(median) pivot = aList[pivotInd] # Switch the pivot element with the first aList[pivotInd] = aList[startIndex] aList[startIndex] = pivot ## DEBUG #print(aList, aList[startIndex:endIndex], pivot) # Partition the list between elements greater than and less than # the pivot element p = startIndex + 1 # Partition index i = startIndex + 1 # Element index for elem in aList[startIndex+1:endIndex]: # Is this element less than our pivot? if elem < pivot: # Swap this element with the lowest item in the upper # partition. But only do that if we've created an upper # partition. if i != p: aList[i] = aList[p] aList[p] = elem # Move the partition index up to make room for the new # value. p += 1 # Track the index of the next list element i += 1 # Move the pivot element between the partitions aList[startIndex] = aList[p-1] aList[p-1] = pivot ## DEBUG #import ipdb; ipdb.set_trace() if comparisons: compares = len(aList[startIndex:endIndex]) - 1 # Rescursively call quick_sort on the upper and lower partitions aList, lowerCompares = quick_sort_ideal(aList, startIndex, p-1, True) aList, upperCompares = quick_sort_ideal(aList, p, endIndex, True) totalCompares = compares + lowerCompares + upperCompares return aList, totalCompares else: # Rescursively call quick_sort on the upper and lower partitions aList = quick_sort_ideal(aList, startIndex, p-1) aList = quick_sort_ideal(aList, p, endIndex) # Return the sorted list return aList if __name__ == '__main__': a = [4, 2, 234, 9, 1, 10, 2300, 3] b = [1, 2, 3, 4, 9, 10, 234, 2300] result, comparisons = quick_sort_simple_first(a, comparisons=True) print(result, comparisons) a = [4, 2, 234, 9, 1, 10, 2300, 3] b = [1, 2, 3, 4, 9, 10, 234, 2300] result, comparisons = quick_sort_simple_last(a, comparisons=True) print(result, comparisons) a = [4, 2, 234, 9, 1, 10, 2300, 3] b = [1, 2, 3, 4, 9, 10, 234, 2300] result, comparisons = quick_sort_median(a, comparisons=True) print(result, comparisons) a = [4, 2, 234, 9, 1, 10, 2300, 3] b = [1, 2, 3, 4, 9, 10, 234, 2300] result, comparisons = quick_sort_random(a, comparisons=True) print(result, comparisons) a = [4, 2, 234, 9, 1, 10, 2300, 3] b = [1, 2, 3, 4, 9, 10, 234, 2300] result, comparisons = quick_sort_ideal(a, comparisons=True) print(result, comparisons)
28.695341
82
0.556832
1,861
16,012
4.738313
0.075766
0.09526
0.036516
0.028351
0.911885
0.899524
0.869698
0.848832
0.831935
0.821501
0
0.022422
0.373283
16,012
557
83
28.746858
0.856303
0.342431
0
0.8125
0
0
0.000782
0
0
0
0
0
0
1
0.020833
false
0
0.008333
0
0.1125
0.020833
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
bb457b46936be0b55b310ed6152e1926baff42ad
99,061
py
Python
optimizer.py
shermanluo/personalDriving
5e80bb7248f8d91703050acde43b71291b8fa704
[ "MIT" ]
1
2019-08-31T15:13:58.000Z
2019-08-31T15:13:58.000Z
optimizer.py
shermanluo/personalDriving
5e80bb7248f8d91703050acde43b71291b8fa704
[ "MIT" ]
null
null
null
optimizer.py
shermanluo/personalDriving
5e80bb7248f8d91703050acde43b71291b8fa704
[ "MIT" ]
1
2019-08-30T21:08:11.000Z
2019-08-30T21:08:11.000Z
import itertools import pdb import time import numpy as np import numpy.linalg as nl import theano as th import theano.tensor as tt import theano.tensor.slinalg as ts import scipy.optimize import scipy.io import config import constants import opt_timeup import time_profile import utils from utils import shape, jacobian, hessian, grad import pdb as pdb import ilqgames.python.CarExample as unicycle #pdb.set_trace() #import ilqgames.python.two_player_unicycle_4d_example as unicycle class Maximizer(object): def __init__(self, f, vs, g={}, pre=None, gen=None, method='bfgs', eps=1, iters=100000, debug=False, inf_ignore=np.inf): self.inf_ignore = inf_ignore self.debug = debug self.iters = iters self.eps = eps self.method = method def one_gen(): yield self.gen = gen if self.gen is None: self.gen = one_gen self.pre = pre self.f = f self.vs = vs self.sz = [shape(v)[0] for v in self.vs] for i in range(1,len(self.sz)): self.sz[i] += self.sz[i-1] self.sz = [(0 if i==0 else self.sz[i-1], self.sz[i]) for i in range(len(self.sz))] if isinstance(g, dict): self.df = tt.concatenate([g[v] if v in g else grad(f, v) for v in self.vs]) else: self.df = g self.new_vs = [tt.vector() for v in self.vs] self.func = th.function(self.new_vs, [-self.f, -self.df], givens=zip(self.vs, self.new_vs)) def f_and_df(x0): if self.debug: print x0 s = None N = 0 for _ in self.gen(): if self.pre: for v, (a, b) in zip(self.vs, self.sz): v.set_value(x0[a:b]) self.pre() res = self.func(*[x0[a:b] for a, b in self.sz]) if np.isnan(res[0]).any() or np.isnan(res[1]).any() or (np.abs(res[0])>self.inf_ignore).any() or (np.abs(res[1])>self.inf_ignore).any(): continue if s is None: s = res N = 1 else: s[0] += res[0] s[1] += res[1] N += 1 s[0]/=N s[1]/=N return s self.f_and_df = f_and_df def argmax(self, vals={}, bounds={}): if not isinstance(bounds, dict): bounds = {v: bounds for v in self.vs} B = [] for v, (a, b) in zip(self.vs, self.sz): if v in bounds: B += bounds[v] else: B += [(None, None)]*(b-a) x0 = np.hstack([np.asarray(vals[v]) if v in vals else v.get_value() for v in self.vs]) if self.method=='bfgs': opt = scipy.optimize.fmin_l_bfgs_b(self.f_and_df, x0=x0, bounds=B)[0] elif self.method=='gd': opt = x0 for _ in range(self.iters): opt -= self.f_and_df(opt)[1]*self.eps else: opt = scipy.optimize.minimize(self.f_and_df, x0=x0, method=self.method, jac=True).x return opt def maximize(self, *args, **vargs): return self.argmax(*args, **vargs) class IteratedBestResponseMaximizer(object): def __init__(self, r_h, traj_h, r_r, traj_r, use_timeup=True, use_second_order=False, update_with_curr_plan_fn=None, init_plan_scheme='prev_opt', # num_optimizations_r=1, get_init_plan_r_fn=None, # num_optimizations_h=1, get_init_plan_h_fn=None, init_grads=True): """ Arguments: - r_h: the human tactical reward. - traj_h: the human trajectory. - r_r: the robot tactical reward. - traj_r: the robot trajectory. - update_with_curr_plan_fn: function to update any necessary information based on the current plan. This is only necessary for the HierarchicalMaximizer, not the NestedMaximizer. - init_plan_scheme: string specifying the plan initialization scheme. - num_optimizations_r: number of times to optimize the robot reward (the best result of these optimizations will be chosen). - get_init_plan_r_fn: function to return a function that initializes the robot's plan for optimization, based on the current optimization iteration. - num_optimizations_h: number of times to optimize the human reward (the best result of these optimizations will be chosen). - get_init_plan_h_fn: function to return a function that initializes the human's plan for optimization, based on the current optimization iteration. - init_grads: if True, initialize the gradients. This argument can be set to False if another function is meant to initialize the gradients. """ # --------------------------------------------------------------------------------------------------- # Basics. self.r_h = r_h self.r_r = r_r self.traj_h = traj_h self.traj_r = traj_r self.plan_h = traj_h.u_th # human plan (controls) self.plan_r = traj_r.u_th # robot plan (controls) # (start, end) indices for each control in the plan when it's represented # as a flattened array. Ex: [(0, 2), (2, 4), (4, 6), (6, 8), (8, 10)] self.control_indices_h = traj_h.control_indices self.control_indices_r = traj_r.control_indices # maximum time for optimization if use_timeup: self.timeup = config.OPT_TIMEOUT else: self.timeup = float('inf') self.use_second_order = use_second_order if update_with_curr_plan_fn is None: # no functionality necessary here update_with_curr_plan_fn = lambda: None self.update_with_curr_plan_fn = update_with_curr_plan_fn self.create_get_init_plans_fn(init_plan_scheme) self.maximizer_inner_iters_r = 0 # number of iterations of maximizer_inner self.maximizer_inner_iters_h = 0 if init_grads: # initialize the gradients self.init_grads() # self.nested = self.optimizer = NestedMaximizer( # r_h, traj_h, r_r, traj_r, # use_second_order=True, # init_plan_scheme=init_plan_scheme) def create_get_init_plans_fn(self, init_plan_scheme): """Create the functions that return the plan initialization functions for the robot and the human, depending on the optimization iteration. Also set the number of optimization loops for the robot and human. Arguments: - init_plan_scheme: string specifying the plan initialization scheme. """ assert init_plan_scheme in constants.INIT_PLAN_SCHEMES_OPTIONS self.get_init_plan_r_fn = eval('self.get_init_plan_r_fn_' + init_plan_scheme) self.get_init_plan_h_fn = eval('self.get_init_plan_h_fn_' + init_plan_scheme) self.num_optimizations_r = constants.INIT_PLAN_SCHEME_TO_NUM_OPTS_R[init_plan_scheme] self.num_optimizations_h = constants.INIT_PLAN_SCHEME_TO_NUM_OPTS_H[init_plan_scheme] def get_init_plan_r_fn_maintain_speed_lsr_and_prev_opt(self, iter): # TODO: comment this v = self.traj_r.x0[3] acc = constants.FRICTION * v ** 2 init_plan_r_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([v.get_value() for v in self.plan_r[:-1]] + [self.traj_r.default_control]) ] return init_plan_r_fn_list[iter] def get_init_plan_h_fn_maintain_speed_lsr_and_prev_opt(self, iter): # TODO: comment this v = self.traj_h.x0[3] acc = constants.FRICTION * v ** 2 init_plan_h_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([v.get_value() for v in self.plan_h[:-1]] + [self.traj_h.default_control]) ] return init_plan_h_fn_list[iter] def get_init_plan_r_fn_maintain_speed_lsr(self, iter): # TODO: comment this v = self.traj_r.x0[3] acc = constants.FRICTION * v ** 2 init_plan_r_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_r.horizon)) ] return init_plan_r_fn_list[iter] def get_init_plan_h_fn_maintain_speed_lsr(self, iter): # TODO: comment this v = self.traj_h.x0[3] acc = constants.FRICTION * v ** 2 init_plan_h_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_h.horizon)) ] return init_plan_h_fn_list[iter] def get_init_plan_r_fn_lsr(self, iter): # TODO: comment this init_plan_r_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], 0.] for _ in range(self.traj_r.horizon)), lambda: np.hstack([0., 0.] for _ in range(self.traj_r.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], 0.] for _ in range(self.traj_r.horizon)) ] return init_plan_r_fn_list[iter] def get_init_plan_h_fn_lsr(self, iter): # TODO: comment this init_plan_h_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], 0.] for _ in range(self.traj_h.horizon)), lambda: np.hstack([0., 0.] for _ in range(self.traj_h.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], 0.] for _ in range(self.traj_h.horizon)) ] return init_plan_h_fn_list[iter] def get_init_plan_r_fn_max_speed_prev_steer(self, iter): # TODO: comment this return lambda: np.hstack([[v.get_value()[0], constants.CAR_CONTROL_BOUNDS[1][1]] for v in self.plan_r[:-1]] + [ self.traj_r.default_control[0], constants.CAR_CONTROL_BOUNDS[1][1]]) def get_init_plan_h_fn_max_speed_prev_steer(self, iter): # TODO: comment this return lambda: np.hstack([[v.get_value()[0], constants.CAR_CONTROL_BOUNDS[1][1]] for v in self.plan_h[:-1]] + [ self.traj_h.default_control[0], constants.CAR_CONTROL_BOUNDS[1][1]]) def get_init_plan_r_fn_maintain_speed_prev_steer(self, iter): # TODO: comment this v = self.traj_r.x0[3] acc = constants.FRICTION * v ** 2 return lambda: np.hstack( [[v.get_value()[0], acc] for v in self.plan_r[:-1]] + [self.traj_r.default_control[0], acc]) def get_init_plan_h_fn_maintain_speed_prev_steer(self, iter): # TODO: comment this v = self.traj_h.x0[3] acc = constants.FRICTION * v ** 2 return lambda: np.hstack( [[v.get_value()[0], acc] for v in self.plan_h[:-1]] + [self.traj_h.default_control[0], acc]) def get_init_plan_r_fn_prev_opt(self, iter): # TODO: comment this """Initialize the robot plan using the default way.""" return lambda: np.hstack([v.get_value() for v in self.plan_r[:-1]] + [self.traj_r.default_control]) def get_init_plan_h_fn_prev_opt(self, iter): # TODO: comment this """Initialize the human's plan using the default way.""" return lambda: np.hstack([v.get_value() for v in self.plan_h[:-1]] + [self.traj_h.default_control]) def get_my_init(self, iter): return lambda: np.hstack((0, -2 * 0.0878) for _ in range(self.traj_h.horizon)) def get_my_init2(self, iter): return lambda: np.hstack((0, -8 * 0.0878) for _ in range(self.traj_h.horizon)) def init_grads(self): """Initialize the gradients based on the rewards. Precondition: the rewards (self.r_h and self.r_r) have already been initialized. """ # gradient of human reward wrt human controls self.dr_h = grad(self.r_h, self.plan_h) # negative human reward and its derivative self.func1 = th.function([], [-self.r_h, -self.dr_h]) def r_h_and_dr_h(plan_h_0): """Evaluate negative human reward and its derivative. - plan_h_0: initial value for human plan.""" start_time = time.time() # set plan_h to the given (initial) plan_h_0 for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(plan_h_0[a:b]) # do any necessary updates based on the current plan # (this is necessary for the HierarchicalMaximizer, not the Nested Maximizer) self.update_with_curr_plan_fn() func1_val = self.func1() # negative human reward and its derivative end_time = time.time() time_profile.inner_loop_time_profile.update(start_time, end_time) return func1_val self.r_h_and_dr_h = r_h_and_dr_h # ------------------------------------------------------------------------------------------ # Robot's reward and its derivative. # ------------------------------------------------------------------------------------------ # OPTION 2: Partial derivative computation. FAST # (Only direct effect of robot action given current human action) # Below is the simplified derivative that neglects the second-order # effect through human (and therefore avoids the heavy Hessian # inversion) self.dr_r = grad(self.r_r, self.plan_r) # ------------------------------------------------------------------------------------------ # negative robot reward and its derivative self.func2 = th.function([], [-self.r_r, -self.dr_r]) def r_r_and_dr_r(plan_r_0): """Get optimal human response, and return negative robot reward and its derivative. - plan_r_0: initial value for robot plan.""" # set self.plan_r to the given (initial) plan_r_0 for v, (a, b) in zip(self.plan_r, self.control_indices_r): v.set_value(plan_r_0[a:b]) start_time = time.time() func2_val = self.func2() # negative robot reward and its derivative end_time = time.time() time_profile.func2_time_profile.update(start_time, end_time) return func2_val self.r_r_and_dr_r = r_r_and_dr_r def maximize_h(self, bounds={}, maxiter=config.NESTEDMAX_MAXITER_INNER): """Get optimal human response (controls). Arguments: - bounds: control bounds for the human. - maxiter: maximum number of iterations. """ start_time = time.time() bounds = constants.CAR_CONTROL_BOUNDS if not isinstance(bounds, dict): # convert bounds to dictionary bounds = {v: bounds for v in self.plan_h} B = [] # list of bounds for each control in the plan for v, (a, b) in zip(self.plan_h, self.control_indices_h): if v in bounds: B += bounds[v] else: B += [(None, None)] * (b - a) # TODO: can we replace the .get_value() approach with using the numpy # version because at this point the Theano and numpy plans are the same? # plan_h_0 = np.hstack(self.traj_h.u) # initial robot plan (numpy version) opt_h_list = [] for i in range(self.num_optimizations_h): self.init_plan_h = self.get_init_plan_h_fn(i) plan_h_0 = self.init_plan_h() opt_h = scipy.optimize.fmin_l_bfgs_b(self.r_h_and_dr_h, x0=plan_h_0, bounds=B) opt_h_list.append(opt_h) plan_h_0 = np.hstack([v.get_value() for v in self.plan_h]) opt_h = scipy.optimize.fmin_l_bfgs_b(self.r_h_and_dr_h, x0=plan_h_0, bounds=B) opt_h_list.append(opt_h) best_opt_h = min(opt_h_list, key=lambda opt: opt[1]) opt_plan_h= best_opt_h[0] # optimal robot control for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(opt_plan_h[a:b]) # do any necessary updates based on the current plan # (this is necessary for the HierarchicalMaximizer, not the Nested Maximizer) self.update_with_curr_plan_fn() # increment the counter for the number of iterations of maximizer_inner self.maximizer_inner_iters_h += 1 end_time = time.time() time_profile.maximize_inner_time_profile.update(start_time, end_time) return opt_h def maximize_r(self, bounds={}, maxiter=config.NESTEDMAX_MAXITER_INNER): """Get optimal human response (controls). Arguments: - bounds: control bounds for the human. - maxiter: maximum number of iterations. """ start_time = time.time() bounds = constants.CAR_CONTROL_BOUNDS if not isinstance(bounds, dict): # convert bounds to dictionary bounds = {v: bounds for v in self.plan_r} B = [] # list of bounds for each control in the plan for v, (a, b) in zip(self.plan_r, self.control_indices_r): if v in bounds: B += bounds[v] else: B += [(None, None)] * (b - a) opt_r_list = [] for i in range(self.num_optimizations_r): self.init_plan_r = self.get_init_plan_r_fn(i) plan_r_0 = self.init_plan_r() opt_r = scipy.optimize.fmin_l_bfgs_b(self.r_r_and_dr_r, x0=plan_r_0, bounds=B) opt_r_list.append(opt_r) plan_r_0 = np.hstack([v.get_value() for v in self.plan_r]) opt_r = scipy.optimize.fmin_l_bfgs_b(self.r_r_and_dr_r, x0=plan_r_0, bounds=B) opt_r_list.append(opt_r) best_opt_r = min(opt_r_list, key=lambda opt: opt[1]) opt_plan_r = best_opt_r[0] # optimal robot control for v, (a, b) in zip(self.plan_r, self.control_indices_r): v.set_value(opt_plan_r[a:b]) # do any necessary updates based on the current plan # (this is necessary for the HierarchicalMaximizer, not the Nested Maximizer) self.update_with_curr_plan_fn() # increment the counter for the number of iterations of maximizer_inner self.maximizer_inner_iters_r += 1 end_time = time.time() time_profile.maximize_inner_time_profile.update(start_time, end_time) return opt_r def maximize(self, bounds={}, bounds_inner={}, maxiter_inner=config.NESTEDMAX_MAXITER_INNER): # Get optimal robot plan (controls) and human response using nested # optimization. start_time = time.time() #for r in range(self.num_optimizations_r): # for h in range(self.num_optimizations_h): #self.init_plan_r = self.get_my_init2(0) #self.init_plan_h = self.get_my_init(0) # # #pdb.set_trace() # opt_plan_r, opt_plan_h = self.nested.maximize(bounds=bounds, bounds_inner=bounds) # for v, (a, b) in zip(self.plan_r, self.control_indices_r): # v.set_value(opt_plan_r[a:b]) opt_plan_h = np.hstack((0, -8 * 0.0878) for _ in range(self.traj_h.horizon)) for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(opt_plan_h[a:b]) #pdb.set_trace() for i in range(5): opt_r = self.maximize_r(bounds=bounds) opt_h = self.maximize_h(bounds=bounds) #print([x.get_value() for x in self.plan_r]) #print([x.get_value() for x in self.plan_h]) #pdb.set_trace() # time profile of HierarchicalMaximizer maximize_end_time = time.time() time_profile.maximizer_time_profile.update(start_time, maximize_end_time) return opt_r[0], opt_h[0] class NestedMaximizer(object): def __init__(self, r_h, traj_h, r_r, traj_r, use_timeup=True, use_second_order=False, update_with_curr_plan_fn=None, init_plan_scheme='prev_opt', # num_optimizations_r=1, get_init_plan_r_fn=None, # num_optimizations_h=1, get_init_plan_h_fn=None, init_grads=True): """ Arguments: - r_h: the human tactical reward. - traj_h: the human trajectory. - r_r: the robot tactical reward. - traj_r: the robot trajectory. - update_with_curr_plan_fn: function to update any necessary information based on the current plan. This is only necessary for the HierarchicalMaximizer, not the NestedMaximizer. - init_plan_scheme: string specifying the plan initialization scheme. - num_optimizations_r: number of times to optimize the robot reward (the best result of these optimizations will be chosen). - get_init_plan_r_fn: function to return a function that initializes the robot's plan for optimization, based on the current optimization iteration. - num_optimizations_h: number of times to optimize the human reward (the best result of these optimizations will be chosen). - get_init_plan_h_fn: function to return a function that initializes the human's plan for optimization, based on the current optimization iteration. - init_grads: if True, initialize the gradients. This argument can be set to False if another function is meant to initialize the gradients. """ # --------------------------------------------------------------------------------------------------- # Basics. self.r_h = r_h self.r_r = r_r self.traj_h = traj_h self.traj_r = traj_r self.plan_h = traj_h.u_th # human plan (controls) self.plan_r = traj_r.u_th # robot plan (controls) # (start, end) indices for each control in the plan when it's represented # as a flattened array. Ex: [(0, 2), (2, 4), (4, 6), (6, 8), (8, 10)] self.control_indices_h = traj_h.control_indices self.control_indices_r = traj_r.control_indices # maximum time for optimization if use_timeup: self.timeup = config.OPT_TIMEOUT else: self.timeup = float('inf') self.use_second_order = use_second_order if update_with_curr_plan_fn is None: # no functionality necessary here update_with_curr_plan_fn = lambda: None self.update_with_curr_plan_fn = update_with_curr_plan_fn # self.num_optimizations_r = num_optimizations_r # if get_init_plan_r_fn is None: # self.get_init_plan_r_fn = self.get_init_plan_r_fn_default # else: # self.get_init_plan_r_fn = get_init_plan_r_fn # self.num_optimizations_h = num_optimizations_h # if get_init_plan_h_fn is None: # self.get_init_plan_h_fn = self.get_init_plan_h_fn_default # else: # self.get_init_plan_h_fn = get_init_plan_h_fn self.create_get_init_plans_fn(init_plan_scheme) self.maximizer_inner_iters = 0 # number of iterations of maximizer_inner if init_grads: # initialize the gradients self.init_grads() def create_get_init_plans_fn(self, init_plan_scheme): """Create the functions that return the plan initialization functions for the robot and the human, depending on the optimization iteration. Also set the number of optimization loops for the robot and human. Arguments: - init_plan_scheme: string specifying the plan initialization scheme. """ assert init_plan_scheme in constants.INIT_PLAN_SCHEMES_OPTIONS self.get_init_plan_r_fn = eval('self.get_init_plan_r_fn_' + init_plan_scheme) self.get_init_plan_h_fn = eval('self.get_init_plan_h_fn_' + init_plan_scheme) self.num_optimizations_r = constants.INIT_PLAN_SCHEME_TO_NUM_OPTS_R[init_plan_scheme] self.num_optimizations_h = constants.INIT_PLAN_SCHEME_TO_NUM_OPTS_H[init_plan_scheme] def get_init_plan_r_fn_maintain_speed_lsr_and_prev_opt(self, iter): # TODO: comment this v = self.traj_r.x0[3] acc = constants.FRICTION * v**2 init_plan_r_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([v.get_value() for v in self.plan_r[:-1]] + [self.traj_r.default_control]) ] return init_plan_r_fn_list[iter] def get_init_plan_h_fn_maintain_speed_lsr_and_prev_opt(self, iter): # TODO: comment this v = self.traj_h.x0[3] acc = constants.FRICTION * v**2 init_plan_h_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([v.get_value() for v in self.plan_h[:-1]] + [self.traj_h.default_control]) ] return init_plan_h_fn_list[iter] def get_init_plan_r_fn_maintain_speed_lsr(self, iter): # TODO: comment this v = self.traj_r.x0[3] acc = constants.FRICTION * v**2 init_plan_r_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_r.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_r.horizon)) ] return init_plan_r_fn_list[iter] def get_init_plan_h_fn_maintain_speed_lsr(self, iter): # TODO: comment this v = self.traj_h.x0[3] acc = constants.FRICTION * v**2 init_plan_h_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([0., acc] for _ in range(self.traj_h.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_h.horizon)) ] return init_plan_h_fn_list[iter] def get_init_plan_r_fn_lsr(self, iter): # TODO: comment this init_plan_r_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], 0.] for _ in range(self.traj_r.horizon)), lambda: np.hstack([0., 0.] for _ in range(self.traj_r.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], 0.] for _ in range(self.traj_r.horizon)) ] return init_plan_r_fn_list[iter] def get_init_plan_h_fn_lsr(self, iter): # TODO: comment this init_plan_h_fn_list = [ lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], 0.] for _ in range(self.traj_h.horizon)), lambda: np.hstack([0., 0.] for _ in range(self.traj_h.horizon)), lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], 0.] for _ in range(self.traj_h.horizon)) ] return init_plan_h_fn_list[iter] def get_init_plan_r_fn_max_speed_prev_steer(self, iter): # TODO: comment this return lambda: np.hstack([[v.get_value()[0], constants.CAR_CONTROL_BOUNDS[1][1]] for v in self.plan_r[:-1]] + [self.traj_r.default_control[0], constants.CAR_CONTROL_BOUNDS[1][1]]) def get_init_plan_h_fn_max_speed_prev_steer(self, iter): # TODO: comment this return lambda: np.hstack([[v.get_value()[0], constants.CAR_CONTROL_BOUNDS[1][1]] for v in self.plan_h[:-1]] + [self.traj_h.default_control[0], constants.CAR_CONTROL_BOUNDS[1][1]]) def get_init_plan_r_fn_maintain_speed_prev_steer(self, iter): # TODO: comment this v = self.traj_r.x0[3] acc = constants.FRICTION * v**2 return lambda: np.hstack([[v.get_value()[0], acc] for v in self.plan_r[:-1]] + [self.traj_r.default_control[0], acc]) def get_init_plan_h_fn_maintain_speed_prev_steer(self, iter): # TODO: comment this v = self.traj_h.x0[3] acc = constants.FRICTION * v**2 return lambda: np.hstack([[v.get_value()[0], acc] for v in self.plan_h[:-1]] + [self.traj_h.default_control[0], acc]) def get_init_plan_r_fn_prev_opt(self, iter): # TODO: comment this """Initialize the robot plan using the default way.""" return lambda: np.hstack([v.get_value() for v in self.plan_r[:-1]] + [self.traj_r.default_control]) def get_init_plan_h_fn_prev_opt(self, iter): # TODO: comment this """Initialize the human's plan using the default way.""" return lambda: np.hstack([v.get_value() for v in self.plan_h[:-1]] + [self.traj_h.default_control]) def init_grads(self): """Initialize the gradients based on the rewards. Precondition: the rewards (self.r_h and self.r_r) have already been initialized. """ # gradient of human reward wrt human controls self.dr_h = grad(self.r_h, self.plan_h) # negative human reward and its derivative self.func1 = th.function([], [-self.r_h, -self.dr_h]) def r_h_and_dr_h(plan_h_0): """Evaluate negative human reward and its derivative. - plan_h_0: initial value for human plan.""" start_time = time.time() # set plan_h to the given (initial) plan_h_0 for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(plan_h_0[a:b]) # do any necessary updates based on the current plan # (this is necessary for the HierarchicalMaximizer, not the Nested Maximizer) self.update_with_curr_plan_fn() func1_val = self.func1() # negative human reward and its derivative end_time = time.time() time_profile.inner_loop_time_profile.update(start_time, end_time) return func1_val self.r_h_and_dr_h = r_h_and_dr_h # ------------------------------------------------------------------------------------------ # Robot's reward and its derivative. # ------------------------------------------------------------------------------------------ if self.use_second_order: # OPTION 1: Full derivative computation with Hessian inversion. # SLOW, DEPRECATED # jacobian of (d human reward / d robot actions) w.r.t. human actions J = jacobian(grad(self.r_h, self.plan_r), self.plan_h) # hessian of human reward w.r.t. human actions H = hessian(self.r_h, self.plan_h) # d robot reward / d human actions g = grad(self.r_r, self.plan_h) # Below is the most time-consuming step (the solve(H,g)) self.dr_r = -tt.dot(J, ts.solve(H, g))+grad(self.r_r, self.plan_r) # ------------------------------------------------------------------------------------------ else: # OPTION 2: Partial derivative computation. FAST # (Only direct effect of robot action given current human action) # Below is the simplified derivative that neglects the second-order # effect through human (and therefore avoids the heavy Hessian # inversion) self.dr_r = grad(self.r_r, self.plan_r) # ------------------------------------------------------------------------------------------ # negative robot reward and its derivative self.func2 = th.function([], [-self.r_r, -self.dr_r]) def r_r_and_dr_r(plan_r_0): """Get optimal human response, and return negative robot reward and its derivative. - plan_r_0: initial value for robot plan.""" # set self.plan_r to the given (initial) plan_r_0 for v, (a, b) in zip(self.plan_r, self.control_indices_r): v.set_value(plan_r_0[a:b]) self.maximize_inner() # get optimal human response start_time = time.time() func2_val = self.func2() # negative robot reward and its derivative end_time = time.time() time_profile.func2_time_profile.update(start_time, end_time) return func2_val self.r_r_and_dr_r = r_r_and_dr_r def maximize_inner(self, bounds={}, maxiter=config.NESTEDMAX_MAXITER_INNER): """Get optimal human response (controls). Arguments: - bounds: control bounds for the human. - maxiter: maximum number of iterations. """ start_time = time.time() bounds = constants.HIERARCHICAL_HUMAN_CONTROL_BOUNDS if not isinstance(bounds, dict): # convert bounds to dictionary bounds = {v: bounds for v in self.plan_h} B = [] # list of bounds for each control in the plan for v, (a, b) in zip(self.plan_h, self.control_indices_h): if v in bounds: B += bounds[v] else: B += [(None, None)]*(b-a) # TODO: can we replace the .get_value() approach with using the numpy # version because at this point the Theano and numpy plans are the same? # plan_h_0 = np.hstack(self.traj_h.u) # initial robot plan (numpy version) if self.maximizer_inner_iters == 0: # initialize the robot's plan using the defined plan initialization scheme plan_h_0 = self.init_plan_h() else: # initialize human plan to previous optimal value plan_h_0 = np.hstack([v.get_value() for v in self.plan_h]) # optimal human response, value, etc. opt_h = scipy.optimize.fmin_l_bfgs_b(self.r_h_and_dr_h, x0=plan_h_0, bounds=B) opt_plan_h = opt_h[0] # optimal human response for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(opt_plan_h[a:b]) # do any necessary updates based on the current plan # (this is necessary for the HierarchicalMaximizer, not the Nested Maximizer) self.update_with_curr_plan_fn() # increment the counter for the number of iterations of maximizer_inner self.maximizer_inner_iters += 1 end_time = time.time() time_profile.maximize_inner_time_profile.update(start_time, end_time) return opt_h def maximize(self, bounds={}, bounds_inner={}, maxiter_inner=config.NESTEDMAX_MAXITER_INNER): # Get optimal robot plan (controls) and human response using nested # optimization. start_time = time.time() if not isinstance(bounds, dict): # convert bounds to dictionary bounds = {v: bounds for v in self.plan_r} B = [] # list of bounds for each control in the plan for v, (a, b) in zip(self.plan_r, self.control_indices_r): if v in bounds: B += bounds[v] else: B += [(None, None)]*(b-a) opt_r_list = [] # list of optimization results for i in range(self.num_optimizations_r): # TODO: can we replace the .get_value() approach with using the numpy # version because at this point the Theano and numpy plans are the same? # plan_r_0 = np.hstack(self.traj_r.u) # initial robot plan (numpy version) plan_r_0 = self.get_init_plan_r_fn(i)() # initialize the robot's plan # plan_r_0 = np.hstack([v.get_value() for v in self.plan_r]) for j in range(self.num_optimizations_h): # debugging # print('robot optimization iter:', i) # print('human optimization iter:', j) # reset number of maximizer_inner iterations self.maximizer_inner_iters = 0 # get the human's plan initialization function self.init_plan_h = self.get_init_plan_h_fn(j) # optimal robot control, value, etc. opt_r = opt_timeup.fmin_l_bfgs_b_timeup(self.r_r_and_dr_r, x0=plan_r_0, bounds=B, t0=start_time, timeup=self.timeup) opt_r_list.append(opt_r) # opt_plan_r = opt_r[0] # optimal robot control # get the best plan based on its value best_opt_r = min(opt_r_list, key=lambda opt: opt[1]) opt_plan_r = best_opt_r[0] # optimal robot control #pdb.set_trace() # debugging # print('opt_r_list:', opt_r_list) # print('best_opt_r:', best_opt_r) # TODO: remove? for v, (a, b) in zip(self.plan_r, self.control_indices_r): v.set_value(opt_plan_r[a:b]) # optimal human response, value, etc. to optimal robot control opt_h = self.maximize_inner(bounds=bounds_inner, maxiter=maxiter_inner) print([x.get_value() for x in self.plan_r]) print([x.get_value() for x in self.plan_h]) # time profile of HierarchicalMaximizer maximize_end_time = time.time() time_profile.maximizer_time_profile.update(start_time, maximize_end_time) return opt_r[0], opt_h[0] # class NestedMaximizer(object): # def __init__(self, r_h, traj_h, r_r, traj_r, use_timeup=True, # use_second_order=False, update_with_curr_plan_fn=None, # init_plan_scheme='prev_opt', # # num_optimizations_r=1, get_init_plan_r_fn=None, # # num_optimizations_h=1, get_init_plan_h_fn=None, # init_grads=True): # """ # Arguments: # - r_h: the human tactical reward. # - traj_h: the human trajectory. # - r_r: the robot tactical reward. # - traj_r: the robot trajectory. # - update_with_curr_plan_fn: function to update any necessary information # based on the current plan. This is only necessary for the # HierarchicalMaximizer, not the NestedMaximizer. # - init_plan_scheme: string specifying the plan initialization scheme. # - num_optimizations_r: number of times to optimize the robot reward (the # best result of these optimizations will be chosen). # - get_init_plan_r_fn: function to return a function that initializes # the robot's plan for optimization, based on the current optimization # iteration. # - num_optimizations_h: number of times to optimize the human reward (the # best result of these optimizations will be chosen). # - get_init_plan_h_fn: function to return a function that initializes # the human's plan for optimization, based on the current optimization # iteration. # - init_grads: if True, initialize the gradients. This argument can be # set to False if another function is meant to initialize the gradients. # """ # # # --------------------------------------------------------------------------------------------------- # # Basics. # # self.r_h = r_h # self.r_r = r_r # self.traj_h = traj_h # self.traj_r = traj_r # self.plan_h = traj_h.u_th # human plan (controls) # self.plan_r = traj_r.u_th # robot plan (controls) # # (start, end) indices for each control in the plan when it's represented # # as a flattened array. Ex: [(0, 2), (2, 4), (4, 6), (6, 8), (8, 10)] # self.control_indices_h = traj_h.control_indices # self.control_indices_r = traj_r.control_indices # # maximum time for optimization # if use_timeup: # self.timeup = config.OPT_TIMEOUT # else: # self.timeup = float('inf') # self.use_second_order = use_second_order # if update_with_curr_plan_fn is None: # no functionality necessary here # update_with_curr_plan_fn = lambda: None # self.update_with_curr_plan_fn = update_with_curr_plan_fn # # # self.num_optimizations_r = num_optimizations_r # # if get_init_plan_r_fn is None: # # self.get_init_plan_r_fn = self.get_init_plan_r_fn_default # # else: # # self.get_init_plan_r_fn = get_init_plan_r_fn # # self.num_optimizations_h = num_optimizations_h # # if get_init_plan_h_fn is None: # # self.get_init_plan_h_fn = self.get_init_plan_h_fn_default # # else: # # self.get_init_plan_h_fn = get_init_plan_h_fn # self.create_get_init_plans_fn(init_plan_scheme) # # self.maximizer_inner_iters = 0 # number of iterations of maximizer_inner # # if init_grads: # initialize the gradients # self.init_grads() # # def create_get_init_plans_fn(self, init_plan_scheme): # """Create the functions that return the plan initialization functions # for the robot and the human, depending on the optimization iteration. # Also set the number of optimization loops for the robot and human. # Arguments: # - init_plan_scheme: string specifying the plan initialization scheme. # """ # assert init_plan_scheme in constants.INIT_PLAN_SCHEMES_OPTIONS # self.get_init_plan_r_fn = eval('self.get_init_plan_r_fn_' + init_plan_scheme) # self.get_init_plan_h_fn = eval('self.get_init_plan_h_fn_' + init_plan_scheme) # self.num_optimizations_r = constants.INIT_PLAN_SCHEME_TO_NUM_OPTS_R[init_plan_scheme] # self.num_optimizations_h = constants.INIT_PLAN_SCHEME_TO_NUM_OPTS_H[init_plan_scheme] # # def get_init_plan_r_fn_maintain_speed_lsr_and_prev_opt(self, iter): # # TODO: comment this # v = self.traj_r.x0[3] # acc = constants.FRICTION * v ** 2 # init_plan_r_fn_list = [ # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_r.horizon)), # lambda: np.hstack([0., acc] for _ in range(self.traj_r.horizon)), # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_r.horizon)), # lambda: np.hstack([v.get_value() for v in self.plan_r[:-1]] + [self.traj_r.default_control]) # ] # return init_plan_r_fn_list[iter] # # def get_init_plan_h_fn_maintain_speed_lsr_and_prev_opt(self, iter): # # TODO: comment this # v = self.traj_h.x0[3] # acc = constants.FRICTION * v ** 2 # init_plan_h_fn_list = [ # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_h.horizon)), # lambda: np.hstack([0., acc] for _ in range(self.traj_h.horizon)), # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_h.horizon)), # lambda: np.hstack([v.get_value() for v in self.plan_h[:-1]] + [self.traj_h.default_control]) # ] # return init_plan_h_fn_list[iter] # # def get_init_plan_r_fn_maintain_speed_lsr(self, iter): # # TODO: comment this # v = self.traj_r.x0[3] # acc = constants.FRICTION * v ** 2 # init_plan_r_fn_list = [ # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_r.horizon)), # lambda: np.hstack([0., acc] for _ in range(self.traj_r.horizon)), # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_r.horizon)) # ] # return init_plan_r_fn_list[iter] # # def get_init_plan_h_fn_maintain_speed_lsr(self, iter): # # TODO: comment this # v = self.traj_h.x0[3] # acc = constants.FRICTION * v ** 2 # init_plan_h_fn_list = [ # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], acc] for _ in range(self.traj_h.horizon)), # lambda: np.hstack([0., acc] for _ in range(self.traj_h.horizon)), # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], acc] for _ in range(self.traj_h.horizon)) # ] # return init_plan_h_fn_list[iter] # # def get_init_plan_r_fn_lsr(self, iter): # # TODO: comment this # init_plan_r_fn_list = [ # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], 0.] for _ in range(self.traj_r.horizon)), # lambda: np.hstack([0., 0.] for _ in range(self.traj_r.horizon)), # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], 0.] for _ in range(self.traj_r.horizon)) # ] # return init_plan_r_fn_list[iter] # # def get_init_plan_h_fn_lsr(self, iter): # # TODO: comment this # init_plan_h_fn_list = [ # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][0], 0.] for _ in range(self.traj_h.horizon)), # lambda: np.hstack([0., 0.] for _ in range(self.traj_h.horizon)), # lambda: np.hstack([constants.CAR_CONTROL_BOUNDS[0][1], 0.] for _ in range(self.traj_h.horizon)) # ] # return init_plan_h_fn_list[iter] # # def get_init_plan_r_fn_max_speed_prev_steer(self, iter): # # TODO: comment this # return lambda: np.hstack([[v.get_value()[0], constants.CAR_CONTROL_BOUNDS[1][1]] for v in self.plan_r[:-1]] + [ # self.traj_r.default_control[0], constants.CAR_CONTROL_BOUNDS[1][1]]) # # def get_init_plan_h_fn_max_speed_prev_steer(self, iter): # # TODO: comment this # return lambda: np.hstack([[v.get_value()[0], constants.CAR_CONTROL_BOUNDS[1][1]] for v in self.plan_h[:-1]] + [ # self.traj_h.default_control[0], constants.CAR_CONTROL_BOUNDS[1][1]]) # # def get_init_plan_r_fn_maintain_speed_prev_steer(self, iter): # # TODO: comment this # v = self.traj_r.x0[3] # acc = constants.FRICTION * v ** 2 # return lambda: np.hstack( # [[v.get_value()[0], acc] for v in self.plan_r[:-1]] + [self.traj_r.default_control[0], acc]) # # def get_init_plan_h_fn_maintain_speed_prev_steer(self, iter): # # TODO: comment this # v = self.traj_h.x0[3] # acc = constants.FRICTION * v ** 2 # return lambda: np.hstack( # [[v.get_value()[0], acc] for v in self.plan_h[:-1]] + [self.traj_h.default_control[0], acc]) # # def get_init_plan_r_fn_prev_opt(self, iter): # # TODO: comment this # """Initialize the robot plan using the default way.""" # return lambda: np.hstack([v.get_value() for v in self.plan_r[:-1]] + [self.traj_r.default_control]) # # def get_init_plan_h_fn_prev_opt(self, iter): # # TODO: comment this # """Initialize the human's plan using the default way.""" # return lambda: np.hstack([v.get_value() for v in self.plan_h[:-1]] + [self.traj_h.default_control]) # # def init_grads(self): # """Initialize the gradients based on the rewards. # Precondition: the rewards (self.r_h and self.r_r) have already been # initialized. # """ # # gradient of human reward wrt human controls # self.dr_h = grad(self.r_h, self.plan_h) # # negative human reward and its derivative # self.func1 = th.function([], [-self.r_h, -self.dr_h]) # # def r_h_and_dr_h(plan_h_0): # """Evaluate negative human reward and its derivative. # - plan_h_0: initial value for human plan.""" # start_time = time.time() # # set plan_h to the given (initial) plan_h_0 # for v, (a, b) in zip(self.plan_h, self.control_indices_h): # v.set_value(plan_h_0[a:b]) # # do any necessary updates based on the current plan # # (this is necessary for the HierarchicalMaximizer, not the Nested Maximizer) # self.update_with_curr_plan_fn() # func1_val = self.func1() # negative human reward and its derivative # end_time = time.time() # time_profile.inner_loop_time_profile.update(start_time, end_time) # return func1_val # # self.r_h_and_dr_h = r_h_and_dr_h # # # ------------------------------------------------------------------------------------------ # # Robot's reward and its derivative. # # # ------------------------------------------------------------------------------------------ # if self.use_second_order: # # OPTION 1: Full derivative computation with Hessian inversion. # # SLOW, DEPRECATED # # jacobian of (d human reward / d robot actions) w.r.t. human actions # J = jacobian(grad(self.r_h, self.plan_r), self.plan_h) # # hessian of human reward w.r.t. human actions # H = hessian(self.r_h, self.plan_h) # # d robot reward / d human actions # g = grad(self.r_r, self.plan_h) # # Below is the most time-consuming step (the solve(H,g)) # self.dr_r = -tt.dot(J, ts.solve(H, g)) + grad(self.r_r, self.plan_r) # # ------------------------------------------------------------------------------------------ # else: # # OPTION 2: Partial derivative computation. FAST # # (Only direct effect of robot action given current human action) # # Below is the simplified derivative that neglects the second-order # # effect through human (and therefore avoids the heavy Hessian # # inversion) # self.dr_r = grad(self.r_r, self.plan_r) # # ------------------------------------------------------------------------------------------ # # # negative robot reward and its derivative # self.func2 = th.function([], [-self.r_r, -self.dr_r]) # # def r_r_and_dr_r(plan_r_0): # """Get optimal human response, and return negative robot reward # and its derivative. # - plan_r_0: initial value for robot plan.""" # # set self.plan_r to the given (initial) plan_r_0 # for v, (a, b) in zip(self.plan_r, self.control_indices_r): # v.set_value(plan_r_0[a:b]) # self.maximize_inner() # get optimal human response # start_time = time.time() # func2_val = self.func2() # negative robot reward and its derivative # end_time = time.time() # time_profile.func2_time_profile.update(start_time, end_time) # return func2_val # # self.r_r_and_dr_r = r_r_and_dr_r # # def maximize_inner(self, bounds={}, maxiter=config.NESTEDMAX_MAXITER_INNER): # """Get optimal human response (controls). # Arguments: # - bounds: control bounds for the human. # - maxiter: maximum number of iterations. # """ # start_time = time.time() # # bounds = constants.HIERARCHICAL_HUMAN_CONTROL_BOUNDS # if not isinstance(bounds, dict): # convert bounds to dictionary # bounds = {v: bounds for v in self.plan_h} # B = [] # list of bounds for each control in the plan # for v, (a, b) in zip(self.plan_h, self.control_indices_h): # if v in bounds: # B += bounds[v] # else: # B += [(None, None)] * (b - a) # # # optimal human response, value, etc. # opt_plan_h = np.hstack((-2*0.13/3, 0) for _ in range(self.traj_h.horizon)) # # for v, (a, b) in zip(self.plan_h, self.control_indices_h): # v.set_value(opt_plan_h[a:b]) # # # do any necessary updates based on the current plan # # (this is necessary for the HierarchicalMaximizer, not the Nested Maximizer) # self.update_with_curr_plan_fn() # # # increment the counter for the number of iterations of maximizer_inner # self.maximizer_inner_iters += 1 # end_time = time.time() # time_profile.maximize_inner_time_profile.update(start_time, end_time) # return opt_plan_h # # def maximize(self, bounds={}, bounds_inner={}, # maxiter_inner=config.NESTEDMAX_MAXITER_INNER): # # Get optimal robot plan (controls) and human response using nested # # optimization. # start_time = time.time() # if not isinstance(bounds, dict): # convert bounds to dictionary # bounds = {v: bounds for v in self.plan_r} # B = [] # list of bounds for each control in the plan # for v, (a, b) in zip(self.plan_r, self.control_indices_r): # if v in bounds: # B += bounds[v] # else: # B += [(None, None)] * (b - a) # # opt_r_list = [] # list of optimization results # for i in range(self.num_optimizations_r): # # TODO: can we replace the .get_value() approach with using the numpy # # version because at this point the Theano and numpy plans are the same? # # plan_r_0 = np.hstack(self.traj_r.u) # initial robot plan (numpy version) # plan_r_0 = self.get_init_plan_r_fn(i)() # initialize the robot's plan # # plan_r_0 = np.hstack([v.get_value() for v in self.plan_r]) # for j in range(self.num_optimizations_h): # # debugging # # print('robot optimization iter:', i) # # print('human optimization iter:', j) # # # reset number of maximizer_inner iterations # self.maximizer_inner_iters = 0 # # get the human's plan initialization function # self.init_plan_h = self.get_init_plan_h_fn(j) # # optimal robot control, value, etc. # opt_r = opt_timeup.fmin_l_bfgs_b_timeup(self.r_r_and_dr_r, # x0=plan_r_0, bounds=B, t0=start_time, timeup=self.timeup) # opt_r_list.append(opt_r) # # opt_plan_r = opt_r[0] # optimal robot control # # # get the best plan based on its value # best_opt_r = min(opt_r_list, key=lambda opt: opt[1]) # opt_plan_r = best_opt_r[0] # optimal robot control # # # debugging # # print('opt_r_list:', opt_r_list) # # print('best_opt_r:', best_opt_r) # # # TODO: remove? # for v, (a, b) in zip(self.plan_r, self.control_indices_r): # v.set_value(opt_plan_r[a:b]) # # # optimal human response, value, etc. to optimal robot control # opt_h = self.maximize_inner(bounds=bounds_inner, maxiter=maxiter_inner) # # print([x.get_value() for x in self.plan_r]) # print([x.get_value() for x in self.plan_h]) # # time profile of HierarchicalMaximizer # maximize_end_time = time.time() # time_profile.maximizer_time_profile.update(start_time, maximize_end_time) # return opt_r[0], opt_h class HierarchicalMaximizer(NestedMaximizer): # The following class maximizes the hierarchical game between the robot (leader) # and the human (follower). The tactic reward is given analytically as inputs # while the terminal rewards given as the value functions of the strategic level # is loaded into the class as grids. To get the value from the value functions, # grid interpltion is used (see code below for details). def __init__(self, r_h, traj_h, r_r, traj_r, mat_name, n, proj, traj_truck=None, use_timeup=True, use_second_order=False, init_plan_scheme='prev_opt', disc_grid=None, step_grid=None, vH_grid=None, vR_grid=None): """ Arguments - r_h: the human tactical reward. - traj_h: the human trajectory. - r_r: the robot tactical reward. - traj_r: the robot trajectory. - mat_name: the matlab file with the value function grids. - n: the number of dimensions of the strategic level. - proj: function specifying the projection from the tactical level to the strategic level. - traj_truck: the truck trajectory. If None, the truck is not used in the strategic value. """ # --------------------------------------------------------------------------------------------------- # Basics. NestedMaximizer.__init__(self, r_h, traj_h, r_r, traj_r, use_timeup=use_timeup, use_second_order=use_second_order, update_with_curr_plan_fn=self.update_corners, init_plan_scheme=init_plan_scheme, init_grads=False) self.n = n # dimension of strategic state self.x_tact_h = traj_h.x_th[-1] # final human tactical state self.x_tact_r = traj_r.x_th[-1] # final robot tactical state if traj_truck is not None: self.x_tact_truck = traj_truck.x_th[-1] # --------------------------------------------------------------------------------------------------- # Shared varables. # Grid interpolation is done with the grid corners of the current grid # cell the state is in. These grid corners are shared variables which # are updated as the state is updated. Since the grid step lengths # are given, only the lower corners (self.cell_corners below) are needed # as shared variables. self.cell_corners = th.shared(np.zeros(self.n)) # corners of each box in grid # corners for human value function self.vH_corners = th.shared(np.zeros([2 for i in range(n)])) # corners for robot value function self.vR_corners = th.shared(np.zeros([2 for i in range(n)])) # --------------------------------------------------------------------------------------------------- # Load grid data. if (disc_grid is None or step_grid is None or vH_grid is None or vR_grid is None): self.disc_grid, self.step_grid, self.vH_grid, self.vR_grid = ( utils.load_grid_data(mat_name, n=self.n)) else: self.disc_grid, self.step_grid, self.vH_grid, self.vR_grid = ( disc_grid, step_grid, vH_grid, vR_grid) # --------------------------------------------------------------------------------------------------- # Value functions. # strategic state (project using Theano) if traj_truck is not None: self.x_strat = proj(self.x_tact_r, self.x_tact_h, self.x_tact_truck) else: self.x_strat = proj(self.x_tact_r, self.x_tact_h) self.x_strat_func = th.function([], self.x_strat) def value_function(value_corners): return HierarchicalMaximizer.value_function_fn(self.x_strat, self.cell_corners, value_corners, self.step_grid, self.n) self.vR = value_function(self.vR_corners) # robot value function self.vH = value_function(self.vH_corners) # human value function # --------------------------------------------------------------------------------------------------- # Human's reward and its derivative. # add strategic value function self.r_h += config.STRATEGIC_VALUE_SCALE * self.vH # ------------------------------------------------------------------------------------------ # Robot's reward and its derivative. # add strategic value function self.r_r += config.STRATEGIC_VALUE_SCALE * self.vR # initialize the gradients NestedMaximizer.init_grads(self) @staticmethod def value_function_fn(x_strat, cell_corners, value_corners, step_grid, n): # Strategic value function computed using multilinear grid # interpolation. start_time = time.time() sumterms = [] volume = step_grid.prod() for i in itertools.product(range(2), repeat=n): partial_volume = [((-1)**(i[j]+1) * (x_strat[j] - cell_corners[j]) + (1-i[j]) * step_grid[j]) for j in range(n)] partial_volume = np.asarray(partial_volume).prod() # convert to array to use prod. sumterm = value_corners[i]*partial_volume/volume sumterms.append(sumterm) sum_val = sum(sumterms) end_time = time.time() time_profile.value_function_time_profile.update(start_time, end_time) return sum_val def update_corners(self): # Update the corner values of the strategic cell grid by determining # which grid cell the current strategic state belongs to. cell_corners, vR_corners, vH_corners = (HierarchicalMaximizer .update_corners_fn(self.x_strat_func(), self.n, self.disc_grid, self.vH_grid, self.vR_grid)) self.cell_corners.set_value(cell_corners) self.vR_corners.set_value(vR_corners) self.vH_corners.set_value(vH_corners) @staticmethod def update_corners_fn(x_strat, n, disc_grid, vH_grid, vR_grid): # Update the corner values of the strategic cell grid by determining # which grid cell the current strategic state belongs to. start_time = time.time() # outside (length n) has outside[i] True if the value of the strategi state # at that index is outside the strategic domain. Then either project back # onto the strategic grid, or set the value function = 0 # (i.e. just consider tactical reward) outside = [] inds = [] cell_corners_new = [] for i in range(n): if x_strat[i] < disc_grid[i][0]: ind = 0 # only set value=0 if not projecting onto grid outside.append(True) elif x_strat[i] > disc_grid[i][-1]: ind = len(disc_grid[i]) - 1 # was - 2 # only set value=0 if not projecting onto grid outside.append(True) else: ind = np.where(disc_grid[i] <= x_strat[i])[0][-1] outside.append(False) # inside the strategid domain inds.append(ind) cell_corners_new.append(disc_grid[i][ind]) # debugging # if outside: # print 'OUTSIDE grid interpolation!' # else: # print 'INSIDE grid interpolation' cell_corners_new = np.array(cell_corners_new) # self.cell_corners.set_value(cell_corners_new) vH_corners_new = np.zeros([2 for i in range(n)]) vR_corners_new = np.zeros([2 for i in range(n)]) if not any(outside) or config.PROJECT_ONTO_STRAT_GRID: # iterate through unit vectors representing the corners of the grid # with dimension n for i in itertools.product(range(2), repeat=n): # gp_ind = tuple([sum(pair) for pair in zip(inds, list(i))]) # tuple to just be compatible with below. gp_ind = [] # list of indices of the grid cell's corners for j, (ind, direction, dimension) in enumerate(zip(inds, list(i), vH_grid.shape)): # ind: index of grid cell "smaller" than the strategic state # direction: vector specifying the grid cell corner # dimension: length of the grid in this dimension if outside[j]: # if this state variable of the strategic state is outside # of the strategic grid, project it back onto the grid # by setting its index to be either 0 or dimension - 1 # (this is set above) gp_ind.append(ind) else: # clip to dimension - 1 to avoid index out of bounds error gp_ind.append(min(ind + direction, dimension - 1)) gp_ind = tuple(gp_ind) # tuple to use for indexing into value grids try: vH_corners_new[i] = vH_grid[gp_ind] vR_corners_new[i] = vR_grid[gp_ind] except Exception as e: print e pdb.set_trace() # self.vH_corners.set_value(vH_corners_new) # self.vR_corners.set_value(vR_corners_new) end_time = time.time() time_profile.update_corners_time_profile.update(start_time, end_time) return cell_corners_new, vR_corners_new, vH_corners_new class ILQRMaximizer(): def __init__(self, r_h, traj_h, r_r, traj, dyn): self.r_h = r_h self.traj_h = traj_h self.r_r = r_r self.traj = traj self.dyn = dyn def maximize(self, bounds={}): #return unicycle.run() return unicycle.run(self.traj.x0, None, None, self.dyn, self.r_r, self.r_h) class PredictReactMaximizer(NestedMaximizer): def init_grads(self): """Initialize the gradients based on the rewards. Precondition: the rewards (self.r_h and self.r_r) have already been initialized. """ # gradient of human reward wrt human controls self.dr_h = grad(self.r_h, self.plan_h) # negative human reward and its derivative self.func1 = th.function([], [-self.r_h, -self.dr_h]) def r_h_and_dr_h(plan_h_0): """Evaluate negative human reward and its derivative. - plan_h_0: initial value for human plan.""" start_time = time.time() # set plan_h to the given (initial) plan_h_0 for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(plan_h_0[a:b]) # do any necessary updates based on the current plan # (this is necessary when using a strategic value) self.update_with_curr_plan_fn() func1_val = self.func1() # negative human reward and its derivative end_time = time.time() time_profile.inner_loop_time_profile.update(start_time, end_time) return func1_val self.r_h_and_dr_h = r_h_and_dr_h # ------------------------------------------------------------------------------------------ # Robot's reward and its derivative. # ------------------------------------------------------------------------------------------ if self.use_second_order: # OPTION 1: Full derivative computation with Hessian inversion. # SLOW, DEPRECATED # jacobian of (d human reward / d robot actions) w.r.t. human actions J = jacobian(grad(self.r_h, self.plan_r), self.plan_h) # hessian of human reward w.r.t. human actions H = hessian(self.r_h, self.plan_h) # d robot reward / d human actions g = grad(self.r_r, self.plan_h) # Below is the most time-consuming step (the solve(H,g)) self.dr_r = -tt.dot(J, ts.solve(H, g))+grad(self.r_r, self.plan_r) # ------------------------------------------------------------------------------------------ else: # OPTION 2: Partial derivative computation. FAST # (Only direct effect of robot action given current human action) # Below is the simplified derivative that neglects the second-order # effect through human (and therefore avoids the heavy Hessian # inversion) self.dr_r = grad(self.r_r, self.plan_r) # ------------------------------------------------------------------------------------------ # negative robot reward and its derivative self.func2 = th.function([], [-self.r_r, -self.dr_r]) def r_r_and_dr_r(plan_r_0): """Get optimal human response, and return negative robot reward and its derivative. - plan_r_0: initial value for robot plan.""" start_time = time.time() # set self.plan_r to the given (initial) plan_r_0 for v, (a, b) in zip(self.plan_r, self.control_indices_r): v.set_value(plan_r_0[a:b]) # do any necessary updates based on the current plan # (this is necessary when using a strategic value) self.update_with_curr_plan_fn() func2_val = self.func2() # negative robot reward and its derivative end_time = time.time() time_profile.func2_time_profile.update(start_time, end_time) return func2_val self.r_r_and_dr_r = r_r_and_dr_r def maximize(self, bounds={}): """Optimize the robot's and human's plans with respect to their rewards using the predict-then-react scheme: 1) "Predict" the human's plan by optimizing the human's plan w.r.t. its reward. 2) Optimize the robot's plan w.r.t. its reward by treating the human as a moving obstacle following the "predicted" human plan. """ # Get optimal robot plan (controls) and human response using nested # optimization. # start_time = time.time() # bounds_list = [robot_bounds, human_bounds] # plan_list = [self.plan_r, self.plan_h] # control_indices_list = [self.control_indices_r, self.control_indices_h] # opt_bounds_list = [] # bounds to pass into optimization # for i, (bounds, plan, control_indices) in enumerate(zip( # bounds_list, plan_list, control_indices_list)): # if not isinstance(bounds, dict): # convert bounds to dictionary # bounds = {v: bounds for v in plan} # B = [] # list of bounds for each control in the plan # for v, (a, b) in zip(plan, control_indices): # if v in bounds: # B += bounds[v] # else: # B += [(None, None)]*(b-a) # opt_bounds_list.append(B) # opt_robot_bounds, opt_human_bounds = opt_bounds_list start_time = time.time() if not isinstance(bounds, dict): # convert bounds to dictionary bounds = {v: bounds for v in self.plan_r} B = [] # list of bounds for each control in the plan for v, (a, b) in zip(self.plan_r, self.control_indices_r): if v in bounds: B += bounds[v] else: B += [(None, None)]*(b-a) opt_robot_bounds = B opt_human_bounds = B opt_r_list = [] # list of robot optimization results opt_h_list = [] # list of human optimization results for i in range(self.num_optimizations_r): # TODO: can we replace the .get_value() approach with using the numpy # version because at this point the Theano and numpy plans are the same? # plan_r_0 = np.hstack(self.traj_r.u) # initial robot plan (numpy version) plan_r_0 = self.get_init_plan_r_fn(i)() # initialize the robot's plan # plan_r_0 = np.hstack([v.get_value() for v in self.plan_r]) for j in range(self.num_optimizations_h): # debugging # print('robot optimization iter:', i) # print('human optimization iter:', j) # get the human's plan initialization function plan_h_0 = self.get_init_plan_h_fn(j)() # optimal human control, value, etc. opt_h = opt_timeup.fmin_l_bfgs_b_timeup(self.r_h_and_dr_h, x0=plan_h_0, bounds=opt_human_bounds, t0=start_time, timeup=self.timeup) opt_h_list.append(opt_h) opt_plan_h = opt_h[0] # optimal human control # Set the robot's belief of the human plan to the predicted/planned # human plan for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(opt_plan_h[a:b]) # optimal robot control, value, etc. opt_r = opt_timeup.fmin_l_bfgs_b_timeup(self.r_r_and_dr_r, x0=plan_r_0, bounds=opt_robot_bounds, t0=start_time, timeup=self.timeup) opt_r_list.append(opt_r) # get the best plan based on its value opt_r_vals = [opt[1] for opt in opt_r_list] # list of values for robot best_opt_r_idx = np.argmin(opt_r_vals) opt_r = opt_r_list[best_opt_r_idx] opt_plan_r = opt_r[0] # optimal robot control opt_h = opt_h_list[best_opt_r_idx] opt_plan_h = opt_h[0] # human control corresponding to optimal robot control # debugging # print('opt_r_list:', opt_r_list) # print('opt_r:', opt_r) # Set optimal plans in Theano for v, (a, b) in zip(self.plan_r, self.control_indices_r): v.set_value(opt_plan_r[a:b]) for v, (a, b) in zip(self.plan_h, self.control_indices_h): v.set_value(opt_plan_h[a:b]) # time profile of HierarchicalMaximizer maximize_end_time = time.time() time_profile.maximizer_time_profile.update(start_time, maximize_end_time) return opt_r, opt_h # TODO: make this more modular so there isn't so much copied code class PredictReactHierarchicalMaximizer(PredictReactMaximizer): def __init__(self, r_h, traj_h, r_r, traj_r, mat_name, n, proj, traj_truck=None, use_timeup=True, use_second_order=False, init_plan_scheme='prev_opt', disc_grid=None, step_grid=None, vH_grid=None, vR_grid=None): """ Arguments - r_h: the human tactical reward. - traj_h: the human trajectory. - r_r: the robot tactical reward. - traj_r: the robot trajectory. - mat_name: the matlab file with the value function grids. - n: the number of dimensions of the strategic level. - proj: function specifying the projection from the tactical level to the strategic level. - traj_truck: the truck trajectory. If None, the truck is not used in the strategic value. """ # --------------------------------------------------------------------------------------------------- # Basics. PredictReactMaximizer.__init__(self, r_h, traj_h, r_r, traj_r, use_timeup=use_timeup, use_second_order=use_second_order, update_with_curr_plan_fn=self.update_corners, init_plan_scheme=init_plan_scheme, init_grads=False) self.n = n # dimension of strategic state self.x_tact_h = traj_h.x_th[-1] # final human tactical state self.x_tact_r = traj_r.x_th[-1] # final robot tactical state if traj_truck is not None: self.x_tact_truck = traj_truck.x_th[-1] # --------------------------------------------------------------------------------------------------- # Shared varables. # Grid interpolation is done with the grid corners of the current grid # cell the state is in. These grid corners are shared variables which # are updated as the state is updated. Since the grid step lengths # are given, only the lower corners (self.cell_corners below) are needed # as shared variables. self.cell_corners = th.shared(np.zeros(self.n)) # corners of each box in grid # corners for human value function self.vH_corners = th.shared(np.zeros([2 for i in range(n)])) # corners for robot value function self.vR_corners = th.shared(np.zeros([2 for i in range(n)])) # --------------------------------------------------------------------------------------------------- # Load grid data. if (disc_grid is None or step_grid is None or vH_grid is None or vR_grid is None): self.disc_grid, self.step_grid, self.vH_grid, self.vR_grid = ( utils.load_grid_data(mat_name, n=self.n)) else: self.disc_grid, self.step_grid, self.vH_grid, self.vR_grid = ( disc_grid, step_grid, vH_grid, vR_grid) # --------------------------------------------------------------------------------------------------- # Value functions. # strategic state (project using Theano) if traj_truck is not None: self.x_strat = proj(self.x_tact_r, self.x_tact_h, self.x_tact_truck) else: self.x_strat = proj(self.x_tact_r, self.x_tact_h) self.x_strat_func = th.function([], self.x_strat) def value_function(value_corners): return HierarchicalMaximizer.value_function_fn(self.x_strat, self.cell_corners, value_corners, self.step_grid, self.n) self.vR = value_function(self.vR_corners) # robot value function self.vH = value_function(self.vH_corners) # human value function # --------------------------------------------------------------------------------------------------- # Human's reward and its derivative. # add strategic value function self.r_h += config.STRATEGIC_VALUE_SCALE * self.vH # ------------------------------------------------------------------------------------------ # Robot's reward and its derivative. # add strategic value function self.r_r += config.STRATEGIC_VALUE_SCALE * self.vR # initialize the gradients PredictReactMaximizer.init_grads(self) @staticmethod def value_function_fn(x_strat, cell_corners, value_corners, step_grid, n): # Strategic value function computed using multilinear grid # interpolation. start_time = time.time() sumterms = [] volume = step_grid.prod() for i in itertools.product(range(2), repeat=n): partial_volume = [((-1)**(i[j]+1) * (x_strat[j] - cell_corners[j]) + (1-i[j]) * step_grid[j]) for j in range(n)] partial_volume = np.asarray(partial_volume).prod() # convert to array to use prod. sumterm = value_corners[i]*partial_volume/volume sumterms.append(sumterm) sum_val = sum(sumterms) end_time = time.time() time_profile.value_function_time_profile.update(start_time, end_time) return sum_val def update_corners(self): # Update the corner values of the strategic cell grid by determining # which grid cell the current strategic state belongs to. cell_corners, vR_corners, vH_corners = (HierarchicalMaximizer .update_corners_fn(self.x_strat_func(), self.n, self.disc_grid, self.vH_grid, self.vR_grid)) self.cell_corners.set_value(cell_corners) self.vR_corners.set_value(vR_corners) self.vH_corners.set_value(vH_corners) @staticmethod def update_corners_fn(x_strat, n, disc_grid, vH_grid, vR_grid): # Update the corner values of the strategic cell grid by determining # which grid cell the current strategic state belongs to. start_time = time.time() # outside (length n) has outside[i] True if the value of the strategi state # at that index is outside the strategic domain. Then either project back # onto the strategic grid, or set the value function = 0 # (i.e. just consider tactical reward) outside = [] inds = [] cell_corners_new = [] for i in range(n): if x_strat[i] < disc_grid[i][0]: ind = 0 # only set value=0 if not projecting onto grid outside.append(True) elif x_strat[i] > disc_grid[i][-1]: ind = len(disc_grid[i]) - 1 # was - 2 # only set value=0 if not projecting onto grid outside.append(True) else: ind = np.where(disc_grid[i] <= x_strat[i])[0][-1] outside.append(False) # inside the strategid domain inds.append(ind) cell_corners_new.append(disc_grid[i][ind]) # debugging # if outside: # print 'OUTSIDE grid interpolation!' # else: # print 'INSIDE grid interpolation' cell_corners_new = np.array(cell_corners_new) # self.cell_corners.set_value(cell_corners_new) vH_corners_new = np.zeros([2 for i in range(n)]) vR_corners_new = np.zeros([2 for i in range(n)]) if not any(outside) or config.PROJECT_ONTO_STRAT_GRID: # iterate through unit vectors representing the corners of the grid # with dimension n for i in itertools.product(range(2), repeat=n): # gp_ind = tuple([sum(pair) for pair in zip(inds, list(i))]) # tuple to just be compatible with below. gp_ind = [] # list of indices of the grid cell's corners for j, (ind, direction, dimension) in enumerate(zip(inds, list(i), vH_grid.shape)): # ind: index of grid cell "smaller" than the strategic state # direction: vector specifying the grid cell corner # dimension: length of the grid in this dimension if outside[j]: # if this state variable of the strategic state is outside # of the strategic grid, project it back onto the grid # by setting its index to be either 0 or dimension - 1 # (this is set above) gp_ind.append(ind) else: # clip to dimension - 1 to avoid index out of bounds error gp_ind.append(min(ind + direction, dimension - 1)) gp_ind = tuple(gp_ind) # tuple to use for indexing into value grids try: vH_corners_new[i] = vH_grid[gp_ind] vR_corners_new[i] = vR_grid[gp_ind] except Exception as e: print e pdb.set_trace() # self.vH_corners.set_value(vH_corners_new) # self.vR_corners.set_value(vR_corners_new) end_time = time.time() time_profile.update_corners_time_profile.update(start_time, end_time) return cell_corners_new, vR_corners_new, vH_corners_new # Old HierarchicalMaximizer: duplicated a lot of code from the NestedMaximizer # class HierarchicalMaximizer(object): # # The following class maximizes the hierarchical game between the robot (leader) # # and the human (follower). The tactic reward is given analytically as inputs # # while the terminal rewards given as the value functions of the strategic level # # is loaded into the class as grids. To get the value from the value functions, # # grid interpltion is used (see code below for details). # def __init__(self, r_h, traj_h, r_r, traj_r, mat_name, n, # proj, use_timeup=True, use_second_order=False, # disc_grid=None, step_grid=None, vH_grid=None, vR_grid=None): # # The input parameters are: # # - r_h: the human tactical reward. # # - traj_h: the human trajectory. # # - r_r: the robot tactical reward. # # - traj_r: the robot trajectory. # # - mat_name: the matlab file with the value function grids. # # - n: the number of dimensions of the strategic level. # # - proj: function specifying the projection from the tactical level to the strategic level. # # --------------------------------------------------------------------------------------------------- # # Basics. # self.n = n # dimension of strategic state # self.r_h = r_h # self.r_r = r_r # self.x_tact_h = traj_h.x_th[-1] # final human tactical state # self.x_tact_r = traj_r.x_th[-1] # final robot tactical state # self.plan_h = traj_h.u_th # human plan (controls) # self.plan_r = traj_r.u_th # robot plan (controls) # # (start, end) indices for each control in the plan when it's represented # # as a flattened array. Ex: [(0, 2), (2, 4), (4, 6), (6, 8), (8, 10)] # self.control_indices_h = traj_h.control_indices # self.control_indices_r = traj_r.control_indices # # maximum time for optimization # if use_timeup: # self.timeup = config.OPT_TIMEOUT # else: # self.timeup = float('inf') # # --------------------------------------------------------------------------------------------------- # # Shared varables. # # Grid interpolation is done with the grid corners of the current grid # # cell the state is in. These grid corners are shared variables which # # are updated as the state is updated. Since the grid step lengths # # are given, only the lower corners (self.cell_corners below) are needed # # as shared variables. # self.cell_corners = th.shared(np.zeros(self.n)) # corners of each box in grid # # corners for human value function # self.vH_corners = th.shared(np.zeros([2 for i in range(n)])) # # corners for robot value function # self.vR_corners = th.shared(np.zeros([2 for i in range(n)])) # # --------------------------------------------------------------------------------------------------- # # Load grid data. # if (disc_grid is None or step_grid is None or vH_grid is None or # vR_grid is None): # self.disc_grid, self.step_grid, self.vH_grid, self.vR_grid = ( # utils.load_grid_data(mat_name, n=self.n)) # else: # self.disc_grid, self.step_grid, self.vH_grid, self.vR_grid = ( # disc_grid, step_grid, vH_grid, vR_grid) # # --------------------------------------------------------------------------------------------------- # # Value functions. # # strategic state (project using Theano) # self.x_strat = proj(self.x_tact_r, self.x_tact_h) # self.x_strat_func = th.function([], self.x_strat) # def value_function(value_corners): # return HierarchicalMaximizer.value_function_fn(self.x_strat, # self.cell_corners, value_corners, self.step_grid, self.n) # self.vR = value_function(self.vR_corners) # robot value function # self.vH = value_function(self.vH_corners) # human value function # # --------------------------------------------------------------------------------------------------- # # Human's reward and its derivative. # # add strategic value function # self.r_h += config.STRATEGIC_VALUE_SCALE * self.vH # # gradient of human reward wrt human controls # self.dr_h = grad(self.r_h, self.plan_h) # # negative human reward and its derivative # self.func1 = th.function([], [-self.r_h, -self.dr_h]) # def r_h_and_dr_h(plan_h_0): # """Evaluate negative human reward and its derivative. # - plan_h_0: initial value for human plan.""" # start_time = time.time() # # set plan_h to the given (initial) plan_h_0 # for v, (a, b) in zip(self.plan_h, self.control_indices_h): # v.set_value(plan_h_0[a:b]) # # update strategic value corners according to current grid cell # self.update_corners() # func1_val = self.func1() # negative human reward and its derivative # end_time = time.time() # time_profile.inner_loop_time_profile.update(start_time, end_time) # return func1_val # self.r_h_and_dr_h = r_h_and_dr_h # # ------------------------------------------------------------------------------------------ # # Robot's reward and its derivative. # # add strategic value function # self.r_r += config.STRATEGIC_VALUE_SCALE * self.vR # # ------------------------------------------------------------------------------------------ # if use_second_order: # # OPTION 1: Full derivative computation with Hessian inversion. # # SLOW, DEPRECATED # # jacobian of (d human reward / d robot actions) w.r.t. human actions # J = jacobian(grad(self.r_h, self.plan_r), self.plan_h) # # hessian of human reward w.r.t. human actions # H = hessian(self.r_h, self.plan_h) # # d robot reward / d human actions # g = grad(self.r_r, self.plan_h) # # Below is the most time-consuming step (the solve(H,g)) # self.dr_r = -tt.dot(J, ts.solve(H, g))+grad(self.r_r, self.plan_r) # # ------------------------------------------------------------------------------------------ # else: # # OPTION 2: Partial derivative computation. FAST # # (Only direct effect of robot action given current human action) # # Below is the simplified derivative that neglects the second-order # # effect through human (and therefore avoids the heavy Hessian # # inversion) # self.dr_r = grad(self.r_r, self.plan_r) # # ------------------------------------------------------------------------------------------ # # negative robot reward and its derivative # self.func2 = th.function([], [-self.r_r, -self.dr_r]) # def r_r_and_dr_r(plan_r_0): # """Get optimal human response, and return negative robot reward # and its derivative. # - plan_r_0: initial value for robot plan.""" # # set self.plan_r to the given (initial) plan_r_0 # for v, (a, b) in zip(self.plan_r, self.control_indices_r): # v.set_value(plan_r_0[a:b]) # self.maximize_inner() # get optimal human response # start_time = time.time() # func2_val = self.func2() # negative robot reward and its derivative # end_time = time.time() # time_profile.func2_time_profile.update(start_time, end_time) # return func2_val # self.r_r_and_dr_r = r_r_and_dr_r # @staticmethod # def value_function_fn(x_strat, cell_corners, value_corners, step_grid, n): # # Strategic value function computed using multilinear grid # # interpolation. # start_time = time.time() # sumterms = [] # volume = step_grid.prod() # for i in itertools.product(range(2), repeat=n): # partial_volume = [((-1)**(i[j]+1) * # (x_strat[j] - cell_corners[j]) + # (1-i[j]) * step_grid[j]) for j in range(n)] # partial_volume = np.asarray(partial_volume).prod() # convert to array to use prod. # sumterm = value_corners[i]*partial_volume/volume # sumterms.append(sumterm) # sum_val = sum(sumterms) # end_time = time.time() # time_profile.value_function_time_profile.update(start_time, end_time) # return sum_val # def update_corners(self): # # Update the corner values of the strategic cell grid by determining # # which grid cell the current strategic state belongs to. # cell_corners, vR_corners, vH_corners = (HierarchicalMaximizer # .update_corners_fn(self.x_strat_func(), self.n, # self.disc_grid, self.vH_grid, self.vR_grid)) # self.cell_corners.set_value(cell_corners) # self.vR_corners.set_value(vR_corners) # self.vH_corners.set_value(vH_corners) # @staticmethod # def update_corners_fn(x_strat, n, disc_grid, vH_grid, vR_grid): # # Update the corner values of the strategic cell grid by determining # # which grid cell the current strategic state belongs to. # start_time = time.time() # # outside is True if outside the strategic domain. Then value function = 0 # # (i.e. just consider tactical reward) # outside = False # inds = [] # cell_corners_new = [] # for i in range(n): # if disc_grid[i][0] > x_strat[i]: # ind = 0 # outside = True # elif disc_grid[i][-1] < x_strat[i]: # ind = len(disc_grid[i])-2 # outside = True # else: # ind = np.where(disc_grid[i] <= x_strat[i])[0][-1] # inds.append(ind) # cell_corners_new.append(disc_grid[i][ind]) # # debugging # # if outside: # # print 'OUTSIDE grid interpolation!' # # else: # # print 'INSIDE grid interpolation' # cell_corners_new = np.array(cell_corners_new) # # self.cell_corners.set_value(cell_corners_new) # vH_corners_new = np.zeros([2 for i in range(n)]) # vR_corners_new = np.zeros([2 for i in range(n)]) # if not outside: # # iterate through unit vectors representing the corners of the grid # # with dimension n # for i in itertools.product(range(2), repeat=n): # # gp_ind = tuple([sum(pair) for pair in zip(inds, list(i))]) # tuple to just be compatible with below. # gp_ind = [] # list of indices of the grid cell's corners # for ind, direction, dimension in zip(inds, list(i), vH_grid.shape): # # ind: index of grid cell "smaller" than the strategic state # # direction: vector specifying the grid cell corner # # dimension: length of the grid in this dimension # # clip to dimension - 1 to avoid index out of bounds error # gp_ind.append(min(ind + direction, dimension - 1)) # gp_ind = tuple(gp_ind) # tuple to use for indexing into value grids # try: # vH_corners_new[i] = vH_grid[gp_ind] # vR_corners_new[i] = vR_grid[gp_ind] # except Exception as e: # print e # pdb.set_trace() # # self.vH_corners.set_value(vH_corners_new) # # self.vR_corners.set_value(vR_corners_new) # end_time = time.time() # time_profile.update_corners_time_profile.update(start_time, end_time) # return cell_corners_new, vR_corners_new, vH_corners_new # def maximize_inner(self, bounds={}, maxiter=config.NESTEDMAX_MAXITER_INNER): # """Get optimal human response (controls). # Arguments: # - bounds: control bounds for the human. # - maxiter: maximum number of iterations. # """ # start_time = time.time() # # initialize human plan to previous optimal value # plan_h_0 = np.hstack([v.get_value() for v in self.plan_h]) # bounds = constants.HIERARCHICAL_HUMAN_CONTROL_BOUNDS # if not isinstance(bounds, dict): # convert bounds to dictionary # bounds = {v: bounds for v in self.plan_h} # B = [] # list of bounds for each control in the plan # for v, (a, b) in zip(self.plan_h, self.control_indices_h): # if v in bounds: # B += bounds[v] # else: # B += [(None, None)]*(b-a) # # optimal human response, value, etc. # opt_h = scipy.optimize.fmin_l_bfgs_b(self.r_h_and_dr_h, x0=plan_h_0, # bounds=B) # opt_plan_h = opt_h[0] # optimal human response # for v, (a, b) in zip(self.plan_h, self.control_indices_h): # v.set_value(opt_plan_h[a:b]) # # update strategic value corners according to current grid cell # self.update_corners() # end_time = time.time() # time_profile.maximize_inner_time_profile.update(start_time, end_time) # return opt_h # def maximize(self, bounds={}, bounds_inner={}, # maxiter_inner=config.NESTEDMAX_MAXITER_INNER): # # Get optimal robot plan (controls) and human response using nested # # optimization. # start_time = time.time() # if not isinstance(bounds, dict): # convert bounds to dictionary # bounds = {v: bounds for v in self.plan_r} # B = [] # list of bounds for each control in the plan # for v, (a, b) in zip(self.plan_r, self.control_indices_r): # if v in bounds: # B += bounds[v] # else: # B += [(None, None)]*(b-a) # # TODO: can we replace the .get_value() approach with using the numpy # # version because at this point the Theano and numpy plans are the same? # # plan_r_0 = np.hstack(self.traj_r.u) # initial robot plan (numpy version) # plan_r_0 = np.hstack([v.get_value() for v in self.plan_r]) # # optimal robot control, value, etc. # opt_r = opt_timeup.fmin_l_bfgs_b_timeup(self.r_r_and_dr_r, # x0=plan_r_0, bounds=B, t0=start_time, timeup=self.timeup) # opt_plan_r = opt_r[0] # optimal robot control # # TODO: remove? # for v, (a, b) in zip(self.plan_r, self.control_indices_r): # v.set_value(opt_plan_r[a:b]) # # time profile of HierarchicalMaximizer # maximize_end_time = time.time() # time_profile.maximizer_time_profile.update(start_time, maximize_end_time) # # optimal human response, value, etc. to optimal robot control # opt_h = self.maximize_inner(bounds=bounds_inner, maxiter=maxiter_inner) # return opt_r, opt_h
50.005553
187
0.585286
13,543
99,061
4.034335
0.034261
0.016655
0.017717
0.012482
0.941779
0.937057
0.932354
0.926643
0.922269
0.918297
0
0.008548
0.285491
99,061
1,980
188
50.030808
0.763379
0.493141
0
0.793963
0
0
0.003419
0.002279
0
0
0
0.00303
0.002625
0
null
null
0
0.023622
null
null
0.006562
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
246fd5db15818c7d7844a8460d9e88bae7dd0aaf
5,356
py
Python
day-1/captcha.py
michaelze/advent-of-code-2017
241d7a1e06f55b31b41ea61f6fe49839a7d14985
[ "MIT" ]
null
null
null
day-1/captcha.py
michaelze/advent-of-code-2017
241d7a1e06f55b31b41ea61f6fe49839a7d14985
[ "MIT" ]
null
null
null
day-1/captcha.py
michaelze/advent-of-code-2017
241d7a1e06f55b31b41ea61f6fe49839a7d14985
[ "MIT" ]
null
null
null
import unittest def captcha(input, stepSize = 1): if callable(stepSize): useStepSize = stepSize(input) else: useStepSize = stepSize inputLength = len(input) sum = 0 for index in range(0, inputLength): compIndex = index + useStepSize if compIndex >= inputLength: compIndex = useStepSize - (inputLength - index) if input[index] == input[compIndex]: sum = sum + int(input[index]) return sum class TestCaptcha(unittest.TestCase): def testCaptcha(self): self.assertEqual(captcha('1122'), 3) self.assertEqual(captcha('1111'), 4) self.assertEqual(captcha('1234'), 0) self.assertEqual(captcha('91212129'), 9) self.assertEqual(captcha('6592822488931338589815525425236818285229555616392928433262436847386544514648645288129834834862363847542262953164877694234514375164927616649264122487182321437459646851966649732474925353281699895326824852555747127547527163197544539468632369858413232684269835288817735678173986264554586412678364433327621627496939956645283712453265255261565511586373551439198276373843771249563722914847255524452675842558622845416218195374459386785618255129831539984559644185369543662821311686162137672168266152494656448824719791398797359326412235723234585539515385352426579831251943911197862994974133738196775618715739412713224837531544346114877971977411275354168752719858889347588136787894798476123335894514342411742111135337286449968879251481449757294167363867119927811513529711239534914119292833111624483472466781475951494348516125474142532923858941279569675445694654355314925386833175795464912974865287564866767924677333599828829875283753669783176288899797691713766199641716546284841387455733132519649365113182432238477673375234793394595435816924453585513973119548841577126141962776649294322189695375451743747581241922657947182232454611837512564776273929815169367899818698892234618847815155578736875295629917247977658723868641411493551796998791839776335793682643551875947346347344695869874564432566956882395424267187552799458352121248147371938943799995158617871393289534789214852747976587432857675156884837634687257363975437535621197887877326295229195663235129213398178282549432599455965759999159247295857366485345759516622427833518837458236123723353817444545271644684925297477149298484753858863551357266259935298184325926848958828192317538375317946457985874965434486829387647425222952585293626473351211161684297351932771462665621764392833122236577353669215833721772482863775629244619639234636853267934895783891823877845198326665728659328729472456175285229681244974389248235457688922179237895954959228638193933854787917647154837695422429184757725387589969781672596568421191236374563718951738499591454571728641951699981615249635314789251239677393251756396'), 1029) calcStepSize = lambda i: int(len(i) / 2) self.assertEqual(captcha('1212', calcStepSize), 6) self.assertEqual(captcha('1221', calcStepSize), 0) self.assertEqual(captcha('123425', calcStepSize), 4) self.assertEqual(captcha('123123', calcStepSize), 12) self.assertEqual(captcha('12131415', calcStepSize), 4) self.assertEqual(captcha('6592822488931338589815525425236818285229555616392928433262436847386544514648645288129834834862363847542262953164877694234514375164927616649264122487182321437459646851966649732474925353281699895326824852555747127547527163197544539468632369858413232684269835288817735678173986264554586412678364433327621627496939956645283712453265255261565511586373551439198276373843771249563722914847255524452675842558622845416218195374459386785618255129831539984559644185369543662821311686162137672168266152494656448824719791398797359326412235723234585539515385352426579831251943911197862994974133738196775618715739412713224837531544346114877971977411275354168752719858889347588136787894798476123335894514342411742111135337286449968879251481449757294167363867119927811513529711239534914119292833111624483472466781475951494348516125474142532923858941279569675445694654355314925386833175795464912974865287564866767924677333599828829875283753669783176288899797691713766199641716546284841387455733132519649365113182432238477673375234793394595435816924453585513973119548841577126141962776649294322189695375451743747581241922657947182232454611837512564776273929815169367899818698892234618847815155578736875295629917247977658723868641411493551796998791839776335793682643551875947346347344695869874564432566956882395424267187552799458352121248147371938943799995158617871393289534789214852747976587432857675156884837634687257363975437535621197887877326295229195663235129213398178282549432599455965759999159247295857366485345759516622427833518837458236123723353817444545271644684925297477149298484753858863551357266259935298184325926848958828192317538375317946457985874965434486829387647425222952585293626473351211161684297351932771462665621764392833122236577353669215833721772482863775629244619639234636853267934895783891823877845198326665728659328729472456175285229681244974389248235457688922179237895954959228638193933854787917647154837695422429184757725387589969781672596568421191236374563718951738499591454571728641951699981615249635314789251239677393251756396', calcStepSize), 1220) if __name__ == '__main__': unittest.main()
137.333333
2,096
0.906087
129
5,356
37.55814
0.387597
0.034056
0.049948
0.014241
0.014448
0
0
0
0
0
0
0.82705
0.06404
5,356
38
2,097
140.947368
0.139437
0
0
0
0
0
0.776984
0.766454
0
1
0
0
0.354839
1
0.064516
false
0
0.032258
0
0.16129
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
8
707ccbba777c1fc9288acace107ac44e73fb8301
66
py
Python
stackapi/__init__.py
cgrtrifork/stackapi
146c2c5a201aa51dc8218a6e03d3e903b1d2c36d
[ "MIT" ]
56
2016-02-25T20:00:49.000Z
2022-03-07T23:27:18.000Z
stackapi/__init__.py
cgrtrifork/stackapi
146c2c5a201aa51dc8218a6e03d3e903b1d2c36d
[ "MIT" ]
42
2016-02-24T20:14:03.000Z
2021-12-05T21:36:29.000Z
stackapi/__init__.py
AWegnerGitHub/StackAPI
602d9ce6de4b8a3e0462576365536c99a5a14c71
[ "MIT" ]
19
2016-03-10T17:24:43.000Z
2022-01-31T18:22:29.000Z
from .stackapi import StackAPI from .stackapi import StackAPIError
33
35
0.863636
8
66
7.125
0.5
0.421053
0.631579
0
0
0
0
0
0
0
0
0
0.106061
66
2
35
33
0.966102
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
562b4d6f4bd52a8f5b6c0f19685a6a5b23752271
44
py
Python
exercise/newfile42.py
LeeBeral/python
9f0d360d69ee5245e3ef13a9dc9fc666374587a4
[ "MIT" ]
null
null
null
exercise/newfile42.py
LeeBeral/python
9f0d360d69ee5245e3ef13a9dc9fc666374587a4
[ "MIT" ]
null
null
null
exercise/newfile42.py
LeeBeral/python
9f0d360d69ee5245e3ef13a9dc9fc666374587a4
[ "MIT" ]
null
null
null
a,b = 1,2 print(a,b) a,b = b,a print(a,b)
11
11
0.5
14
44
1.571429
0.357143
0.363636
0.636364
0
0
0
0
0
0
0
0
0.058824
0.227273
44
4
12
11
0.588235
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
1
1
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
8
564221225d9998566dce280423a3f022b29b041c
207
py
Python
src/test/test_sample.py
howaboutudance/pytomltemplate
41f375f75be378f3aa7749b03ca7819ab482c611
[ "Apache-2.0" ]
null
null
null
src/test/test_sample.py
howaboutudance/pytomltemplate
41f375f75be378f3aa7749b03ca7819ab482c611
[ "Apache-2.0" ]
2
2021-12-11T22:15:59.000Z
2021-12-11T22:31:40.000Z
src/test/test_sample.py
howaboutudance/pytomltemplate
41f375f75be378f3aa7749b03ca7819ab482c611
[ "Apache-2.0" ]
null
null
null
import pytest from sample_module import sample def test_helloworld(): assert sample.helloworld() == "Hello World" def test_helloworld_name(): assert sample.helloworld(name="Flargen") == "Hello Flargen"
25.875
61
0.768116
26
207
5.961538
0.5
0.090323
0.219355
0
0
0
0
0
0
0
0
0
0.120773
207
8
61
25.875
0.851648
0
0
0
0
0
0.149038
0
0
0
0
0
0.333333
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7