hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
ba1ef5d24ccbf2a971832aa246a86c2ee1b37525
176
py
Python
removecaractere.py
DiegoMGouveia/validadordecnpj
c982911cf00ba5c3202d4501a52c6d6e27d15c3a
[ "CC0-1.0" ]
null
null
null
removecaractere.py
DiegoMGouveia/validadordecnpj
c982911cf00ba5c3202d4501a52c6d6e27d15c3a
[ "CC0-1.0" ]
null
null
null
removecaractere.py
DiegoMGouveia/validadordecnpj
c982911cf00ba5c3202d4501a52c6d6e27d15c3a
[ "CC0-1.0" ]
null
null
null
def removcaract(cnpj): documento = [] for x in cnpj: if x.isnumeric(): documento.append(x) documento = ''.join(documento) return documento
19.555556
34
0.579545
19
176
5.368421
0.631579
0
0
0
0
0
0
0
0
0
0
0
0.306818
176
8
35
22
0.836066
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.285714
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ba1fb220aa59dba58d3443a661c782258ca19b13
54
py
Python
src/python/WMCore/DQMCat/__init__.py
khurtado/WMCore
f74e252412e49189a92962945a94f93bec81cd1e
[ "Apache-2.0" ]
21
2015-11-19T16:18:45.000Z
2021-12-02T18:20:39.000Z
src/python/WMCore/DQMCat/__init__.py
khurtado/WMCore
f74e252412e49189a92962945a94f93bec81cd1e
[ "Apache-2.0" ]
5,671
2015-01-06T14:38:52.000Z
2022-03-31T22:11:14.000Z
src/python/WMCore/DQMCat/__init__.py
khurtado/WMCore
f74e252412e49189a92962945a94f93bec81cd1e
[ "Apache-2.0" ]
67
2015-01-21T15:55:38.000Z
2022-02-03T19:53:13.000Z
#!/usr/bin/env python """ _DQMCat_ """ __all__ = []
6.75
21
0.555556
6
54
4
1
0
0
0
0
0
0
0
0
0
0
0
0.185185
54
7
22
7.714286
0.545455
0.537037
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ba26069bf439820986a841df4e9fba10364c2283
583
py
Python
python/API_test/test.py
GG-yuki/bugs
aabd576e9e57012a3390007af890b7c6ab6cdda8
[ "MIT" ]
null
null
null
python/API_test/test.py
GG-yuki/bugs
aabd576e9e57012a3390007af890b7c6ab6cdda8
[ "MIT" ]
null
null
null
python/API_test/test.py
GG-yuki/bugs
aabd576e9e57012a3390007af890b7c6ab6cdda8
[ "MIT" ]
null
null
null
# import numpy as np # import matplotlib.pyplot as plt # # def f(t): # return np.exp(-t) * np.cos(2*np.pi*t) # # t1 = np.arange(0.0, 5.0, 0.1) # t2 = np.arange(0.0, 5.0, 0.02) # # plt.figure("2suplot") # plt.subplot(211) # plt.plot(t1, f(t1), 'bo', t2, f(t2), 'k') # # plt.subplot(212) # plt.plot(t2, np.cos(2*np.pi*t2), 'r--') # plt.show() # plt.figure("2suplot222") # plt.subplot(211) # plt.plot(t1, f(t1), 'bo', t2, f(t2), 'k') # # plt.subplot(212) # plt.plot(t2, np.cos(2*np.pi*t2), 'r--') # plt.show() if __name__ == '__main__': print('程序自身在运行') else: print('我来自另一模块')
21.592593
43
0.572899
110
583
2.963636
0.363636
0.02454
0.055215
0.07362
0.570552
0.539877
0.539877
0.460123
0.460123
0.460123
0
0.095142
0.152659
583
27
44
21.592593
0.564777
0.795883
0
0
0
0
0.229167
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
2
e83b2680f00ceeb1442b4300ebb740aea12fe54c
1,372
py
Python
tests/test_providers/test_food.py
chinghwayu/mimesis
3afcda8e68ee2f6feb61a5c7ca663328909828fa
[ "MIT" ]
2,619
2017-07-18T13:25:46.000Z
2022-03-31T17:52:53.000Z
tests/test_providers/test_food.py
chinghwayu/mimesis
3afcda8e68ee2f6feb61a5c7ca663328909828fa
[ "MIT" ]
947
2017-07-15T18:32:12.000Z
2022-03-28T10:04:15.000Z
tests/test_providers/test_food.py
chinghwayu/mimesis
3afcda8e68ee2f6feb61a5c7ca663328909828fa
[ "MIT" ]
328
2017-07-18T01:11:12.000Z
2022-03-30T09:20:48.000Z
# -*- coding: utf-8 -*- import re import pytest from mimesis import Food from . import patterns class TestFood(object): def test_str(self, food): assert re.match(patterns.DATA_PROVIDER_STR_REGEX, str(food)) def test_vegetable(self, food): result = food.vegetable() assert result in food._data["vegetables"] def test_fruit(self, food): result = food.fruit() assert result in food._data["fruits"] def test_dish(self, food): result = food.dish() assert result in food._data["dishes"] def test_drink(self, food): result = food.drink() assert result in food._data["drinks"] def test_spices(self, food): result = food.spices() assert result in food._data["spices"] class TestSeededFood(object): @pytest.fixture def fd1(self, seed): return Food(seed=seed) @pytest.fixture def fd2(self, seed): return Food(seed=seed) def test_vegetable(self, fd1, fd2): assert fd1.vegetable() == fd2.vegetable() def test_fruit(self, fd1, fd2): assert fd1.fruit() == fd2.fruit() def test_dish(self, fd1, fd2): assert fd1.dish() == fd2.dish() def test_drink(self, fd1, fd2): assert fd1.drink() == fd2.drink() def test_spices(self, fd1, fd2): assert fd1.spices() == fd2.spices()
23.655172
68
0.617347
180
1,372
4.6
0.216667
0.092995
0.084541
0.108696
0.310386
0.062802
0
0
0
0
0
0.022505
0.255102
1,372
57
69
24.070175
0.787671
0.015306
0
0.102564
0
0
0.025204
0
0
0
0
0
0.282051
1
0.333333
false
0
0.102564
0.051282
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e842075d02d5122efe26798a667e10eb60633e5d
22,017
py
Python
AutomatedTesting/Gem/PythonTests/Atom/tests/hydra_AtomEditorComponents_DeferredFogAdded.py
BreakerOfThings/o3de
f4c59f868c726470ec910623facd836047d059c3
[ "Apache-2.0", "MIT" ]
1
2022-03-28T08:06:58.000Z
2022-03-28T08:06:58.000Z
AutomatedTesting/Gem/PythonTests/Atom/tests/hydra_AtomEditorComponents_DeferredFogAdded.py
BreakerOfThings/o3de
f4c59f868c726470ec910623facd836047d059c3
[ "Apache-2.0", "MIT" ]
null
null
null
AutomatedTesting/Gem/PythonTests/Atom/tests/hydra_AtomEditorComponents_DeferredFogAdded.py
BreakerOfThings/o3de
f4c59f868c726470ec910623facd836047d059c3
[ "Apache-2.0", "MIT" ]
null
null
null
""" Copyright (c) Contributors to the Open 3D Engine Project. For complete copyright and license terms please see the LICENSE at the root of this distribution. SPDX-License-Identifier: Apache-2.0 OR MIT """ class Tests: deferred_fog_creation = ( "Deferred Fog Entity successfully created", "P0: Deferred Fog Entity failed to be created") deferred_fog_component = ( "Entity has a Deferred Fog component", "P0: Entity failed to find Deferred Fog component") deferred_fog_component_removal = ( "Deferred Fog component successfully removed", "P0: Deferred Fog component failed to be removed") removal_undo = ( "UNDO Deferred Fog component removal success", "P0: UNDO Deferred Fog component removal failed") deferred_fog_disabled = ( "Deferred Fog component disabled", "P0: Deferred Fog component was not disabled") postfx_layer_component = ( "Entity has a PostFX Layer component", "P0: Entity did not have an PostFX Layer component") deferred_fog_enabled = ( "Deferred Fog component enabled", "P0: Deferred Fog component was not enabled") enable_deferred_fog_parameter_enabled = ( "Enable Deferred Fog parameter enabled", "P0: Enable Deferred Fog parameter was not enabled") enable_deferred_fog_parameter_disabled = ( "Enable Deferred Fog parameter disabled", "P0: Enable Deferred Fog parameter was not disabled") enable_turbulence_properties_parameter_enabled = ( "Enable Turbulence Properties parameter enabled", "P1: Enable Turbulence Properties parameter was not enabled") enable_turbulence_properties_parameter_disabled = ( "Enable Turbulence Properties parameter disabled", "P1: Enable Turbulence Properties parameter was not disabled") enable_fog_layer_parameter_enabled = ( "Enable Fog Layer parameter enabled", "P1: Enable Fog Layer parameter was not enabled") enable_fog_layer_parameter_disabled = ( "Enable Fog Layer parameter disabled", "P1: Enable Fog Layer parameter was not disabled") edit_fog_color = ( "Fog Color parameter updated", "P1: Fog Color parameter failed to update") fog_start_min = ( "Fog Start Distance set to minimum value", "P1: Fog Start Distance failed to be set to minimum value") fog_start_max = ( "Fog Start Distance set to maximum value", "P1: Fog Start Distance failed to be set to maximum value") fog_end_min = ( "Fog End Distance set to minimum value", "P1: Fog End Distance failed to be set to minimum value") fog_end_max = ( "Fog End Distance set to maximum value", "P1: Fog End Distance failed to be set to maximum value") fog_bottom_height_min = ( "Fog Bottom Height set to minimum value", "P1: Fog Bottom Height failed to be set to minimum value") fog_bottom_height_max = ( "Fog Bottom Height set to maximum value", "P1: Fog Bottom Height failed to be set to maximum value") fog_max_height_min = ( "Fog Max Height set to minimum value", "P1: Fog Max Height failed to be set to minimum value") fog_max_height_max = ( "Fog Max Height set to maximum value", "P1: Fog Max Height failed to be set to maximum value") first_octave_scale = ( "Noise Texture First Octave Scale updated", "P1: Noise Texture First Octave Scale failed to be updated") first_octave_velocity = ( "Noise Texture First Octave Velocity updated", "P1: Noise Texture First Octave Velocity failed to be updated") second_octave_scale = ( "Noise Texture Second Octave Scale updated", "P1: Noise Texture Second Octave Scale failed to be updated") second_octave_velocity = ( "Noise Texture Second Octave Velocity updated", "P1: Noise Texture Second Octave Velocity failed to be updated") octaves_blend_factor_min = ( "Octaves Blend Factor set to minimum value", "P1: Octaves Blend Factort failed to be set to minimum value") octaves_blend_factor_max = ( "Octaves Blend Factor set to maximum value", "P1: Octaves Blend Factort failed to be set to maximum value") enter_game_mode = ( "Entered game mode", "P0: Failed to enter game mode") exit_game_mode = ( "Exited game mode", "P0: Couldn't exit game mode") is_visible = ( "Entity is visible", "P0: Entity was not visible") is_hidden = ( "Entity is hidden", "P0: Entity was not hidden") entity_deleted = ( "Entity deleted", "P0: Entity was not deleted") deletion_undo = ( "UNDO deletion success", "P0: UNDO deletion failed") deletion_redo = ( "REDO deletion success", "P0: REDO deletion failed") def AtomEditorComponents_DeferredFog_AddedToEntity(): """ Summary: Tests the Deferred Fog component can be added to an entity and has the expected functionality. Test setup: - Wait for Editor idle loop. - Open the "Base" level. Expected Behavior: The component can be added, used in game mode, hidden/shown, deleted, and has accurate required components. Creation and deletion undo/redo should also work. Test Steps: 1) Create an Deferred Fog entity with no components. 2) Add Deferred Fog component to Deferred Fog entity. 3) Remove the Deferred Fog component. 4) Undo Bloom component removal. 5) Verify Deferred Fog component not enabled. 6) Add PostFX Layer component since it is required by the Deferred Fog component. 7) Verify Deferred Fog component is enabled. 8) Enable/Disable the "Enable Deferred Fog" parameter. 9) Enable/Disable the Enable Turbulence Properties parameter. 10) Enable/Disable the Enable Fog Layer parameter. 11) Edit the Fog Color parameter. 12) Update the Fog Start Distance parameter to min/max values. 13) Update the Fog End Distance parameter to min/max values. 14) Update the Fog Bottom Height parameter to min/max values. 15) Update the Fog Max Height parameter to min/max values. 16) Edit the Noise Texture parameter. 17) Update the Noise Texture First Octave Scale parameter to low/high values. 18) Update the Noise Texture First Octave Velocity parameter to low/high values. 19) Update the Noise Texture Second Octave Scale parameter to low/high values. 20) Update the Noise Texture Second Octave Velocity parameter to low/high values. 21) Update the Octaves Blend Factor parameter to min/max values. 22 Enter/Exit game mode. 23) Test IsHidden. 24) Test IsVisible. 25) Delete Deferred Fog entity. 26) UNDO deletion. 27) REDO deletion. 28) Look for errors. :return: None """ import azlmbr.legacy.general as general import azlmbr.math as math from editor_python_test_tools.editor_entity_utils import EditorEntity from editor_python_test_tools.utils import Report, Tracer, TestHelper from Atom.atom_utils.atom_constants import AtomComponentProperties with Tracer() as error_tracer: # Test setup begins. # Setup: Wait for Editor idle loop before executing Python hydra scripts then open "Base" level. TestHelper.init_idle() TestHelper.open_level("Graphics", "base_empty") # Test steps begin. # 1. Create an Deferred Fog entity with no components. deferred_fog_entity = EditorEntity.create_editor_entity(AtomComponentProperties.deferred_fog()) Report.critical_result(Tests.deferred_fog_creation, deferred_fog_entity.exists()) # 2. Add Deferred Fog component to Deferred Fog entity. deferred_fog_component = deferred_fog_entity.add_component( AtomComponentProperties.deferred_fog()) Report.critical_result( Tests.deferred_fog_component, deferred_fog_entity.has_component(AtomComponentProperties.deferred_fog())) # 3. Remove the Deferred Fog component. deferred_fog_component.remove() general.idle_wait_frames(1) Report.critical_result(Tests.deferred_fog_component_removal, not deferred_fog_entity.has_component(AtomComponentProperties.deferred_fog())) # 4. Undo Bloom component removal. general.undo() general.idle_wait_frames(1) Report.result(Tests.removal_undo, deferred_fog_entity.has_component(AtomComponentProperties.deferred_fog())) # 5. Verify Deferred Fog component not enabled. Report.result(Tests.deferred_fog_disabled, not deferred_fog_component.is_enabled()) # 6. Add PostFX Layer component since it is required by the Deferred Fog component. deferred_fog_entity.add_component(AtomComponentProperties.postfx_layer()) general.idle_wait_frames(1) Report.result( Tests.postfx_layer_component, deferred_fog_entity.has_component(AtomComponentProperties.postfx_layer())) # 7. Verify Deferred Fog component is enabled. Report.result(Tests.deferred_fog_enabled, deferred_fog_component.is_enabled()) # 8. Enable/Disable the Enable Deferred Fog parameter. # Enable the Enable Deferred Fog parameter. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Deferred Fog'), True) Report.result(Tests.enable_deferred_fog_parameter_enabled, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Enable Deferred Fog')) is True) # Disable the Enable Deferred Fog parameter. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Deferred Fog'), False) Report.result(Tests.enable_deferred_fog_parameter_disabled, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Enable Deferred Fog')) is False) # Re-enable the Enable Deferred Fog parameter for game mode verification. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Deferred Fog'), True) general.idle_wait_frames(1) # 9. Enable/Disable the Enable Turbulence Properties parameter. # Enable the Enable Turbulence Properties parameter. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Turbulence Properties'), True) Report.result(Tests.enable_turbulence_properties_parameter_enabled, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Enable Turbulence Properties')) is True) # Disable the Enable Turbulence Properties parameter. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Turbulence Properties'), False) Report.result(Tests.enable_turbulence_properties_parameter_disabled, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Enable Turbulence Properties')) is False) # Re-enable the Enable Turbulence Properties parameter for game mode verification. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Turbulence Properties'), True) # 10. Enable/Disable the Enable Fog Layer parameter. # Enable the Enable Fog Layer parameter. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Fog Layer'), True) Report.result(Tests.enable_fog_layer_parameter_enabled, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Enable Fog Layer')) is True) # Disable the Enable Fog Layer parameter. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Fog Layer'), False) Report.result(Tests.enable_fog_layer_parameter_disabled, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Enable Fog Layer')) is False) # Re-enable the Enable Fog Layer parameter for game mode verification. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Enable Fog Layer'), True) general.idle_wait_frames(1) # 11. Edit the Fog Color parameter. violet_color_value = math.Vector3(0.498, 0.0, 1.0) deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog Color'), violet_color_value) fog_color_value = deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog Color')) Report.result(Tests.edit_fog_color, fog_color_value.IsClose(violet_color_value)) # 12. Update the Fog Start Distance parameter to min/max values. # Update the Fog Start Distance parameter to its minimum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog Start Distance'), 0.0) Report.result(Tests.fog_start_min, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog Start Distance')) == 0.0) # Update the Fog Start Distance parameter to its maximum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog Start Distance'), 5000.0) Report.result(Tests.fog_start_max, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog Start Distance')) == 5000.0) # 13. Update the Fog End Distance parameter to min/max values. # Update the Fog End Distance parameter to its maximum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog End Distance'), 5000.0) Report.result(Tests.fog_end_max, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog End Distance')) == 5000.0) # Update the Fog End Distance parameter to its minimum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog End Distance'), 0.0) Report.result(Tests.fog_end_min, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog End Distance')) == 0.0) # 14. Update the Fog Bottom Height parameter to min/max values. # Update the Fog Bottom Height parameter to its maximum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog Bottom Height'), 5000.0) Report.result(Tests.fog_bottom_height_max, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog Bottom Height')) == 5000.0) # Update the Fog Bottom Height parameter to its minimum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog Bottom Height'), -5000.0) Report.result(Tests.fog_bottom_height_min, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog Bottom Height')) == -5000.0) # 15. Update the Fog Max Height parameter to min/max values. # Update the Fog Max Height parameter to its minimum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog Max Height'), -5000.0) Report.result(Tests.fog_max_height_min, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog Max Height')) == -5000.0) # Update the Fog Max Height parameter to its maximum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Fog Max Height'), 5000.0) Report.result(Tests.fog_max_height_max, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Fog Max Height')) == 5000.0) general.idle_wait_frames(1) # 16. Edit the Noise Texture parameter. # This field cannot currently be edited. It will be fixed in a future sprint. # Store Noise Texture First/Second Scale & Velocity value: set_octave = math.Vector2(-100.0, 100.0) # 17. Update the Noise Texture First Octave Scale parameter to low/high values. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture First Octave Scale'), set_octave) get_first_octave_scale = deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture First Octave Scale')) Report.result(Tests.first_octave_scale, get_first_octave_scale.IsClose(set_octave)) # 18. Update the Noise Texture First Octave Velocity parameter to low/high values. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture First Octave Velocity'), set_octave) get_first_octave_velocity = deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture First Octave Velocity')) Report.result(Tests.first_octave_velocity, get_first_octave_velocity.IsClose(set_octave)) # 19. Update the Noise Texture Second Octave Scale parameter to low/high values. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture Second Octave Scale'), set_octave) get_second_octave_scale = deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture Second Octave Scale')) Report.result(Tests.second_octave_scale, get_second_octave_scale.IsClose(set_octave)) # 20. Update the Noise Texture Second Octave Velocity parameter to low/high values. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture Second Octave Velocity'), set_octave) get_second_octave_velocity = deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Noise Texture Second Octave Velocity')) Report.result(Tests.second_octave_velocity, get_second_octave_velocity.IsClose(set_octave)) # 21. Update the Octaves Blend Factor parameter to min/max values. # Update the Octaves Blend Factor to its minimum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Octaves Blend Factor'), 0.0) Report.result(Tests.octaves_blend_factor_min, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Octaves Blend Factor')) == 0.0) # Update the Octave Blend Factor to its maximum value. deferred_fog_component.set_component_property_value( AtomComponentProperties.deferred_fog('Octaves Blend Factor'), 1.0) Report.result(Tests.octaves_blend_factor_max, deferred_fog_component.get_component_property_value( AtomComponentProperties.deferred_fog('Octaves Blend Factor')) == 1.0) # 22. Enter/Exit game mode. TestHelper.enter_game_mode(Tests.enter_game_mode) general.idle_wait_frames(1) TestHelper.exit_game_mode(Tests.exit_game_mode) # 23. Test IsHidden. deferred_fog_entity.set_visibility_state(False) Report.result(Tests.is_hidden, deferred_fog_entity.is_hidden() is True) # 24. Test IsVisible. deferred_fog_entity.set_visibility_state(True) general.idle_wait_frames(1) Report.result(Tests.is_visible, deferred_fog_entity.is_visible() is True) # 25. Delete Deferred Fog entity. deferred_fog_entity.delete() Report.result(Tests.entity_deleted, not deferred_fog_entity.exists()) # 26. UNDO deletion. general.undo() general.idle_wait_frames(1) Report.result(Tests.deletion_undo, deferred_fog_entity.exists()) # 27. REDO deletion. general.redo() general.idle_wait_frames(1) Report.result(Tests.deletion_redo, not deferred_fog_entity.exists()) # 28. Look for errors and asserts. TestHelper.wait_for_condition(lambda: error_tracer.has_errors or error_tracer.has_asserts, 1.0) for error_info in error_tracer.errors: Report.info(f"Error: {error_info.filename} {error_info.function} | {error_info.message}") for assert_info in error_tracer.asserts: Report.info(f"Assert: {assert_info.filename} {assert_info.function} | {assert_info.message}") if __name__ == "__main__": from editor_python_test_tools.utils import Report Report.start_test(AtomEditorComponents_DeferredFog_AddedToEntity)
51.441589
116
0.703774
2,641
22,017
5.634229
0.093147
0.127151
0.099462
0.136089
0.804839
0.716801
0.641196
0.565255
0.521169
0.480578
0
0.014071
0.231775
22,017
427
117
51.562061
0.865673
0.212699
0
0.210145
0
0
0.230774
0.006253
0
0
0
0
0.01087
1
0.003623
false
0
0.021739
0
0.155797
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e86b6d9c3698570962ab3ef870fc56b4ef2f8095
2,336
py
Python
setup.py
InsightSoftwareConsortium/ITKGenericLabelInterpolator
d804834d5af635241cb92ab0ef2197d6e4f4e50c
[ "Apache-2.0" ]
3
2019-09-09T03:16:26.000Z
2020-05-21T15:19:51.000Z
setup.py
InsightSoftwareConsortium/ITKGenericLabelInterpolator
d804834d5af635241cb92ab0ef2197d6e4f4e50c
[ "Apache-2.0" ]
12
2016-10-14T15:44:48.000Z
2020-06-17T02:33:46.000Z
setup.py
InsightSoftwareConsortium/ITKGenericLabelInterpolator
d804834d5af635241cb92ab0ef2197d6e4f4e50c
[ "Apache-2.0" ]
5
2018-01-17T15:08:24.000Z
2020-05-18T19:26:18.000Z
# -*- coding: utf-8 -*- from __future__ import print_function from os import sys try: from skbuild import setup except ImportError: print('scikit-build is required to build from source.', file=sys.stderr) print('Please run:', file=sys.stderr) print('', file=sys.stderr) print(' python -m pip install scikit-build') sys.exit(1) setup( name='itk-genericlabelinterpolator', version='1.1.0', author='Joël Schaerer', author_email='joelthelion@laposte.net', packages=['itk'], package_dir={'itk': 'itk'}, download_url=r'https://github.com/InsightSoftwareConsortium/ITKGenericLabelInterpolator', description=r'ITK classes for generic interpolation of label images.', long_description='itk-genericlabelinterpolator provides a generic ' 'interpolator for label images to interpolate each ' 'label with an ordinary image interpolator, and return ' 'the label with the highest value.\n' 'Please refer to:\n' 'Schaerer J., Roche F., Belaroussi B., ' '"A generic interpolator for multi-label images.", ' 'Insight Journal, January-December 2014, http://hdl.handle.net/10380/3506.', classifiers=[ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: C++", "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Intended Audience :: Education", "Intended Audience :: Healthcare Industry", "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Medical Science Apps.", "Topic :: Scientific/Engineering :: Information Analysis", "Topic :: Software Development :: Libraries", "Operating System :: Android", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Operating System :: Unix", "Operating System :: MacOS" ], license='Apache', keywords='ITK InsightToolkit Label-images Interpolation', url=r'https://github.com/InsightSoftwareConsortium/ITKGenericLabelInterpolator', install_requires=[ r'itk>=5.2rc1' ] )
40.982456
97
0.627997
234
2,336
6.226496
0.58547
0.051476
0.026767
0.037062
0.096088
0.096088
0.096088
0
0
0
0
0.012622
0.253853
2,336
56
98
41.714286
0.823293
0.00899
0
0
0
0
0.603978
0.062689
0
0
0
0
0
1
0
true
0
0.075472
0
0.075472
0.09434
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
e86eac78d1bfe7828b2d429f638b50510a0cf862
366
py
Python
OOP/first_steps_in_OOP_excercise/cup.py
petel3/Softuni_education
4fd80f8c6ce6c3d6a838edecdb091dda2ed1084c
[ "MIT" ]
2
2022-03-05T13:17:12.000Z
2022-03-05T13:17:16.000Z
first_steps_in_OOP_excercise/cup.py
petel3/Softuni_education
4fd80f8c6ce6c3d6a838edecdb091dda2ed1084c
[ "MIT" ]
null
null
null
first_steps_in_OOP_excercise/cup.py
petel3/Softuni_education
4fd80f8c6ce6c3d6a838edecdb091dda2ed1084c
[ "MIT" ]
null
null
null
class Cup: def __init__(self,size,quantity): self.size = size self.quantity = quantity def status(self): return self.size-self.quantity def fill(self, quantity): if self.quantity<=self.status(): self.quantity+=quantity cup = Cup(100, 50) print(cup.status()) cup.fill(40) cup.fill(20) print(cup.status())
17.428571
40
0.617486
49
366
4.530612
0.326531
0.27027
0.144144
0
0
0
0
0
0
0
0
0.032609
0.245902
366
20
41
18.3
0.771739
0
0
0.142857
0
0
0
0
0
0
0
0
0
1
0.214286
false
0
0
0.071429
0.357143
0.142857
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
e87da2084cf844776c8fc20cab98cfd12d1d9d68
1,454
py
Python
apps/accounts/views.py
Chamane/django-starter-project
11ee773c3d105826d9d9b8ee492bb57252c2e75c
[ "MIT" ]
1
2019-10-20T05:11:33.000Z
2019-10-20T05:11:33.000Z
apps/accounts/views.py
Chamane/django-skeleton
11ee773c3d105826d9d9b8ee492bb57252c2e75c
[ "MIT" ]
4
2021-06-08T20:30:10.000Z
2022-03-12T00:02:36.000Z
apps/accounts/views.py
Chamane/django-foundation
11ee773c3d105826d9d9b8ee492bb57252c2e75c
[ "MIT" ]
1
2020-08-12T10:49:25.000Z
2020-08-12T10:49:25.000Z
from django.shortcuts import render, redirect from django.contrib.auth import login as auth_login, authenticate, get_user_model from django.urls import reverse from .forms import LoginForm, RegisterForm def login(request): # if request is a HTTP POST try to pull out the relevant information if request.method == 'POST': login_form = LoginForm(request.POST) if login_form.is_valid(): email = login_form.cleaned_data['email'] password = login_form.cleaned_data['password'] user = authenticate(request, username=email, password=password) #if user is not None: #auth_login(request, user) #if user.is_activate: #return redirect('doctor_dashboard', user_slug=user.slug) #else: #return redirect(reverse('home')) else: login_form = LoginForm() return render(request, 'accounts/login.html', {'login_form': login_form,}) def register(request): if request.method == 'POST': register_form = RegisterForm(request.POST) if register_form.is_valid(): # TODO: add recaptcha validation user = register_form.save() auth_login(request, user) return redirect(reverse('home')) else: register_form = RegisterForm() return render( request, 'accounts/register.html', {'register_form': register_form} )
33.813953
81
0.634801
165
1,454
5.442424
0.351515
0.070156
0.035635
0.042316
0.064588
0
0
0
0
0
0
0
0.269601
1,454
42
82
34.619048
0.845574
0.175378
0
0.148148
0
0
0.07479
0.018487
0
0
0
0.02381
0
1
0.074074
false
0.074074
0.148148
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
0
0
0
0
0
2
e898946db1164b75a4ff3727c9d47e0cde0c12ab
1,969
py
Python
oo2/modelo.py
DuduMontezuma/curso_python_poo
2163649741d14dfa1b7fb410f131971abb3858fe
[ "MIT" ]
null
null
null
oo2/modelo.py
DuduMontezuma/curso_python_poo
2163649741d14dfa1b7fb410f131971abb3858fe
[ "MIT" ]
null
null
null
oo2/modelo.py
DuduMontezuma/curso_python_poo
2163649741d14dfa1b7fb410f131971abb3858fe
[ "MIT" ]
null
null
null
class Programa: def __init__(self, nome, ano): self._nome = nome.title() self.ano = ano self._likes = 0 @property def likes(self): return self._likes def dar_like(self): self._likes += 1 @property def nome(self): return self._nome @nome.setter def nome(self, novo_nome): self._nome = novo_nome.title() def __str__(self): return f"{self.nome} - {self.ano} - {self.likes} Likes" class Filme(Programa): def __init__(self, nome, ano, duracao): super().__init__(nome, ano) self.duracao = duracao def __str__(self): return f"{self.nome} - {self.ano} - {self.duracao} min - {self.likes} Likes" class Serie(Programa): def __init__(self, nome, ano, temporadas): super().__init__(nome, ano) self.temporadas = temporadas def __str__(self): return f"{self.nome} - {self.ano} - {self.temporadas} temporadas - {self.likes} Likes" class Playlist: def __init__(self, nome, programas): self.nome = nome self._programas = programas def __getitem__(self, item): return self._programas[item] @property def listagem(self): return self._programas def __len__(self): return len(self._programas) vingadores = Filme("Vingadores - Guerra infinita", 2018, 160) atlanta = Serie("Atlanta", 2018, 2) tmep = Filme("Todo mundo em pânico", 1999, 100) demolidor = Serie("Demolidor", 2016, 2) vingadores.dar_like() tmep.dar_like() tmep.dar_like() tmep.dar_like() tmep.dar_like() demolidor.dar_like() demolidor.dar_like() atlanta.dar_like() atlanta.dar_like() atlanta.dar_like() filmes_e_series = [vingadores, atlanta, tmep] playlist_fim_de_semana = Playlist("fim de semana", filmes_e_series) print(f"Tamanho do Playlist: {len(playlist_fim_de_semana)}") print(playlist_fim_de_semana[0]) for programa in playlist_fim_de_semana.listagem: print(programa)
22.632184
94
0.656171
257
1,969
4.70428
0.225681
0.072787
0.053763
0.078577
0.290323
0.236559
0.172043
0.172043
0.131514
0.131514
0
0.017544
0.218385
1,969
86
95
22.895349
0.768031
0
0
0.278689
0
0.032787
0.159472
0.014728
0
0
0
0
0
1
0.229508
false
0
0
0.131148
0.42623
0.04918
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
e8bf5045e9ca64b173db4de3074c8f6695d4a027
898
py
Python
reconsec.py
Corshine-Official/Eztools
9278ef4916172505895071c4fc82ee68f4b92e18
[ "MIT" ]
1
2020-05-02T17:14:46.000Z
2020-05-02T17:14:46.000Z
reconsec.py
Corshine-Official/Eztools
9278ef4916172505895071c4fc82ee68f4b92e18
[ "MIT" ]
null
null
null
reconsec.py
Corshine-Official/Eztools
9278ef4916172505895071c4fc82ee68f4b92e18
[ "MIT" ]
null
null
null
##Author: CORSHINE ################################################## ## / ___/ _ \| _ \/ ___|| | | |_ _| \ | | ____|## ##| | | | | | |_) \___ \| |_| || || \| | _| ## ##| |__| |_| | _ < ___) | _ || || |\ | |___ ## ## \____\___/|_| \_\____/|_| |_|___|_| \_|_____|## ################################################## import sys import requests import socket import json if len(sys.argv) < 2: print("Usage: " + sys.argv[0] + "<url>") sys.exit(1) req = requests.get("https://"+sys.argv[1]) print("\n"+str(req.headers)) gethostby_ = socket.gethostbyname(sys.argv[1]) print("\nThe IP address of "+sys.argv[1]+" is: "+gethostby_ + "\n") #ipinfo.io req_two = requests.get("https://ipinfo.io/"+gethostby_+"/json") resp_ = json.loads(req_two.text) print("Location: "+resp_["loc"]) print("Region: "+resp_["region"]) print("City: "+resp_["city"]) print("Country: "+resp_["country"])
27.212121
67
0.494432
86
898
4.395349
0.476744
0.092593
0.063492
0.068783
0
0
0
0
0
0
0
0.007833
0.146993
898
32
68
28.0625
0.48564
0.232739
0
0
0
0
0.219684
0
0
0
0
0
0
1
0
false
0
0.235294
0
0.235294
0.411765
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
e8d42944bf85008756e6f87a53f4073b97be7f98
244
py
Python
leetcode/python/504_base_7.py
VVKot/leetcode-solutions
7d6e599b223d89a7861929190be715d3b3604fa4
[ "MIT" ]
4
2019-04-22T11:57:36.000Z
2019-10-29T09:12:56.000Z
leetcode/python/504_base_7.py
VVKot/coding-competitions
7d6e599b223d89a7861929190be715d3b3604fa4
[ "MIT" ]
null
null
null
leetcode/python/504_base_7.py
VVKot/coding-competitions
7d6e599b223d89a7861929190be715d3b3604fa4
[ "MIT" ]
null
null
null
class Solution: def convertToBase7(self, num: int) -> str: result = '' n = abs(num) while n: n, curr = divmod(n, 7) result = str(curr) + result return '-' * (num < 0) + result or '0'
24.4
46
0.463115
29
244
3.896552
0.62069
0
0
0
0
0
0
0
0
0
0
0.027211
0.397541
244
9
47
27.111111
0.741497
0
0
0
0
0
0.008197
0
0
0
0
0
0
1
0.125
false
0
0
0
0.375
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
2ce19310597369332c9e74b49bd510aea31275cc
434
py
Python
test/test_file.py
dawoodkhan82/Maniac-Bot-Test
536a37f9bde5ba97bf8b7ec450ec24dae33ab6cd
[ "0BSD" ]
null
null
null
test/test_file.py
dawoodkhan82/Maniac-Bot-Test
536a37f9bde5ba97bf8b7ec450ec24dae33ab6cd
[ "0BSD" ]
null
null
null
test/test_file.py
dawoodkhan82/Maniac-Bot-Test
536a37f9bde5ba97bf8b7ec450ec24dae33ab6cd
[ "0BSD" ]
null
null
null
""" test: """ def sum_function(a, b, c): """ returns sum of a and b :param a: array :param b: integer :return: sum of a and b change """ c = a return c def average_function(a, b): """ returns the average of the squares of a and b :param a: integer :param b: integer :return: average of a and b """ c = (a**2 + b**2) / 2 return c def no_docs(c): return c
14
49
0.525346
71
434
3.169014
0.295775
0.053333
0.106667
0.124444
0.173333
0.115556
0
0
0
0
0
0.010676
0.352535
434
30
50
14.466667
0.790036
0.467742
0
0.375
0
0
0
0
0
0
0
0
0
1
0.375
false
0
0
0.125
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
2ce886f7acef4d60af3f9bc82c962bf6389e0074
1,590
py
Python
python/lib/Lib/site-packages/django/views/static.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2019-03-10T03:34:14.000Z
2020-06-09T22:53:32.000Z
python/lib/Lib/site-packages/django/views/static.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
null
null
null
python/lib/Lib/site-packages/django/views/static.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
1
2018-10-03T12:35:06.000Z
2018-10-03T12:35:06.000Z
""" Views and functions for serving static files. These are only to be used during development, and SHOULD NOT be used in a production setting. """ import mimetypes import os import posixpath import re import stat import urllib import warnings from email.Utils import parsedate_tz, mktime_tz from django.template import loader from django.http import Http404, HttpResponse, HttpResponseRedirect, HttpResponseNotModified from django.template import Template, Context, TemplateDoesNotExist from django.utils.http import http_date from django.contrib.staticfiles.views import (directory_index, was_modified_since, serve as staticfiles_serve) def serve(request, path, document_root=None, show_indexes=False, insecure=False): """ Serve static files below a given point in the directory structure. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'}) in your URLconf. You must provide the ``document_root`` param. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index.html``. """ warnings.warn("The view at `django.views.static.serve` is deprecated; " "use the path `django.contrib.staticfiles.views.serve` " "instead.", PendingDeprecationWarning) return staticfiles_serve(request, path, document_root, show_indexes, insecure)
37.857143
98
0.740881
220
1,590
5.281818
0.504545
0.043029
0.030981
0.041308
0.068847
0
0
0
0
0
0
0.002285
0.174214
1,590
41
99
38.780488
0.882711
0.415094
0
0
0
0
0.133106
0.076223
0
0
0
0
0
1
0.052632
false
0
0.684211
0
0.789474
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
2ce8c5d6bfb3409043fe9182f3a832f5b173e331
220
py
Python
test/util.py
pyrige/greedypacker
ca6a302af2b0275a52df91be01831c87ee428aa2
[ "Apache-2.0" ]
108
2017-11-04T02:26:44.000Z
2021-09-19T01:27:37.000Z
test/util.py
pyrige/greedypacker
ca6a302af2b0275a52df91be01831c87ee428aa2
[ "Apache-2.0" ]
10
2018-05-31T08:08:47.000Z
2021-06-01T22:02:19.000Z
test/util.py
pyrige/greedypacker
ca6a302af2b0275a52df91be01831c87ee428aa2
[ "Apache-2.0" ]
24
2017-12-01T04:22:09.000Z
2021-11-02T07:37:49.000Z
import sys import contextlib @contextlib.contextmanager def stdout_redirect(stringIO): sys.stdout = stringIO try: yield stringIO finally: sys.stdout = sys.__stdout__ stringIO.seek(0)
18.333333
35
0.681818
24
220
6.041667
0.541667
0.186207
0.234483
0
0
0
0
0
0
0
0
0.006061
0.25
220
11
36
20
0.872727
0
0
0
0
0
0
0
0
0
0
0
0
1
0.1
false
0
0.2
0
0.3
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
2cf099bfde81fc69bf5322d331fa296b309dfadf
95
py
Python
day09/test05.py
jaywoong/python
99daedd5a9418b72b2d5c3b800080e730eb9b3ea
[ "Apache-2.0" ]
null
null
null
day09/test05.py
jaywoong/python
99daedd5a9418b72b2d5c3b800080e730eb9b3ea
[ "Apache-2.0" ]
null
null
null
day09/test05.py
jaywoong/python
99daedd5a9418b72b2d5c3b800080e730eb9b3ea
[ "Apache-2.0" ]
null
null
null
import wx; app = wx.App(); frame = wx.Frame(None,0,'wxPython'); frame.Show(); app.MainLoop();
13.571429
36
0.642105
15
95
4.066667
0.6
0.163934
0
0
0
0
0
0
0
0
0
0.011905
0.115789
95
6
37
15.833333
0.714286
0
0
0
0
0
0.084211
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fa08e7120d71853d269a946a4c5a5a461272fdb7
2,466
py
Python
syncstream/webtools.py
cainmagi/sync-stream
8d50bd2c9f13071b057dd2583b80f63229f89b0e
[ "MIT" ]
null
null
null
syncstream/webtools.py
cainmagi/sync-stream
8d50bd2c9f13071b057dd2583b80f63229f89b0e
[ "MIT" ]
null
null
null
syncstream/webtools.py
cainmagi/sync-stream
8d50bd2c9f13071b057dd2583b80f63229f89b0e
[ "MIT" ]
null
null
null
#!python # -*- coding: UTF-8 -*- ''' ################################################################ # Tools used for web connections and services. # @ Sync-stream # Produced by # Yuchen Jin @ cainmagi@gmail.com, # yjin4@uh.edu. # Requirements: (Pay attention to version) # python 3.6+ # fasteners 0.16+ # This module contains the basic tools for the host module, # and would be only used by the host module. ################################################################ ''' import sys import types import urllib3 class StdoutWrapper: '''A wrapper for ensuring that the stdout is always directed to the same position. ''' def __init__(self): self.__stdout = sys.stdout self.__stderr = sys.stderr self.__stdout_ = None self.__stderr_ = None def __enter__(self): self.__stdout_ = sys.stdout self.__stderr_ = sys.stderr sys.stdout = self.__stdout sys.stderr = self.__stderr return def __exit__(self, exc_type: type, exc_value: Exception, exc_traceback: types.TracebackType) -> None: sys.stdout = self.__stdout_ sys.stderr = self.__stderr_ class SafePoolManager(urllib3.PoolManager): '''A wrapped urllib3.PoolManager with context supported. This is a private class. Should not be used by users. ''' def __enter__(self): return self def __exit__(self, exc_type: type, exc_value: Exception, exc_traceback: types.TracebackType) -> None: self.clear() class SafeRequest: '''A wrapper for providing context for the urllib3.HTTPResponse. This is a private class. Should not be used by users. ''' def __init__(self, request: urllib3.HTTPResponse) -> None: self.request = request def __enter__(self) -> urllib3.HTTPResponse: return self.request def __exit__(self, exc_type: type, exc_value: Exception, exc_traceback: types.TracebackType) -> None: self.request.release_conn() def clean_http_manager(http: urllib3.HTTPSConnectionPool) -> None: '''A callback for the finializer, this function would be used for cleaning the http requests, if the connection does not need to exist. ''' http.clear() def close_request_session(sess: urllib3.PoolManager) -> None: '''A callback for the finializer, this function would be used for cleaning the requests session, if the connection does not need to exist. ''' sess.close()
29.710843
105
0.643552
301
2,466
5.033223
0.352159
0.033003
0.034323
0.027723
0.444224
0.444224
0.444224
0.444224
0.350495
0.29505
0
0.007776
0.217762
2,466
82
106
30.073171
0.777605
0.428629
0
0.147059
0
0
0
0
0
0
0
0
0
1
0.294118
false
0
0.088235
0.058824
0.558824
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
fa097d090d49a098699f5d92400d2487139dd4c9
153
py
Python
emgen/storage/config.py
emgenio/emgen-site
76b958390fe9e81245e0b6af2dd7de63bf5dc857
[ "Artistic-2.0" ]
null
null
null
emgen/storage/config.py
emgenio/emgen-site
76b958390fe9e81245e0b6af2dd7de63bf5dc857
[ "Artistic-2.0" ]
null
null
null
emgen/storage/config.py
emgenio/emgen-site
76b958390fe9e81245e0b6af2dd7de63bf5dc857
[ "Artistic-2.0" ]
null
null
null
STORAGE_DSN="mongodb://mongo/emgenstore" STORAGE_DB="emgenstore" TTL=14400 #db.log_events.createIndex( { "createdAt": 1 }, { expireAfterSeconds: 3600 } )
38.25
78
0.75817
18
153
6.277778
0.833333
0
0
0
0
0
0
0
0
0
0
0.070922
0.078431
153
4
78
38.25
0.730496
0.503268
0
0
0
0
0.473684
0.342105
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fa0d712343974ab9859f38111ecfd7a47a070be1
223
py
Python
check_eda.py
rblcoder/Analyzing-open-data-edd-ca
04dc0df7606eadd3a71317847237397b98aeabe1
[ "Apache-2.0" ]
null
null
null
check_eda.py
rblcoder/Analyzing-open-data-edd-ca
04dc0df7606eadd3a71317847237397b98aeabe1
[ "Apache-2.0" ]
null
null
null
check_eda.py
rblcoder/Analyzing-open-data-edd-ca
04dc0df7606eadd3a71317847237397b98aeabe1
[ "Apache-2.0" ]
null
null
null
from utils import eda, eda_plotting import ipywidgets as widgets import numpy as np import pandas as pd import seaborn as sns import matplotlib.pyplot as plt def check1(): eda_iv = eda_plotting.Eda_Plotting() check1()
20.272727
40
0.789238
36
223
4.777778
0.555556
0.19186
0
0
0
0
0
0
0
0
0
0.010695
0.161435
223
11
41
20.272727
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0.111111
false
0
0.666667
0
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
fa13bc00e8286199880821e6c04e099ce857be14
347
py
Python
examples/mnist_efficientnet/mnist_efficientnet/network.py
ar90n/kkt
e772860b20231e067973478350a4f0edb8bf5db1
[ "Apache-2.0" ]
1
2020-09-03T08:19:46.000Z
2020-09-03T08:19:46.000Z
examples/mnist_efficientnet/mnist_efficientnet/network.py
ar90n/kkt
e772860b20231e067973478350a4f0edb8bf5db1
[ "Apache-2.0" ]
null
null
null
examples/mnist_efficientnet/mnist_efficientnet/network.py
ar90n/kkt
e772860b20231e067973478350a4f0edb8bf5db1
[ "Apache-2.0" ]
null
null
null
import timm import torch import torch.nn as nn class Network(nn.Module): def __init__(self): super(Network, self).__init__() self.net = timm.create_model("efficientnet_b0", num_classes=10) def forward(self, x): return self.net(x) def extract_result(net_out): return (torch.max(net_out.data, 1)[1]).numpy()
20.411765
71
0.67147
52
347
4.211538
0.576923
0.100457
0
0
0
0
0
0
0
0
0
0.017986
0.198847
347
16
72
21.6875
0.769784
0
0
0
0
0
0.043228
0
0
0
0
0
0
1
0.272727
false
0
0.272727
0.181818
0.818182
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
fa24c06f6cb71d0d132ad5eaf12a37f9f961d403
3,896
py
Python
examples/relationship/manytoonefield/views.py
zhengtong0898/django-decode
69680853a4a5b07f6a9c4b65c7d86b2d401a92b1
[ "MIT" ]
5
2020-07-14T07:48:10.000Z
2021-12-20T21:20:10.000Z
examples/relationship/manytoonefield/views.py
zhengtong0898/django-decode
69680853a4a5b07f6a9c4b65c7d86b2d401a92b1
[ "MIT" ]
7
2021-03-26T03:13:38.000Z
2022-03-12T00:42:03.000Z
examples/relationship/manytoonefield/views.py
zhengtong0898/django-decode
69680853a4a5b07f6a9c4b65c7d86b2d401a92b1
[ "MIT" ]
1
2021-02-16T07:04:25.000Z
2021-02-16T07:04:25.000Z
from django.shortcuts import render, HttpResponse from .models import Reporter, Article from datetime import date def single_create(request): # 测试用例-1: 创建一条维表数据和一条主表数据. # 1. 创建维表数据. # INSERT INTO `manytoonefield_reporter` (`first_name`, `last_name`, `email`) # VALUES ('John', 'Smith', 'john@example.com') # RETURNING `manytoonefield_reporter`.`id`; r = Reporter(first_name='John', last_name='Smith', email='john@example.com') r.save() # 2. 创建主表数据, 同时将维表数据作为参数 # INSERT INTO `manytoonefield_article` (`headline`, `pub_date`, `reporter_id`) # VALUES ('This is a test', '2005-07-27', 1) # RETURNING `manytoonefield_article`.`id`; a = Article(headline="This is a test", pub_date=date(2005, 7, 27), reporter=r) a.save() # 3. 正向查询. # SELECT `manytoonefield_article`.`id`, # `manytoonefield_article`.`headline`, # `manytoonefield_article`.`pub_date`, # `manytoonefield_article`.`reporter_id` # FROM `manytoonefield_article` # WHERE `manytoonefield_article`.`id` = 1 LIMIT 21; af = Article.objects.get(pk=1) # N+1 查询 # SELECT `manytoonefield_reporter`.`id`, # `manytoonefield_reporter`.`first_name`, # `manytoonefield_reporter`.`last_name`, # `manytoonefield_reporter`.`email` # FROM `manytoonefield_reporter` # WHERE `manytoonefield_reporter`.`id` = 1 LIMIT 21; print("af.reporter.id: ", af.reporter.id) # 4. 反向查询. # SELECT `manytoonefield_reporter`.`id`, # `manytoonefield_reporter`.`first_name`, # `manytoonefield_reporter`.`last_name`, # `manytoonefield_reporter`.`email` # FROM `manytoonefield_reporter` # WHERE `manytoonefield_reporter`.`id` = 1 LIMIT 21; r = Reporter.objects.get(pk=1) # SELECT `manytoonefield_article`.`id`, # `manytoonefield_article`.`headline`, # `manytoonefield_article`.`pub_date`, # `manytoonefield_article`.`reporter_id` # FROM `manytoonefield_article` # WHERE `manytoonefield_article`.`reporter_id` = 1 LIMIT 21; # TODO: 为什么 all 对应的是limit 21? print(r.article_set.all()) return HttpResponse("view_create") def multi_create(request): # 测试用例-2: 创建一条维表数据和多条主表数据. # 1. 创建维表数据. r = Reporter.objects.create(first_name='John', last_name='Smith', email='john@example.com') # 2. 创建30条主表数据, 同时将维表数据作为参数 for i in range(30): Article.objects.create(headline=f"This is a test-{i}", pub_date=date(2005, 7, 27), reporter=r) # 3. 正向查询 af = Article.objects.get(pk=1) # Article 是 Many; Reporter 是 One; print("af.reporter.id: ", af.reporter.id) # 触发N+1; # 4. 反向查询 # SELECT `manytoonefield_reporter`.`id`, # `manytoonefield_reporter`.`first_name`, # `manytoonefield_reporter`.`last_name`, # `manytoonefield_reporter`.`email` # FROM `manytoonefield_reporter` # WHERE `manytoonefield_reporter`.`id` = 1 LIMIT 21; r = Reporter.objects.get(pk=1) # SELECT `manytoonefield_article`.`id`, # `manytoonefield_article`.`headline`, # `manytoonefield_article`.`pub_date`, # `manytoonefield_article`.`reporter_id` # FROM `manytoonefield_article` # WHERE `manytoonefield_article`.`reporter_id` = 1 LIMIT 21 articles = r.article_set.all() print("articles: ", articles) # SELECT `manytoonefield_article`.`id`, # `manytoonefield_article`.`headline`, # `manytoonefield_article`.`pub_date`, # `manytoonefield_article`.`reporter_id` # FROM `manytoonefield_article` # WHERE `manytoonefield_article`.`reporter_id` = 1; for article in articles: print("article.id: ", article.id) return HttpResponse("view_query")
42.813187
118
0.630903
416
3,896
5.71875
0.204327
0.229508
0.070618
0.091215
0.656578
0.656578
0.638083
0.613703
0.591005
0.591005
0
0.021398
0.23229
3,896
90
119
43.288889
0.773989
0.611653
0
0.24
0
0
0.108276
0
0
0
0
0.011111
0
1
0.08
false
0
0.12
0
0.28
0.2
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
2
fa3f5463140c204c3bff9a9b399127aa2571317e
604
py
Python
corehq/motech/repeaters/migrations/0004_attempt_strings.py
akashkj/commcare-hq
b00a62336ec26cea1477dfb8c048c548cc462831
[ "BSD-3-Clause" ]
471
2015-01-10T02:55:01.000Z
2022-03-29T18:07:18.000Z
corehq/motech/repeaters/migrations/0004_attempt_strings.py
akashkj/commcare-hq
b00a62336ec26cea1477dfb8c048c548cc462831
[ "BSD-3-Clause" ]
14,354
2015-01-01T07:38:23.000Z
2022-03-31T20:55:14.000Z
corehq/motech/repeaters/migrations/0004_attempt_strings.py
akashkj/commcare-hq
b00a62336ec26cea1477dfb8c048c548cc462831
[ "BSD-3-Clause" ]
175
2015-01-06T07:16:47.000Z
2022-03-29T13:27:01.000Z
# Generated by Django 2.2.19 on 2021-04-10 14:10 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('repeaters', '0003_migrate_connectionsettings'), ] operations = [ migrations.AlterField( model_name='sqlrepeatrecordattempt', name='message', field=models.TextField(blank=True, default=''), ), migrations.AlterField( model_name='sqlrepeatrecordattempt', name='traceback', field=models.TextField(blank=True, default=''), ), ]
25.166667
59
0.602649
54
604
6.666667
0.62963
0.111111
0.138889
0.161111
0.505556
0.505556
0
0
0
0
0
0.046083
0.281457
604
23
60
26.26087
0.78341
0.076159
0
0.470588
1
0
0.179856
0.134892
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fa3facbe147c10169ce12c2ef4fdeb7436c59421
601
py
Python
errorScreen.py
Heliodex/HybridOS
27024daa336159d6a300b3463e8a0cbd98dee93e
[ "Unlicense" ]
2
2021-03-19T02:39:18.000Z
2021-07-31T18:05:28.000Z
errorScreen.py
HelioDex/HybridOS
4cf2fba325f18a86b7b0e744dbf5cf2cf7271d61
[ "Unlicense" ]
null
null
null
errorScreen.py
HelioDex/HybridOS
4cf2fba325f18a86b7b0e744dbf5cf2cf7271d61
[ "Unlicense" ]
null
null
null
def error(): from colorama import Fore from os import system system("clear") print(Fore.BLUE + """ A problem has occured and HybridOS needs to shut down. We will shut down and display the error when you press enter. We're sorry if we caused any inconvenience. == TEST BEGIN == { T0cgPSBUcnVlCk1PRCA9IElORE VWCkVOQyA9IExld2luCkVESVQg PSBUcnVlCgpWR2hwY3lCcGN5Q mhiaUJ2Y21sCm5hVzVoYkNCMlp YSnphVzl1SQpHOW1JRWg1WW5 KcFpFOVRMaUJKCmRDQm9ZWE 1nWVNCa2FYTjBjCm04Z2JHbGpa VzV6WlM0PQo= } == TEST END == You weren't expected or supposed to understand that. """) input() raise
16.243243
62
0.758735
68
601
6.705882
0.808824
0.035088
0
0
0
0
0
0
0
0
0
0.04878
0.181364
601
37
63
16.243243
0.878049
0
0
0
0
0
0.800664
0.292359
0
0
0
0
0
1
0.045455
true
0
0.090909
0
0.136364
0.045455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
fa4de4dcfaefaff226e9de2f8d35f31a08f4c8e9
66,216
py
Python
analyze/check_analyze.py
JunboLu/CP2K_kit
0950f37f253c3f90d6a0539c57f1be1045e7317d
[ "Apache-2.0" ]
16
2021-04-19T03:40:32.000Z
2022-02-21T12:53:33.000Z
analyze/check_analyze.py
JunboLu/CP2K_kit
0950f37f253c3f90d6a0539c57f1be1045e7317d
[ "Apache-2.0" ]
null
null
null
analyze/check_analyze.py
JunboLu/CP2K_kit
0950f37f253c3f90d6a0539c57f1be1045e7317d
[ "Apache-2.0" ]
2
2021-11-28T02:55:31.000Z
2022-02-21T12:54:52.000Z
#! /use/env/bin python import os import copy from collections import OrderedDict from CP2K_kit.tools import data_op from CP2K_kit.tools import log_info from CP2K_kit.tools import traj_info def check_step(init_step, end_step, start_frame_id, end_frame_id): ''' check_step: check the input step Args: init_step : int init_step is the initial frame id. end_step : int end_step is the endding frame id. start_frame_id: int start_frame_id is the starting frame id in the trajectory file. end_frame_id: int end_frame_id is the endding frame id in the trajectory file. Returns : none ''' if ( init_step > end_step ): log_info.log_error('Input error: the endding step is less than initial step, please check or reset init_step and end_step') exit() if ( init_step < start_frame_id ): log_info.log_error('Input error: the initial step is less than initial step in trajectory, please check or reset init_step') exit() if ( end_step > end_frame_id ): log_info.log_error('Input error: the endding step is large than endding step in trajectory, please check or reset end_step') exit() def check_center_inp(center_dic): ''' check_center_inp: check the input file of center. Args: center_dic: dictionary center_dic contains the parameter for center. Returns: new_center_dic: dictionary new_center_dic is the revised center_dic ''' #As we use pop, so we copy the dic. new_center_dic = copy.deepcopy(center_dic) if ( 'center_type' in new_center_dic.keys() ): center_type = new_center_dic['center_type'] if ( center_type == 'center_box' or center_type == 'center_image' ): pass else: log_info.log_error('Input error: only center_box and center_image are supported, please check or set analyze/center/type ') exit() else: log_info.log_error('Input error: no center type, please set analyze/center/type') exit() if ( new_center_dic['center_type'] == 'center_image' ): if ( 'center_atom_id' in new_center_dic.keys() ): center_id = new_center_dic['center_atom_id'] if ( data_op.eval_str(center_id) == 1 ): new_center_dic['center_atom_id'] = int(center_id) else: log_info.log_error('Input error: center atom id should be integer, please check or set analyze/center/center_id') exit() else: log_info.log_error('Input error: no center atom id for center_image, please set analyze/center/center_id') exit() if ( 'traj_coord_file' in new_center_dic.keys() ): traj_coord_file = new_center_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): new_center_dic['traj_coord_file'] = os.path.abspath(traj_coord_file) else: log_info.log_error('%s file does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no coordination trajectory file, please set analyze/center/traj_coord_file') exit() if ( 'box' in new_center_dic.keys() ): A_exist = 'A' in new_center_dic['box'].keys() B_exist = 'B' in new_center_dic['box'].keys() C_exist = 'C' in new_center_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/center/box') exit() if ( A_exist and B_exist and C_exist ): box_A = new_center_dic['box']['A'] box_B = new_center_dic['box']['B'] box_C = new_center_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/center/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): new_center_dic['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/center/box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): new_center_dic['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/center/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): new_center_dic['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/center/box/C') exit() if ( 'connect0' in new_center_dic.keys() ): group_atom = [] atom_id = [] group_num = 0 for i in new_center_dic['connect0'].keys(): if ( 'group' in i ): group_num = group_num+1 if ( 'atom_id' in new_center_dic['connect0'][i].keys() ): atom_id_i = data_op.get_id_list(new_center_dic['connect0'][i]['atom_id']) atom_id.append(atom_id_i) else: log_info.log_error('Input error: no atom id, please set analyze/center/connect/group/atom_id') exit() if ( 'group_atom' in new_center_dic['connect0'][i].keys() ): group_atom_i = new_center_dic['connect0'][i]['group_atom'] if ( isinstance(group_atom_i, list)): if ( all(data_op.eval_str(x) == 0 for x in group_atom_i) ): group_atom.append(group_atom_i) else: log_info.log_error('Input error: group atoms wrong, please check or reset analyze/center/connect/group/group_atom') exit() else: group_atom.append([group_atom_i]) else: log_info.log_error('Input error: no group atoms, please set analyze/center/connect/group/group_atom') exit() for i in center_dic['connect0'].keys(): new_center_dic['connect0'].pop(i) new_center_dic['connect0']['atom_id'] = atom_id new_center_dic['connect0']['group_atom'] = group_atom return new_center_dic def check_diffusion_inp(diffusion_dic): ''' check_diffusion_inp: check the input of diffusion. Args: diffusion_dic: dictionary diffusion_dic contains parameters for diffusion Returns: diffusion_dic: dictionary diffusion_dic is the revised diffusion_dic ''' #new_diffusion_dic = copy.deepcopy(diffusion_dic) if ( 'method' in diffusion_dic.keys() ): method = diffusion_dic['method'] if ( method == 'einstein_sum' or method == 'green_kubo' ): pass else: log_info.log_error('Input error: only einstein_sum or green_kubo are supported for diffusion calculation') exit() else: diffusion_dic['method'] = 'einstein_sum' method = diffusion_dic['method'] if ( method == 'einstein_sum' ): if ( 'traj_coord_file' in diffusion_dic.keys() ): traj_coord_file = diffusion_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): diffusion_dic['traj_coord_file'] = os.path.abspath(traj_coord_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(traj_coord_file, 'coord_xyz') else: log_info.log_error('Input error: %s file does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no coordination trajectory file, please set analyze/diffusion/traj_coord_file') exit() if ( 'remove_com' in diffusion_dic.keys() ): remove_com = data_op.str_to_bool(diffusion_dic['remove_com']) if ( isinstance(remove_com, bool) ): diffusion_dic['remove_com'] = remove_com else: log_info.log_error('Input error: remove_com must be bool, please check or reset analyze/diffusion/remove_com') else: diffusion_dic['remove_com'] = True elif ( method == 'green_kubo' ): if ( 'traj_vel_file' in diffusion_dic.keys() ): traj_vel_file = diffusion_dic['traj_vel_file'] if ( os.path.exists(os.path.abspath(traj_vel_file)) ): diffusion_dic['traj_vel_file'] = os.path.abspath(traj_vel_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(traj_vel_file, 'vel') else: log_info.log_error('Input error: %s file does not exist' %(traj_vel_file)) exit() else: log_info.log_error('Input error: no velocity trajectory file, please set analyze/diffusion/traj_vel_file') exit() if ( 'atom_id' in diffusion_dic.keys() ): atom_id = data_op.get_id_list(diffusion_dic['atom_id']) diffusion_dic['atom_id'] = atom_id else: log_info.log_error('Input error: no atom_id, please set analyze/diffusion/atom_id') exit() if ( 'init_step' in diffusion_dic.keys() ): init_step = diffusion_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): diffusion_dic['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step wrong, please check or set analyze/diffusion/init_step') exit() else: diffusion_dic['init_step'] = start_frame_id if ( 'end_step' in diffusion_dic.keys() ): end_step = diffusion_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): diffusion_dic['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step wrong, please check or set analyze/diffusion/end_step') exit() else: diffusion_dic['end_step'] = end_frame_id init_step = diffusion_dic['init_step'] end_step = diffusion_dic['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'max_frame_corr' in diffusion_dic.keys() ): max_frame_corr = diffusion_dic['max_frame_corr'] if ( data_op.eval_str(max_frame_corr) == 1 ): if ( int(max_frame_corr) > int(frames_num/2) ): log_info.log_error('Input error: max_frame_corr should be less than frames_num/2, please check or reset analyze/diffusion/max_frame_corr') exit() else: diffusion_dic['max_frame_corr'] = int(max_frame_corr) else: log_info.log_error('Input error: max_frame_corr should be integer, please check or set analyze/diffusion/max_frame_corr') exit() else: diffusion_dic['max_frame_corr'] = int(frames_num/2) return diffusion_dic def check_file_trans_inp(file_trans_dic): ''' check_file_trans_inp: check the input of file_trans. Args: file_trans_dic: dictionary file_trans_dic contains parameters for file_trans. Returns: file_trans_dic: dictionary file_trans_dic is the revised file_trans_dic ''' if ( 'transd_file' in file_trans_dic.keys() ): transd_file = file_trans_dic['transd_file'] if ( os.path.exists(os.path.abspath(transd_file)) ): file_trans_dic['transd_file'] = os.path.abspath(transd_file) else: log_info.log_error('Input error: %s does not exist' %(transd_file)) exit() else: log_info.log_error('Input error: no transfered file, please set analzye/file_trans/transd_file') exit() if ( 'trans_type' in file_trans_dic.keys() ): trans_type = file_trans_dic['trans_type'] if ( trans_type == 'pdb2xyz' or trans_type == 'xyz2pdb' ): pass else: log_info.log_error('Input error: only pbd2xyz and xyz2pdb are supported, please check or reset analyze/file_trans/trans_type') exit() else: log_info.log_error('Input error: no transfer type, please set analyze/file_trans/trans_type') exit() return file_trans_dic def check_geometry_inp(geometry_dic): ''' check_geometry_inp: check the input of geometry. Args: geometry_dic: dictionary geometry_dic contains parameters for geometry. Returns: geometry_dic: dictionary geometry_dic is the revised geometry_dic ''' if ( 'coord_num' in geometry_dic ): coord_num_dic = geometry_dic['coord_num'] if ( 'traj_coord_file' in coord_num_dic.keys() ): traj_coord_file = coord_num_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): geometry_dic['coord_num']['traj_coord_file'] = os.path.abspath(traj_coord_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(os.path.abspath(traj_coord_file), 'coord_xyz') else: log_info.log_error('Input error: %s does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no coordination trajectory file, please set analyze/geometry/coord_num/traj_coord_file') exit() if ( 'init_step' in coord_num_dic.keys() ): init_step = coord_num_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): geometry_dic['coord_num']['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/geometry/coord_num/init_step') exit() else: geometry_dic['coord_num']['init_step'] = start_frame_id if ( 'end_step' in coord_num_dic.keys() ): end_step = coord_num_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): geometry_dic['coord_num']['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/geometry/coord_num/end_step') exit() else: geometry_dic['coord_num']['end_step'] = end_frame_id init_step = geometry_dic['coord_num']['init_step'] end_step = geometry_dic['coord_num']['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'r_cut' in coord_num_dic.keys() ): r_cut = coord_num_dic['r_cut'] if ( data_op.eval_str(r_cut) == 1 or data_op.eval_str(r_cut) ==2 ): geometry_dic['coord_num']['r_cut'] = float(r_cut) else: log_info.log_error('Input error: r_cut must be float, please check or reset analyze/geometry/coord_num/r_cut') else: geometry_dic['coord_num']['r_cut'] = 6.0 if ( 'box' in coord_num_dic.keys() ): A_exist = 'A' in coord_num_dic['box'].keys() B_exist = 'B' in coord_num_dic['box'].keys() C_exist = 'C' in coord_num_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/geometry/coord_num/box') exit() if ( A_exist and B_exist and C_exist ): box_A = coord_num_dic['box']['A'] box_B = coord_num_dic['box']['B'] box_C = coord_num_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/geometry/coord_num/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): geometry_dic['coord_num']['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/geometry/coord_num//box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): geometry_dic['coord_num']['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/geometry/coord_num/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): geometry_dic['coord_num']['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/geometry/coord_num/box/C') exit() return geometry_dic elif ( 'bond_length' in geometry_dic ): bond_length_dic = geometry_dic['bond_length'] if ( 'traj_coord_file' in bond_length_dic.keys() ): traj_coord_file = bond_length_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): geometry_dic['bond_length']['traj_coord_file'] = os.path.abspath(traj_coord_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(os.path.abspath(traj_coord_file), 'coord_xyz') else: log_info.log_error('Input error: %s file does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no coordination trajectory file, please set analyze/geometry/bond_length/traj_coord_file') exit() if ( 'atom_pair' in bond_length_dic.keys() ): atom_pair = bond_length_dic['atom_pair'] if ( len(atom_pair) == 2 and all(data_op.eval_str(x) == 1 for x in atom_pair) ): geometry_dic['bond_length']['atom_pair'] = [int (x) for x in atom_pair] else: log_info.log_error('Input error: atom_pair should be 2 integer, please check or reset analyze/geometry/bond_length/atom_pair') exit() else: log_info.log_error('Input error: no atom_pair, please set analyze/geometry/bond_length/atom_pair') exit() if ( 'init_step' in bond_length_dic.keys() ): init_step = bond_length_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): geometry_dic['bond_length']['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/geometry/bond_length/init_step') exit() else: geometry_dic['bond_length']['init_step'] = start_frame_id if ( 'end_step' in bond_length_dic.keys() ): end_step = bond_length_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): geometry_dic['bond_length']['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/geometry/bond_length/end_step') exit() else: geometry_dic['bond_length']['end_step'] = end_frame_id init_step = geometry_dic['bond_length']['init_step'] end_step = geometry_dic['bond_length']['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'box' in bond_length_dic.keys() ): A_exist = 'A' in bond_length_dic['box'].keys() B_exist = 'B' in bond_length_dic['box'].keys() C_exist = 'C' in bond_length_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/geometry/bond_length/box') exit() if ( A_exist and B_exist and C_exist ): box_A = bond_length_dic['box']['A'] box_B = bond_length_dic['box']['B'] box_C = bond_length_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/geometry/bond_length/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): geometry_dic['bond_length']['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/geometry/bond_length//box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): geometry_dic['bond_length']['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/geometry/bond_length/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): geometry_dic['bond_length']['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/geometry/bond_length/box/C') exit() return geometry_dic elif ( 'bond_angle' in geometry_dic ): bond_angle_dic = geometry_dic['bond_angle'] if ( 'traj_coord_file' in bond_angle_dic.keys() ): traj_coord_file = bond_angle_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): geometry_dic['bond_angle']['traj_coord_file'] = os.path.abspath(traj_coord_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(traj_coord_file, 'coord_xyz') else: log_info.log_error('Input error: %s does not exist' %(traj_coord_file)) else: log_info.log_error('Input error: no coordination trajectory file, please set analyze/geometry/bond_angle/traj_coord_file') exit() if ( 'atom_pair' in bond_angle_dic.keys() ): atom_pair = bond_angle_dic['atom_pair'] if ( len(atom_pair) == 3 and all(data_op.eval_str(x) == 1 for x in atom_pair) ): geometry_dic['bond_angle']['atom_pair'] = [int(x) for x in atom_pair] else: log_info.log_error('Input error: atom_pair should be 3 integers, please check or reset analyze/geometry/bond_angle/atom_pair') exit() else: log_info.log_error('Input error: no atom_pair, please set analyze/geometry/bond_angle/atom_pair') exit() if ( 'init_step' in bond_angle_dic.keys() ): init_step = bond_angle_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): geometry_dic['bond_angle']['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step shoule be integer, please check or reset analyze/geometry/bond_angle/init_step') exit() else: geometry_dic['bond_angle']['init_step'] = start_frame_id if ( 'end_step' in bond_angle_dic.keys() ): end_step = bond_angle_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): geometry_dic['bond_angle']['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step shoule be integer, please check or reset analyze/geometry/bond_angle/end_step') exit() else: geometry_dic['bond_angle']['end_step'] = end_frame_id init_step = geometry_dic['bond_angle']['init_step'] end_step = geometry_dic['bond_angle']['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) return geometry_dic elif ( 'first_shell' in geometry_dic ): first_shell_dic = geometry_dic['first_shell'] if ( 'traj_coord_file' in first_shell_dic.keys() ): traj_coord_file = first_shell_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): geometry_dic['first_shell']['traj_coord_file'] = os.path.abspath(traj_coord_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(os.path.abspath(traj_coord_file), 'coord_xyz') else: log_info.log_error('Input error: %s does not exist' %(traj_coord_file)) exit() if ( 'atom_type_pair' in first_shell_dic.keys() ): atom_type_pair = first_shell_dic['atom_type_pair'] if ( len(atom_type_pair) == 2 and all(data_op.eval_str(x) == 0 for x in atom_type_pair) ): pass else: log_info.log_error('Input error: atom_type_pair should be 2 string, please check or reset analyze/geometry/first_shell/atom_type_pair') exit() else: log_info.log_error('Input error: no atom_type_pair, please set analyze/geometry/first_shell/atom_type_pair') exit() if ( 'first_shell_dist' in first_shell_dic.keys() ): first_shell_dist = first_shell_dic['first_shell_dist'] if ( data_op.eval_str(first_shell_dist) == 2 ): geometry_dic['first_shell']['first_shell_dist'] = float(first_shell_dist) else: log_info.log_error('Input error: first_shell_dist should be float, please check or reset analyze/geometry/first_shell/first_shell_dist') exit() else: log_info.log_error('Input error: no first_shell_dist, please set analyze/geometry/first_shell/first_shell_dist') exit() if ( 'dist_conv' in first_shell_dic.keys() ): dist_conv = first_shell_dic['dist_conv'] if ( data_op.eval_str(dist_conv) == 2 ): geometry_dic['first_shell']['dist_conv'] = float(dist_conv) else: log_info.log_error('Input error: dist_conv should be float, please check or reset analyze/geometry/first_shell/dist_conv') exit() else: geometry_dic['first_shell']['dist_conv'] = 0.1 if ( 'init_step' in first_shell_dic.keys() ): init_step = first_shell_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): geometry_dic['first_shell']['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/geometry/first_shell/init_step') else: geometry_dic['first_shell']['init_step'] = start_frame_id if ( 'end_step' in first_shell_dic.keys() ): end_step = first_shell_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): geometry_dic['first_shell']['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/geometry/first_shell/end_step') else: geometry_dic['first_shell']['end_step'] = end_frame_id init_step = geometry_dic['first_shell']['init_step'] end_step = geometry_dic['first_shell']['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'box' in first_shell_dic.keys() ): A_exist = 'A' in first_shell_dic['box'].keys() B_exist = 'B' in first_shell_dic['box'].keys() C_exist = 'C' in first_shell_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/geometry/first_shell/box') exit() if ( A_exist and B_exist and C_exist ): box_A = first_shell_dic['box']['A'] box_B = first_shell_dic['box']['B'] box_C = first_shell_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/geometry/first_shell/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): geometry_dic['first_shell']['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/geometry/first_shell//box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): geometry_dic['first_shell']['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/geometry/first_shell/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): geometry_dic['first_shell']['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/geometry/first_shell/box/C') exit() return geometry_dic elif ( 'choose_structure' in geometry_dic ): choose_str_dic = geometry_dic['choose_structure'] if ( 'file_type' in choose_str_dic.keys() ): file_type = choose_str_dic['file_type'] valid_file_type = ['coord_xyz', 'vel', 'frc'] if ( file_type in valid_file_type ): pass else: log_info.log_error('Input error: only coord, vel, and frc are supported for file_type,\ please check or reset analyze/geometry/choose_structure/file_type') exit() else: log_info.log_error('Input error: no file type, please set analyze/geometry/choose_structure/file_type') exit() file_type = choose_str_dic['file_type'] if ( 'traj_file' in choose_str_dic.keys() ): traj_file = choose_str_dic['traj_file'] if ( os.path.exists(os.path.abspath(traj_file)) ): geometry_dic['choose_structure']['traj_file'] = os.path.abspath(traj_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(traj_file, 'coord_xyz') else: log_info.log_error('Input error: %s file does not exist' %(traj_file)) exit() else: log_info.log_error('Input error: no trajectory file, please set analyze/geometry/choose_structure/traj_file') exit() if ( 'init_step' in choose_str_dic.keys() ): init_step = choose_str_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): geometry_dic['choose_structure']['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/geometry/choose_structure/init_step') else: geometry_dic['choose_structure']['init_step'] = start_frame_id if ( 'end_step' in choose_str_dic.keys() ): end_step = choose_str_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): geometry_dic['choose_structure']['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/geometry/choose_structure/end_step') else: geometry_dic['choose_structure']['end_step'] = end_frame_id init_step = geometry_dic['choose_structure']['init_step'] end_step = geometry_dic['choose_structure']['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'atom_id' in choose_str_dic.keys() ): geometry_dic['choose_structure']['atom_id'] = data_op.get_id_list(choose_str_dic['atom_id']) else: log_info.log_error('Input error: no atom_id, please set analyze/geometry/choose_structure/atom_id') exit() return geometry_dic elif ( 'order_structure' in geometry_dic.keys() ): new_geometry_dic = copy.deepcopy(geometry_dic) order_str_dic = new_geometry_dic['order_structure'] if ( 'traj_coord_file' in order_str_dic.keys() ): traj_coord_file = order_str_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): new_geometry_dic['order_structure']['traj_coord_file'] = os.path.abspath(traj_coord_file) else: log_info.log_error('Input error: %s file does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no coordination trajectory file, please set analyze/geometry/order_structure/traj_coord_file') exit() if ( 'box' in order_str_dic.keys() ): A_exist = 'A' in order_str_dic['box'].keys() B_exist = 'B' in order_str_dic['box'].keys() C_exist = 'C' in order_str_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/geometry/order_structure/box') exit() if ( A_exist and B_exist and C_exist ): box_A = order_str_dic['box']['A'] box_B = order_str_dic['box']['B'] box_C = order_str_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/geometry/order_structure/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): new_geometry_dic['order_structure']['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/geometry/order_structure//box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): new_geometry_dic['order_structure']['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/geometry/order_structure/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): new_geometry_dic['order_structure']['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/geometry/order_structure/box/C') exit() group_atom = [] atom_id = [] group_num = 0 for i in order_str_dic.keys(): if ( 'connect' in i ): group_num = group_num+1 if ( 'atom_id' in order_str_dic[i].keys() ): atom_id_i = data_op.get_id_list(order_str_dic[i]['atom_id']) atom_id.append(atom_id_i) else: log_info.log_error('Input error: no atom_id, please set analyze/geometry/order_structure/connect/atom_id') exit() if ( 'group_atom' in order_str_dic[i].keys() ): group_atom_i = order_str_dic[i]['group_atom'] if ( isinstance(group_atom_i, list)): if ( all(data_op.eval_str(x) == 0 for x in group_atom_i) ): group_atom.append(group_atom_i) else: log_info.log_error('Input error: group_atom wrong, please check or reset analyze/geometry/order_structure/connect/group_atom') exit() else: group_atom.append([group_atom_i]) else: log_info.log_error('Input error: no group_atom, please set analyze/geometry/order_structure/connect/group_atom') exit() if ( group_num == 0 ): log_info.log_error('Input error: no connect information, please check or reset analyze/geometry/order_structure/connect') exit() else: for i in geometry_dic['order_structure'].keys(): if ( 'connect' in i ): new_geometry_dic['order_structure'].pop(i) new_geometry_dic['order_structure']['atom_id'] = atom_id new_geometry_dic['order_structure']['group_atom'] = group_atom return new_geometry_dic def check_lmp2cp2k_inp(lmp2cp2k_dic): ''' check_lmp2cp2k_inp: check the input of lmp2cp2k. Args: lmp2cp2k_dic: dictionary lmp2cp2k_dic contains parameters for lmp2cp2k. Returns: lmp2cp2k_dic: dictionary lmp2cp2k_dic is the revised lmp2cp2k_dic. ''' lmp2cp2k_dic = copy.deepcopy(lmp2cp2k_dic) if ( 'lmp_log_file' in lmp2cp2k_dic.keys() ): lmp_log_file = lmp2cp2k_dic['lmp_log_file'] if ( os.path.exists(os.path.abspath(lmp_log_file)) ): lmp2cp2k_dic['lmp_log_file'] = os.path.abspath(lmp_log_file) else: log_info.log_error('Input error: %s file does not exist' %(lmp_log_file)) exit() else: log_info.log_error('Input error: no lmp_log_file, please set analyze/lmp2cp2k/lmp_log_file') exit() if ( 'lmp_traj_file' in lmp2cp2k_dic.keys() ): lmp_traj_file = lmp2cp2k_dic['lmp_traj_file'] if ( os.path.exists(os.path.abspath(lmp_traj_file)) ): lmp2cp2k_dic['lmp_traj_file'] = os.path.abspath(lmp_traj_file) else: log_info.log_error('Input error: %s file does not exist' %(lmp_traj_file)) exit() else: log_info.log_error('Input error: no lmp_traj_file, please set analyze/lmp2cp2k/lmp_traj_file') exit() if ( 'atom_label' in lmp2cp2k_dic.keys() ): atom_label = lmp2cp2k_dic['atom_label'] atom_label_dic = OrderedDict() for i in range (len(atom_label)): label_split = data_op.split_str(atom_label[i], ':') atom_label_dic[int(label_split[0])] = label_split[1] lmp2cp2k_dic['atom_label'] = atom_label_dic else: log_info.log_error('Input error: no atom_label, please set analyze/lmp2cp2k/atom_label') exit() if ( 'time_step' in lmp2cp2k_dic.keys() ): time_step = lmp2cp2k_dic['time_step'] if ( data_op.eval_str(time_step) == 1 or data_op.eval_str(time_step) == 2 ): lmp2cp2k_dic['time_step'] = float(time_step) else: log_info.log_error('Input error: time_step should float, please set analyze/lmp2cp2k/time_step') exit() else: log_info.log_error('Input error: no time_step, please set analyze/lmp2cp2k/time_step') exit() valid_unit = ['metal', 'real'] if ( 'lmp_unit' in lmp2cp2k_dic.keys() ): lmp_unit = lmp2cp2k_dic['lmp_unit'] if ( lmp_unit in valid_unit ): pass else: log_info.log_error('Input error: %s is not supported, please check or reset analyze/lmp2cp2k/lmp_unit') exit() else: log_info.log_error('Input error: no lmp_unit, please set analyze/lmp2cp2k/lmp_unit') exit() if ( 'unwrap' in lmp2cp2k_dic.keys() ): unwrap = data_op.str_to_bool(lmp2cp2k_dic['unwrap']) if ( isinstance(unwrap, bool) ): lmp2cp2k_dic['unwrap'] = unwrap else: log_info.log_error('Input error: unwrap should be bool, please set analyze/lmp2cp2k/unwrap') exit() else: lmp2cp2k_dic['unwrap'] = False if ( 'box' in lmp2cp2k_dic.keys() ): A_exist = 'A' in lmp2cp2k_dic['box'].keys() B_exist = 'B' in lmp2cp2k_dic['box'].keys() C_exist = 'C' in lmp2cp2k_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/lmp2cp2k/box') exit() if ( A_exist and B_exist and C_exist ): box_A = lmp2cp2k_dic['box']['A'] box_B = lmp2cp2k_dic['box']['B'] box_C = lmp2cp2k_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/lmp2cp2k/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): lmp2cp2k_dic['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/lmp2cp2k/box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): lmp2cp2k_dic['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/lmp2cp2k/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): lmp2cp2k_dic['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/lmp2cp2k/box/C') exit() return lmp2cp2k_dic def check_rdf_inp(rdf_dic): ''' check_lmp2cp2k_inp: check the input of rdf. Args: rdf_dic: dictionary rdf_dic contains parameters for rdf. Returns: rdf_dic: dictionary rdf_dic is the revised rdf_dic. ''' rdf_dic = copy.deepcopy(rdf_dic) if ( 'traj_coord_file' in rdf_dic.keys() ): traj_coord_file = rdf_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): rdf_dic['traj_coord_file'] = os.path.abspath(traj_coord_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(os.path.abspath(traj_coord_file), 'coord_xyz') else: log_info.log_error('Input error: %s file does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no coordination trajectroy file, please set analyze/rdf/traj_coord_file') exit() if ( 'atom_type_pair' in rdf_dic.keys() ): atom_type_pair = rdf_dic['atom_type_pair'] if ( len(atom_type_pair) == 2 and all(data_op.eval_str(x) == 0 for x in atom_type_pair) ): pass else: log_info.log_error('Input error: atom_type_pair should be 2 string, please check or reset analyze/rdf/atom_type_pair') exit() else: log_info.log_error('Input error: no atom type, please set analyze/rdf/atom_type_pair') exit() if ( 'init_step' in rdf_dic.keys() ): init_step = rdf_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): rdf_dic['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/rdf/init_step') else: rdf_dic['init_step'] = start_frame_id if ( 'end_step' in rdf_dic.keys() ): end_step = rdf_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): rdf_dic['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/rdf/end_step') else: rdf_dic['end_step'] = end_frame_id init_step = rdf_dic['init_step'] end_step = rdf_dic['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'r_increment' in rdf_dic.keys() ): r_increment = rdf_dic['r_increment'] if ( data_op.eval_str(r_increment) == 2 ): rdf_dic['r_increment'] = float(r_increment) else: log_info.log_error('Input error: r_increment should be float, please check or reset analyze/rdf/r_increment') else: rdf_dic['r_increment'] = 0.1 if ( 'box' in rdf_dic.keys() ): A_exist = 'A' in rdf_dic['box'].keys() B_exist = 'B' in rdf_dic['box'].keys() C_exist = 'C' in rdf_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/rdf/box') exit() if ( A_exist and B_exist and C_exist ): box_A = rdf_dic['box']['A'] box_B = rdf_dic['box']['B'] box_C = rdf_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/rdf/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): rdf_dic['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/rdf/box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): rdf_dic['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/rdf/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): rdf_dic['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/rdf/box/C') exit() return rdf_dic def check_spectrum_inp(spectrum_dic): ''' check_spectrum_inp: check the input of spectrum. Args: spectrum_dic: dictionary spectrum_dic contains parameters for spectrum. Returns: spectrum_dic: dictionary spectrum_dic is the revised spectrum_dic. ''' valid_type = ['general', 'water_mode', 'hydration_mode'] if ( 'type' in spectrum_dic.keys() ): spec_type = spectrum_dic['type'] if ( spec_type in valid_type ): pass else: log_info.log_error('Input error: %s is not supported, but check or reset analyze/power_spectrum/type' %(spec_type)) exit() else: log_info('Input error: no type, please set analyze/power_spectrum/type') exit() if ( 'traj_vel_file' in spectrum_dic.keys() ): traj_vel_file = spectrum_dic['traj_vel_file'] if ( os.path.exists(os.path.abspath(traj_vel_file)) ): spectrum_dic['traj_vel_file'] = os.path.abspath(traj_vel_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(os.path.abspath(traj_vel_file), 'vel') else: log_info.log_error('Input error: %s file does not exist' %(traj_vel_file)) exit() else: log_info.log_error('Input error: traj_vel_file, please set analyze/power_spectrum/traj_vel_file') exit() spec_type = spectrum_dic['type'] if ( spec_type == 'water_mode' or spec_type == 'hydration_mode' ): if ( 'traj_coord_file' in spectrum_dic.keys() ): traj_coord_file = spectrum_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): spectrum_dic['traj_coord_file'] = os.path.abspath(traj_coord_file) else: log_info.log_error('Input error: %s file does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no traj_coord_file, please set analyze/power_spectrum/traj_coord_file') exit() if ( 'init_step' in spectrum_dic.keys() ): init_step = spectrum_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): spectrum_dic['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/power_spectrum/init_step') exit() else: spectrum_dic['init_step'] = start_frame_id if ( 'end_step' in spectrum_dic.keys() ): end_step = spectrum_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): spectrum_dic['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/power_spectrum/end_step') exit() else: spectrum_dic['end_step'] = end_frame_id init_step = spectrum_dic['init_step'] end_step = spectrum_dic['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'max_frame_corr' in spectrum_dic.keys() ): max_frame_corr = spectrum_dic['max_frame_corr'] if ( data_op.eval_str(max_frame_corr) == 1 ): spectrum_dic['max_frame_corr'] = int(max_frame_corr) else: log_info.log_error('Input error: max_frame_corr should be integer, please check or reset analyze/power_spectrum/max_frame_corr') exit() else: spectrum_dic['max_frame_corr'] = int(frames_num/3) if ( 'start_wave' in spectrum_dic.keys() ): start_wave = spectrum_dic['start_wave'] if ( data_op.eval_str(start_wave) == 1 or data_op.eval_str(start_wave) == 2 ): spectrum_dic['start_wave'] = float(start_wave) else: log_info.log_error('Input error: start_wave should be float, please check or reset analyze/power_spectrum/start_wave') exit() else: spectrum_dic['start_wave'] = 0 if ( 'end_wave' in spectrum_dic.keys() ): end_wave = spectrum_dic['end_wave'] if ( data_op.eval_str(end_wave) == 1 or data_op.eval_str(end_wave) == 2 ): spectrum_dic['end_wave'] = float(end_wave) else: log_info.log_error('Input error: end_wave should be float, please check or reset analyze/power_spectrum/end_wave') exit() else: spectrum_dic['end_wave'] = 0 if ( 'normalize' in spectrum_dic.keys() ): normalize = spectrum_dic['normalize'] if ( data_op.eval_str(normalize) == 1 ): if ( int(normalize) == 0 or int(normalize) == 1 ): spectrum_dic['normalize'] = int(normalize) else: log_info.log_error('Input error: normalize should be 0 or 1, please check or reset analyze/power_spectrum/normalize') exit() else: log_info.log_error('Input error: normalize should be 0 or 1, please check or reset analyze/power_spectrum/normalize') exit() else: spectrum_dic['normalize'] = 1 if ( spec_type == 'general' or spec_type == 'water_mode' ): if ( 'atom_id' in spectrum_dic.keys() ): atom_id_list = data_op.get_id_list(spectrum_dic['atom_id']) spectrum_dic['atom_id'] = atom_id_list else: log_info.log_error('Input error: no atom_id, please set analyze/power_spectrum/atom_id') exit() else: if ( 'box' in spectrum_dic.keys() ): A_exist = 'A' in spectrum_dic['box'].keys() B_exist = 'B' in spectrum_dic['box'].keys() C_exist = 'C' in spectrum_dic['box'].keys() else: log_info.log_error('Input error: no box, please set analyze/power_spectrum/box') exit() if ( A_exist and B_exist and C_exist ): box_A = spectrum_dic['box']['A'] box_B = spectrum_dic['box']['B'] box_C = spectrum_dic['box']['C'] else: log_info.log_error('Input error: box setting error, please check analyze/power_sepctrum/box') exit() if ( len(box_A) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_A) ): spectrum_dic['box']['A'] = [float(x) for x in box_A] else: log_info.log_error('Input error: A vector of box wrong, please check analyze/power_spectrum/box/A') exit() if ( len(box_B) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_B) ): spectrum_dic['box']['B'] = [float(x) for x in box_B] else: log_info.log_error('Input error: B vector of box wrong, please check analyze/power_spectrum/box/B') exit() if ( len(box_C) == 3 and all(data_op.eval_str(i) == 1 or data_op.eval_str(i) == 2 for i in box_C) ): spectrum_dic['box']['C'] = [float(x) for x in box_C] else: log_info.log_error('Input error: C vector of box wrong, please check analyze/power_spectrum/box/C') exit() if ( spec_type == 'hydration_mode' ): if ( 'hyd_shell_dist' in spectrum_dic.keys() ): hyd_shell_dist = spectrum_dic['hyd_shell_dist'] if ( data_op.eval_str(hyd_shell_dist) == 1 or data_op.eval_str(hyd_shell_dist) == 2 ): spectrum_dic['hyd_shell_dist'] = float(hyd_shell_dist) else: log_info.log_error('Input error: hyd_shell_dist should be float, please check or reset analyze/power_spectrum/hyd_shell_dist') exit() else: log_info.log_error('Input error: no hyd_shell_dist, please set analyze/power_spectrum/hyd_shell_dist') exit() if ( 'dist_conv' in spectrum_dic.keys() ): dist_conv = spectrum_dic['dist_conv'] if ( data_op.eval_str(dist_conv) == 1 or data_op.eval_str(dist_conv) == 2 ): spectrum_dic['dist_conv'] = float(dist_conv) else: log_info.log_error('Input error: dist_conv should be float, please check or reset analyze/power_spectrum/dist_conv') exit() else: spectrum_dic['dist_conv'] = 0.3 if ( 'atom_type_pair' in spectrum_dic.keys() ): atom_type_pair = spectrum_dic['atom_type_pair'] if ( len(atom_type_pair) == 2 and all(data_op.eval_str(x) == 0 for x in atom_type_pair) ): pass else: log_info.log_error('Input error: atom_type_pair should be 2 string, please check or reset analyze/power_spectrum/atom_type_pair') exit() else: log_info.log_error('Input error: no atom_type_pair, please set analyze/power_spectrum/atom_type_pair') exit() return spectrum_dic def check_v_hartree_inp(v_hartree_dic): ''' check_v_hartree_inp: check the input of v_hartree. Args: v_hartree_dic: dictionary v_hartree_dic contains parameters for v_hartree. Returns: v_hartree_dic: dictionary v_hartree_dic is the revised v_hartree_dic. ''' v_hartree_dic = copy.deepcopy(v_hartree) if ( 'cube_file' in v_hartree_dic.keys() ): cube_file = v_hartree_dic['cube_file'] if ( os.path.exists(os.path.abspath(cube_file)) ): v_hartree_dic['cube_file'] = os.path.abspath(cube_file) else: log_info.log_error('Input error: %s file does not exist' %(cube_file)) exit() else: log_info.log_error('Input error: no charge cube file, please set analyze/v_hartree/cube_file') exit() if ( 'surface' in v_hartree_dic.keys() ): surface = v_hartree_dic['surface'] if ( len(surface) == 3 and all(data_op.eval_str(x) == 1 for x in surface) ): v_hartree_dic['surface'] = [int(x) for x in surface] else: log_info.log_error('Input error: surface should be 3 integer, please check or set analyze/v_hartree/surface') exit() else: log_info.log_error('Input error: no surface, please set analyze/v_hartree/surface') exit() return v_hartree_dic def check_arrange_data_inp(arrange_data_dic): ''' check_arrange_data_inp: check the input of arrange_data. Args: arrange_data_dic: dictionary arrange_data_dic contains parameters for arrange_data. Returns: arrange_data_dic: dictionary arrange_data_dic is the revised arrange_data_dic. ''' if ( 'temperature' in arrange_data_dic): temp_dic = arrange_data_dic['temperature'] if ( 'traj_ener_file' in temp_dic.keys() ): traj_ener_file = temp_dic['traj_ener_file'] if ( os.path.exists(os.path.abspath(traj_ener_file)) ): arrange_data_dic['temperature']['traj_ener_file'] = os.path.abspath(traj_ener_file) else: log_info.log_error('Input error: %s file does not exist' %(traj_ener_file)) exit() else: log_info.log_error('Input error: energy trajectory file, please set analyze/arranage_data/temperature/traj_ener_file') exit() #arrange potential energy elif ( 'potential' in arrange_data_dic ): pot_dic = arrange_data_dic['potential'] if ( 'traj_ener_file' in pot_dic.keys() ): traj_ener_file = pot_dic['traj_ener_file'] if ( os.path.exists(os.path.abspath(traj_ener_file)) ): arrange_data_dic['potential']['traj_ener_file'] = os.path.abspath(traj_ener_file) else: log_info.log_error('Input error: %s file does not exist' %(traj_ener_file)) exit() else: log_info.log_error('Input error: no energy trajectory file, please set analyze/arranage_data/potential/traj_ener_file') exit() #arrange mulliken charge elif ( 'mulliken' in arrange_data_dic ): mulliken_dic = arrange_data_dic['mulliken'] if ( 'traj_mul_file' in mulliken_dic.keys() ): traj_mul_file = mulliken_dic['traj_mul_file'] if ( os.path.exists(os.path.abspath(traj_mul_file)) ): arrange_data_dic['mulliken']['traj_mul_file'] = os.path.abspath(traj_mul_file) else: log_info.log_error('Input error: %s file does not exist' %(traj_mul_file)) exit() else: log_info.log_error('Input error: no mulliken trajectory file, please set analyze/arranage_data/mulliken/traj_mul_file') exit() if ( 'atom_id' in mulliken_dic.keys() ): arrange_data_dic['mulliken']['atom_id'] = data_op.get_id_list(mulliken_dic['atom_id']) else: log_info.log_error('Input error: no atom id, please set analyze/arranage_data/mulliken/atom_id') exit() if ( 'time_step' in mulliken_dic.keys() ): time_step = mulliken_dic['time_step'] if ( data_op.eval_str(time_step) == 2 ): arrange_data_dic['mulliken']['time_step'] = float(time_step) else: log_info.log_error('Input error: time step should be float, please check or set analyze/arranage_data/mulliken/atom_id') exit() else: arrange_data_dic['mulliken']['time_step'] = 0.5 if ( 'each' in mulliken_dic.keys() ): each = mulliken_dic['each'] if ( data_op.eval_str(each) == 1 ): arrange_data_dic['mulliken']['each'] = int(each) else: log_info.log_error('Input error: each should be integer, please check or set analyze/arranage_data/mulliken/each') exit() else: arrange_data_dic['mulliken']['each'] = 1 #arrange vertical energy elif ( 'vertical_energy' in arrange_data_dic ): vert_ene_dic = arrange_data_dic['vertical_energy'] if ( 'traj_mix_ener_file' in vert_ene_dic.keys() ): traj_mix_ener_file = vert_ene_dic['traj_mix_ener_file'] if ( os.path.exists(os.path.abspath(traj_mix_ener_file)) ): arrange_data_dic['vertical_energy']['traj_mix_ener_file'] = os.path.abspath(traj_mix_ener_file) blocks_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(os.path.abspath(traj_mix_ener_file), 'mix_ener') else: log_info.log_error('Input error: %s file does not exist' %(traj_mix_ener_file)) exit() else: log_info.log_error('Input error: no mix energy trajectory file, please set analyze/arranage_data/vertical_energy/traj_mix_ener_file') exit() if ( 'row_ox' in vert_ene_dic.keys() ): row_ox = vert_ene_dic['row_ox'] if ( data_op.eval_str(row_ox) == 1 ): arrange_data_dic['vertical_energy']['row_ox'] = int(row_ox) else: log_info.log_error('Input error: row_ox should be integer, please check or reset analyze/arranage_data/vertical_energy/row_ox') exit() else: log_info.log_error('Input error: no row_ox, please set analyze/arranage_data/vertical_energy/row_ox') exit() if ( 'row_red' in vert_ene_dic.keys() ): row_red = vert_ene_dic['row_red'] if ( data_op.eval_str(row_red) == 1 ): arrange_data_dic['vertical_energy']['row_red'] = int(row_red) else: log_info.log_error('Input error: row_red should be integer, please check or reset analyze/arranage_data/vertical_energy/row_red') exit() else: log_info.log_error('Input error: no row_red, please set analyze/arranage_data/vertical_energy/row_red') exit() if ( 'redox_type' in vert_ene_dic.keys() ): redox_type = vert_ene_dic['redox_type'] if ( redox_type == 'oxidation' or redox_type == 'reduction' ): pass else: log_info.log_error('Input error: only oxidation and reduction are supported for redox_type') exit() else: log_info.log_error('Input error: no redox_type, please set analyze/arranage_data/vertical_energy/redox_type') exit() if ( 'slow_growth' in vert_ene_dic.keys() ): slow_growth = vert_ene_dic['slow_growth'] if ( data_op.eval_str(slow_growth) == 1 ): if ( int(slow_growth) == 0 or int(slow_growth) == 1 ): arrange_data_dic['vertical_energy']['slow_growth'] = int(slow_growth) else: log_info.log_error('Input error: slow_growth should be 0 or 1, please check or reset analyze/arranage_data/vertical_energy/slow_growth') exit() else: log_info.log_error('Input error: slow_growth should be integer, please set analyze/arranage_data/vertical_energy/slow_growth') exit() else: arrange_data_dic['vertical_energy']['slow_growth'] = 0 #0 means no slow_growth slow_growth = arrange_data_dic['vertical_energy']['slow_growth'] if ( slow_growth == 1 ): if ( 'increment' in vert_ene_dic.keys() ): increment = vert_ene_dic['increment'] if ( data_op.eval_str(increment) == 2 ): arrange_data_dic['vertical_energy']['increment'] = float(increment) else: log_info.log_error('Input error: increment should be float, please check or reset analyze/arranage_data/vertical_energy/increment') exit() else: log_info.log_error('Input error: no increment, please set analyze/arranage_data/vertical_energy/increment') exit() if ( 'init_step' in vert_ene_dic.keys() ): init_step = vert_ene_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): arrange_data_dic['vertical_energy']['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/arranage_data/vertical_energy/init_step') exit() else: arrange_data_dic['vertical_energy']['init_step'] = start_id if ( 'end_step' in vert_ene_dic.keys() ): end_step = vert_ene_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): arrange_data_dic['vertical_energy']['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/arranage_data/vertical_energy/end_step') exit() else: arrange_data_dic['vertical_energy']['end_step'] = end_id init_step = arrange_data_dic['vertical_energy']['init_step'] end_step = arrange_data_dic['vertical_energy']['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'final_time_unit' in vert_ene_dic.keys() ): final_time_unit = vert_ene_dic['final_time_unit'] if ( final_time_unit in ['fs', 'ps', 'ns'] ): pass else: log_info.log_error('Input error: final_time_unit wrong, please check or reset analyze/arranage_data/vertical_energy/final_time_unit') exit() else: arrange_data_dic['vertical_energy']['final_time_unit'] = 'fs' elif ( 'ti_force' in arrange_data_dic ): ti_force_dic = arrange_data_dic['ti_force'] if ( 'traj_lag_file' in ti_force_dic.keys() ): traj_lag_file = ti_force_dic['traj_lag_file'] if ( os.path.exists(os.path.abspath(traj_lag_file)) ): arrange_data_dic['ti_force']['traj_lag_file'] = os.path.abspath(traj_lag_file) else: log_info.log_error('Input error: %s file does not exist' %(traj_lag_file)) exit() else: log_info.log_error('Input error: no lagrange trajectory file, please set analyze/arrange_data/ti_force/traj_lag_file') exit() if ( 'stat_num' in ti_force_dic.keys() ): stat_num = ti_force_dic['stat_num'] if ( data_op.eval_str(stat_num) == 1 ): arrange_data_dic['ti_force']['stat_num'] = int(stat_num) else: log_info.log_error('Input error: stat_num should be integer, please check or reset analyze/arrange_data/ti_force/stat_num') exit() else: arrange_data_dic['ti_force']['stat_num'] = 1 return arrange_data_dic def check_free_energy_inp(free_energy_dic): ''' check_free_energy_inp: check the input of free_energy. Args: free_energy_dic: dictionary free_energy_dic contains parameters for free_energy. Returns: free_energy_dic: dictionary free_energy_dic is the revised free_energy_dic. ''' free_energy_dic = copy.deepcopy(free_energy_dic) if ( 'method' in free_energy_dic.keys() ): method = free_energy_dic['method'] else: log_info.log_error('Input error: no method, please set analyze/free_energy/method') exit() if ( method == 'ti' ): if ( 'ti_file' in free_energy_dic.keys() ): ti_file = free_energy_dic['ti_file'] if ( os.path.exists(os.path.abspath(ti_file)) ): free_energy_dic['ti_file'] = os.path.abspath(ti_file) else: log_info.log_error('Input error: %s does not exist' %(ti_file)) exit() else: log_info.log_error('Input error: no ti_file, please set analyze/free_energy/ti_file') exit() return free_energy_dic def check_rmsd_inp(rmsd_dic): ''' check_rmsd_inp: check the input of rmsd. Args: rmsd_dic: dictionary rmsd_dic contains parameters for rmsd. Returns: rmsd_dic: dictionary rmsd_dic is the revised rmsd_dic. ''' if ( 'traj_coord_file' in rmsd_dic.keys() ): traj_coord_file = rmsd_dic['traj_coord_file'] if ( os.path.exists(os.path.abspath(traj_coord_file)) ): rmsd_dic['traj_coord_file'] = os.path.abspath(traj_coord_file) else: log_info.log_error('Input error: %s file does not exist' %(traj_coord_file)) exit() else: log_info.log_error('Input error: no coordination trajectory file, please set analyze/rmsd/traj_coord_file') exit() if ( 'atom_id' in rmsd_dic.keys() ): rmsd_dic['atom_id'] = data_op.get_id_list(rmsd_dic['atom_id']) else: log_info.log_error('Input error: no atom id, please set analyze/rmsd/atom_id') exit() if ( 'ref_frame' in rmsd_dic.keys() ): ref_frame = rmsd_dic['ref_frame'] if ( data_op.eval_str(ref_frame) == 1 ): rmsd_dic['ref_frame'] = int(ref_frame) else: log_info.log_error('Input error: ref_frame should be integer, please check or reset analyze/rmsd/ref_frame') eixt() else: log_info.log_error('Input error: no reference frame, please set analyze/rmsd/ref_frame') exit() if ( 'compare_frame' in rmsd_dic.keys() ): compare_frame = rmsd_dic['compare_frame'] rmsd_dic['compare_frame'] = data_op.get_id_list(compare_frame) else: log_info.log_error('Input error: no compare frame, please set analyze/rmsd/compare_frame') exit() return rmsd_dic def check_time_correlation_inp(time_corr_dic): ''' check_time_correlation_inp: check the input of time_correlation. Args: time_corr_dic: dictionary time_corr_dic contains parameters for time_corr. Returns: time_corr_dic: dictionary time_corr_dic is the revised time_corr_dic. ''' if ( 'traj_file' in time_corr_dic.keys() ): traj_file = time_corr_dic['traj_file'] if ( os.path.exists(os.path.abspath(traj_file)) ): time_corr_dic['traj_file'] = os.path.abspath(traj_file) atoms_num, pre_base_block, end_base_block, pre_base, frames_num, each, start_frame_id, end_frame_id, time_step = \ traj_info.get_traj_info(os.path.abspath(traj_file), 'coord_xyz') else: log_info.log_error('Input error: %s file does not exists' %(traj_file)) exit() else: log_info.log_error('Input error: no trajectory file, please set analyze/time_correlation/traj_file') exit() if ( 'atom_id' in time_corr_dic.keys() ): atom_id = time_corr_dic['atom_id'] time_corr_dic['atom_id'] = data_op.get_id_list(atom_id) else: log_info.log_error('Input error: no atom_id, please set analyze/time_correlation/atom_id') exit() if ( 'max_frame_corr' in time_corr_dic.keys() ): max_frame_corr = time_corr_dic['max_frame_corr'] if ( data_op.eval_str(max_frame_corr) == 1 ): time_corr_dic['max_frame_corr'] = int(max_frame_corr) else: log_info.log_error('Input error: max_frame_corr should be integer, please check or reset analyze/time_correlation/max_frame_corr') exit() else: time_corr_dic['max_frame_corr'] = int(frames_num/3) if ( 'init_step' in time_corr_dic.keys() ): init_step = time_corr_dic['init_step'] if ( data_op.eval_str(init_step) == 1 ): time_corr_dic['init_step'] = int(init_step) else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/time_correlation/init_step') exit() else: time_corr_dic['init_step'] = start_frame_id if ( 'end_step' in time_corr_dic.keys() ): end_step = time_corr_dic['end_step'] if ( data_op.eval_str(end_step) == 1 ): time_corr_dic['end_step'] = int(end_step) else: log_info.log_error('Input error: end_step should be integer, please check or reset analyze/time_correlation/end_step') exit() else: time_corr_dic['end_step'] = end_frame_id init_step = time_corr_dic['init_step'] end_step = time_corr_dic['end_step'] check_step(init_step, end_step, start_frame_id, end_frame_id) if ( 'normalize' in time_corr_dic.keys() ): normalize = time_corr_dic['normalize'] if ( data_op.eval_str(normalize) == 1 ): if ( int(normalize) == 0 or int(normalize) == 1 ): time_corr_dic['normalize'] = int(normalize) else: log_info.log_error('Input error: only 0 and 1 are supported for normalize, please check or reset analyze/time_correlation/normalize') exit() else: log_info.log_error('Input error: init_step should be integer, please check or reset analyze/time_correlation/normalize') exit() else: time_corr_dic['normalize'] = 1 return time_corr_dic
40.400244
146
0.677978
10,353
66,216
4.012267
0.023375
0.032018
0.045259
0.067888
0.823299
0.727894
0.647705
0.568911
0.513204
0.477527
0
0.005909
0.200042
66,216
1,638
147
40.424908
0.77829
0.047859
0
0.453344
0
0.002333
0.320304
0.079403
0
0
0
0
0
1
0.010109
false
0.008554
0.004666
0
0.027994
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fa4fcf11c3e0d7848affc502abed9001f5ba7a11
276
py
Python
probability/measure/algebra/borel.py
jedhsu/probability
cd94726315cd6d9e537925cdd2f13cf7911dbb92
[ "Apache-2.0" ]
null
null
null
probability/measure/algebra/borel.py
jedhsu/probability
cd94726315cd6d9e537925cdd2f13cf7911dbb92
[ "Apache-2.0" ]
null
null
null
probability/measure/algebra/borel.py
jedhsu/probability
cd94726315cd6d9e537925cdd2f13cf7911dbb92
[ "Apache-2.0" ]
null
null
null
# just do power set for now. ez __all__ = ["BorelAlgebra"] from .sigma import SigmaAlgebra # this step is not super explicitly necessary as long as you handle the borel mechanism implicitly class BorelAlgebra(SigmalAlgebra): symbol = "B" class BorelSpace: pass
17.25
98
0.742754
37
276
5.432432
0.918919
0
0
0
0
0
0
0
0
0
0
0
0.199275
276
15
99
18.4
0.909502
0.456522
0
0
0
0
0.088435
0
0
0
0
0
0
1
0
false
0.166667
0.166667
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
fa5083b2cae596caa53eabcc4d2defeeab957579
362
py
Python
copulas/multivariate/__init__.py
pvk-developer/Copulas
4de54e946ecb1e2bf831874e6a00a7d04d302804
[ "MIT" ]
71
2018-06-20T12:07:34.000Z
2020-01-03T21:43:01.000Z
copulas/multivariate/__init__.py
Hooddi/Copulas
86dc1304fe4ffb51302fc37801d7f18c4ab4d88d
[ "MIT" ]
75
2018-06-20T09:46:07.000Z
2019-12-23T15:04:19.000Z
copulas/multivariate/__init__.py
Hooddi/Copulas
86dc1304fe4ffb51302fc37801d7f18c4ab4d88d
[ "MIT" ]
25
2018-06-24T18:01:11.000Z
2020-01-02T14:30:09.000Z
"""Multivariate copulas module.""" from copulas.multivariate.base import Multivariate from copulas.multivariate.gaussian import GaussianMultivariate from copulas.multivariate.tree import Tree, TreeTypes from copulas.multivariate.vine import VineCopula __all__ = ( 'Multivariate', 'GaussianMultivariate', 'VineCopula', 'Tree', 'TreeTypes' )
24.133333
62
0.767956
34
362
8.058824
0.382353
0.160584
0.335766
0
0
0
0
0
0
0
0
0
0.140884
362
14
63
25.857143
0.881029
0.077348
0
0
0
0
0.167683
0
0
0
0
0
0
1
0
false
0
0.363636
0
0.363636
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
fa583b36546ffd32a550057f809cc32efb9ae476
3,929
py
Python
venv/Include/Tuple.py
matthijsvanvliet/raytracing-python
73d692b47330ab94eedde579a51063e3a907e92b
[ "MIT" ]
1
2021-06-03T11:34:15.000Z
2021-06-03T11:34:15.000Z
venv/Include/Tuple.py
matthijsvanvliet/raytracing-python
73d692b47330ab94eedde579a51063e3a907e92b
[ "MIT" ]
null
null
null
venv/Include/Tuple.py
matthijsvanvliet/raytracing-python
73d692b47330ab94eedde579a51063e3a907e92b
[ "MIT" ]
null
null
null
from dataclasses import dataclass from enum import Enum import math import sys EPSILON = 0.00001 def isEqual(a, b): return True if abs(a - b) < EPSILON else False # Tuple types enum class TupleTypes(Enum): VECTOR = 0 POINT = 1 # Tuple struct # Implement tuple as numpy array of 4 elements with properties for x, y, z and w class Tuple: def __init__(self, x: float, y: float, z: float, w: float): self.x = x self.y = y self.z = z self.w = w def magnitude(self): return math.sqrt(pow(self.x, 2) + pow(self.y, 2) + pow(self.z, 2) + pow(self.w, 2)) def normalize(self): magnitude = self.magnitude() return Tuple(self.x / magnitude, self.y / magnitude, self.z / magnitude, self.w / magnitude) def dot(self, other): return (self.x * other.x + self.y * other.y + self.z * other.z + self.w * other.w) def cross(self, other): return vector(self.y * other.z - self.z * other.y, self.z * other.x - self.x * other.z, self.x * other.y - self.y * other.x) def reflect(self, normal): return self - normal * 2 * self.dot(normal) def get_type(self): if (self.w == 0): return TupleTypes.VECTOR elif (self.w == 1): return TupleTypes.POINT def __eq__(self, other): return True if isEqual(self.x, other.x) and isEqual(self.y, other.y) and isEqual(self.z, other.z) and isEqual(self.w, other.w) else False def __add__(self, other): return Tuple((self.x + other.x), (self.y + other.y), (self.z + other.z), (self.w + other.w)) def __sub__(self, other): return Tuple((self.x - other.x), (self.y - other.y), (self.z - other.z), (self.w - other.w)) def __neg__(self): return Tuple(-self.x, -self.y, -self.z, -self.w) def __mul__(self, other): return Tuple((self.x * other), (self.y * other), (self.z * other), (self.w * other)) def __truediv__(self, other): return Tuple((self.x / other), (self.y / other), (self.z / other), (self.w / other)) def __abs__(self): return Tuple(abs(self.x), abs(self.y), abs(self.z), abs(self.w)) def __str__(self): return f'{self.x, self.y, self.z, self.w}' class Color: def __init__(self, red: float, green: float, blue: float): self.red = red self.green = green self.blue = blue def hadamard_product(self, other): return Color((self.red * other.red), (self.green * other.green), (self.blue * other.blue)) def __eq__(self, other): return True if isEqual(self.red, other.red) and isEqual(self.green, other.green) and isEqual(self.blue, other.blue) else False def __add__(self, other): return Color((self.red + other.red), (self.green + other.green), (self.blue + other.blue)) def __sub__(self, other): return Color((self.red - other.red), (self.green - other.green), (self.blue - other.blue)) def __neg__(self): return Color(-self.red, -self.green, -self.blue) def __mul__(self, other): return self.hadamard_product(other) if type(other) is Color else Color((self.red * other), (self.green * other), (self.blue * other)) def __truediv__(self, other: float): return Color((self.red / other), (self.green / other), (self.blue / other)) def __abs__(self): return Color(abs(self.red), abs(self.green), abs(self.blue)) def __str__(self): return f'{self.red, self.green, self.blue}' # Functions to instantiate Tuples as different types def point(x, y, z): return Tuple(x, y, z, TupleTypes.POINT.value) def vector(x, y, z): return Tuple(x, y, z, TupleTypes.VECTOR.value)
31.943089
145
0.576992
569
3,929
3.852373
0.140598
0.034215
0.082117
0.043796
0.458029
0.410584
0.373175
0.359489
0.341241
0.284215
0
0.00566
0.280478
3,929
122
146
32.204918
0.769721
0.040468
0
0.162791
0
0
0.017273
0
0
0
0
0
0
1
0.325581
false
0
0.046512
0.27907
0.744186
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
fa6dbab6702b162536c6542a80ba754c79e3c51e
4,065
py
Python
scripts/clean_dataset.py
bonaventuredossou/fakenews_detection
062d4348ab76860a48f745cda883db8ab416c250
[ "MIT" ]
null
null
null
scripts/clean_dataset.py
bonaventuredossou/fakenews_detection
062d4348ab76860a48f745cda883db8ab416c250
[ "MIT" ]
null
null
null
scripts/clean_dataset.py
bonaventuredossou/fakenews_detection
062d4348ab76860a48f745cda883db8ab416c250
[ "MIT" ]
1
2022-01-10T02:45:30.000Z
2022-01-10T02:45:30.000Z
# Bonaventure F. P. Dossou - MSc in Data Engineering # Project: RL Approach in Fake News Detection # Task at hand in this script: Clean a (.csv) dataframe (dataset) # imports import pandas as pd import string import re from nltk import word_tokenize from nltk.corpus import stopwords # from geograpy import places true_file_name = "../dataset/True.csv" fake_file_name = "../dataset/Fake.csv" true_dataset = pd.read_csv(true_file_name) true_dataset.dropna(axis=0, how='any', inplace=True) fake_dataset = pd.read_csv(fake_file_name) fake_dataset.dropna(axis=0, how='any', inplace=True) punctuations = string.punctuation punctuations = punctuations.replace("-", "") def explode_characters(article): """ Task: Explode abbreviations Args: article: an article returns: the article with the abbreviations expanded in a proper form """ article = re.sub('<u>', '', article) article = re.sub('</u>', '', article) article = re.sub('\[', '', article) article = re.sub('\]', '', article) article = re.sub('\^', '', article) article = re.sub('\#', '', article) article = re.sub('\$', '', article) article = re.sub('\*', '', article) article = re.sub(" ’ ", "'", article) article = re.sub(r"i'm", "i am", article) article = re.sub(r"he's", "he is", article) article = re.sub(r"she's", "she is", article) article = re.sub(r"it's", "it is", article) article = re.sub(r"that's", "that is", article) article = re.sub(r"there's", "there is", article) article = re.sub(r"what's", "what is", article) article = re.sub(r"where's", "where is", article) article = re.sub(r"how's", "how is", article) article = re.sub(r"\'ll", " will", article) article = re.sub(r"\'ve", " have", article) article = re.sub(r"\'ve", " have", article) article = re.sub(r"\'re", " are", article) article = re.sub(r"\'d", " would", article) article = re.sub(r"\'re", " are", article) article = re.sub(r"won't", "will not", article) article = re.sub(r"can't", "cannot", article) article = re.sub(r"n't", " not", article) article = re.sub(r"n'", "ing", article) article = re.sub(r"'bout", "about", article) article = re.sub(r"'til", "until", article) article = re.sub(r"[()\"#/@;:<>{}`+=~|.!?,]", "", article) return article def unicodeToAscii(s): return ''.join( c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn') def preprocess_sentence(article_): """ Task: preprocessed the exploded article by removing punctuations, non-ascii characters etc. Args: article exploded from explode_characters() return: cleaned and preprocessed article """ article_ = article_.lower().strip() article_ = explode_characters(article_) article_ = re.sub(r"([?.!,¿])", r" \1 ", article_) article_ = re.sub(r"[^a-zA-Z.!?]+", r" ", article_) article_ = re.sub(r"\s+", r" ", article_).strip() article_ = re.sub(r'[" "]+', " ", article_) article_ = ' '.join([word for word in article_.split() if word.isalpha()]) # remove stop words stop_words = set(stopwords.words('english')) word_tokens = word_tokenize(article_) article_ = ' '.join(w.strip() for w in word_tokens if w not in stop_words) return article_ # Before pre-processing print("Real articles dataset before cleaning") print(true_dataset['text'].head(5)) print("\n") print("Fake articles dataset before cleaning") print(fake_dataset['text'].head(5)) print("\n") true_dataset['text'] = true_dataset['text'].apply(preprocess_sentence) fake_dataset['text'] = fake_dataset['text'].apply(preprocess_sentence) # After pre-processing print("Real articles dataset after cleaning") print(true_dataset['text'].head(5)) print("\n") print("Fake articles dataset after cleaning") print(fake_dataset['text'].head(5)) print("\n") # saving the entire preprocessed dataset true_dataset.to_csv("../dataset/preprocessed_true.csv", index=False) fake_dataset.to_csv("../dataset/preprocessed_fake.csv", index=False)
35.043103
95
0.647724
567
4,065
4.54321
0.268078
0.201087
0.163043
0.243401
0.480978
0.356755
0.233307
0.233307
0.193323
0.163043
0
0.002074
0.169742
4,065
116
96
35.043103
0.760889
0.147847
0
0.153846
0
0
0.171848
0.018768
0
0
0
0
0
1
0.038462
false
0
0.064103
0.012821
0.141026
0.153846
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
fa7022cd3a8c7679fcdb3a3b88b0a0f8f90cf327
628
py
Python
AzureConnect.py
BK-Uni-Projects/IoT_Python
7eda60d83ae2d184555f4be5f59e23c8e0bcc2d4
[ "MIT" ]
null
null
null
AzureConnect.py
BK-Uni-Projects/IoT_Python
7eda60d83ae2d184555f4be5f59e23c8e0bcc2d4
[ "MIT" ]
null
null
null
AzureConnect.py
BK-Uni-Projects/IoT_Python
7eda60d83ae2d184555f4be5f59e23c8e0bcc2d4
[ "MIT" ]
null
null
null
import requests import json def mysensor(sensor): senseid = {"sensorid": sensor} return senseid def AddData(id, type, value): payload = {"sensorid":id, "sensortype":type, "value":value} return payload def getSensorData(reqsensor): RespGet = requests.get(MainURL+GetURL, params=mysensor(reqsensor)) return RespGet MainURL='http://bksiotworkshop.azurewebsites.net/index.php' PostURL='/sensors/postsensordata' GetURL='/sensors/getsensordata' def UploadData(sensorid, sensortype, sensorvalue): RespPost = requests.post(MainURL+PostURL, params=AddData(sensorid, sensortype, sensorvalue))
23.259259
94
0.737261
67
628
6.910448
0.522388
0.038877
0.12527
0
0
0
0
0
0
0
0
0
0.14172
628
26
95
24.153846
0.858998
0
0
0
0
0
0.20903
0.075251
0
0
0
0
0
1
0.25
false
0
0.125
0
0.5625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
3afa9521abc326b66cde24e738f4278d45952338
946
py
Python
src/modax/training/losses/.ipynb_checkpoints/utils-checkpoint.py
GJBoth/modax
c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82
[ "MIT" ]
2
2021-12-10T14:36:37.000Z
2022-02-10T11:47:03.000Z
src/modax/training/losses/.ipynb_checkpoints/utils-checkpoint.py
GJBoth/modax
c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82
[ "MIT" ]
null
null
null
src/modax/training/losses/.ipynb_checkpoints/utils-checkpoint.py
GJBoth/modax
c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82
[ "MIT" ]
2
2020-12-22T14:49:13.000Z
2021-04-09T08:52:08.000Z
import jax import jax.numpy as jnp from jax.scipy.stats import norm def mse(y_pred, y): """Helper fuction to calculate MSE. """ def squared_error(y, y_pred): return jnp.inner(y - y_pred, y - y_pred) return jnp.mean(jax.vmap(squared_error)(y, y_pred), axis=0) def normal_LL(x, mu, tau): # tau = 1 / sigma**2, for numerical reasons. n_samples = x.shape[0] mse = jnp.mean((x - mu) ** 2) # 2 before tau to compensate for 1/2 p = -n_samples / 2 * (tau * mse - jnp.log(tau) + jnp.log(2 * jnp.pi)) return p, mse def gamma_LL(x, alpha, beta): # log pdf of gamma dist, dropped constant factors since it can be improper. p = (alpha - 1) * jnp.log(x) - beta * x return p def precision(y, x, alpha, beta): # calculates precision parameter with a gamma prior n_samples = y.shape[0] tau = (n_samples + 2 * (alpha - 1)) / (jnp.linalg.norm(y - x) ** 2 + 2 * beta) return tau
25.567568
82
0.613108
162
946
3.5
0.401235
0.044092
0.042328
0.049383
0.10582
0
0
0
0
0
0
0.022567
0.250529
946
36
83
26.277778
0.777151
0.2537
0
0
0
0
0
0
0
0
0
0
0
1
0.263158
false
0
0.157895
0.052632
0.684211
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d704cb054c560656e280923fda0403a0e8c5ab58
598
py
Python
tests/test_datasets.py
v715/popcon
b0874d68c0051cae6971710521a4031c16c54b12
[ "Apache-2.0" ]
1
2020-08-06T17:49:26.000Z
2020-08-06T17:49:26.000Z
tests/test_datasets.py
v715/popcon
b0874d68c0051cae6971710521a4031c16c54b12
[ "Apache-2.0" ]
null
null
null
tests/test_datasets.py
v715/popcon
b0874d68c0051cae6971710521a4031c16c54b12
[ "Apache-2.0" ]
null
null
null
import pytest from popcon.datasets import load_mice def test_mice(): # Load the mouse dataset mice = load_mice() # Split population by genotype _, btbr = mice.multigraph.query("genotype == 'BTBR'") _, b6 = mice.multigraph.query("genotype == 'B6'") _, cast = mice.multigraph.query("genotype == 'CAST'") _, dba2 = mice.multigraph.query("genotype == 'DBA2'") assert mice.multigraph.participants.shape == (32, 3) assert mice.metrics.shape == (32 * 333, 11) assert mice.blocks.shape == (14, 4) assert len(btbr) == len(b6) == len(cast) == len(dba2) == 8
28.47619
62
0.635452
76
598
4.907895
0.460526
0.187668
0.203753
0.289544
0
0
0
0
0
0
0
0.042017
0.204013
598
20
63
29.9
0.741597
0.085284
0
0
0
0
0.128676
0
0
0
0
0
0.333333
1
0.083333
false
0
0.166667
0
0.25
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d713b3463e8b0d9c675736b4c1100aa9e514d3c9
2,183
py
Python
tests/test_compress.py
elkinsd/couchapp
944797b03e4dfb2a5fbd0b5710568104e3775b31
[ "Apache-2.0" ]
224
2015-01-03T13:35:56.000Z
2022-02-13T14:27:17.000Z
tests/test_compress.py
elkinsd/couchapp
944797b03e4dfb2a5fbd0b5710568104e3775b31
[ "Apache-2.0" ]
79
2015-03-11T15:28:00.000Z
2022-02-17T13:53:46.000Z
tests/test_compress.py
elkinsd/couchapp
944797b03e4dfb2a5fbd0b5710568104e3775b31
[ "Apache-2.0" ]
56
2015-01-03T13:37:22.000Z
2022-01-11T08:33:38.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # # This software is licensed as described in the file LICENSE, which # you should have received as part of this distribution. import unittest2 as unittest import mock import os class CompressTest(unittest.TestCase): def test_compress_js(self): from couchapp.config import Config config = Config() config.conf['compress'] = {'js': {'foo':['shows/example-show.js']}} with mock.patch('couchapp.hooks.compress.default.compress', return_value='foo') as mock_compress: from couchapp.hooks.compress import Compress compress = Compress(os.path.join(os.path.dirname(__file__), 'testapp')) compress.conf = config with mock.patch('couchapp.util.write'): compress.run() self.assertTrue(mock_compress.called, 'Default compressor has been called') def test_our_jsmin_loading(self): orig_import = __import__ def import_mock(name, *args): if name == 'jsmin': raise ImportError() return orig_import(name, *args) with mock.patch('__builtin__.__import__', side_effect=import_mock): with mock.patch('couchapp.hooks.compress.jsmin.jsmin', return_value='foo'): from couchapp.hooks.compress import default result = default.compress('bar') self.assertEqual(result, 'foo', 'Our module is called when it is not installed in the system') def test_system_jsmin_loading(self): orig_import = __import__ def import_mock(name, *args): if name == 'couchapp.hooks.compress.jsmin': raise ImportError() return orig_import(name, *args) with mock.patch('__builtin__.__import__', side_effect=import_mock): with mock.patch('jsmin.jsmin', return_value='foo'): from couchapp.hooks.compress import default result = default.compress('bar') self.assertEqual(result, 'foo', 'The system module is called when it is installed') if __name__ == "__main__": # import sys;sys.argv = ['', 'Test.testName'] unittest.main()
38.298246
105
0.640861
259
2,183
5.173745
0.343629
0.035821
0.058209
0.047015
0.504478
0.481343
0.407463
0.407463
0.407463
0.407463
0
0.001218
0.247824
2,183
56
106
38.982143
0.81486
0.094824
0
0.358974
0
0
0.2
0.085787
0
0
0
0
0.076923
1
0.128205
false
0
0.435897
0
0.641026
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
d71c31b14c19cfcb4d90c252a380c9ffe98fe7af
8,517
py
Python
src/the_tale/the_tale/game/bills/tests/test_road_change.py
Alacrate/the-tale
43b211f3a99e93964e95abc20a8ed649a205ffcf
[ "BSD-3-Clause" ]
85
2017-11-21T12:22:02.000Z
2022-03-27T23:07:17.000Z
src/the_tale/the_tale/game/bills/tests/test_road_change.py
Alacrate/the-tale
43b211f3a99e93964e95abc20a8ed649a205ffcf
[ "BSD-3-Clause" ]
545
2017-11-04T14:15:04.000Z
2022-03-27T14:19:27.000Z
src/the_tale/the_tale/game/bills/tests/test_road_change.py
Alacrate/the-tale
43b211f3a99e93964e95abc20a8ed649a205ffcf
[ "BSD-3-Clause" ]
45
2017-11-11T12:36:30.000Z
2022-02-25T06:10:44.000Z
import smart_imports smart_imports.all() class RoadChangeTests(helpers.BaseTestPrototypes): def setUp(self): super().setUp() self.new_path = 'rddr' self.old_road = roads_logic.road_between_places(self.place1, self.place2) self.assertNotEqual(self.new_path, self.old_road.path) self.bill_data = bills.road_change.RoadChange(place_1_id=self.place1.id, place_2_id=self.place2.id, path=self.new_path) self.bill = prototypes.BillPrototype.create(self.account1, 'bill-1-caption', self.bill_data, chronicle_on_accepted='chronicle-on-accepted') def test_create(self): self.assertEqual(self.bill.data.place_1_id, self.place1.id) self.assertEqual(self.bill.data.place_2_id, self.place2.id) self.assertEqual(self.bill.data.path, self.new_path) self.assertEqual(self.bill.data.old_place_1_name_forms, self.place1.utg_name) self.assertEqual(self.bill.data.old_place_2_name_forms, self.place2.utg_name) self.assertEqual(self.bill.data.old_path, self.old_road.path) self.assertEqual(self.bill.data.place_1.id, self.place1.id) self.assertEqual(self.bill.data.place_2.id, self.place2.id) self.assertEqual(self.bill.data.old_place_1_name, self.place1.utg_name.normal_form()) self.assertEqual(self.bill.data.old_place_2_name, self.place2.utg_name.normal_form()) self.assertFalse(self.bill.data.place_1_name_changed) self.assertFalse(self.bill.data.place_2_name_changed) def test_user_form_initials(self): self.assertEqual(self.bill.data.user_form_initials(), {'place_1': self.bill.data.place_1_id, 'place_2': self.bill.data.place_2_id, 'path': self.bill.data.path}) def test_actors(self): self.assertEqual(set(id(a) for a in self.bill_data.actors), set([id(self.place1), id(self.place2)])) def test_update(self): form = self.bill.data.get_user_form_update(post={'caption': 'new-caption', 'chronicle_on_accepted': 'chronicle-on-accepted-2', 'place_1': self.place2.id, 'place_2': self.place3.id, 'path': 'luld'}) self.assertTrue(form.is_valid()) self.bill.update(form) self.bill = prototypes.BillPrototype.get_by_id(self.bill.id) old_road = roads_logic.road_between_places(self.place2, self.place3) self.assertEqual(self.bill.data.place_1_id, self.place2.id) self.assertEqual(self.bill.data.place_2_id, self.place3.id) self.assertEqual(self.bill.data.path, 'luld') self.assertEqual(self.bill.data.old_place_1_name_forms, self.place2.utg_name) self.assertEqual(self.bill.data.old_place_2_name_forms, self.place3.utg_name) self.assertEqual(self.bill.data.old_path, old_road.path) self.assertEqual(self.bill.data.place_1.id, self.place2.id) self.assertEqual(self.bill.data.place_2.id, self.place3.id) self.assertEqual(self.bill.data.old_place_1_name, self.place2.utg_name.normal_form()) self.assertEqual(self.bill.data.old_place_2_name, self.place3.utg_name.normal_form()) self.assertFalse(self.bill.data.place_2_name_changed) self.assertFalse(self.bill.data.place_1_name_changed) def test_form_validation__success(self): form = self.bill.data.get_user_form_update(post={'caption': 'long caption', 'chronicle_on_accepted': 'chronicle-on-accepted', 'place_1': self.place1.id, 'place_2': self.place2.id, 'path': self.new_path}) self.assertTrue(form.is_valid()) def test_form_validation__wrong_end_place(self): form = self.bill.data.get_user_form_update(post={'caption': 'long caption', 'chronicle_on_accepted': 'chronicle-on-accepted', 'place_1': self.place1.id, 'place_2': self.place3.id, 'path': 'drrd'}) self.assertFalse(form.is_valid()) def test_user_form_validation__not_exists(self): self.assertEqual(roads_logic.road_between_places(self.place1, self.place3), None) form = self.bill.data.get_user_form_update(post={'caption': 'long caption', 'chronicle_on_accepted': 'chronicle-on-accepted', 'place_1': self.place1.id, 'place_2': self.place3.id, 'path': 'rdrd'}) self.assertFalse(form.is_valid()) @mock.patch('the_tale.game.roads.logic.is_path_suitable_for_road', lambda **kwargs: roads_relations.ROAD_PATH_ERRORS.random(exclude=[roads_relations.ROAD_PATH_ERRORS.NO_ERRORS])) def test_user_form_validation__bad_path(self): form = self.bill.data.get_user_form_update(post={'caption': 'long caption', 'chronicle_on_accepted': 'chronicle-on-accepted', 'place_1': self.place1.id, 'place_2': self.place2.id, 'path': self.new_path}) self.assertFalse(form.is_valid()) @mock.patch('the_tale.game.bills.conf.settings.MIN_VOTES_PERCENT', 0.6) @mock.patch('the_tale.game.bills.prototypes.BillPrototype.time_before_voting_end', datetime.timedelta(seconds=0)) def apply_bill(self): prototypes.VotePrototype.create(self.account2, self.bill, relations.VOTE_TYPE.AGAINST) prototypes.VotePrototype.create(self.account3, self.bill, relations.VOTE_TYPE.FOR) data = self.bill.user_form_initials data['approved'] = True form = self.bill.data.get_moderator_form_update(data) self.assertTrue(form.is_valid()) self.bill.update_by_moderator(form, self.account1) self.assertTrue(self.bill.apply()) def test_apply(self): old_storage_version = roads_storage.roads._version with self.check_not_changed(lambda: len(roads_storage.roads.all())): self.apply_bill() self.assertNotEqual(old_storage_version, roads_storage.roads._version) bill = prototypes.BillPrototype.get_by_id(self.bill.id) self.assertTrue(bill.state.is_ACCEPTED) road = roads_logic.road_between_places(self.place1, self.place2) self.assertEqual(road.path, self.new_path) def test_has_meaning__not_exists(self): bill_data = bills.road_change.RoadChange(place_1_id=self.place1.id, place_2_id=self.place3.id, path='rdrd') bill = prototypes.BillPrototype.create(self.account1, 'bill-1-caption', bill_data, chronicle_on_accepted='chronicle-on-accepted') self.assertFalse(bill.has_meaning()) @mock.patch('the_tale.game.roads.logic.is_path_suitable_for_road', lambda **kwargs: roads_relations.ROAD_PATH_ERRORS.random(exclude=[roads_relations.ROAD_PATH_ERRORS.NO_ERRORS])) def test_has_meaning__wrong_path(self): bill_data = bills.road_change.RoadChange(place_1_id=self.place1.id, place_2_id=self.place2.id, path=self.new_path) bill = prototypes.BillPrototype.create(self.account1, 'bill-1-caption', bill_data, chronicle_on_accepted='chronicle-on-accepted') self.assertFalse(bill.has_meaning())
49.807018
127
0.581073
985
8,517
4.751269
0.122843
0.083761
0.1
0.103205
0.782051
0.738675
0.709615
0.675641
0.628205
0.563675
0
0.01627
0.31443
8,517
170
128
50.1
0.785237
0
0
0.373984
0
0
0.090535
0.058126
0
0
0
0
0.325203
1
0.105691
false
0
0.01626
0
0.130081
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d71cb68f9bdc1071b4aafeec40b87a85c10fe5e7
383
py
Python
custom_layers/pad_layer.py
bobenxia/Centripetal-SGD
e33350464504085a641a2a969d3c06e3d5cc4765
[ "Apache-2.0" ]
767
2019-10-08T01:32:47.000Z
2022-03-27T14:39:44.000Z
custom_layers/pad_layer.py
ShawnDing1994/ACNet
9586a269d7065805aafb8f1d69d425e84cec55f1
[ "MIT" ]
48
2019-11-04T12:05:15.000Z
2021-11-28T06:50:30.000Z
custom_layers/pad_layer.py
ShawnDing1994/ACNet
9586a269d7065805aafb8f1d69d425e84cec55f1
[ "MIT" ]
140
2019-10-29T07:49:24.000Z
2022-03-27T13:01:22.000Z
import torch.nn as nn import torch.nn.functional as F class PadLayer(nn.Module): # E.g., (-1, 0) means this layer should crop the first and last rows of the feature map. And (0, -1) crops the first and last columns def __init__(self, pad): super(PadLayer, self).__init__() self.pad = pad def forward(self, input): F.pad(input, [self.pad] * 4)
31.916667
139
0.644909
63
383
3.793651
0.571429
0.087866
0.108787
0.125523
0
0
0
0
0
0
0
0.017182
0.240209
383
12
140
31.916667
0.804124
0.342037
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d721feae5478cacb7636788cfeec491dc6623a53
711
py
Python
groundstation/broadcast_discoverer.py
richo/groundstation
7ed48dd355051ee6b71164fc801e3893c09d11db
[ "MIT" ]
26
2015-06-18T20:17:07.000Z
2019-09-26T09:55:35.000Z
groundstation/broadcast_discoverer.py
richo/groundstation
7ed48dd355051ee6b71164fc801e3893c09d11db
[ "MIT" ]
null
null
null
groundstation/broadcast_discoverer.py
richo/groundstation
7ed48dd355051ee6b71164fc801e3893c09d11db
[ "MIT" ]
5
2015-07-20T01:52:47.000Z
2017-01-08T09:54:07.000Z
import socket from sockets.broadcast_socket import BroadcastSocket import logger log = logger.getLogger(__name__) class BroadcastDiscoverer(BroadcastSocket): def __init__(self, port): super(BroadcastDiscoverer, self).__init__() self.socket.bind(('0.0.0.0', port)) def __del__(self): "Shutdown and close the underlying socket." self._sock.close() @property def timeout(self): 'Receive timeout' return self._sock.gettimeout() @timeout.setter def timeout(self, value): self._sock.settimeout(value) def recv(self, size): "Receive a broadcast through the underlying socket." return self._sock.recvfrom(size)
23.7
60
0.673699
81
711
5.654321
0.481481
0.069869
0.0131
0
0
0
0
0
0
0
0
0.007273
0.226442
711
29
61
24.517241
0.825455
0.151899
0
0
0
0
0.159155
0
0
0
0
0
0
1
0.238095
false
0
0.142857
0
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d722485ae10f9a522da2ef41afd138479c7a923b
219
py
Python
falkon/ooc_ops/__init__.py
fwilliams/falkon
26deb0aeeaa867ac370b9316bdfa4f4a1160b3e6
[ "MIT" ]
1
2021-11-10T16:50:53.000Z
2021-11-10T16:50:53.000Z
falkon/ooc_ops/__init__.py
fwilliams/falkon
26deb0aeeaa867ac370b9316bdfa4f4a1160b3e6
[ "MIT" ]
null
null
null
falkon/ooc_ops/__init__.py
fwilliams/falkon
26deb0aeeaa867ac370b9316bdfa4f4a1160b3e6
[ "MIT" ]
null
null
null
try: from .ooc_lauum import gpu_lauum from .ooc_potrf import gpu_cholesky except (OSError, ModuleNotFoundError): # No GPU gpu_lauum = None gpu_cholesky = None __all__ = ("gpu_lauum", "gpu_cholesky")
24.333333
39
0.712329
29
219
4.965517
0.482759
0.166667
0
0
0
0
0
0
0
0
0
0
0.205479
219
9
40
24.333333
0.827586
0.027397
0
0
0
0
0.099057
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.285714
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d7389fa33af8ef8bf4ec941edbc596f33da32e3b
1,569
py
Python
gap.py
BakaBBQ/gap
b133cb0d57a04d8750d66e5c46125fe7d9edc4bf
[ "MIT" ]
null
null
null
gap.py
BakaBBQ/gap
b133cb0d57a04d8750d66e5c46125fe7d9edc4bf
[ "MIT" ]
null
null
null
gap.py
BakaBBQ/gap
b133cb0d57a04d8750d66e5c46125fe7d9edc4bf
[ "MIT" ]
null
null
null
#!/usr/bin/env python import urllib import urllib2 import argparse import spur import os parser = argparse.ArgumentParser(description="retrieve a file from a server") parser.add_argument('server', metavar='server', help='server in favor of user@host') parser.add_argument('file_url', metavar='file_url') parser.add_argument('-n', metavar='filename', default='',help='the final filename of the downloaded file') parser.add_argument('-p', metavar='password', help='ssh password', required=True) args = parser.parse_args() host = args.server.split('@')[1] user = args.server.split('@')[0] if '' == args.n: args.n = args.file_url.split('/')[-1] def remote_retrieve_file(): shell = spur.SshShell(hostname=host, username=user, password=args.p) with shell: com = """import urllib urllib.urlretrieve('{url}', '{file}') print 'finished' """ com = com.format(url=args.file_url, file=args.n) print com result = shell.run(["python", '-c', com]) print result.output def curl_file(): shell = spur.LocalShell() cmd = "curl -o {filename} sftp://{user}:{password}@{host}/~/{filename}".format(filename=args.n, user=user, host=host, password=args.p) os.system(cmd) print cmd def clean_trash(): shell = spur.SshShell(hostname=host, username=user, password=args.p) with shell: result = shell.run(["rm", args.n]) print result.output #the server will fetch the file remote_retrieve_file() #then your computer gets it from the server curl_file() #server trash cleanup clean_trash()
28.527273
138
0.681326
220
1,569
4.781818
0.354545
0.023764
0.064639
0.047529
0.119772
0.119772
0.119772
0.119772
0.119772
0.119772
0
0.00304
0.161249
1,569
54
139
29.055556
0.796353
0.071383
0
0.153846
0
0
0.21404
0.048864
0
0
0
0
0
0
null
null
0.102564
0.153846
null
null
0.128205
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
d739ec32c1366663f8e0014aff0d0aab644da947
611
py
Python
src/server/master/master_manager/change_password.py
dpaola2/djangy
4b10e681cb49e5c16aba4429dfbfadfd9b512463
[ "NCSA" ]
15
2015-02-14T02:39:04.000Z
2021-12-13T14:17:15.000Z
src/server/master/master_manager/change_password.py
ojengwa/djangy
4b10e681cb49e5c16aba4429dfbfadfd9b512463
[ "NCSA" ]
null
null
null
src/server/master/master_manager/change_password.py
ojengwa/djangy
4b10e681cb49e5c16aba4429dfbfadfd9b512463
[ "NCSA" ]
11
2015-08-07T11:47:02.000Z
2021-04-29T08:08:24.000Z
import sys from hashlib import md5 from management_database import User def hash_password(email, password): return md5("%s:%s" % (email, password)).hexdigest() def main(email, password): try: user = User.get_by_email(email) user.passwd = hash_password(email, password) user.save() except Exception as e: print "Exception: %s" % e print "Success." if __name__ == '__main__': if len(sys.argv) < 3: print "Usage: python change_password.py <email> <new_password>" email = str(sys.argv[1]) password = str(sys.argv[2]) main(email, password)
25.458333
71
0.646481
82
611
4.634146
0.487805
0.171053
0.089474
0.131579
0
0
0
0
0
0
0
0.010593
0.227496
611
23
72
26.565217
0.794492
0
0
0
0
0
0.145663
0
0
0
0
0
0
0
null
null
0.368421
0.157895
null
null
0.157895
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
d74c61016501e6ac4b0e3407fce3c76d6a388c16
90
py
Python
output/models/ms_data/regex/myanmar_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
1
2021-08-14T17:59:21.000Z
2021-08-14T17:59:21.000Z
output/models/ms_data/regex/myanmar_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
4
2020-02-12T21:30:44.000Z
2020-04-15T20:06:46.000Z
output/models/ms_data/regex/myanmar_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
null
null
null
from output.models.ms_data.regex.myanmar_xsd.myanmar import Doc __all__ = [ "Doc", ]
15
63
0.722222
13
90
4.538462
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.155556
90
5
64
18
0.776316
0
0
0
0
0
0.033333
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d77fe952058c784f76973734ce34ffc2765b9d43
117
py
Python
aiourlshortener/__init__.py
das7pad/aiourlshortener
457e6ceb2d2674a60dbca455e214b52c0a3ed484
[ "MIT" ]
3
2018-02-02T07:08:59.000Z
2021-01-23T17:01:19.000Z
aiourlshortener/__init__.py
das7pad/aiourlshortener
457e6ceb2d2674a60dbca455e214b52c0a3ed484
[ "MIT" ]
7
2018-01-28T15:38:55.000Z
2018-03-31T12:34:44.000Z
aiourlshortener/__init__.py
das7pad/aiourlshortener
457e6ceb2d2674a60dbca455e214b52c0a3ed484
[ "MIT" ]
1
2018-01-27T13:21:13.000Z
2018-01-27T13:21:13.000Z
from .shorteners import Shortener, Shorteners __version__ = '0.0.3' __author__ = 'Chirag Patel' __license__ = 'MIT'
19.5
45
0.752137
14
117
5.428571
0.857143
0
0
0
0
0
0
0
0
0
0
0.029703
0.136752
117
5
46
23.4
0.722772
0
0
0
0
0
0.17094
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d78b711b403b547eedf363063cb0c6ac808aa5f2
135
py
Python
output/models/ms_data/identity_constraint/id_g001_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
1
2021-08-14T17:59:21.000Z
2021-08-14T17:59:21.000Z
output/models/ms_data/identity_constraint/id_g001_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
4
2020-02-12T21:30:44.000Z
2020-04-15T20:06:46.000Z
output/models/ms_data/identity_constraint/id_g001_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
null
null
null
from output.models.ms_data.identity_constraint.id_g001_xsd.id_g001 import ( Root, Uid, ) __all__ = [ "Root", "Uid", ]
13.5
75
0.644444
18
135
4.333333
0.777778
0.153846
0
0
0
0
0
0
0
0
0
0.057143
0.222222
135
9
76
15
0.685714
0
0
0
0
0
0.051852
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d79167e89e926007816c6a460257c0bed5504a94
5,829
py
Python
tests/test_distance.py
rionbr/distanceclosure
f773af27e11112fc2a71fe7eaed7f56d8835ed9d
[ "MIT" ]
9
2016-02-12T22:09:47.000Z
2022-02-17T17:02:37.000Z
tests/test_distance.py
rionbr/distanceclosure
f773af27e11112fc2a71fe7eaed7f56d8835ed9d
[ "MIT" ]
3
2022-01-13T15:09:35.000Z
2022-02-14T13:50:28.000Z
tests/test_distance.py
rionbr/distanceclosure
f773af27e11112fc2a71fe7eaed7f56d8835ed9d
[ "MIT" ]
3
2017-10-27T16:42:41.000Z
2022-01-20T08:54:51.000Z
from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy import numpy as np from scipy.sparse import csr_matrix B = np.array([ [1, 1, 1, 1], [1, 1, 1, 0], [1, 1, 0, 0], [1, 0, 0, 0], ]) N = np.array([ [2, 3, 4, 2], [2, 3, 4, 2], [2, 3, 3, 2], [2, 1, 3, 4] ]) W = np.array([ [4, 3, 2, 1], [3, 2, 1, 0], [2, 1, 0, 0], [1, 0, 0, 0], ]) def test_jaccard_scipy(): """ Test Jaccard: scipy.spatial.dist.jaccard """ u = np.array([2, 3, 4, 5]) v = np.array([2, 3, 4, 2]) d = _jaccard_coef_scipy(u, v, min_support=1) assert (d == 0.75) def test_jaccard_binary(): """ Test Jaccard: binary (bitwise) coef """ u = np.array([1, 1, 1, 1]) v = np.array([1, 1, 1, 0]) d = _jaccard_coef_binary(u, v, min_support=1) assert (d == 0.75) def test_jaccard_set(): """ Test Jaccard: set coef """ u = np.array([4, 3, 2, 1]) v = np.array([3, 2, 1, 0]) d = _jaccard_coef_set(u, v, min_support=1) assert (d == 0.6) def test_jaccard_weighted(): """ Test Jaccard: weighted coef """ u = np.array([4, 3, 2, 1]) v = np.array([3, 2, 1, 0]) d = _jaccard_coef_weighted_numpy(u, v, min_support=1) assert (d == 0.6) def test_pairwise_distance_numpy_scipy(): """ Test pairwise distance: using the Numpy (dense matrix) implemmentation for numer jaccard (scipy) coef """ D = pairwise_proximity(N, metric='jaccard') true = np.array([ [1., 1., 0.75, 0.25], [1., 1., 0.75, 0.25], [0.75, 0.75, 1., 0.5], [0.25, 0.25, 0.5, 1.]], dtype=float) assert np.isclose(D, true). all() def test_pairwise_distance_numpy_binary(): """ Test pairwise distance: using the Numpy (dense matrix) implementation for jaccard binary coef """ D = pairwise_proximity(B, metric='jaccard_binary', min_support=1, verbose=True) true = np.array([ [1., 0.75, 0.5, 0.25], [0.75, 1., 0.66666667, 0.33333333], [0.5, 0.66666667, 1., 0.5], [0.25, 0.33333333, 0.5, 1.]], dtype=float) assert np.isclose(D, true).all() def test_pairwise_distance_numpy_set(): """ Test pairwise distance: using the Numpy (dense matrix) implementation for jaccard set coef """ D = pairwise_proximity(W, metric='jaccard_set', min_support=1) true = np.array([ [1., 0.6, 0.4, 0.2], [0.6, 1., 0.75, 0.5], [0.4, 0.75, 1., 0.66666667], [0.2, 0.5, 0.66666667, 1.]], dtype=float) assert np.isclose(D, true).all() def test_pairwise_distance_numpy_weighted(): """ Test pairwise distance: using Numpy (dense matrix) using weighted jaccard """ D = pairwise_proximity(W, metric='weighted_jaccard', min_support=10) true = np.array([ [1., 0.6, 0.3, 0.1], [0.6, 1., 0., 0.], [0.3, 0., 1., 0.], [0.1, 0., 0., 1.]], dtype=float) assert np.isclose(D, true).all() def test_pairwise_distance_sparse_scipy(): """ Test pairwise distance: using the Scipy (sparse matrix) implemmentation for jaccard scipy coef """ N_sparse = csr_matrix(N) D = pairwise_proximity(N_sparse, metric='jaccard', min_support=1) true = np.array([ [1., 1., 0.75, 0.25], [1., 1., 0.75, 0.25], [0.75, 0.75, 1., 0.5], [0.25, 0.25, 0.5, 1.]], dtype=float) assert np.isclose(D.todense(), true). all() def test_pairwise_distance_sparse_binary(): """ Test pairwise distance: using the Scipy (sparse matrix) implementation for jaccard bitwise coef """ B_sparse = csr_matrix(B) D = pairwise_proximity(B_sparse, metric='jaccard_binary', min_support=1) true = np.array([ [1., 0.75, 0.5, 0.25], [0.75, 1., 0.66666667, 0.33333333], [0.5, 0.66666667, 1., 0.5], [0.25, 0.33333333, 0.5, 1.]], dtype=float) assert np.isclose(D.todense(), true).all() def test_pairwise_distance_sparse_set(): """ Test pairwise distance: using the Scipy (sparse matrix) implementation for jaccard set coef """ W_sparse = csr_matrix(W) D = pairwise_proximity(W_sparse, metric='jaccard_set', min_support=1) true = np.array([ [1., 0.75, 0.5, 0.25], [0.75, 1., 0.66666667, 0.33333333], [0.5, 0.66666667, 1., 0.5], [0.25, 0.33333333, 0.5, 1.]], dtype=float) assert np.isclose(D.todense(), true).all() def test_pairwise_distance_sparse_weighted(): """ Test pairwise distance: using the Scipy (sparse matrix) implementation for jaccard weighted coef """ W_sparse = csr_matrix(W) D = pairwise_proximity(W_sparse, metric='jaccard_weighted', min_support=1) true = np.array([ [1., 0.6, 0.3, 0.1], [0.6, 1., 0., 0.], [0.3, 0., 1., 0.], [0.1, 0., 0., 1.]], dtype=float) assert np.isclose(D.todense(), true).all() def test_pairwise_distance_dense_my_own_metric(): """ Test pairwise distance: using the numpy (dense matrix) implementation and my own metric function """ def my_coef(u, v): return 0.25 D = pairwise_proximity(W, metric=my_coef, verbose=True) true = np.array([ [1., .25, .25, .25], [.25, 1., .25, .25], [.25, .25, 1., .25], [.25, .25, .25, 1.]], dtype=float) assert np.isclose(D, true).all() def test_pairwise_distance_sparse_my_own_metric(): """ Test pairwise distance: using the Scipy (sparse matrix) implementation and my own metric function """ def my_coef(u, v): return 0.25 W_sparse = csr_matrix(W) D = pairwise_proximity(W_sparse, metric=('indices', my_coef), verbose=True) true = np.array([ [1., .25, .25, .25], [.25, 1., .25, .25], [.25, .25, 1., .25], [.25, .25, .25, 1.]], dtype=float) assert np.isclose(D.todense(), true).all()
32.383333
147
0.587236
921
5,829
3.579805
0.071661
0.022445
0.121322
0.06976
0.786473
0.733394
0.677889
0.670306
0.636639
0.613285
0
0.111807
0.232802
5,829
179
148
32.564246
0.625447
0.184594
0
0.6
0
0
0.02207
0
0
0
0
0
0.107692
1
0.123077
false
0
0.023077
0.015385
0.161538
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ad11fd7e89e6fdb4e2f78714134c097905da987e
1,124
py
Python
testes/teste_pontos.py
nanapereira/python-introducao
092fdf775c680149df3e329d832b7c4d120b19be
[ "MIT" ]
null
null
null
testes/teste_pontos.py
nanapereira/python-introducao
092fdf775c680149df3e329d832b7c4d120b19be
[ "MIT" ]
null
null
null
testes/teste_pontos.py
nanapereira/python-introducao
092fdf775c680149df3e329d832b7c4d120b19be
[ "MIT" ]
null
null
null
import unittest def remove_pontos(palavra): resultado = palavra.split('.') return "".join(resultado) def adiciona_pontos(palavra): resultado = list(palavra) return ".".join(resultado) class RemovePontosTest(unittest.TestCase): def test_com_pontos(self): esperado = "teste" resultado = remove_pontos("t.e.s.t.e") self.assertEqual(esperado, resultado) def test_com_outros_pontos(self): esperado = "virginia" resultado = remove_pontos("v.i.r.g.i.n.i.a") self.assertEqual(esperado, resultado) def test_sem_pontos(self): esperado = "nana" resultado = remove_pontos("nana") self.assertEqual(esperado, resultado) class AdicionaPontosTest(unittest.TestCase): def test_com_pontos(self): esperado = "t.e.s.t.e" resultado = adiciona_pontos("teste") self.assertEqual(esperado, resultado) def test_com_outros_pontos(self): esperado = "d.o.u.g.l.a.s" resultado = adiciona_pontos("douglas") self.assertEqual(esperado, resultado) if __name__ == '__main__': unittest.main()
28.820513
52
0.661032
133
1,124
5.383459
0.323308
0.048883
0.125698
0.223464
0.375698
0.361732
0.307263
0.307263
0.184358
0.184358
0
0
0.215302
1,124
39
53
28.820513
0.811791
0
0
0.290323
0
0
0.079111
0
0
0
0
0
0.16129
1
0.225806
false
0
0.032258
0
0.387097
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
ad1bfb7a43f3d893acaa48b26237faea58b95632
644
py
Python
{{cookiecutter.project_slug}}/run.py
bwrsandman/cookiecutter-tornado
94f9ce23de3f65c84bd20ca1bff3f775ae8880b2
[ "MIT" ]
null
null
null
{{cookiecutter.project_slug}}/run.py
bwrsandman/cookiecutter-tornado
94f9ce23de3f65c84bd20ca1bff3f775ae8880b2
[ "MIT" ]
null
null
null
{{cookiecutter.project_slug}}/run.py
bwrsandman/cookiecutter-tornado
94f9ce23de3f65c84bd20ca1bff3f775ae8880b2
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- """Basic run script""" import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web import tornado.autoreload from tornado.options import options import tornado.web from settings import settings from {{ cookiecutter.project_slug }}.urls import url_patterns class TornadoApplication(tornado.web.Application): def __init__(self): tornado.web.Application.__init__(self, url_patterns, **settings) def main(): app = TornadoApplication() app.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main()
20.125
72
0.73913
78
644
5.858974
0.5
0.170678
0.087527
0.100656
0
0
0
0
0
0
0
0.001815
0.14441
644
31
73
20.774194
0.827586
0.065217
0
0.111111
0
0
0.013841
0
0
0
0
0
0
0
null
null
0
0.5
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
ad20020ffdc1081bd8832fee449e6a1bc337c282
471
py
Python
login.py
nishthapant/cgi-lab
3fed251f8b62e6e307f3fc7836e1a1a0c8caf861
[ "Apache-2.0" ]
null
null
null
login.py
nishthapant/cgi-lab
3fed251f8b62e6e307f3fc7836e1a1a0c8caf861
[ "Apache-2.0" ]
null
null
null
login.py
nishthapant/cgi-lab
3fed251f8b62e6e307f3fc7836e1a1a0c8caf861
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 import cgi, cgitb import secret from templates import secret_page # instance of FieldStorage form = cgi.FieldStorage() # get data from fields username = form.getvalue('username') password = form.getvalue('password') if(username == secret.username and password == secret.password): print("Set-Cookie: username=%s\r\nSet-Cookie: password=%s\r\nContent-type: text/html\r\n\r\n" % (username, password)) print(secret_page(username, password))
31.4
121
0.745223
66
471
5.287879
0.515152
0.137536
0
0
0
0
0
0
0
0
0
0.002404
0.116773
471
15
122
31.4
0.836538
0.142251
0
0
0
0.111111
0.251244
0.134328
0
0
0
0
0
1
0
false
0.444444
0.333333
0
0.333333
0.222222
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
2
ad2d0a2470fb330580fcc10da01db5d19640c734
775
py
Python
data.py
gsel9/ConvOpt02953
78ee69b1b857a24fb3effdb6283ec2b5e013c33e
[ "MIT" ]
null
null
null
data.py
gsel9/ConvOpt02953
78ee69b1b857a24fb3effdb6283ec2b5e013c33e
[ "MIT" ]
null
null
null
data.py
gsel9/ConvOpt02953
78ee69b1b857a24fb3effdb6283ec2b5e013c33e
[ "MIT" ]
null
null
null
import numpy as np import matplotlib.pyplot as plt from scipy.stats import bernoulli def eval_loss(M, M_hat): return np.linalg.norm(M - M_hat) ** 2 / np.linalg.norm(M) ** 2 def mask_matrix(m=200, n=20, prob_masked=0.5, seed=42): """ Generate a binary mask for m users and n movies. Note that 1 denotes observed, and 0 denotes unobserved. """ np.random.seed(seed) return 1 - bernoulli.rvs(p=prob_masked, size=(m, n)) def data_matrix(m=200, n=20, k=15, seed=42): """ Generate non-noisy data for m users and n movies with k latent factors. Draws factors U, V from Gaussian noise and returns U Vᵀ. """ np.random.seed(seed) U = np.random.randn(m, k) V = np.random.randn(n, k) return np.dot(U, V.T)
22.794118
75
0.64129
137
775
3.576642
0.481752
0.065306
0.020408
0.053061
0.130612
0.077551
0
0
0
0
0
0.038851
0.236129
775
33
76
23.484848
0.788851
0.300645
0
0.153846
0
0
0
0
0
0
0
0
0
1
0.230769
false
0
0.230769
0.076923
0.692308
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
ad31ea07ac42dffdd82b4812a5ba7cad9eae68cf
54
py
Python
lines/__init__.py
AlexTaguchi/lines
d091d52350d0bedc3c8af0aa5438b6a1da95151d
[ "MIT" ]
null
null
null
lines/__init__.py
AlexTaguchi/lines
d091d52350d0bedc3c8af0aa5438b6a1da95151d
[ "MIT" ]
null
null
null
lines/__init__.py
AlexTaguchi/lines
d091d52350d0bedc3c8af0aa5438b6a1da95151d
[ "MIT" ]
null
null
null
__all__ = ['Lines', 'Segment', 'Point'] version = 0.1
18
39
0.611111
7
54
4.142857
1
0
0
0
0
0
0
0
0
0
0
0.043478
0.148148
54
2
40
27
0.586957
0
0
0
0
0
0.314815
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ad45adb3476ffe351aba1b2cf066867c4bbeaa29
232
py
Python
saleor/core/utils/date_time.py
eanknd/saleor
08aa724176be00d7aaf654f14e9ae99dd4327f97
[ "CC-BY-4.0" ]
1,392
2021-10-06T15:54:28.000Z
2022-03-31T20:50:55.000Z
saleor/core/utils/date_time.py
eanknd/saleor
08aa724176be00d7aaf654f14e9ae99dd4327f97
[ "CC-BY-4.0" ]
888
2021-10-06T10:48:54.000Z
2022-03-31T11:00:30.000Z
saleor/core/utils/date_time.py
eanknd/saleor
08aa724176be00d7aaf654f14e9ae99dd4327f97
[ "CC-BY-4.0" ]
538
2021-10-07T16:21:27.000Z
2022-03-31T22:58:57.000Z
from datetime import datetime import pytz def convert_to_utc_date_time(date): """Convert date into utc date time.""" if date is None: return return datetime.combine(date, datetime.min.time(), tzinfo=pytz.UTC)
21.090909
71
0.706897
34
232
4.705882
0.529412
0.175
0.1375
0
0
0
0
0
0
0
0
0
0.198276
232
10
72
23.2
0.860215
0.137931
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
ad4bb834c7ac1f8085f612a3214ab44b6466cca2
546
py
Python
pyedflib/data/_readers.py
dthkao/pyedflib
0f787fc1202b84a6f30d098296acf72666eaeeb4
[ "BSD-2-Clause" ]
7
2018-11-07T14:40:13.000Z
2019-11-03T20:38:52.000Z
pyedflib/data/_readers.py
dthkao/pyedflib
0f787fc1202b84a6f30d098296acf72666eaeeb4
[ "BSD-2-Clause" ]
null
null
null
pyedflib/data/_readers.py
dthkao/pyedflib
0f787fc1202b84a6f30d098296acf72666eaeeb4
[ "BSD-2-Clause" ]
1
2019-06-02T07:50:41.000Z
2019-06-02T07:50:41.000Z
import os import numpy as np import pyedflib def test_generator(): """ Get an sample EDF-file Parameters ---------- None Returns ------- f : EdfReader object object containing the handle to the file Examples -------- >>> import pyedflib.data >>> f = pyedflib.data.test_generator() >>> f.signals_in_file == 11 True >>> f._close() >>> del f """ fname = os.path.join(os.path.dirname(__file__), 'test_generator.edf') f = pyedflib.EdfReader(fname) return f
16.545455
73
0.571429
65
546
4.646154
0.569231
0.129139
0
0
0
0
0
0
0
0
0
0.005102
0.282051
546
32
74
17.0625
0.765306
0.5
0
0
0
0
0.091837
0
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
ad5c23841d93fc85b5c13254ebddc0e6601f78eb
196
py
Python
settings.py
stshrewsburyDev/raspberrypi_status_website
282498cb89ddc3c1a9c6538500b7bab5f3ab26f7
[ "MIT" ]
null
null
null
settings.py
stshrewsburyDev/raspberrypi_status_website
282498cb89ddc3c1a9c6538500b7bab5f3ab26f7
[ "MIT" ]
null
null
null
settings.py
stshrewsburyDev/raspberrypi_status_website
282498cb89ddc3c1a9c6538500b7bab5f3ab26f7
[ "MIT" ]
null
null
null
from os.path import join, dirname from dotenv import load_dotenv # Get .env file path path = join(dirname(__file__), ".env") # Load .env vars and delete path variable load_dotenv(path) del path
19.6
41
0.755102
32
196
4.4375
0.5
0.15493
0
0
0
0
0
0
0
0
0
0
0.158163
196
9
42
21.777778
0.860606
0.295918
0
0
0
0
0.02963
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
ad5f1ec88a7313c3654db98f06bb05369b2fe550
662
py
Python
generate_config.py
shawn-bluce/server_tag
9d99ec44803c550046418d6d440b7e96ce3a74a4
[ "MIT" ]
16
2020-08-19T15:16:55.000Z
2020-12-02T06:01:37.000Z
generate_config.py
shawn-bluce/server_tag
9d99ec44803c550046418d6d440b7e96ce3a74a4
[ "MIT" ]
1
2020-08-30T04:51:27.000Z
2021-02-17T14:46:39.000Z
generate_config.py
shawn-bluce/server_tag
9d99ec44803c550046418d6d440b7e96ce3a74a4
[ "MIT" ]
1
2020-08-24T11:07:05.000Z
2020-08-24T11:07:05.000Z
#!/usr/bin/env python3 # generate template file config.json, when it not exists. # run this command: `python3 generate_config.py` import os import json from utils import color_text from settings import config_file_name from template_config import config as template_config if __name__ == '__main__': file_content = json.dumps(template_config, indent=4) if os.path.exists(config_file_name): color_text.error('[error] {} is already exists, not generate.'.format(config_file_name)) exit(1) else: open(config_file_name, 'w').write(file_content) color_text.success('[success] {} is generated.'.format(config_file_name))
33.1
96
0.73565
95
662
4.842105
0.473684
0.108696
0.152174
0.086957
0
0
0
0
0
0
0
0.007207
0.161631
662
19
97
34.842105
0.821622
0.187311
0
0
1
0
0.145794
0
0
0
0
0
0
1
0
false
0
0.384615
0
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
ad6189192d0343724fb83243fe236fb1261af574
1,151
py
Python
ansible_runner/tests/test_filter_ansibleprojects.py
smk4664/nautobot-plugin-ansible-runner
14e3c57e9d4b21abe9bf81484fc3e26114fc7056
[ "Apache-2.0" ]
null
null
null
ansible_runner/tests/test_filter_ansibleprojects.py
smk4664/nautobot-plugin-ansible-runner
14e3c57e9d4b21abe9bf81484fc3e26114fc7056
[ "Apache-2.0" ]
null
null
null
ansible_runner/tests/test_filter_ansibleprojects.py
smk4664/nautobot-plugin-ansible-runner
14e3c57e9d4b21abe9bf81484fc3e26114fc7056
[ "Apache-2.0" ]
null
null
null
"""Test AnsibleProjects Filter.""" from django.test import TestCase from ansible_runner import filters from ansible_runner import models from ansible_runner.tests import fixtures class AnsibleProjectsFilterTestCase(TestCase): """AnsibleProjects Filter Test Case.""" queryset = models.AnsibleProjects.objects.all() filterset = filters.AnsibleProjectsFilterSet @classmethod def setUpTestData(cls): """Setup test data for AnsibleProjects Model.""" fixtures.create_ansibleprojects() def test_q_search_name(self): """Test using Q search with name of AnsibleProjects.""" params = {"q": "Test One"} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_q_search_slug(self): """Test using Q search with slug of AnsibleProjects.""" params = {"q": "test-one"} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1) def test_q_invalid(self): """Test using invalid Q search for AnsibleProjects.""" params = {"q": "test-five"} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 0)
34.878788
77
0.691573
134
1,151
5.850746
0.350746
0.044643
0.065051
0.09949
0.366071
0.366071
0.304847
0.304847
0.304847
0.237245
0
0.003219
0.190269
1,151
32
78
35.96875
0.837983
0.220678
0
0.105263
0
0
0.032333
0
0
0
0
0
0.157895
1
0.210526
false
0
0.210526
0
0.578947
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
ad64d77e24b1cc4b06e2178562d1de481f016083
30,055
py
Python
tests/features/test_plinkio.py
cnr-ibba/SMARTER-database
837f7d514c33e458ad0e39e26784c761df29e004
[ "MIT" ]
null
null
null
tests/features/test_plinkio.py
cnr-ibba/SMARTER-database
837f7d514c33e458ad0e39e26784c761df29e004
[ "MIT" ]
44
2021-05-25T16:00:34.000Z
2022-03-12T01:12:45.000Z
tests/features/test_plinkio.py
cnr-ibba/SMARTER-database
837f7d514c33e458ad0e39e26784c761df29e004
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Apr 9 17:42:03 2021 @author: Paolo Cozzi <paolo.cozzi@ibba.cnr.it> """ import types import unittest import pathlib import tempfile from copy import deepcopy from src.features.smarterdb import ( VariantSheep, Location, Breed, Dataset, SampleSheep, SEX) from src.features.plinkio import ( TextPlinkIO, MapRecord, CodingException, IlluminaReportIO, BinaryPlinkIO, AffyPlinkIO) from ..common import ( MongoMockMixin, SmarterIDMixin, VariantsMixin, SupportedChipMixin) # set data dir DATA_DIR = pathlib.Path(__file__).parent / "data" class TextPlinkIOMap(VariantsMixin, MongoMockMixin, unittest.TestCase): def setUp(self): super().setUp() self.plinkio = TextPlinkIO( prefix=str(DATA_DIR / "plinktest"), species="Sheep") def test_read_mapfile(self): self.plinkio.read_mapfile() self.assertIsInstance(self.plinkio.mapdata, list) self.assertEqual(len(self.plinkio.mapdata), 4) for record in self.plinkio.mapdata: self.assertIsInstance(record, MapRecord) def test_fetch_coordinates(self): self.plinkio.read_mapfile() self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3") self.assertIsInstance(self.plinkio.locations, list) self.assertEqual(len(self.plinkio.locations), 4) self.assertIsInstance(self.plinkio.filtered, set) self.assertEqual(len(self.plinkio.filtered), 1) # assert filtered items self.assertIn(3, self.plinkio.filtered) for idx, record in enumerate(self.plinkio.locations): if idx in self.plinkio.filtered: self.assertIsNone(record) else: self.assertIsInstance(record, Location) def test_update_mapfile(self): # create a temporary directory using the context manager with tempfile.TemporaryDirectory() as tmpdirname: outfile = pathlib.Path(tmpdirname) / "plinktest_updated.map" self.plinkio.read_mapfile() self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3") self.plinkio.update_mapfile(str(outfile)) # now open outputfile and test stuff test = TextPlinkIO( mapfile=str(outfile), pedfile=str(DATA_DIR / "plinktest.ped")) test.read_mapfile() # one snp cannot be mapped self.assertEqual(len(test.mapdata), 3) for record in test.mapdata: variant = VariantSheep.objects(name=record.name).get() location = variant.get_location( version="Oar_v3.1", imported_from="SNPchiMp v.3") self.assertEqual(location.chrom, record.chrom) self.assertEqual(location.position, record.position) class TextPlinkIOPed( VariantsMixin, SmarterIDMixin, MongoMockMixin, unittest.TestCase): def setUp(self): super().setUp() self.plinkio = TextPlinkIO( prefix=str(DATA_DIR / "plinktest"), species="Sheep") # read info from map self.plinkio.read_mapfile() self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3") # read first line of ped file self.lines = list(self.plinkio.read_pedfile()) # get a dataset self.dataset = Dataset.objects(file="test.zip").get() def test_read_pedfile(self): test = self.plinkio.read_pedfile() self.assertIsInstance(test, types.GeneratorType) # consume data and count rows test = list(test) self.assertEqual(len(test), 2) def test_process_genotypes_top(self): # first record is in top coordinates line = self.lines[0] test = self.plinkio._process_genotypes(line, 'top') # a genotype in forward coordinates isn't modified self.assertEqual(line, test) # searching forward coordinates throws exception self.assertRaisesRegex( CodingException, "Not illumina forward format", self.plinkio._process_genotypes, line, "forward" ) # searching ab coordinates throws exception self.assertRaisesRegex( CodingException, "Not illumina ab format", self.plinkio._process_genotypes, line, "ab" ) def test_process_genotypes_half_missing(self): # read a file in forward coordinates self.plinkio.pedfile = str(DATA_DIR / "plinktest_half-missing.ped") half_missing = next(self.plinkio.read_pedfile()) # processing top genotype test = self.plinkio._process_genotypes(half_missing, 'top') # an half-missing genotype should be set as MISSING reference = [ 'TEX_IT', '1', '0', '0', '0', '-9', '0', '0', 'A', 'G', '0', '0', '0', '0'] self.assertEqual(reference, test) def test_process_genotypes_forward(self): # read a file in forward coordinates self.plinkio.pedfile = str(DATA_DIR / "plinktest_forward.ped") forward = next(self.plinkio.read_pedfile()) # searching top coordinates throws exception self.assertRaisesRegex( CodingException, "Not illumina top format", self.plinkio._process_genotypes, forward, "top" ) # searching ab coordinates throws exception self.assertRaisesRegex( CodingException, "Not illumina ab format", self.plinkio._process_genotypes, forward, "ab" ) test = self.plinkio._process_genotypes(forward, 'forward') # a genotype in forward coordinates returns in top reference = self.lines[0] self.assertEqual(reference, test) def test_process_genotypes_ab(self): # read a file in forward coordinates self.plinkio.pedfile = str(DATA_DIR / "plinktest_ab.ped") ab = next(self.plinkio.read_pedfile()) # searching top coordinates throws exception self.assertRaisesRegex( CodingException, "Not illumina top format", self.plinkio._process_genotypes, ab, "top" ) # searching forward coordinates throws exception self.assertRaisesRegex( CodingException, "Not illumina forward format", self.plinkio._process_genotypes, ab, "forward" ) test = self.plinkio._process_genotypes(ab, 'ab') # a genotype in forward coordinates returns in top reference = self.lines[0] self.assertEqual(reference, test) def test_get_or_create_sample(self): # get a sample line line = self.lines[0] # get a breed breed = Breed.objects( aliases__match={'fid': line[0], 'dataset': self.dataset}).get() # no individulas for such breeds self.assertEqual(breed.n_individuals, 0) self.assertEqual(SampleSheep.objects.count(), 0) # calling my function and collect sample reference = self.plinkio.get_or_create_sample( line, self.dataset, breed) self.assertIsInstance(reference, SampleSheep) # assert an element in database self.assertEqual(SampleSheep.objects.count(), 1) # check individuals updated breed.reload() self.assertEqual(breed.n_individuals, 1) # calling this function twice, returns the same individual test = self.plinkio.get_or_create_sample(line, self.dataset, breed) self.assertIsInstance(test, SampleSheep) # assert an element in database self.assertEqual(SampleSheep.objects.count(), 1) # check individuals updated breed.reload() self.assertEqual(breed.n_individuals, 1) self.assertEqual(reference, test) def test_get_sample(self): """get a sample without creating item""" # get a sample line line = self.lines[0] # get a breed breed = Breed.objects( aliases__match={'fid': line[0], 'dataset': self.dataset}).get() # no individulas for such breeds self.assertEqual(breed.n_individuals, 0) self.assertEqual(SampleSheep.objects.count(), 0) # calling my function and collect sample reference = self.plinkio.get_sample( line, self.dataset) # there are no sample in database, so get_sample returns none self.assertIsNone(reference) # assert an element in database self.assertEqual(SampleSheep.objects.count(), 0) # check individuals updated breed.reload() self.assertEqual(breed.n_individuals, 0) # call get_or_create to insert this sample in database test = self.plinkio.get_or_create_sample(line, self.dataset, breed) self.assertIsInstance(test, SampleSheep) # assert an element in database self.assertEqual(SampleSheep.objects.count(), 1) # check individuals updated breed.reload() self.assertEqual(breed.n_individuals, 1) # calling get_sample again to collect the sample reference = self.plinkio.get_sample( line, self.dataset ) # now reference is a sample. self.assertIsInstance(reference, SampleSheep) # Check how many samples I have breed.reload() self.assertEqual(breed.n_individuals, 1) self.assertEqual(SampleSheep.objects.count(), 1) # check that objects are the same self.assertEqual(reference, test) def test_get_sample_by_alias(self): """get a sample without creating item""" # get a sample line line = self.lines[0].copy() # get a breed breed = Breed.objects( aliases__match={'fid': line[0], 'dataset': self.dataset}).get() # call get_or_create to insert this sample in database test = self.plinkio.get_or_create_sample(line, self.dataset, breed) # add an alias to this sample test.alias = "alias-1" test.save() # replace sample name with alias line[1] = "alias-1" # calling get_sample again to collect the sample reference = self.plinkio.get_sample( line, self.dataset, sample_field='alias' ) # now reference is a sample. self.assertIsInstance(reference, SampleSheep) # Check how many samples I have breed.reload() self.assertEqual(breed.n_individuals, 1) self.assertEqual(SampleSheep.objects.count(), 1) # check that objects are the same self.assertEqual(reference, test) def test_sample_relies_dataset(self): """Getting two sample with the same original id is not a problem""" # get a sample line line = self.lines[0] # get a breed breed = Breed.objects( aliases__match={'fid': line[0], 'dataset': self.dataset}).get() # create a copy of dataset new_dataset = deepcopy(self.dataset) new_dataset.file = "test2.zip" new_dataset.id = None new_dataset.save() # ok create a samplesheep object with the same original_id first = self.plinkio.get_or_create_sample(line, self.dataset, breed) second = self.plinkio.get_or_create_sample(line, new_dataset, breed) self.assertEqual(SampleSheep.objects.count(), 2) self.assertEqual(first.original_id, second.original_id) # need to delete second sample in order to remove the new dataset # (mongoengine.DENY behaviour for deleting samples) second.delete() first.delete() # reset database to original state new_dataset.delete() def test_process_pedline(self): # get a sample line line = self.lines[0] test = self.plinkio._process_pedline(line, self.dataset, 'top', True) # define reference reference = line.copy() reference[0], reference[1] = ['TEX', 'ITOA-TEX-000000001'] # trow away the last snps (not found in database) del(reference[-2:]) self.assertEqual(reference, test) def get_relationships(self): """Helper function to define fake relationships""" # get a sample line line = self.lines[0] # make a copy and change some values father = deepcopy(line) mother = deepcopy(line) child = deepcopy(line) # let's start with father. Change id and sex column father[1], father[4] = "1", "1" # now the mother (same columns) mother[1], mother[4] = "2", "2" # the last is child. set id and and other records (unknown sex) child[1], child[2], child[3] = "3", "1", "2" return father, mother, child def test_process_pedline_relationship(self): """Test a pedline with father or mother ids""" father, mother, child = self.get_relationships() # process data and insert records for i, item in enumerate([father, mother, child]): test = self.plinkio._process_pedline( item, self.dataset, 'top', True) # define smarter_id smarter_id = f"ITOA-TEX-00000000{i+1}" # test ped line items self.assertEqual(test[0], "TEX") self.assertEqual(test[1], smarter_id) self.assertEqual(test[4], item[4]) # assert database objects sample = SampleSheep.objects(smarter_id=smarter_id).get() # special child case if item == child: sample_father = SampleSheep.objects( original_id=item[2], dataset=self.dataset).get() sample_mother = SampleSheep.objects( original_id=item[3], dataset=self.dataset).get() self.assertIsNone(sample.sex) self.assertEqual(sample.father_id, sample_father) self.assertEqual(sample.mother_id, sample_mother) self.assertEqual(test[2], sample_father.smarter_id) self.assertEqual(test[3], sample_mother.smarter_id) else: self.assertEqual(sample.sex, SEX(int(item[4]))) def test_update_relationship(self): """Test the possibility to update a sample relationship""" # ped lines could be in the wrong order. In such way, a child sample # could be written before its parents, and so ped lines can't be # written correctly # define fake relationships father, mother, child = self.get_relationships() # process data and insert records for i, item in enumerate([child, father, mother]): self.plinkio._process_pedline(item, self.dataset, 'top', True) # special child case if item == child: # define smarter_id smarter_id = f"ITOA-TEX-00000000{i+1}" # assert database objects sample_child = SampleSheep.objects(smarter_id=smarter_id).get() # assert child has no relationship self.assertIsNone(sample_child.father_id) self.assertIsNone(sample_child.mother_id) self.assertIsNone(sample_child.sex) # ok now try to process child again test = self.plinkio._process_pedline(child, self.dataset, 'top', True) # refresh database object sample_child.reload() sample_father = sample_child.father_id.fetch() sample_mother = sample_child.mother_id.fetch() self.assertEqual(sample_child.smarter_id, test[1]) self.assertEqual(sample_father.smarter_id, test[2]) self.assertEqual(sample_mother.smarter_id, test[3]) def test_unmanaged_relationship(self): "test unsetting ped columns if relationship can be derived from data" # define fake relationships child = self.get_relationships()[-1] # insert child without parents test = self.plinkio._process_pedline(child, self.dataset, 'top', True) # define smarter_id smarter_id = "ITOA-TEX-000000001" self.assertEqual(test[1], smarter_id) self.assertEqual(test[2], "0") self.assertEqual(test[3], "0") def test_update_pedfile(self): # create a temporary directory using the context manager with tempfile.TemporaryDirectory() as tmpdirname: outfile = pathlib.Path(tmpdirname) / "plinktest_updated.ped" self.plinkio.update_pedfile( str(outfile), self.dataset, 'top', True) # now open outputfile and test stuff test = TextPlinkIO( mapfile=str(DATA_DIR / "plinktest.map"), pedfile=str(outfile)) # assert two records written self.assertEqual(len(list(test.read_pedfile())), 2) def test_update_pedfile_no_insert(self): """Test no sample creating while processing genotypes""" # create a temporary directory using the context manager with tempfile.TemporaryDirectory() as tmpdirname: outfile = pathlib.Path(tmpdirname) / "plinktest_updated.ped" self.plinkio.update_pedfile( str(outfile), self.dataset, 'top', False) with open(outfile) as handle: data = handle.read() # outfile is an empty self.assertEqual(data, '') # no sample created self.assertEqual(SampleSheep.objects.count(), 0) class BinaryPlinkIOTest( VariantsMixin, SmarterIDMixin, MongoMockMixin, unittest.TestCase): def setUp(self): super().setUp() self.plinkio = BinaryPlinkIO( prefix=str(DATA_DIR / "plinktest"), species="Sheep") # read info from map self.plinkio.read_mapfile() self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3") # read first line of ped file self.lines = list(self.plinkio.read_pedfile()) def test_read_mapfile(self): self.assertIsInstance(self.plinkio.mapdata, list) self.assertEqual(len(self.plinkio.mapdata), 4) for record in self.plinkio.mapdata: self.assertIsInstance(record, MapRecord) def test_read_pedfile(self): test = self.plinkio.read_pedfile() self.assertIsInstance(test, types.GeneratorType) # consume data and count rows test = list(test) self.assertEqual(len(test), 2) def test_process_pedline(self): # define reference reference = [ 'TEX', 'ITOA-TEX-000000001', '0', '0', '0', -9, 'A', 'A', 'A', 'G', 'G', 'G'] # get a line for testing line = self.lines[0] # get a dataset dataset = Dataset.objects(file="test.zip").get() test = self.plinkio._process_pedline(line, dataset, 'top', True) self.assertEqual(reference, test) class IlluminaReportIOMap(VariantsMixin, MongoMockMixin, unittest.TestCase): def setUp(self): super().setUp() self.plinkio = IlluminaReportIO( snpfile=str(DATA_DIR / "snplist.txt"), report=str(DATA_DIR / "finalreport.txt"), species="Sheep") self.plinkio.read_snpfile() def test_read_snpfile(self): self.assertIsInstance(self.plinkio.mapdata, list) self.assertEqual(len(self.plinkio.mapdata), 2) for record in self.plinkio.mapdata: self.assertIsInstance(record, tuple) def test_fetch_coordinates(self): self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3") self.assertIsInstance(self.plinkio.locations, list) self.assertEqual(len(self.plinkio.locations), 2) self.assertIsInstance(self.plinkio.filtered, set) self.assertEqual(len(self.plinkio.filtered), 0) for idx, record in enumerate(self.plinkio.locations): self.assertIsInstance(record, Location) def test_update_mapfile(self): # create a temporary directory using the context manager with tempfile.TemporaryDirectory() as tmpdirname: # this is the temporary output file outfile = pathlib.Path(tmpdirname) / "plinktest_updated.map" self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3") self.plinkio.update_mapfile(str(outfile)) # now open outputfile and test stuff test = TextPlinkIO(mapfile=str(outfile)) test.read_mapfile() # there was only two snps into dataset # HINT: could be in final report SNPs not included in database? self.assertEqual(len(test.mapdata), 2) for record in test.mapdata: variant = VariantSheep.objects(name=record.name).get() location = variant.get_location( version="Oar_v3.1", imported_from="SNPchiMp v.3") self.assertEqual(location.chrom, record.chrom) self.assertEqual(location.position, record.position) class IlluminaReportIOPed( VariantsMixin, SmarterIDMixin, SupportedChipMixin, MongoMockMixin, unittest.TestCase): def setUp(self): super().setUp() self.dataset = Dataset.objects.get(file="test.zip") self.plinkio = IlluminaReportIO( snpfile=str(DATA_DIR / "snplist.txt"), report=str(DATA_DIR / "finalreport.txt"), species="Sheep") # read info from map self.plinkio.read_snpfile() self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3") # read first line of ped file self.lines = list(self.plinkio.read_reportfile(fid="TEX")) def test_read_reportfile(self): test = self.plinkio.read_reportfile(fid="TEX") self.assertIsInstance(test, types.GeneratorType) # consume data and count rows test = list(test) self.assertEqual(len(test), 2) def test_read_reportfile_no_fid(self): """Try to determine fid from database""" # create two fake samples to colled fid relying on database for i in range(2): sample = SampleSheep( original_id=f"{i+1}", country="Italy", breed="Texel", breed_code="TEX", species="Sheep", dataset=self.dataset, type_="background", chip_name=self.chip_name ) sample.save() test = self.plinkio.read_reportfile(dataset=self.dataset) self.assertIsInstance(test, types.GeneratorType) # consume data and count rows test = list(test) self.assertEqual(len(test), 2) def test_process_pedline(self): # define reference reference = [ 'TEX', 'ITOA-TEX-000000001', '0', '0', '0', -9, 'A', 'A', 'G', 'G'] # get a line for testing line = self.lines[0] # get a dataset dataset = Dataset.objects(file="test.zip").get() test = self.plinkio._process_pedline(line, dataset, 'ab', True) self.assertEqual(reference, test) def test_update_pedfile(self): # get a dataset dataset = Dataset.objects(file="test.zip").get() # create a temporary directory using the context manager with tempfile.TemporaryDirectory() as tmpdirname: outfile = pathlib.Path(tmpdirname) / "plinktest_updated.ped" self.plinkio.update_pedfile( str(outfile), dataset, 'ab', fid="TEX", create_samples=True) # now open outputfile and test stuff test = TextPlinkIO( mapfile=str(DATA_DIR / "plinktest.map"), pedfile=str(outfile)) # assert two records written self.assertEqual(len(list(test.read_pedfile())), 2) class AffyPlinkIOMapTest(VariantsMixin, MongoMockMixin, unittest.TestCase): # load a custom fixture for this class variant_fixture = "affy_variants.json" def setUp(self): super().setUp() self.plinkio = AffyPlinkIO( prefix=str(DATA_DIR / "affytest"), species="Sheep", chip_name="AffymetrixAxiomOviCan" ) def test_read_mapfile(self): self.plinkio.read_mapfile() self.assertIsInstance(self.plinkio.mapdata, list) self.assertEqual(len(self.plinkio.mapdata), 4) for record in self.plinkio.mapdata: self.assertIsInstance(record, MapRecord) def test_fetch_coordinates(self): self.plinkio.read_mapfile() self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3", search_field='probeset_id' ) self.assertIsInstance(self.plinkio.locations, list) self.assertEqual(len(self.plinkio.locations), 4) self.assertIsInstance(self.plinkio.filtered, set) self.assertEqual(len(self.plinkio.filtered), 2) # assert filtered items self.assertIn(2, self.plinkio.filtered) self.assertIn(3, self.plinkio.filtered) for idx, record in enumerate(self.plinkio.locations): if idx in self.plinkio.filtered: self.assertIsNone(record) else: self.assertIsInstance(record, Location) def test_update_mapfile(self): # create a temporary directory using the context manager with tempfile.TemporaryDirectory() as tmpdirname: outfile = pathlib.Path(tmpdirname) / "affytest_updated.map" self.plinkio.read_mapfile() self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3", search_field='probeset_id' ) self.plinkio.update_mapfile(str(outfile)) # now open outputfile and test stuff test = TextPlinkIO(mapfile=str(outfile)) test.read_mapfile() # one snp cannot be mapped self.assertEqual(len(test.mapdata), 2) for record in test.mapdata: variant = VariantSheep.objects(name=record.name).get() location = variant.get_location( version="Oar_v3.1", imported_from="SNPchiMp v.3") self.assertEqual(location.chrom, record.chrom) self.assertEqual(location.position, record.position) class AffyPlinkIOPedTest( VariantsMixin, SmarterIDMixin, MongoMockMixin, unittest.TestCase): # load a custom fixture for this class variant_fixture = "affy_variants.json" def setUp(self): super().setUp() self.plinkio = AffyPlinkIO( prefix=str(DATA_DIR / "affytest"), species="Sheep", chip_name="AffymetrixAxiomOviCan" ) # read info from map self.plinkio.read_mapfile() # need to read the destination coordinates once, to determine which # SNPs don't have a position on the destination assembly self.plinkio.fetch_coordinates( version="Oar_v3.1", imported_from="SNPchiMp v.3", search_field='probeset_id' ) # need to track filtered SNPs self.filtered_snps = self.plinkio.filtered # now read the original coordinates self.plinkio.fetch_coordinates( version="Oar_v4.0", imported_from="affymetrix", search_field='probeset_id' ) # then updated the filtered SNPs using the desiderate coordinate sys self.plinkio.filtered.update(self.filtered_snps) # read ped files self.lines = list(self.plinkio.read_pedfile(fid="TEX")) def test_assert_filtered(self): self.assertEqual(self.plinkio.filtered, {2, 3}) def test_read_pedfile(self): test = self.plinkio.read_pedfile(fid="TEX") self.assertIsInstance(test, types.GeneratorType) # consume data and count rows test = list(test) self.assertEqual(len(test), 2) def test_process_pedline(self): # define reference reference = [ 'TEX', 'ITOA-TEX-000000001', '0', '0', '0', -9, 'A', 'G', 'A', 'A'] # get a line for testing line = self.lines[0] # get a dataset dataset = Dataset.objects(file="test.zip").get() test = self.plinkio._process_pedline(line, dataset, 'affymetrix', True) self.assertEqual(reference, test) for sample in SampleSheep.objects.all(): print(sample, sample.original_id) def test_update_pedfile(self): # get a dataset dataset = Dataset.objects(file="test.zip").get() # create a temporary directory using the context manager with tempfile.TemporaryDirectory() as tmpdirname: outfile = pathlib.Path(tmpdirname) / "affytes_updated.ped" self.plinkio.update_pedfile( str(outfile), dataset, 'affymetrix', fid="TEX", create_samples=True) # now open outputfile and test stuff test = TextPlinkIO( mapfile=str(DATA_DIR / "plinktest.map"), pedfile=str(outfile)) # assert two records written self.assertEqual(len(list(test.read_pedfile())), 2) if __name__ == '__main__': unittest.main()
33.173289
79
0.611745
3,347
30,055
5.383926
0.106961
0.068979
0.019145
0.009378
0.749445
0.709767
0.695228
0.683574
0.672253
0.639789
0
0.011316
0.291366
30,055
905
80
33.209945
0.834773
0.170388
0
0.642722
0
0
0.059445
0.009592
0
0
0
0
0.221172
1
0.081285
false
0
0.041588
0
0.141777
0.00189
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ad818906801685ab3f1a9a9809e540bf3af04839
1,637
py
Python
sklearn/gaussian_process/tests/_mini_sequence_kernel.py
MaiRajborirug/scikit-learn
c18d015372f7041099d19c215cd4c36ffd6fe5c5
[ "BSD-3-Clause" ]
50,961
2015-01-01T06:06:31.000Z
2022-03-31T23:40:12.000Z
sklearn/gaussian_process/tests/_mini_sequence_kernel.py
MaiRajborirug/scikit-learn
c18d015372f7041099d19c215cd4c36ffd6fe5c5
[ "BSD-3-Clause" ]
17,065
2015-01-01T02:01:58.000Z
2022-03-31T23:48:34.000Z
sklearn/gaussian_process/tests/_mini_sequence_kernel.py
MaiRajborirug/scikit-learn
c18d015372f7041099d19c215cd4c36ffd6fe5c5
[ "BSD-3-Clause" ]
26,886
2015-01-01T00:59:27.000Z
2022-03-31T18:03:23.000Z
from sklearn.gaussian_process.kernels import Kernel, Hyperparameter from sklearn.gaussian_process.kernels import GenericKernelMixin from sklearn.gaussian_process.kernels import StationaryKernelMixin import numpy as np from sklearn.base import clone class MiniSeqKernel(GenericKernelMixin, StationaryKernelMixin, Kernel): """ A minimal (but valid) convolutional kernel for sequences of variable length. """ def __init__(self, baseline_similarity=0.5, baseline_similarity_bounds=(1e-5, 1)): self.baseline_similarity = baseline_similarity self.baseline_similarity_bounds = baseline_similarity_bounds @property def hyperparameter_baseline_similarity(self): return Hyperparameter( "baseline_similarity", "numeric", self.baseline_similarity_bounds ) def _f(self, s1, s2): return sum( [1.0 if c1 == c2 else self.baseline_similarity for c1 in s1 for c2 in s2] ) def _g(self, s1, s2): return sum([0.0 if c1 == c2 else 1.0 for c1 in s1 for c2 in s2]) def __call__(self, X, Y=None, eval_gradient=False): if Y is None: Y = X if eval_gradient: return ( np.array([[self._f(x, y) for y in Y] for x in X]), np.array([[[self._g(x, y)] for y in Y] for x in X]), ) else: return np.array([[self._f(x, y) for y in Y] for x in X]) def diag(self, X): return np.array([self._f(x, x) for x in X]) def clone_with_theta(self, theta): cloned = clone(self) cloned.theta = theta return cloned
32.098039
86
0.637141
226
1,637
4.455752
0.278761
0.178749
0.109235
0.027805
0.328699
0.260179
0.125124
0.125124
0.125124
0.083416
0
0.022632
0.271228
1,637
50
87
32.74
0.821459
0.046426
0
0
0
0
0.016872
0
0
0
0
0
0
1
0.194444
false
0
0.138889
0.111111
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
ad85fccfc5759ceb3c3b82da18eb50b3ebeb4483
1,576
py
Python
setup.py
vvanholl/estop
d82537014e545cd154ef4f8686dbe557f8cc99d7
[ "Apache-2.0" ]
9
2018-10-02T11:37:46.000Z
2019-08-26T04:22:24.000Z
setup.py
vvanholl/estop
d82537014e545cd154ef4f8686dbe557f8cc99d7
[ "Apache-2.0" ]
null
null
null
setup.py
vvanholl/estop
d82537014e545cd154ef4f8686dbe557f8cc99d7
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python from setuptools import setup, find_packages with open('VERSION') as version_file: version = version_file.read() with open('README.rst') as readme_file: long_description = readme_file.read() with open('requirements.txt') as requirements_file: install_requires = requirements_file.read().splitlines() classifiers = [ 'Development Status :: 3 - Alpha', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.0', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: System :: Clustering', 'Topic :: System :: Networking', 'Topic :: System :: Networking :: Monitoring', 'Topic :: Utilities', ] scripts = [ 'bin/estop' ] setup( name='estop', version=version, description='Monitor and control Elasticsearch tasks', long_description=long_description, url='https://github.com/vvanholl/estop', author='Vincent Van Hollebeke', author_email='vincent@compuscene.org', license='Apache License, Version 2.0', install_requires=install_requires, classifiers=classifiers, packages=find_packages(exclude=('tests',)), scripts=scripts )
30.307692
60
0.670051
171
1,576
6.087719
0.438596
0.182517
0.240154
0.199808
0.051873
0
0
0
0
0
0
0.015625
0.187817
1,576
51
61
30.901961
0.797656
0.01269
0
0
0
0
0.51254
0.014148
0
0
0
0
0
1
0
false
0
0.023256
0
0.023256
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ad8eb06c4b7702a0f38236141b9ecfe474b64c86
5,233
py
Python
solution/LP_MDS.py
Pro7ech/CryptoChallenge
6ba33e2c9479ce2cefc36c6e0d0b45ddca05dfc9
[ "Apache-2.0" ]
4
2021-01-11T13:02:12.000Z
2022-01-01T13:45:53.000Z
solution/LP_MDS.py
Pro7ech/CryptoChallenge
6ba33e2c9479ce2cefc36c6e0d0b45ddca05dfc9
[ "Apache-2.0" ]
null
null
null
solution/LP_MDS.py
Pro7ech/CryptoChallenge
6ba33e2c9479ce2cefc36c6e0d0b45ddca05dfc9
[ "Apache-2.0" ]
null
null
null
from random import randrange # Precomputed parity S-BOX from 0 to 255 P = [0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0,1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1, 1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0, 1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0, 0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0,1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1, 1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0, 0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0,1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1, 0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0,1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1, 1,0,0,1,0,1,1,0,0,1,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0] # Computes the parity of a 64 bit unsigned integer def parity(x): x ^= x >> 64 x ^= x >> 32 x ^= x >> 16 x ^= x >> 8 return P[x&0xFF] def M(x,m,s): """ INPUT : x = vector (int), m = matrix (as list of vectors), s = vectors bit length OUTPUT : vector (int), result of m*x """ z = 0 for i in range(s): z<<=1 z |= parity(x & m[i]) return z def transpose(x,s): """ INPUT : x = matrix of masks (int), s = mask bit length OUTPUT : transposed masks """ #Converts the masks into binary lists m = [] for mask in x: tmp = [] for i in range(s): tmp += [(mask>>(s-i-1))&1] m += [tmp] #Transposes t = [[m[j][i] for j in range(len(m))] for i in range(len(m[0]))] #Converts the binary lists back into masks m = [] for x in t: mask = 0 for bit in x: mask<<=1 mask |= bit m += [mask] return m def expand(x): return [(x>>(120-(i<<3)) & 0xFF) for i in range(16)] def squeeze(w): x = 0 for i in w: x<<=8 x|=i return x def mds(x): w = expand(x) z0 = w[0] z1 = w[1] z2 = w[2] z3 = w[3] z4 = w[4] z5 = w[5] z6 = w[6] z7 = w[7] z8 = w[8] z9 = w[9] z10 = w[10] z11 = w[11] z12 = w[12] z13 = w[13] z14 = w[14] z15 = w[15] w0 = z2 ^ z3 ^ z4 ^ z6 ^ z7 w1 = z0 ^ z1 ^ z3 ^ z4 ^ z7 w2 = z0 ^ z1 ^ z4 ^ z5 ^ z6 w3 = z1 ^ z2 ^ z3 ^ z5 ^ z6 w8 = z0 ^ z2 ^ z3 ^ z6 ^ z7 w9 = z0 ^ z3 ^ z4 ^ z5 ^ z7 w10 = z0 ^ z1 ^ z2 ^ z4 ^ z5 w11 = z1 ^ z2 ^ z5 ^ z6 ^ z7 w4 = z10 ^ z11 ^ z12 ^ z14 ^ z15 w5 = z8 ^ z9 ^ z11 ^ z12 ^ z15 w6 = z8 ^ z9 ^ z12 ^ z13 ^ z14 w7 = z9 ^ z10 ^ z11 ^ z13 ^ z14 w12 = z8 ^ z10 ^ z11 ^ z14 ^ z15 w13 = z8 ^ z11 ^ z12 ^ z13 ^ z15 w14 = z8 ^ z9 ^ z10 ^ z12 ^ z13 w15 = z9 ^ z10 ^ z13 ^ z14 ^ z15 return squeeze((w0,w1,w2,w3,w4,w5,w6,w7,w8,w9,w10,w11,w12,w13,w14,w15)) def mds_t(x): w = expand(x) z0 = w[0] z1 = w[1] z2 = w[2] z3 = w[3] z8 = w[4] z9 = w[5] z10 = w[6] z11 = w[7] z4 = w[8] z5 = w[9] z6 = w[10] z7 = w[11] z12 = w[12] z13 = w[13] z14 = w[14] z15 = w[15] w0 = z1 ^ z2 ^ z4 ^ z5 ^ z6 w1 = z1 ^ z2 ^ z3 ^ z6 ^ z7 w2 = z0 ^ z3 ^ z4 ^ z6 ^ z7 w3 = z0 ^ z1 ^ z3 ^ z4 ^ z5 w4 = z0 ^ z1 ^ z2 ^ z5 ^ z6 w5 = z2 ^ z3 ^ z5 ^ z6 ^ z7 w6 = z0 ^ z2 ^ z3 ^ z4 ^ z7 w7 = z0 ^ z1 ^ z4 ^ z5 ^ z7 w8 = z9 ^ z10 ^ z12 ^ z13 ^ z14 w9 = z9 ^ z10 ^ z11 ^ z14 ^ z15 w10 = z8 ^ z11 ^ z12 ^ z14 ^ z15 w11 = z8 ^ z9 ^ z11 ^ z12 ^ z13 w12 = z8 ^ z9 ^ z10 ^ z13 ^ z14 w13 = z10 ^ z11 ^ z13 ^ z14 ^ z15 w14 = z8 ^ z10 ^ z11 ^ z12 ^ z15 w15 = z8 ^ z9 ^ z12 ^ z13 ^ z15 return squeeze((w0,w1,w2,w3,w4,w5,w6,w7,w8,w9,w10,w11,w12,w13,w14,w15)) def mds_t_inv(x): w = expand(x) z0 = w[0] z1 = w[1] z2 = w[2] z3 = w[3] z4 = w[4] z5 = w[5] z6 = w[6] z7 = w[7] z8 = w[8] z9 = w[9] z10 = w[10] z11 = w[11] z12 = w[12] z13 = w[13] z14 = w[14] z15 = w[15] w0 = z1 ^ z2 ^ z4 ^ z5 ^ z6 w1 = z1 ^ z2 ^ z3 ^ z6 ^ z7 w2 = z0 ^ z3 ^ z4 ^ z6 ^ z7 w3 = z0 ^ z1 ^ z3 ^ z4 ^ z5 w8 = z0 ^ z1 ^ z2 ^ z5 ^ z6 w9 = z2 ^ z3 ^ z5 ^ z6 ^ z7 w10 = z0 ^ z2 ^ z3 ^ z4 ^ z7 w11 = z0 ^ z1 ^ z4 ^ z5 ^ z7 w4 = z9 ^ z10 ^ z12 ^ z13 ^ z14 w5 = z9 ^ z10 ^ z11 ^ z14 ^ z15 w6 = z8 ^ z11 ^ z12 ^ z14 ^ z15 w7 = z8 ^ z9 ^ z11 ^ z12 ^ z13 w12 = z8 ^ z9 ^ z10 ^ z13 ^ z14 w13 = z10 ^ z11 ^ z13 ^ z14 ^ z15 w14 = z8 ^ z10 ^ z11 ^ z12 ^ z15 w15 = z8 ^ z9 ^ z12 ^ z13 ^ z15 return squeeze((w0,w1,w2,w3,w4,w5,w6,w7,w8,w9,w10,w11,w12,w13,w14,w15)) def test(): state = True for i in range(2560): x = randrange(1<<128) mask = randrange(1<<128) if parity((x & mds_t(mask)) ^ (mds(x) & mask)): state = False break if parity((x & mask) ^ (mds(x) & mds_t_inv(mask))): state = False break if not(state): break if state : print('Success') #test()
23.786364
77
0.411236
1,012
5,233
2.120553
0.118577
0.079217
0.060112
0.080149
0.530755
0.423579
0.423579
0.423579
0.423579
0.423579
0
0.287556
0.407223
5,233
219
78
23.894977
0.404255
0.077776
0
0.469512
0
0
0.001543
0
0
0
0.001763
0
0
1
0.054878
false
0
0.006098
0.006098
0.109756
0.006098
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ad9479590c17eb2a528c60aa5a52ebaceddfe0da
36,853
py
Python
pysnmp/SMON2-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
11
2021-02-02T16:27:16.000Z
2021-08-31T06:22:49.000Z
pysnmp/SMON2-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
75
2021-02-24T17:30:31.000Z
2021-12-08T00:01:18.000Z
pysnmp/SMON2-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module SMON2-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SMON2-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 20:59:43 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # smon, = mibBuilder.importSymbols("APPLIC-MIB", "smon") ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint") OwnerString, = mibBuilder.importSymbols("RMON-MIB", "OwnerString") DataSource, LastCreateTime, TimeFilter, hlMatrixControlIndex, ZeroBasedCounter32, protocolDirLocalIndex = mibBuilder.importSymbols("RMON2-MIB", "DataSource", "LastCreateTime", "TimeFilter", "hlMatrixControlIndex", "ZeroBasedCounter32", "protocolDirLocalIndex") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Counter32, Unsigned32, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, ObjectIdentity, MibIdentifier, Integer32, TimeTicks, iso, NotificationType, Counter64, Bits, ModuleIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Unsigned32", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "ObjectIdentity", "MibIdentifier", "Integer32", "TimeTicks", "iso", "NotificationType", "Counter64", "Bits", "ModuleIdentity", "Gauge32") TimeStamp, TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TimeStamp", "TextualConvention", "RowStatus", "DisplayString") xsSmon = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 2)) xsSmonResourceAllocation = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSmonResourceAllocation.setStatus('current') xsHostTopN = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 2, 2)) xsHostTopNControlTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1), ) if mibBuilder.loadTexts: xsHostTopNControlTable.setStatus('current') xsHostTopNControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1), ).setIndexNames((0, "SMON2-MIB", "xsHostTopNControlIndex")) if mibBuilder.loadTexts: xsHostTopNControlEntry.setStatus('current') xsHostTopNControlIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))) if mibBuilder.loadTexts: xsHostTopNControlIndex.setStatus('current') xsHostTopNControlHostIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsHostTopNControlHostIndex.setStatus('current') xsHostTopNControlRateBase = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("xsHostTopNInPkts", 1), ("xsHostTopNOutPkts", 2), ("xsHostTopNInOctets", 3), ("xsHostTopNOutOctets", 4)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsHostTopNControlRateBase.setStatus('current') xsHostTopNControlTimeRemaining = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsHostTopNControlTimeRemaining.setStatus('current') xsHostTopNControlDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsHostTopNControlDuration.setStatus('current') xsHostTopNControlRequestedSize = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(150)).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsHostTopNControlRequestedSize.setStatus('current') xsHostTopNControlGrantedSize = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: xsHostTopNControlGrantedSize.setStatus('current') xsHostTopNControlStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 8), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsHostTopNControlStartTime.setStatus('current') xsHostTopNControlOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 9), OwnerString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsHostTopNControlOwner.setStatus('current') xsHostTopNControlStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 1, 1, 10), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsHostTopNControlStatus.setStatus('current') xsHostTopNTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 2), ) if mibBuilder.loadTexts: xsHostTopNTable.setStatus('current') xsHostTopNEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 2, 1), ).setIndexNames((0, "SMON2-MIB", "xsHostTopNControlIndex"), (0, "SMON2-MIB", "xsHostTopNIndex")) if mibBuilder.loadTexts: xsHostTopNEntry.setStatus('current') xsHostTopNIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))) if mibBuilder.loadTexts: xsHostTopNIndex.setStatus('current') xsHostTopNProtocolDirLocalIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: xsHostTopNProtocolDirLocalIndex.setStatus('current') xsHostTopNNlAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 2, 1, 3), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsHostTopNNlAddress.setStatus('current') xsHostTopNRate = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 2, 2, 1, 4), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsHostTopNRate.setStatus('current') xsFilter = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 2, 3)) xsHostFilterTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1), ) if mibBuilder.loadTexts: xsHostFilterTable.setStatus('current') xsHostFilterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1, 1), ).setIndexNames((0, "SMON2-MIB", "xsHostFilterIpAddress")) if mibBuilder.loadTexts: xsHostFilterEntry.setStatus('current') xsHostFilterType = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ipHost", 1), ("ipSubnet", 2), ("ipxNet", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsHostFilterType.setStatus('current') xsHostFilterIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1, 1, 2), OctetString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsHostFilterIpAddress.setStatus('current') xsHostFilterIpSubnet = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1, 1, 3), OctetString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsHostFilterIpSubnet.setStatus('current') xsHostFilterIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1, 1, 4), OctetString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsHostFilterIpMask.setStatus('current') xsHostFilterIpxAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1, 1, 5), OctetString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsHostFilterIpxAddress.setStatus('current') xsHostFilterStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4))).clone(namedValues=NamedValues(("valid", 1), ("invalid", 4)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsHostFilterStatus.setStatus('current') xsHostFilterTableClear = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("idle", 1), ("clear", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsHostFilterTableClear.setStatus('current') xsSubnet = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 2, 4)) xsSubnetControlTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1), ) if mibBuilder.loadTexts: xsSubnetControlTable.setStatus('current') xsSubnetControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1), ).setIndexNames((0, "SMON2-MIB", "xsSubnetControlIndex")) if mibBuilder.loadTexts: xsSubnetControlEntry.setStatus('current') xsSubnetControlIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))) if mibBuilder.loadTexts: xsSubnetControlIndex.setStatus('current') xsSubnetControlDataSource = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1, 2), DataSource()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetControlDataSource.setStatus('current') xsSubnetControlInserts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetControlInserts.setStatus('current') xsSubnetControlDeletes = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetControlDeletes.setStatus('current') xsSubnetControlMaxDesiredEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 2147483647))).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetControlMaxDesiredEntries.setStatus('current') xsSubnetControlOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1, 6), OwnerString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetControlOwner.setStatus('current') xsSubnetControlStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 1, 1, 7), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetControlStatus.setStatus('current') xsSubnetTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2), ) if mibBuilder.loadTexts: xsSubnetTable.setStatus('current') xsSubnetEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2, 1), ).setIndexNames((0, "SMON2-MIB", "xsSubnetControlIndex"), (0, "SMON2-MIB", "xsSubnetTimeMark"), (0, "RMON2-MIB", "protocolDirLocalIndex"), (0, "SMON2-MIB", "xsSubnetAddress"), (0, "SMON2-MIB", "xsSubnetMask"), (0, "RMON2-MIB", "protocolDirLocalIndex")) if mibBuilder.loadTexts: xsSubnetEntry.setStatus('current') xsSubnetTimeMark = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2, 1, 1), TimeFilter()) if mibBuilder.loadTexts: xsSubnetTimeMark.setStatus('current') xsSubnetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2, 1, 2), OctetString()) if mibBuilder.loadTexts: xsSubnetAddress.setStatus('current') xsSubnetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2, 1, 3), OctetString()) if mibBuilder.loadTexts: xsSubnetMask.setStatus('current') xsSubnetInPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2, 1, 4), ZeroBasedCounter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetInPkts.setStatus('current') xsSubnetOutPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2, 1, 5), ZeroBasedCounter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetOutPkts.setStatus('current') xsSubnetCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 2, 1, 6), LastCreateTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetCreateTime.setStatus('current') xsSubnetMatrixControlTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3), ) if mibBuilder.loadTexts: xsSubnetMatrixControlTable.setStatus('current') xsSubnetMatrixControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1), ).setIndexNames((0, "SMON2-MIB", "xsSubnetMatrixControlIndex")) if mibBuilder.loadTexts: xsSubnetMatrixControlEntry.setStatus('current') xsSubnetMatrixControlIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))) if mibBuilder.loadTexts: xsSubnetMatrixControlIndex.setStatus('current') xsSubnetMatrixControlDataSource = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1, 2), DataSource()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetMatrixControlDataSource.setStatus('current') xsSubnetMatrixControlInserts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetMatrixControlInserts.setStatus('current') xsSubnetMatrixControlDeletes = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetMatrixControlDeletes.setStatus('current') xsSubnetMatrixControlMaxDesiredEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 2147483647))).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetMatrixControlMaxDesiredEntries.setStatus('current') xsSubnetMatrixControlOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1, 7), OwnerString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetMatrixControlOwner.setStatus('current') xsSubnetMatrixControlStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 3, 1, 8), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: xsSubnetMatrixControlStatus.setStatus('current') xsSubnetMatrixSDTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4), ) if mibBuilder.loadTexts: xsSubnetMatrixSDTable.setStatus('current') xsSubnetMatrixSDEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1), ).setIndexNames((0, "SMON2-MIB", "xsSubnetMatrixControlIndex"), (0, "SMON2-MIB", "xsSubnetMatrixSDTimeMark"), (0, "RMON2-MIB", "protocolDirLocalIndex"), (0, "SMON2-MIB", "xsSubnetMatrixSDSourceAddress"), (0, "SMON2-MIB", "xsSubnetMatrixSDSourceMask"), (0, "SMON2-MIB", "xsSubnetMatrixSDDestAddress"), (0, "SMON2-MIB", "xsSubnetMatrixSDDestMask"), (0, "RMON2-MIB", "protocolDirLocalIndex")) if mibBuilder.loadTexts: xsSubnetMatrixSDEntry.setStatus('current') xsSubnetMatrixSDTimeMark = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1, 1), TimeFilter()) if mibBuilder.loadTexts: xsSubnetMatrixSDTimeMark.setStatus('current') xsSubnetMatrixSDSourceAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1, 2), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixSDSourceAddress.setStatus('current') xsSubnetMatrixSDSourceMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1, 3), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixSDSourceMask.setStatus('current') xsSubnetMatrixSDDestAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1, 4), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixSDDestAddress.setStatus('current') xsSubnetMatrixSDDestMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1, 5), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixSDDestMask.setStatus('current') xsSubnetMatrixSDPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1, 6), ZeroBasedCounter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetMatrixSDPkts.setStatus('current') xsSubnetMatrixSDCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 4, 1, 7), LastCreateTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetMatrixSDCreateTime.setStatus('current') xsSubnetMatrixDSTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5), ) if mibBuilder.loadTexts: xsSubnetMatrixDSTable.setStatus('current') xsSubnetMatrixDSEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1), ).setIndexNames((0, "RMON2-MIB", "hlMatrixControlIndex"), (0, "SMON2-MIB", "xsSubnetMatrixDSTimeMark"), (0, "RMON2-MIB", "protocolDirLocalIndex"), (0, "SMON2-MIB", "xsSubnetMatrixDSDestAddress"), (0, "SMON2-MIB", "xsSubnetMatrixDSDestMask"), (0, "SMON2-MIB", "xsSubnetMatrixDSSourceAddress"), (0, "SMON2-MIB", "xsSubnetMatrixDSSourceMask"), (0, "RMON2-MIB", "protocolDirLocalIndex")) if mibBuilder.loadTexts: xsSubnetMatrixDSEntry.setStatus('current') xsSubnetMatrixDSTimeMark = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1, 1), TimeFilter()) if mibBuilder.loadTexts: xsSubnetMatrixDSTimeMark.setStatus('current') xsSubnetMatrixDSSourceAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1, 2), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixDSSourceAddress.setStatus('current') xsSubnetMatrixDSSourceMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1, 3), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixDSSourceMask.setStatus('current') xsSubnetMatrixDSDestAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1, 4), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixDSDestAddress.setStatus('current') xsSubnetMatrixDSDestMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1, 5), OctetString()) if mibBuilder.loadTexts: xsSubnetMatrixDSDestMask.setStatus('current') xsSubnetMatrixDSPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1, 6), ZeroBasedCounter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetMatrixDSPkts.setStatus('current') xsSubnetMatrixDSCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 4, 5, 1, 7), LastCreateTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetMatrixDSCreateTime.setStatus('current') xsNumberOfProtocols = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsNumberOfProtocols.setStatus('current') xsProtocolDistStatsTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 6), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsProtocolDistStatsTimeStamp.setStatus('current') xsNlHostTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 7), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsNlHostTimeStamp.setStatus('current') xsSubnetStatsTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 8), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsSubnetStatsTimeStamp.setStatus('current') xsActiveApplications = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 2, 9)) xsActiveApplicationsBitMask = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 9, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(128, 128)).setFixedLength(128)).setMaxAccess("readonly") if mibBuilder.loadTexts: xsActiveApplicationsBitMask.setStatus('current') xsActiveApplicationsTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 2, 9, 2), ) if mibBuilder.loadTexts: xsActiveApplicationsTable.setStatus('current') xsActiveApplicationsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 2, 9, 2, 1), ).setIndexNames((0, "SMON2-MIB", "xsActiveApplicationsIndex")) if mibBuilder.loadTexts: xsActiveApplicationsEntry.setStatus('current') xsActiveApplicationsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 9, 2, 1, 1), Integer32()) if mibBuilder.loadTexts: xsActiveApplicationsIndex.setStatus('current') xsActiveApplicationsPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 2, 9, 2, 1, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: xsActiveApplicationsPkts.setStatus('current') xsSmonStatus = MibScalar((1, 3, 6, 1, 4, 1, 81, 30, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("operate", 1), ("paused", 2))).clone('paused')).setMaxAccess("readwrite") if mibBuilder.loadTexts: xsSmonStatus.setStatus('current') drSmon = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 4)) drSmonConfiguration = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 4, 1)) drSmonControlTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1), ) if mibBuilder.loadTexts: drSmonControlTable.setStatus('current') drSmonControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1, 1), ).setIndexNames((0, "SMON2-MIB", "drSmonControlModuleID")) if mibBuilder.loadTexts: drSmonControlEntry.setStatus('current') drSmonControlModuleID = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))) if mibBuilder.loadTexts: drSmonControlModuleID.setStatus('current') drSmonControlRowAddressAutoLearnMode = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("notSupported", 255)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: drSmonControlRowAddressAutoLearnMode.setStatus('current') drSmonControlRoutedPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonControlRoutedPackets.setStatus('current') drSmonControlProtocolDistStatsTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1, 1, 4), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonControlProtocolDistStatsTimeStamp.setStatus('current') drSmonControlMatrixRows = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonControlMatrixRows.setStatus('current') drSmonControlMatrixCols = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonControlMatrixCols.setStatus('current') drSmonEntityPlacementTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 2), ) if mibBuilder.loadTexts: drSmonEntityPlacementTable.setStatus('current') drSmonEntityPlacementEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 2, 1), ).setIndexNames((0, "SMON2-MIB", "drSmonEntityPlacementModuleID"), (0, "SMON2-MIB", "drSmonEntityPlacementIndex")) if mibBuilder.loadTexts: drSmonEntityPlacementEntry.setStatus('current') drSmonEntityPlacementModuleID = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))) if mibBuilder.loadTexts: drSmonEntityPlacementModuleID.setStatus('current') drSmonEntityPlacementIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))) if mibBuilder.loadTexts: drSmonEntityPlacementIndex.setStatus('current') drSmonEntityPlacementAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 2, 1, 3), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonEntityPlacementAddress.setStatus('current') drSmonEntityPlacementMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 2, 1, 4), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonEntityPlacementMask.setStatus('current') drSmonEntityPlacementType = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("empty", 1), ("autoLearn", 2), ("filter", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonEntityPlacementType.setStatus('current') drSmonProtocolDir = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 4, 2)) drSmonProtocolDirLCTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 1), ) if mibBuilder.loadTexts: drSmonProtocolDirLCTable.setStatus('current') drSmonProtocolDirLCEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 1, 1), ).setIndexNames((0, "SMON2-MIB", "drSmonProtocolDirLCModuleID")) if mibBuilder.loadTexts: drSmonProtocolDirLCEntry.setStatus('current') drSmonProtocolDirLCModuleID = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))) if mibBuilder.loadTexts: drSmonProtocolDirLCModuleID.setStatus('current') drSmonProtocolDirLCLastChange = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 1, 1, 2), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonProtocolDirLCLastChange.setStatus('current') drSmonProtocolDirTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2), ) if mibBuilder.loadTexts: drSmonProtocolDirTable.setStatus('current') drSmonProtocolDirEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1), ).setIndexNames((0, "SMON2-MIB", "drSmonProtocolDirModuleID"), (0, "SMON2-MIB", "drSmonProtocolDirID"), (0, "SMON2-MIB", "drSmonProtocolDirParameters")) if mibBuilder.loadTexts: drSmonProtocolDirEntry.setStatus('current') drSmonProtocolDirModuleID = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))) if mibBuilder.loadTexts: drSmonProtocolDirModuleID.setStatus('current') drSmonProtocolDirID = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 2), OctetString()) if mibBuilder.loadTexts: drSmonProtocolDirID.setStatus('current') drSmonProtocolDirParameters = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 3), OctetString()) if mibBuilder.loadTexts: drSmonProtocolDirParameters.setStatus('current') drSmonProtocolDirLocalIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonProtocolDirLocalIndex.setStatus('current') drSmonProtocolDirDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonProtocolDirDescr.setStatus('current') drSmonProtocolDirType = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 6), Bits().clone(namedValues=NamedValues(("extensible", 0), ("addressRecognitionCapable", 1)))).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonProtocolDirType.setStatus('current') drSmonProtocolDirAddressMapConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSupported", 1), ("supportedOff", 2), ("supportedOn", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonProtocolDirAddressMapConfig.setStatus('current') drSmonProtocolDirHostConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSupported", 1), ("supportedOff", 2), ("supportedOn", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonProtocolDirHostConfig.setStatus('current') drSmonProtocolDirMatrixConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSupported", 1), ("supportedOff", 2), ("supportedOn", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonProtocolDirMatrixConfig.setStatus('current') drSmonProtocolDirOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 10), OwnerString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonProtocolDirOwner.setStatus('current') drSmonProtocolDirStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 2, 2, 1, 11), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonProtocolDirStatus.setStatus('current') drSmonFilter = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 4, 3)) drSmonFilterTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 4, 3, 1), ) if mibBuilder.loadTexts: drSmonFilterTable.setStatus('current') drSmonFilterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 4, 3, 1, 1), ).setIndexNames((0, "SMON2-MIB", "drSmonFilterModuleID"), (0, "SMON2-MIB", "drSmonFilterIndex")) if mibBuilder.loadTexts: drSmonFilterEntry.setStatus('current') drSmonFilterModuleID = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))) if mibBuilder.loadTexts: drSmonFilterModuleID.setStatus('current') drSmonFilterIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))) if mibBuilder.loadTexts: drSmonFilterIndex.setStatus('current') drSmonFilterAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 3, 1, 1, 3), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonFilterAddress.setStatus('current') drSmonFilterMask = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 3, 1, 1, 4), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonFilterMask.setStatus('current') drSmonFilterStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 3, 1, 1, 5), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: drSmonFilterStatus.setStatus('current') drSmonActiveApplications = MibIdentifier((1, 3, 6, 1, 4, 1, 81, 30, 4, 4)) drSmonActiveApplicationsTable = MibTable((1, 3, 6, 1, 4, 1, 81, 30, 4, 4, 1), ) if mibBuilder.loadTexts: drSmonActiveApplicationsTable.setStatus('current') drSmonActiveApplicationsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 81, 30, 4, 4, 1, 1), ).setIndexNames((0, "SMON2-MIB", "drSmonActiveApplicationsModuleID"), (0, "SMON2-MIB", "drSmonActiveApplicationsType"), (0, "SMON2-MIB", "drSmonActiveApplicationsSubType")) if mibBuilder.loadTexts: drSmonActiveApplicationsEntry.setStatus('current') drSmonActiveApplicationsModuleID = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))) if mibBuilder.loadTexts: drSmonActiveApplicationsModuleID.setStatus('current') drSmonActiveApplicationsType = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("ethertype", 1), ("ipProtocol", 2), ("udpProtocol", 3), ("tcpProtocol", 4)))) if mibBuilder.loadTexts: drSmonActiveApplicationsType.setStatus('current') drSmonActiveApplicationsSubType = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 4, 1, 1, 3), Integer32()) if mibBuilder.loadTexts: drSmonActiveApplicationsSubType.setStatus('current') drSmonActiveApplicationsPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 81, 30, 4, 4, 1, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: drSmonActiveApplicationsPkts.setStatus('current') mibBuilder.exportSymbols("SMON2-MIB", xsSubnetMatrixDSSourceAddress=xsSubnetMatrixDSSourceAddress, xsActiveApplications=xsActiveApplications, drSmonProtocolDir=drSmonProtocolDir, drSmonControlRoutedPackets=drSmonControlRoutedPackets, xsFilter=xsFilter, xsHostTopNControlStatus=xsHostTopNControlStatus, xsSmon=xsSmon, drSmonProtocolDirHostConfig=drSmonProtocolDirHostConfig, drSmonFilter=drSmonFilter, xsSubnetMatrixControlMaxDesiredEntries=xsSubnetMatrixControlMaxDesiredEntries, xsSubnetMatrixDSSourceMask=xsSubnetMatrixDSSourceMask, xsSubnetControlEntry=xsSubnetControlEntry, xsSubnetEntry=xsSubnetEntry, xsSubnetMatrixSDTable=xsSubnetMatrixSDTable, drSmonConfiguration=drSmonConfiguration, drSmonControlMatrixRows=drSmonControlMatrixRows, drSmonProtocolDirStatus=drSmonProtocolDirStatus, xsHostFilterEntry=xsHostFilterEntry, drSmonControlModuleID=drSmonControlModuleID, drSmonEntityPlacementEntry=drSmonEntityPlacementEntry, xsSubnetControlTable=xsSubnetControlTable, drSmonProtocolDirAddressMapConfig=drSmonProtocolDirAddressMapConfig, drSmonActiveApplications=drSmonActiveApplications, drSmonEntityPlacementAddress=drSmonEntityPlacementAddress, xsSubnetControlMaxDesiredEntries=xsSubnetControlMaxDesiredEntries, xsSubnetAddress=xsSubnetAddress, xsSubnetMask=xsSubnetMask, drSmonProtocolDirID=drSmonProtocolDirID, drSmonProtocolDirModuleID=drSmonProtocolDirModuleID, drSmonControlEntry=drSmonControlEntry, drSmonActiveApplicationsSubType=drSmonActiveApplicationsSubType, drSmonEntityPlacementIndex=drSmonEntityPlacementIndex, drSmonProtocolDirMatrixConfig=drSmonProtocolDirMatrixConfig, xsHostTopNControlEntry=xsHostTopNControlEntry, drSmonActiveApplicationsEntry=drSmonActiveApplicationsEntry, drSmonProtocolDirParameters=drSmonProtocolDirParameters, xsSubnetControlOwner=xsSubnetControlOwner, xsSubnetMatrixSDPkts=xsSubnetMatrixSDPkts, drSmonProtocolDirLCLastChange=drSmonProtocolDirLCLastChange, drSmonEntityPlacementTable=drSmonEntityPlacementTable, drSmonControlProtocolDistStatsTimeStamp=drSmonControlProtocolDistStatsTimeStamp, drSmonEntityPlacementModuleID=drSmonEntityPlacementModuleID, xsHostTopNControlTimeRemaining=xsHostTopNControlTimeRemaining, drSmonFilterTable=drSmonFilterTable, drSmonEntityPlacementType=drSmonEntityPlacementType, xsSubnetMatrixControlInserts=xsSubnetMatrixControlInserts, xsHostFilterIpSubnet=xsHostFilterIpSubnet, xsHostTopNControlHostIndex=xsHostTopNControlHostIndex, xsSubnetControlInserts=xsSubnetControlInserts, xsSubnetMatrixSDTimeMark=xsSubnetMatrixSDTimeMark, xsHostFilterTableClear=xsHostFilterTableClear, xsSubnetInPkts=xsSubnetInPkts, xsHostFilterType=xsHostFilterType, drSmon=drSmon, xsHostTopNControlGrantedSize=xsHostTopNControlGrantedSize, xsHostTopNControlRequestedSize=xsHostTopNControlRequestedSize, xsActiveApplicationsEntry=xsActiveApplicationsEntry, drSmonActiveApplicationsTable=drSmonActiveApplicationsTable, drSmonFilterAddress=drSmonFilterAddress, xsHostTopNProtocolDirLocalIndex=xsHostTopNProtocolDirLocalIndex, xsProtocolDistStatsTimeStamp=xsProtocolDistStatsTimeStamp, drSmonFilterModuleID=drSmonFilterModuleID, drSmonControlMatrixCols=drSmonControlMatrixCols, xsSubnetMatrixDSTable=xsSubnetMatrixDSTable, xsHostTopNControlIndex=xsHostTopNControlIndex, xsSubnet=xsSubnet, xsHostTopNControlOwner=xsHostTopNControlOwner, xsSubnetMatrixDSCreateTime=xsSubnetMatrixDSCreateTime, xsHostTopNIndex=xsHostTopNIndex, xsSubnetTimeMark=xsSubnetTimeMark, xsSubnetMatrixDSPkts=xsSubnetMatrixDSPkts, drSmonProtocolDirLCModuleID=drSmonProtocolDirLCModuleID, xsSubnetMatrixSDSourceMask=xsSubnetMatrixSDSourceMask, drSmonProtocolDirLCEntry=drSmonProtocolDirLCEntry, xsHostTopNControlDuration=xsHostTopNControlDuration, drSmonControlRowAddressAutoLearnMode=drSmonControlRowAddressAutoLearnMode, xsSubnetMatrixSDDestAddress=xsSubnetMatrixSDDestAddress, xsSubnetMatrixSDDestMask=xsSubnetMatrixSDDestMask, xsHostTopNNlAddress=xsHostTopNNlAddress, xsSubnetMatrixDSTimeMark=xsSubnetMatrixDSTimeMark, drSmonActiveApplicationsPkts=drSmonActiveApplicationsPkts, drSmonProtocolDirDescr=drSmonProtocolDirDescr, xsHostFilterIpMask=xsHostFilterIpMask, drSmonProtocolDirLocalIndex=drSmonProtocolDirLocalIndex, xsHostFilterStatus=xsHostFilterStatus, xsSubnetMatrixControlEntry=xsSubnetMatrixControlEntry, drSmonEntityPlacementMask=drSmonEntityPlacementMask, xsHostFilterIpxAddress=xsHostFilterIpxAddress, drSmonActiveApplicationsType=drSmonActiveApplicationsType, xsNlHostTimeStamp=xsNlHostTimeStamp, xsSubnetMatrixControlStatus=xsSubnetMatrixControlStatus, xsSubnetMatrixControlDataSource=xsSubnetMatrixControlDataSource, xsHostTopNControlStartTime=xsHostTopNControlStartTime, xsSubnetMatrixControlIndex=xsSubnetMatrixControlIndex, xsSubnetMatrixDSDestMask=xsSubnetMatrixDSDestMask, xsNumberOfProtocols=xsNumberOfProtocols, xsActiveApplicationsBitMask=xsActiveApplicationsBitMask, xsActiveApplicationsIndex=xsActiveApplicationsIndex, xsHostTopNEntry=xsHostTopNEntry, drSmonProtocolDirTable=drSmonProtocolDirTable, xsSubnetControlStatus=xsSubnetControlStatus, xsSubnetMatrixControlOwner=xsSubnetMatrixControlOwner, xsSubnetMatrixControlTable=xsSubnetMatrixControlTable, xsSmonStatus=xsSmonStatus, xsSubnetControlIndex=xsSubnetControlIndex, drSmonFilterEntry=drSmonFilterEntry, drSmonProtocolDirEntry=drSmonProtocolDirEntry, drSmonFilterStatus=drSmonFilterStatus, xsHostTopN=xsHostTopN, xsSubnetControlDataSource=xsSubnetControlDataSource, xsSmonResourceAllocation=xsSmonResourceAllocation, drSmonProtocolDirLCTable=drSmonProtocolDirLCTable, drSmonFilterIndex=drSmonFilterIndex, xsSubnetMatrixSDSourceAddress=xsSubnetMatrixSDSourceAddress, xsSubnetMatrixSDCreateTime=xsSubnetMatrixSDCreateTime, xsHostTopNRate=xsHostTopNRate, xsHostFilterIpAddress=xsHostFilterIpAddress, xsSubnetOutPkts=xsSubnetOutPkts, xsSubnetMatrixControlDeletes=xsSubnetMatrixControlDeletes, drSmonProtocolDirOwner=drSmonProtocolDirOwner, xsSubnetMatrixDSDestAddress=xsSubnetMatrixDSDestAddress, xsSubnetControlDeletes=xsSubnetControlDeletes, xsSubnetCreateTime=xsSubnetCreateTime, xsActiveApplicationsTable=xsActiveApplicationsTable, drSmonControlTable=drSmonControlTable, xsHostTopNTable=xsHostTopNTable, drSmonProtocolDirType=drSmonProtocolDirType, drSmonActiveApplicationsModuleID=drSmonActiveApplicationsModuleID, xsSubnetTable=xsSubnetTable, drSmonFilterMask=drSmonFilterMask, xsActiveApplicationsPkts=xsActiveApplicationsPkts, xsSubnetStatsTimeStamp=xsSubnetStatsTimeStamp, xsSubnetMatrixDSEntry=xsSubnetMatrixDSEntry, xsHostTopNControlRateBase=xsHostTopNControlRateBase, xsSubnetMatrixSDEntry=xsSubnetMatrixSDEntry, xsHostFilterTable=xsHostFilterTable, xsHostTopNControlTable=xsHostTopNControlTable)
130.684397
6,612
0.775324
3,899
36,853
7.328289
0.074891
0.012319
0.014384
0.019179
0.407308
0.368565
0.285129
0.258776
0.243342
0.237637
0
0.073163
0.080211
36,853
281
6,613
131.149466
0.769773
0.008466
0
0
0
0
0.102896
0.023651
0
0
0
0
0
1
0
false
0
0.032847
0
0.032847
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ad9a70fef5b367a97d03392ad628fb8575a45853
31,137
py
Python
tensorlayer/googlenet.py
yellowei/AI
594826348dfdfdde523622bb037ff6800b7b2367
[ "MIT" ]
null
null
null
tensorlayer/googlenet.py
yellowei/AI
594826348dfdfdde523622bb037ff6800b7b2367
[ "MIT" ]
null
null
null
tensorlayer/googlenet.py
yellowei/AI
594826348dfdfdde523622bb037ff6800b7b2367
[ "MIT" ]
null
null
null
""" GoogLeNet. Applying 'GoogLeNet' to Oxford's 17 Category Flower Dataset classification task. References: - Szegedy, Christian, et al. Going deeper with convolutions. - 17 Category Flower Dataset. Maria-Elena Nilsback and Andrew Zisserman. Links: - [GoogLeNet Paper](http://www.cv-foundation.org/openaccess/content_cvpr_2015/papers/Szegedy_Going_Deeper_With_2015_CVPR_paper.pdf) - [Flower Dataset (17)](http://www.robots.ox.ac.uk/~vgg/data/flowers/17/) """ from __future__ import division, print_function, absolute_import import tensorflow as tf import tensorlayer as tl import os from skimage import io, transform import numpy as np from log import * def rgb2gray(im): #rgb图转灰度图 if im.ndim == 2: return im return np.uint8(np.dot(im[..., :3], [0.299, 0.587, 0.114])) def ListFiles(dir,extension): file_list = [] for path, subdirs, files in os.walk(dir): for name in files: if name.endswith(extension): #将jpg图片文件全部全部存入file_list列表 file_list.extend([os.path.join(path, name)]) return file_list def LoadImageData(folder, extension, size): #将folder中后缀名为extension的图片文件转成大小为size的正方形矩阵 Log().info("getData, folder:" + folder) file_list = [] for path, subdirs, files in os.walk(folder): for name in files: if name.endswith(extension): #将jpg图片文件全部全部存入file_list列表 file_list.extend([os.path.join(path, name)]) #len(a):列表a长度 num = len(file_list) data = np.zeros([num, size, size,1]) for i in range(0,num): im = io.imread(file_list[i]) size0 = np.max(im.shape) scale = (size-1)/size0 #print(i," , ",file_list[i],", ndim: " + str(im.ndim)) x = rgb2gray(im) im = transform.rescale(x, scale) data[i, 0:im.shape[0], 0:im.shape[1],0] = im[:, :] if i%20 == 0: Log().info("getData, file index:" + str(i) + ",total:" + str(num)) return data def PicClassModel(x,inputSize): #图像分类的神经网络模型 # Define the neural network structure with tf.variable_scope("googleNet_1"): network = tl.layers.InputLayer(x, name='input_layer') conv1_7_7 = tl.layers.Conv2dLayer(network, act = tf.nn.relu, shape = [7, 7, 1, 64], strides = [1,2,2,1], padding='SAME', name='conv1_7_7_s2') pool1_3_3 = tl.layers.PoolLayer(conv1_7_7, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', pool=tf.nn.max_pool, name='pool1_3_3_m') pool1_3_3 = tl.layers.LocalResponseNormLayer(pool1_3_3,depth_radius=5, bias=1.0,alpha=0.0001, beta=0.75,name='pool1_3_3') conv2_3_3_reduce = tl.layers.Conv2dLayer(pool1_3_3, act = tf.nn.relu, shape = [1, 1, 64, 1], strides = [1,1,1,1], padding='SAME', name='conv2_3_3_reduce') conv2_3_3 = tl.layers.Conv2dLayer(conv2_3_3_reduce, act = tf.nn.relu, shape = [3, 3, 1, 192], strides = [1,1,1,1], padding='SAME', name='conv2_3_3') norm2_3_3 = tl.layers.LocalResponseNormLayer(conv2_3_3,depth_radius=5, bias=1.0,alpha=0.0001, beta=0.75,name='norm2_3_3') pool2_3_3 = tl.layers.PoolLayer(conv2_3_3, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', pool=tf.nn.max_pool, name='pool2_3_3_s2') inception_3a_1_1 = tl.layers.Conv2dLayer(pool2_3_3, act = tf.nn.relu, shape = [1, 1, 192, 64], strides = [1,1,1,1], padding='SAME', name='inception_3a_1_1') inception_3a_3_3_reduce = tl.layers.Conv2dLayer(pool2_3_3, act = tf.nn.relu, shape = [1, 1, 192, 96], strides = [1,1,1,1], padding='SAME', name='inception_3a_3_3_reduce') inception_3a_3_3 = tl.layers.Conv2dLayer(inception_3a_3_3_reduce, act = tf.nn.relu, shape = [3,3,96,128], strides = [1,1,1,1], padding='SAME', name='inception_3a_3_3') inception_3a_5_5_reduce = tl.layers.Conv2dLayer(pool2_3_3, act = tf.nn.relu, shape = [16,16,192,1], strides = [1,1,1,1], padding='SAME', name='inception_3a_5_5_reduce') inception_3a_5_5 = tl.layers.Conv2dLayer(inception_3a_5_5_reduce, act = tf.nn.relu, shape = [32,32,1,5], strides = [1,1,1,1], padding='SAME', name='inception_3a_5_5') inception_3a_pool = tl.layers.PoolLayer(pool2_3_3, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_3a_pool') inception_3a_pool_1_1 = tl.layers.Conv2dLayer(inception_3a_pool, act = tf.nn.relu, shape = [32,32,192,1], strides = [1,1,1,1], padding='SAME', name='inception_3a_pool_1_1') # merge the inception_3a__ inception_3a_output = tl.layers.ConcatLayer([inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1],concat_dim=3) inception_3b_1_1 = tl.layers.Conv2dLayer(inception_3a_output, act = tf.nn.relu, shape = [128,128,198,1], strides = [1,1,1,1], padding='SAME', name='inception_3b_1_1') inception_3b_3_3_reduce = tl.layers.Conv2dLayer(inception_3a_output, act = tf.nn.relu, shape = [128,128,198,1], strides = [1,1,1,1], padding='SAME', name='inception_3b_3_3_reduce') inception_3b_3_3 = tl.layers.Conv2dLayer(inception_3b_3_3_reduce, act = tf.nn.relu, shape = [192,192,1,3], strides = [1,1,1,1], padding='SAME', name='inception_3b_3_3') inception_3b_5_5_reduce = tl.layers.Conv2dLayer(inception_3b_3_3_reduce, act = tf.nn.relu, shape = [32,32,1,1], strides = [1,1,1,1], padding='SAME', name='inception_3b_5_5_reduce') inception_3b_5_5 = tl.layers.Conv2dLayer(inception_3b_3_3_reduce, act = tf.nn.relu, shape = [96,96,1,5], strides = [1,1,1,1], padding='SAME', name='inception_3b_5_5') inception_3b_pool = tl.layers.PoolLayer(inception_3a_output, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_3b_pool') inception_3b_pool_1_1 = tl.layers.Conv2dLayer(inception_3b_pool, act = tf.nn.relu, shape = [64,64,198,1], strides = [1,1,1,1], padding='SAME', name='inception_3b_pool_1_1') #merge the inception_3b_* inception_3b_output = tl.layers.ConcatLayer([inception_3b_1_1, inception_3b_3_3, inception_3b_5_5, inception_3b_pool_1_1],concat_dim=3,name='inception_3b_output') pool3_3_3 = tl.layers.PoolLayer(inception_3b_output, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', pool=tf.nn.max_pool, name='pool3_3_3') inception_4a_1_1 = tl.layers.Conv2dLayer(pool3_3_3, act = tf.nn.relu, shape = [192,192,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4a_1_1') inception_4a_3_3_reduce = tl.layers.Conv2dLayer(pool3_3_3, act = tf.nn.relu, shape = [96,96,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4a_3_3_reduce') inception_4a_3_3 = tl.layers.Conv2dLayer(inception_4a_3_3_reduce, act = tf.nn.relu, shape = [208,208,1,3], strides = [1,1,1,1], padding='SAME', name='inception_4a_3_3') inception_4a_5_5_reduce = tl.layers.Conv2dLayer(pool3_3_3, act = tf.nn.relu, shape = [16,16,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4a_5_5_reduce') inception_4a_5_5 = tl.layers.Conv2dLayer(inception_4a_5_5_reduce, act = tf.nn.relu, shape = [48,48,1,5], strides = [1,1,1,1], padding='SAME', name='inception_4a_5_5') inception_4a_pool = tl.layers.PoolLayer(pool3_3_3, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_4a_pool') inception_4a_pool_1_1 = tl.layers.Conv2dLayer(inception_4a_pool, act = tf.nn.relu, shape = [64,64,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4a_pool_1_1') inception_4a_output = tl.layers.ConcatLayer([inception_4a_1_1, inception_4a_3_3, inception_4a_5_5, inception_4a_pool_1_1],concat_dim=3,name='inception_4a_output') inception_4b_1_1 = tl.layers.Conv2dLayer(inception_4a_output, act = tf.nn.relu, shape = [160,160,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4b_1_1') inception_4b_3_3_reduce = tl.layers.Conv2dLayer(inception_4a_output, act = tf.nn.relu, shape = [112,112,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4b_3_3_reduce') inception_4b_3_3 = tl.layers.Conv2dLayer(inception_4b_3_3_reduce, act = tf.nn.relu, shape = [224,224,1,3], strides = [1,1,1,1], padding='SAME', name='inception_4b_3_3') inception_4b_5_5_reduce = tl.layers.Conv2dLayer(inception_4a_output, act = tf.nn.relu, shape = [24,24,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4b_5_5_reduce') inception_4b_5_5 = tl.layers.Conv2dLayer(inception_4b_5_5_reduce, act = tf.nn.relu, shape = [64,64,1,5], strides = [1,1,1,1], padding='SAME', name='inception_4b_5_5') inception_4b_pool = tl.layers.PoolLayer(inception_4a_output, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_4b_pool') inception_4b_pool_1_1 = tl.layers.Conv2dLayer(inception_4b_pool, act = tf.nn.relu, shape = [64,64,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4b_pool_1_1') inception_4b_output = tl.layers.ConcatLayer([inception_4b_1_1, inception_4b_3_3, inception_4b_5_5, inception_4b_pool_1_1],concat_dim=3,name='inception_4b_output') inception_4c_1_1 = tl.layers.Conv2dLayer(inception_4b_output, act = tf.nn.relu, shape = [128,128,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4c_1_1') inception_4c_3_3_reduce = tl.layers.Conv2dLayer(inception_4b_output, act = tf.nn.relu, shape = [128,128,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4c_3_3_reduce') inception_4c_3_3 = tl.layers.Conv2dLayer(inception_4c_3_3_reduce, act = tf.nn.relu, shape = [256,256,1,3], strides = [1,1,1,1], padding='SAME', name='inception_4c_3_3') inception_4c_5_5_reduce = tl.layers.Conv2dLayer(inception_4b_output, act = tf.nn.relu, shape = [24,24,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4c_5_5_reduce') inception_4c_5_5 = tl.layers.Conv2dLayer(inception_4c_5_5_reduce, act = tf.nn.relu, shape = [64,64,1,5], strides = [1,1,1,1], padding='SAME', name='inception_4c_5_5') inception_4c_pool = tl.layers.PoolLayer(inception_4b_output, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_4c_pool') inception_4c_pool_1_1 = tl.layers.Conv2dLayer(inception_4c_pool, act = tf.nn.relu, shape = [64,64,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4c_pool_1_1') inception_4c_output = tl.layers.ConcatLayer([inception_4c_1_1, inception_4c_3_3, inception_4c_5_5, inception_4c_pool_1_1],concat_dim=3,name='inception_4c_output') inception_4d_1_1 = tl.layers.Conv2dLayer(inception_4c_output, act = tf.nn.relu, shape = [112,112,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4d_1_1') inception_4d_3_3_reduce = tl.layers.Conv2dLayer(inception_4c_output, act = tf.nn.relu, shape = [144,144,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4d_3_3_reduce') inception_4d_3_3 = tl.layers.Conv2dLayer(inception_4d_3_3_reduce, act = tf.nn.relu, shape = [288,288,1,3], strides = [1,1,1,1], padding='SAME', name='inception_4d_3_3') inception_4d_5_5_reduce = tl.layers.Conv2dLayer(inception_4c_output, act = tf.nn.relu, shape = [32,32,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4d_5_5_reduce') inception_4d_5_5 = tl.layers.Conv2dLayer(inception_4d_5_5_reduce, act = tf.nn.relu, shape = [64,64,1,5], strides = [1,1,1,1], padding='SAME', name='inception_4d_5_5') inception_4d_pool = tl.layers.PoolLayer(inception_4c_output, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_4d_pool') inception_4d_pool_1_1 = tl.layers.Conv2dLayer(inception_4d_pool, act = tf.nn.relu, shape = [64,64,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4d_pool_1_1') inception_4d_output = tl.layers.ConcatLayer([inception_4d_1_1, inception_4d_3_3, inception_4d_5_5, inception_4d_pool_1_1],concat_dim=3,name='inception_4d_output') inception_4e_1_1 = tl.layers.Conv2dLayer(inception_4d_output, act = tf.nn.relu, shape = [256,256,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4e_1_1') inception_4e_3_3_reduce = tl.layers.Conv2dLayer(inception_4d_output, act = tf.nn.relu, shape = [160,160,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4e_3_3_reduce') inception_4e_3_3 = tl.layers.Conv2dLayer(inception_4e_3_3_reduce, act = tf.nn.relu, shape = [320,320,1,3], strides = [1,1,1,1], padding='SAME', name='inception_4e_3_3') inception_4e_5_5_reduce = tl.layers.Conv2dLayer(inception_4d_output, act = tf.nn.relu, shape = [32,32,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4e_5_5_reduce') inception_4e_5_5 = tl.layers.Conv2dLayer(inception_4e_5_5_reduce, act = tf.nn.relu, shape = [128,128,1,5], strides = [1,1,1,1], padding='SAME', name='inception_4e_5_5') inception_4e_pool = tl.layers.PoolLayer(inception_4d_output, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_4e_pool') inception_4e_pool_1_1 = tl.layers.Conv2dLayer(inception_4e_pool, act = tf.nn.relu, shape = [128,128,10,1], strides = [1,1,1,1], padding='SAME', name='inception_4e_pool_1_1') inception_4e_output = tl.layers.ConcatLayer([inception_4e_1_1, inception_4e_3_3, inception_4e_5_5, inception_4e_pool_1_1],concat_dim=3,name='inception_4e_output') pool4_3_3 = tl.layers.PoolLayer(inception_4e_output, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', pool=tf.nn.max_pool, name='pool4_3_3') inception_5a_1_1 = tl.layers.Conv2dLayer(pool4_3_3, act = tf.nn.relu, shape = [256,256,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5a_1_1') inception_5a_3_3_reduce = tl.layers.Conv2dLayer(pool4_3_3, act = tf.nn.relu, shape = [160,160,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5a_3_3_reduce') inception_5a_3_3 = tl.layers.Conv2dLayer(inception_5a_3_3_reduce, act = tf.nn.relu, shape = [320,320,1,3], strides = [1,1,1,1], padding='SAME', name='inception_5a_3_3') inception_5a_5_5_reduce = tl.layers.Conv2dLayer(pool4_3_3, act = tf.nn.relu, shape = [32,32,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5a_5_5_reduce') inception_5a_5_5 = tl.layers.Conv2dLayer(inception_5a_5_5_reduce, act = tf.nn.relu, shape = [128,128,1,5], strides = [1,1,1,1], padding='SAME', name='inception_5a_5_5') inception_5a_pool = tl.layers.PoolLayer(pool4_3_3, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_5a_pool') inception_5a_pool_1_1 = tl.layers.Conv2dLayer(inception_5a_pool, act = tf.nn.relu, shape = [128,128,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5a_pool_1_1') inception_5a_output = tl.layers.ConcatLayer([inception_5a_1_1, inception_5a_3_3, inception_5a_5_5, inception_5a_pool_1_1],concat_dim=3,name='inception_5a_output') inception_5b_1_1 = tl.layers.Conv2dLayer(inception_5a_output, act = tf.nn.relu, shape = [384,384,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5b_1_1') inception_5b_3_3_reduce = tl.layers.Conv2dLayer(inception_5a_output, act = tf.nn.relu, shape = [192,192,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5b_3_3_reduce') inception_5b_3_3 = tl.layers.Conv2dLayer(inception_5b_3_3_reduce, act = tf.nn.relu, shape = [384,384,1,3], strides = [1,1,1,1], padding='SAME', name='inception_5b_3_3') inception_5b_5_5_reduce = tl.layers.Conv2dLayer(inception_5a_output, act = tf.nn.relu, shape = [48,48,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5b_5_5_reduce') inception_5b_5_5 = tl.layers.Conv2dLayer(inception_5b_5_5_reduce, act = tf.nn.relu, shape = [128,128,1,5], strides = [1,1,1,1], padding='SAME', name='inception_5b_5_5') inception_5b_pool = tl.layers.PoolLayer(inception_5a_output, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.max_pool, name='inception_5b_pool') inception_5b_pool_1_1 = tl.layers.Conv2dLayer(inception_5b_pool, act = tf.nn.relu, shape = [128,128,10,1], strides = [1,1,1,1], padding='SAME', name='inception_5b_pool_1_1') inception_5b_output = tl.layers.ConcatLayer([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1],concat_dim=3,name='inception_5b_output') pool5_7_7 = tl.layers.PoolLayer(inception_5b_output, ksize=[1, 7, 7, 1], strides=[1, 1, 1, 1], padding='SAME', pool=tf.nn.avg_pool, name='pool5_7_7') #防止CNN过拟合 #当迭代次数增多的时候,可能出现网络对训练集拟合的很好(在训练集上loss很小),但是对验证集的拟合程度很差的情况 gnet = tl.layers.DropoutLayer(pool5_7_7, keep =0.4,name='dropout5_7_7') print(gnet.outputs._shape) #全连接 负责对网络最终输出的特征进行分类预测,得出分类结果 gnet = tl.layers.FlattenLayer(gnet, name='flatten_layer') print(gnet.outputs._shape) gnet = tl.layers.DenseLayer(gnet,n_units=4,act = tf.identity,name='output_layer') print(gnet.outputs._shape) return gnet
60.343023
170
0.381026
2,955
31,137
3.723858
0.075127
0.046529
0.035987
0.059978
0.795165
0.715649
0.658488
0.598328
0.506997
0.495002
0
0.109657
0.531394
31,137
516
171
60.343023
0.644507
0.026464
0
0.555785
0
0
0.060441
0.019905
0
0
0
0
0
1
0.008264
false
0
0.014463
0
0.033058
0.008264
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
a8de9a4d1a76af0c606999504bb3f8bc1cc7ddbd
868
py
Python
club/middleware.py
dimabory/vas3k.club
178154a8d6d2925fb392599d65da3e60082c8f37
[ "MIT" ]
1
2021-04-12T13:38:41.000Z
2021-04-12T13:38:41.000Z
club/middleware.py
dimabory/vas3k.club
178154a8d6d2925fb392599d65da3e60082c8f37
[ "MIT" ]
null
null
null
club/middleware.py
dimabory/vas3k.club
178154a8d6d2925fb392599d65da3e60082c8f37
[ "MIT" ]
null
null
null
from django.shortcuts import render from auth.helpers import authorized_user_with_session from club.exceptions import ClubException def me(get_response): def middleware(request): request.me, request.my_session = authorized_user_with_session(request) response = get_response(request) return response return middleware class ExceptionMiddleware: def __init__(self, get_response): self.get_response = get_response def __call__(self, request): response = self.get_response(request) return response def process_exception(self, request, exception): if isinstance(exception, ClubException): return render( request, "error.html", {"title": exception.title, "message": exception.message}, status=400, )
25.529412
78
0.657834
89
868
6.168539
0.41573
0.120219
0.081967
0.091075
0.116576
0
0
0
0
0
0
0.004732
0.269585
868
33
79
26.30303
0.861199
0
0
0.086957
0
0
0.025346
0
0
0
0
0
0
1
0.217391
false
0
0.130435
0
0.565217
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
a8e38ded37ea0162533cc3250c84122b3d0b316f
600
py
Python
applications/qa/search.py
aliyun/alibabacloud-AnalyticDB-python-demo-AI
84ac16aea20841561375cff982d4eb7e2fd2fb04
[ "MIT" ]
42
2020-06-28T03:51:14.000Z
2021-10-22T03:04:11.000Z
applications/qa/search.py
aliyun/alibabacloud-AnalyticDB-python-demo-AI-
84ac16aea20841561375cff982d4eb7e2fd2fb04
[ "MIT" ]
9
2020-07-13T10:53:51.000Z
2022-03-12T00:45:59.000Z
applications/qa/search.py
aliyun/alibabacloud-AnalyticDB-python-demo-AI-
84ac16aea20841561375cff982d4eb7e2fd2fb04
[ "MIT" ]
8
2020-06-28T05:50:30.000Z
2022-02-11T06:41:19.000Z
# coding: utf-8 import requests import json import base64 import os import random import uuid import multiprocessing import traceback def search(q, url): try: data = { 'question': q } response = requests.post(url, data) for r in json.loads(response.content)['result']: print r[0].encode('utf-8'), r[2] except: traceback.print_exc() image_list_path = 'qa_data.json' url = 'http://0.0.0.0:8004/qa/search' q = u'如何提升网络带宽' search(q, url) response = requests.get('http://0.0.0.0:8004/qa/get_all_questions')
22.222222
67
0.61
86
600
4.186047
0.511628
0.033333
0.033333
0.038889
0.077778
0.077778
0.077778
0
0
0
0
0.049327
0.256667
600
26
68
23.076923
0.757848
0.021667
0
0
0
0
0.184615
0
0
0
0
0
0
0
null
null
0
0.347826
null
null
0.086957
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
a8ec7e2486df413a0bcdcab5a1da0476776ad5e5
5,867
py
Python
cellular-automata/cellular_automata/automata_recorder.py
enthought/ets-examples
3b2894bfe25c9f57f76996bd5818d1f0cdc77e8a
[ "Apache-2.0", "BSD-3-Clause" ]
3
2017-07-22T11:04:20.000Z
2018-10-01T15:30:16.000Z
cellular-automata/cellular_automata/automata_recorder.py
enthought/ets-examples
3b2894bfe25c9f57f76996bd5818d1f0cdc77e8a
[ "Apache-2.0", "BSD-3-Clause" ]
11
2018-01-18T17:06:59.000Z
2019-12-17T12:08:42.000Z
cellular-automata/cellular_automata/automata_recorder.py
enthought/ets-examples
3b2894bfe25c9f57f76996bd5818d1f0cdc77e8a
[ "Apache-2.0", "BSD-3-Clause" ]
1
2018-12-28T14:15:15.000Z
2018-12-28T14:15:15.000Z
# Copyright (c) 2017, Enthought, Inc. # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # # Thanks for using Enthought open source! """ This module provides tools for recording information about a cellular automata over time as it evolves. This includes the :py:class:`AutomataReader` class which performs the actual recording of values, and a collection of callables that transform states in useful ways. """ from functools import wraps import numpy as np from traits.api import Callable, HasStrictTraits, Instance, List, on_trait_change from .cellular_automaton import CellularAutomaton class AutomataRecorder(HasStrictTraits): """ An object that records changes to the states of a cellular automata. An optional :py:attr:`transform` function can be provided that will be used to compute derived values from the states (such as counts of different states) or only recording on certain time ticks. Recording happens on changes to the the :py:attr:`automaton.ticks` value. If the :py:attr:`transform` trait is :py:obj:`None`, then the current value of the automaton's states will be added to the record. If the :py:attr:`transform` trait is not :py:obj:`None` then that will be called with the automaton passed to it as the only argument, and any non-:py:obj:`None` value that is returned will be added to the :py:attr:`record` list. """ #: The CellularAutomaton to record. automaton = Instance(CellularAutomaton) #: The record of states. record = List #: A function to call to compute the value to record. This should accept #: a single CellularAutomaton as an argument and return an arbitrary value. transform = Callable # ------------------------------------------------------------------------ # AutomataRecorder interface # ------------------------------------------------------------------------ def as_array(self): """ Return the record as a single stacked array. This presumes that the recorded values are all arrays with compatible shapes to be stacked. """ return np.stack(self.record) # ------------------------------------------------------------------------ # object interface # ------------------------------------------------------------------------ def __init__(self, automaton=None, **traits): super(AutomataRecorder, self).__init__(**traits) # ensure that the automaton is set _after_ everything is set up # this means in particular that we get first state if it is not None. if automaton is not None: self.automaton = automaton # ------------------------------------------------------------------------ # Private interface # ------------------------------------------------------------------------ def _record(self): """ Record the (possibly transformed) states. If the :py:attr:`transform` trait is not :py:obj:`None` then that will be called with the automaton passed to it as the only argument, and any non-:py:obj:`None` value that is returned will be added to the :py:attr:`record` list. Subclasses that want to do something more sophisticated can override this method. Parameters ---------- states : array The states that will be recorded. """ if self.automaton is None or self.automaton.states is None: return if self.transform is not None: value = self.transform(self.automaton) else: value = self.automaton.states if value is not None: self.record.append(value) # Trait change handlers -------------------------------------------------- @on_trait_change('automaton:tick') def _time_updated(self): if self.automaton.tick == -1: # automaton was reset, dump self.record = [] else: self._record() @on_trait_change('automaton') def _automaton_updated(self, automaton): # reset the record for the new automaton self.record = [] self._record() def count_states(automaton): """ A function that counts the unique states of the automata. This is suitable for use as the :py:attr:`transform` of an :py:class:`AutomataRecorder`. Parameters ---------- automaton : CellularAutomaton The cellular automaton being analyzed. Returns ------- counts : array A 1D array of size 256 containing the counts of each value. """ states = automaton.states uniques, counts = np.unique(states, return_counts=True) full_counts = np.zeros(256, dtype=int) full_counts[uniques] = counts return full_counts def call_if(test): """ Decorator factory that records automaton state only if test is True. Parameters ---------- test : callable A callable that takes an automaton as input and returns a bool. Returns ------- decorator : function The decorator that wraps the function with the test. """ def decorator(fn): """ Decorator that records automaton state every only if test is True. """ @wraps(fn) def f(automaton): if test(automaton): return fn(automaton) return None return f return decorator def every_nth(n): """ Decorator factory that records automaton state every nth tick. """ def is_nth(automaton): return automaton.tick % n == 0 return call_if(is_nth)
32.960674
82
0.605761
708
5,867
4.970339
0.298023
0.011367
0.017903
0.02046
0.151748
0.128161
0.104859
0.097187
0.097187
0.097187
0
0.002923
0.241861
5,867
177
83
33.146893
0.788219
0.618885
0
0.115385
0
0
0.011985
0
0
0
0
0
0
1
0.211538
false
0
0.076923
0.019231
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d103f2911af32ac4be2501dfd6e7d61b700508dc
295
py
Python
fermentor/src/WebService/FermentorsWebService.py
vortexau/OpenBrew-Py
c6ab11ed5bc45fc2776ffbfc140bf3a6113c9445
[ "Beerware" ]
1
2015-11-23T11:25:03.000Z
2015-11-23T11:25:03.000Z
fermentor/src/WebService/FermentorsWebService.py
vortexau/OpenBrew-Py
c6ab11ed5bc45fc2776ffbfc140bf3a6113c9445
[ "Beerware" ]
null
null
null
fermentor/src/WebService/FermentorsWebService.py
vortexau/OpenBrew-Py
c6ab11ed5bc45fc2776ffbfc140bf3a6113c9445
[ "Beerware" ]
null
null
null
import cherrypy class FermentorsWebService: def __init__(self, dbconn): self.dbconn = dbconn @cherrypy.expose @cherrypy.tools.accept(media='application/json') @cherrypy.tools.json_out() def index(self, fermentor=0): return 'Data for fermentor', fermentor
21.071429
52
0.688136
33
295
6
0.636364
0.10101
0
0
0
0
0
0
0
0
0
0.004274
0.20678
295
13
53
22.692308
0.84188
0
0
0
0
0
0.115646
0
0
0
0
0
0
1
0.222222
false
0
0.111111
0.111111
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
d1103caab3a22733b2f1fc56312a71ece7fa4617
281
py
Python
main.py
daniilpeshkov/OPI_labs
42ab08def8195c6ff76f845d783aa13ef10158fb
[ "MIT" ]
null
null
null
main.py
daniilpeshkov/OPI_labs
42ab08def8195c6ff76f845d783aa13ef10158fb
[ "MIT" ]
1
2022-03-30T12:49:33.000Z
2022-03-30T13:00:00.000Z
main.py
daniilpeshkov/OPI_labs
42ab08def8195c6ff76f845d783aa13ef10158fb
[ "MIT" ]
null
null
null
import requests import time def get_usd_exchange(): data = requests.get('https://www.cbr-xml-daily.ru/daily_json.js').json() return float(data['Valute']['USD']['Value']) def main(): print('Курс доллара:', get_usd_exchange() ) if __name__ == '__main__': main()
18.733333
76
0.658363
39
281
4.410256
0.666667
0.069767
0.162791
0
0
0
0
0
0
0
0
0
0.153025
281
14
77
20.071429
0.722689
0
0
0
0
0
0.274021
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.555556
0.111111
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d1311da645ec9e5a43667e92b197fce085cbb3b1
2,680
py
Python
braille_experience/braille_translators/mapBrailleToAlpha.py
firekim2/testserver
0ebd7be0e254fa825dad8be314fbfcb7b03f7a30
[ "MIT" ]
null
null
null
braille_experience/braille_translators/mapBrailleToAlpha.py
firekim2/testserver
0ebd7be0e254fa825dad8be314fbfcb7b03f7a30
[ "MIT" ]
null
null
null
braille_experience/braille_translators/mapBrailleToAlpha.py
firekim2/testserver
0ebd7be0e254fa825dad8be314fbfcb7b03f7a30
[ "MIT" ]
null
null
null
# Contains dictionaries that map braille to English letters. letters = {chr(10241): 'a', chr(10243): 'b', chr(10249): 'c', chr(10265): 'd', chr(10257): 'e', chr(10251): 'f', chr(10267): 'g', chr(10259): 'h', chr(10250): 'i', chr(10266): 'j', chr(10245): 'k', chr(10247): 'l', chr(10253): 'm', chr(10269): 'n', chr(10261): 'o', chr(10255): 'p', chr(10271): 'q', chr(10263): 'r', chr(10254): 's', chr(10270): 't', chr(10277): 'u', chr(10279): 'v', chr(10298): 'w', chr(10285): 'x', chr(10301): 'y', chr(10293): 'z'} contractions = {chr(10243): 'but', chr(10249): 'can', chr(10265): 'do', chr(10257): 'every', chr(10251): 'from', chr(10267): 'go', chr(10259): 'have', chr(10266): 'just', chr(10280): 'knowledge', chr(10296): 'like', chr(10253): 'more', chr(10269): 'not', chr(10255): 'people', chr(10271): 'quite', chr(10263): 'rather', chr(10254): 'so', chr(10270): 'that', chr(10277): 'us', chr(10279): 'very', chr(10285): 'it', chr(10301): 'you', chr(10293): 'as', chr(10287): 'and', chr(10303): 'for', chr(10295): 'of', chr(10286): 'the', chr(10302): 'with', chr(10298): 'will', chr(10278): 'his', chr(10260): 'in', chr(10292): 'was', chr(10262): 'to'} punctuation = {chr(10242): ',', chr(10246): ';', chr(10258): ':', chr(10290): '.', chr(10262): '!', chr(10294): '()', chr(10278): '“', chr(10292): '”', chr(10252): '/', chr(10300): '#', chr(10244): '\'', chr(10276): '-'} numbers = {chr(10241): '1', chr(10243): '2', chr(10249): '3', chr(10265): '4', chr(10257): '5', chr(10251): '6', chr(10267): '7', chr(10259): '8', chr(10250): '9', chr(10266): '0'}
31.162791
61
0.326493
240
2,680
3.645833
0.516667
0.027429
0
0
0
0
0
0
0
0
0
0.297317
0.485448
2,680
85
62
31.529412
0.337201
0.021642
0
0
0
0
0.063511
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d13745c8ebc99cb12665ec5074d1140a16727787
346
py
Python
tds/request/sql_batch.py
by46/geek
04b08d0dff80c524bd471ead3fe524423eebf123
[ "MIT" ]
null
null
null
tds/request/sql_batch.py
by46/geek
04b08d0dff80c524bd471ead3fe524423eebf123
[ "MIT" ]
null
null
null
tds/request/sql_batch.py
by46/geek
04b08d0dff80c524bd471ead3fe524423eebf123
[ "MIT" ]
null
null
null
from io import BytesIO from tds.tokens import SQLBatchStream from .base import Request class SQLBatchRequest(Request): def __init__(self, buf): """ :param BytesIO buf: """ super(SQLBatchRequest, self).__init__() self.stream = stream = SQLBatchStream() stream.unmarshal(buf)
21.625
48
0.615607
34
346
6.029412
0.558824
0.078049
0
0
0
0
0
0
0
0
0
0
0.294798
346
15
49
23.066667
0.840164
0.054913
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
d14d2f478c64d7d2310b6766069f4de62f52485f
54
py
Python
src/tensor/tensor/shape/__init__.py
jedhsu/tensor
3b2fe21029fa7c50b034190e77d79d1a94ea5e8f
[ "Apache-2.0" ]
null
null
null
src/tensor/tensor/shape/__init__.py
jedhsu/tensor
3b2fe21029fa7c50b034190e77d79d1a94ea5e8f
[ "Apache-2.0" ]
null
null
null
src/tensor/tensor/shape/__init__.py
jedhsu/tensor
3b2fe21029fa7c50b034190e77d79d1a94ea5e8f
[ "Apache-2.0" ]
null
null
null
from ._shape import Shape __all__ = [ "Shape", ]
9
25
0.611111
6
54
4.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.259259
54
5
26
10.8
0.7
0
0
0
0
0
0.092593
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d173091ef1fe16743c30cf15562c759ebcada11a
2,205
py
Python
personal/models.py
JamesMutahi/personal-gallery
05ddec214c397cfdd16e5b029c315690cc4e9257
[ "MIT" ]
1
2019-07-18T11:03:08.000Z
2019-07-18T11:03:08.000Z
personal/models.py
JamesMutahi/personal-gallery
05ddec214c397cfdd16e5b029c315690cc4e9257
[ "MIT" ]
1
2021-06-10T21:44:49.000Z
2021-06-10T21:44:49.000Z
personal/models.py
JamesMutahi/personal-gallery
05ddec214c397cfdd16e5b029c315690cc4e9257
[ "MIT" ]
3
2019-08-26T07:00:05.000Z
2020-10-09T11:22:23.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models Deafult_desc = '''Lorem ipsum dolor sit amet, agam probatus indoctum cu quo. Est eu quod rationibus, nam platonem sententiae no. Eu mel vero oporteat elaboraret.''' class Category(models.Model): Name = models.CharField(max_length=30) @property def preview(self): first = self.images.last() return first.image.url def __str__(self): return self.Name # Create your models here. class Location(models.Model): location_name = models.CharField(max_length=255) def __str__(self): return self.location_name def save_location(self): self.save() def delete_location(self): self.delete() @classmethod def update_location(cls, id, value): cls.objects.filter(id=id).update(location_name=value) class Image(models.Model): Name = models.CharField(max_length=60) description = models.TextField(default=Deafult_desc) category = models.ForeignKey(Category, related_name="images") location = models.ForeignKey(Location) submited = models.DateTimeField(auto_now_add=True) image = models.ImageField(upload_to='images/') @property def allinfo(self): info = { 'name': self.Name, 'desc': self.description, 'image': self.image.url, 'id': self.id, } return str(info) def save_image(self): self.save() def delete_image(self): self.delete() def update_image(self, Name=None, category=None): self.Name = Name if Name else self.Name self.category = category if category else self.category self.save() @classmethod def get_image_by_id(cls, id): return cls.objects.get(pk=id) @classmethod def search_by_category(cls, search_term): images = cls.objects.filter(category__icontains=search_term) return images @classmethod def filter_by_location(cls, search_term): locations = cls.objects.filter(location__location_name__icontains=search_term) return locations class Meta: ordering = ['submited']
25.056818
86
0.661678
271
2,205
5.202952
0.357934
0.028369
0.040426
0.046809
0.133333
0.055319
0.055319
0
0
0
0
0.004756
0.237188
2,205
87
87
25.344828
0.833532
0.020862
0
0.206349
0
0
0.083024
0
0
0
0
0
0
1
0.206349
false
0
0.031746
0.047619
0.539683
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
66faccf952a064caf3fc48f3ca13aedaf3e49932
212
py
Python
setup.py
MalteHB/stedsans
73c2bc9fe77038c80e196d2bb1d91e38c8f02e4f
[ "Apache-2.0" ]
2
2021-06-07T04:22:42.000Z
2021-06-07T06:46:10.000Z
setup.py
MalteHB/stedsans
73c2bc9fe77038c80e196d2bb1d91e38c8f02e4f
[ "Apache-2.0" ]
null
null
null
setup.py
MalteHB/stedsans
73c2bc9fe77038c80e196d2bb1d91e38c8f02e4f
[ "Apache-2.0" ]
null
null
null
import setuptools with open("requirements.txt", encoding='utf-8') as f: requirements = f.read() setuptools.setup( install_requires=requirements, keywords="Geospatial Analysis NLP Danish English", )
21.2
54
0.735849
25
212
6.2
0.84
0
0
0
0
0
0
0
0
0
0
0.005556
0.150943
212
9
55
23.555556
0.855556
0
0
0
0
0
0.278302
0
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0f01c352df4624444a8fea82fcaa3e2a1fb58a3f
99
py
Python
Aug - Dec 2021 JOC Course/Week 5/New Week 5 Programming Assignment 2.py
Slow-Rain/NPTEL-The-Joy-of-Computing-using-Python
e4bd830ef7a3f171a14a88f94df626c766a7649b
[ "MIT" ]
29
2021-01-25T16:13:17.000Z
2022-03-23T16:34:06.000Z
Aug - Dec 2021 JOC Course/Week 5/New Week 5 Programming Assignment 2.py
Slow-Rain/NPTEL-The-Joy-of-Computing-using-Python
e4bd830ef7a3f171a14a88f94df626c766a7649b
[ "MIT" ]
12
2021-02-11T13:47:07.000Z
2021-10-10T04:26:45.000Z
Aug - Dec 2021 JOC Course/Week 5/New Week 5 Programming Assignment 2.py
Slow-Rain/NPTEL-The-Joy-of-Computing-using-Python
e4bd830ef7a3f171a14a88f94df626c766a7649b
[ "MIT" ]
30
2021-02-10T05:54:31.000Z
2022-02-25T11:24:36.000Z
lst = [int(x) for x in input().split()] k = int(input()) lst.sort() print(lst[-k], end="")
14.142857
40
0.515152
17
99
3
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.212121
99
7
41
14.142857
0.653846
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0f04a8f3b4b121d4cb8248675c3d35cf8402c50d
148
py
Python
BOJ2559.py
INYEONGKIM/BOJ
5e83d77a92d18b0d20d26645c7cfe4ba3e2d25bc
[ "MIT" ]
2
2019-03-05T15:42:46.000Z
2019-07-24T15:52:36.000Z
BOJ2559.py
INYEONGKIM/BOJ
5e83d77a92d18b0d20d26645c7cfe4ba3e2d25bc
[ "MIT" ]
null
null
null
BOJ2559.py
INYEONGKIM/BOJ
5e83d77a92d18b0d20d26645c7cfe4ba3e2d25bc
[ "MIT" ]
null
null
null
n,k=map(int,input().split());a=[int(i) for i in input().split()];m=sum(a[:k]);s=m for i in range(k,n): s+=(a[i]-a[i-k]) if s>m:m=s print(m)
24.666667
81
0.527027
39
148
2
0.410256
0.25641
0.153846
0
0
0
0
0
0
0
0
0
0.141892
148
5
82
29.6
0.614173
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0f1b1ef095407f7b2c83d4abc6cd4dc671eb3f2c
1,349
py
Python
code/descriptor_track/train_eval/preprocess/make_external_simpt_debug.py
seungkee/2nd-place-solution-to-facebook-image-similarity-matching-track
716667bf416239f448e4ea2730a2cc5146536719
[ "Apache-2.0" ]
13
2021-12-07T17:15:10.000Z
2022-02-23T08:45:58.000Z
code/descriptor_track/train_eval/preprocess/make_external_simpt_debug.py
seungkee/2nd-place-solution-to-facebook-image-similarity-matching-track
716667bf416239f448e4ea2730a2cc5146536719
[ "Apache-2.0" ]
2
2021-12-16T14:33:10.000Z
2021-12-28T07:15:33.000Z
code/descriptor_track/train_eval/preprocess/make_external_simpt_debug.py
seungkee/2nd-place-solution-to-facebook-image-similarity-matching-track
716667bf416239f448e4ea2730a2cc5146536719
[ "Apache-2.0" ]
4
2021-12-08T07:52:28.000Z
2022-03-29T05:50:38.000Z
import torch from tqdm import tqdm import os import numpy as np train_features=torch.load('train_features.pt') n = len(list(np.load('/facebook/data/images/train_imlist.npy'))) print(n) os.makedirs('/siim/sim_pt_256', exist_ok=True) for i in tqdm(range(n)): a=torch.mm(train_features[i:i+1],train_features.t()) torch.save(torch.tensor(np.argpartition(np.array(a),-256)[0][-256:]),os.path.join('/siim/sim_pt_256',f'{i}_sim256.pt')) for i in tqdm(range(n)): a=torch.mm(train_features[i:i+1],train_features.t()) torch.save(torch.tensor(np.argpartition(np.array(a),-512)[0][-512:]),os.path.join('/siim/sim_pt',f'{i}_sim512.pt')) os.makedirs('/storage1/sim_pt',exist_ok=True) if n < 65746: for i in tqdm(range(n)): a=torch.mm(train_features[i:i+1],train_features.t()) torch.save(torch.argsort(a,descending=True)[0][:300],os.path.join('/storage1/sim_pt', f'{i}_sim2000.pt')) else: for i in tqdm(range(65746)): a=torch.mm(train_features[i:i+1],train_features.t()) torch.save(torch.argsort(a,descending=True)[0][:300],os.path.join('/storage1/sim_pt', f'{i}_sim2000.pt')) for i in tqdm(range(65746,1000000)): a=torch.mm(train_features[i:i+1],train_features.t()) torch.save(torch.tensor(np.argpartition(np.array(a),-24)[0][-24:]), os.path.join('/storage1/sim_pt',f'{i}_sim2000.pt'))
48.178571
127
0.683469
244
1,349
3.659836
0.233607
0.174692
0.033595
0.055991
0.701008
0.701008
0.611422
0.611422
0.611422
0.611422
0
0.067545
0.100074
1,349
27
128
49.962963
0.66804
0
0
0.384615
0
0
0.171238
0.028169
0
0
0
0
0
1
0
false
0
0.153846
0
0.153846
0.038462
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0f2dd70f3a13a641369f0d289155bb97f0366583
455
py
Python
quantity/digger/widgets/qtwidgets/techwidget.py
wyjcpu/quantity
a53126a430f12b5bac81a52b2fe749cc497faf36
[ "MIT" ]
null
null
null
quantity/digger/widgets/qtwidgets/techwidget.py
wyjcpu/quantity
a53126a430f12b5bac81a52b2fe749cc497faf36
[ "MIT" ]
null
null
null
quantity/digger/widgets/qtwidgets/techwidget.py
wyjcpu/quantity
a53126a430f12b5bac81a52b2fe749cc497faf36
[ "MIT" ]
1
2021-05-11T09:33:59.000Z
2021-05-11T09:33:59.000Z
# -*- coding: utf-8 -*- from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg from quantity.digger.widgets.mplotwidgets.widgets import MultiWidgets import matplotlib.pyplot as plt class TechWidget(MultiWidgets, FigureCanvasQTAgg): def __init__(self, parent=None, *args): self.fig = plt.figure() FigureCanvasQTAgg.__init__(self, self.fig) MultiWidgets.__init__(self, self.fig, *args) self.setParent(parent)
35
69
0.740659
51
455
6.352941
0.568627
0.074074
0.074074
0.092593
0
0
0
0
0
0
0
0.005208
0.156044
455
12
70
37.916667
0.838542
0.046154
0
0
0
0
0
0
0
0
0
0
0
1
0.111111
false
0
0.333333
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
0f2f421bc7034f1c189702148daf7d137eb011eb
707
py
Python
scripts/addons/render_batcher/helper.py
Tilapiatsu/blender-custom_conf
05592fedf74e4b7075a6228b8448a5cda10f7753
[ "MIT" ]
2
2020-04-16T22:12:40.000Z
2022-01-22T17:18:45.000Z
scripts/addons/render_batcher/helper.py
Tilapiatsu/blender-custom_conf
05592fedf74e4b7075a6228b8448a5cda10f7753
[ "MIT" ]
null
null
null
scripts/addons/render_batcher/helper.py
Tilapiatsu/blender-custom_conf
05592fedf74e4b7075a6228b8448a5cda10f7753
[ "MIT" ]
2
2019-05-16T04:01:09.000Z
2020-08-25T11:42:26.000Z
import bpy, os, shutil, stat def create_folder_if_neeed(path): if not os.path.exists(path): os.makedirs(path) def delete_folder_if_exist(path): if os.path.exists(path): shutil.rmtree(path, onerror=file_acces_handler) def file_acces_handler(func, path, exc_info): print('Handling Error for file ' , path) print(exc_info) # Check if file access issue if not os.access(path, os.W_OK): # Try to change the permision of file os.chmod(path, stat.S_IWUSR) # call the calling function again func(path) def get_current_frame_range(context): return context.scene.frame_end + 1 - context.scene.frame_start def get_curr_render_extension(context): return '.png'
27.192308
63
0.722772
112
707
4.366071
0.535714
0.03272
0.02863
0.06544
0
0
0
0
0
0
0
0.001721
0.178218
707
25
64
28.28
0.839931
0.132956
0
0
0
0
0.045977
0
0
0
0
0
0
1
0.294118
false
0
0.058824
0.117647
0.470588
0.117647
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
0f2f92cec8069a2ac369d54a8a325970f90747a5
731
py
Python
src/myblog/tests/test_views.py
kangheeyong/TEST-sanic-blog
cfd9993868de8675133324ebd410fcafa41adc19
[ "MIT" ]
null
null
null
src/myblog/tests/test_views.py
kangheeyong/TEST-sanic-blog
cfd9993868de8675133324ebd410fcafa41adc19
[ "MIT" ]
null
null
null
src/myblog/tests/test_views.py
kangheeyong/TEST-sanic-blog
cfd9993868de8675133324ebd410fcafa41adc19
[ "MIT" ]
null
null
null
from .. import app def test_url_for(): assert app.url_for('index') == '/' assert app.url_for('about') == '/about' assert app.url_for('static', filename='layout.css', name='css') == '/css/layout.css' assert app.url_for('static', filename='default.png', name='profile_imgs') == '/profile_imgs/default.png' def test_response_200(): request, response = app.test_client.get('/') assert response.status == 200 request, response = app.test_client.get('/about') assert response.status == 200 request, response = app.test_client.get('/css/layout.css') assert response.status == 200 request, response = app.test_client.get('/profile_imgs/default.png') assert response.status == 200
27.074074
108
0.670315
98
731
4.846939
0.255102
0.063158
0.101053
0.126316
0.534737
0.534737
0.412632
0.341053
0.341053
0.341053
0
0.02451
0.162791
731
26
109
28.115385
0.751634
0
0
0.266667
0
0
0.208791
0.068681
0
0
0
0
0.533333
1
0.133333
true
0
0.066667
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
2
0f4865755087766deb6030c1567368a888b5b5f2
24,407
py
Python
feedbag/day2/2.passwords.py
chadnetzer/advent2020
b992eb202ff9dd5cc353914a136337412c8bd074
[ "MIT" ]
null
null
null
feedbag/day2/2.passwords.py
chadnetzer/advent2020
b992eb202ff9dd5cc353914a136337412c8bd074
[ "MIT" ]
1
2020-12-06T07:51:48.000Z
2020-12-08T05:03:11.000Z
feedbag/day2/2.passwords.py
chadnetzer/advent2020
b992eb202ff9dd5cc353914a136337412c8bd074
[ "MIT" ]
8
2020-12-01T21:29:21.000Z
2020-12-09T23:55:15.000Z
#original sample data #policy = [ #"1-3 a: abcde", #"1-3 b: cdefg", #"2-9 c: ccccccccc" #] # Given puzzle data policy = [ "3-5 f: fgfff", "6-20 n: qlzsnnnndwnlhwnxhvjn", "6-7 j: jjjjjwrj", "8-10 g: gggggggggg", "5-6 t: ttttttft", "6-11 h: khmchszhmzm", "4-6 q: qqbjqqqj", "6-8 j: gtkwwjjj", "13-14 w: lwgwrwczwwhkww", "2-4 p: vcnptzdppvpdzp", "4-6 n: pmlnzwrnbnxnpmwmhbnz", "4-9 w: wwprdwwwx", "13-20 b: rbbcbbbbbbbbsbbbbbmj", "1-3 r: zwhrnvr", "4-6 f: fmfgfcf", "13-15 z: zxzzzzzzzzvxkplvzzz", "12-18 r: phjqccgmgzmdwxgtrtb", "5-19 s: ssssssssssssssssssss", "6-7 d: pddqmdd", "7-9 p: pwrlpvlhpfpppzp", "4-5 m: mpmmmm", "9-10 s: ssxsssssrj", "6-9 p: lpjrvpppp", "6-13 r: rrrzvtrgrhqxqrvrvwzr", "1-3 p: dpjp", "6-7 w: wkjnmww", "1-5 r: cfqhhr", "5-9 m: wmmlpgmqmmrm", "12-13 x: sxgnwxxkjwrxr", "6-8 n: lnnrnnnnn", "10-11 q: kqqqzqqfqqqqq", "12-17 k: kkkkkkkkkkpkkhgvnjhk", "12-13 v: vvvvvvvvvvvvvv", "12-15 h: hhhhhhhjkhhhhhth", "6-7 l: hmqtlllnllnlmtqnn", "6-9 m: mcmmmdnmh", "3-5 k: kkhtn", "2-4 n: tnfjmnnn", "5-6 j: cjwlmn", "4-11 b: bcbbkrlgcbbc", "5-10 r: rcrcgrrrzwrrxgpzwrcm", "3-4 n: znnn", "11-14 s: sssjsssssssssss", "6-7 d: rwdddddmdddddkdd", "1-4 s: gssssskssss", "4-5 j: jjjjzj", "11-12 j: cjjjgpjxnjjk", "3-6 z: zzkdzzz", "7-9 d: mkddddqdhddgdd", "1-9 d: rxwqcdrswdg", "2-4 m: gmmmn", "2-4 k: kkkmk", "15-16 r: rrrrrrrrlrrrrrrr", "1-11 g: grgggcgggcggp", "7-8 q: qqqjqqgq", "13-14 c: cxccccgccvcpccccc", "2-5 j: txvwlls", "3-13 w: plwqwhbwdgxcwfmwjl", "9-11 g: ggggggggggggggggdgg", "3-12 g: ggsggggggpgm", "3-11 g: bcgqgxmbjpwm", "8-9 z: zxvzrzzzzzdvzzgz", "17-18 z: zzzxzzzzzzzzzzzzsw", "9-10 t: ftrmtttktttttdtmdk", "15-17 r: rjrrprrrrrrrrrrfrrrr", "7-8 k: kkkkkkkkk", "6-13 n: nnnvnfgntnnnjnnxh", "17-19 l: llllflllllllllllqlrl", "11-13 d: hdlddddddhxddddkd", "13-17 h: hhhhhhhhhhhhnhhhjhhh", "1-12 c: ncccczwcnctcwcc", "4-5 w: wgwkl", "12-13 w: wwwwwwwwwwwwfwww", "3-12 s: scssgwshsspsss", "15-17 w: wwwwwcwxmhwwwwwwwww", "1-3 x: xxxxw", "8-9 p: mpppppppzptp", "11-12 n: nntnbnnnnnmx", "2-10 x: xrspbmkcthqsdxrdxwx", "13-14 l: lllllllpllllll", "5-10 x: xxxxjxjdrx", "7-8 k: kkkkkkjk", "9-11 k: vkkkkkkvkkzddbzr", "3-8 r: lbrrcwbdf", "6-14 l: lgwllrgllllllgllll", "6-9 q: nqqkhqqtqgqc", "2-4 g: ggxggggggg", "12-13 k: kkkkkjkkkkkvf", "5-9 b: scvbbpzbbbzbfb", "16-17 d: mdddddddddddddcttd", "4-10 l: lllflllllllllllllll", "3-6 q: qqqqqtqqqq", "1-3 n: pblsghl", "9-11 v: bvvvmvfvvgtvfpv", "4-5 s: ssshq", "5-8 g: ggwgnggp", "1-4 f: ffmr", "4-5 g: ggwzs", "4-5 h: fhhqs", "10-17 l: clhlllhslxpgljpvlrkl", "7-9 m: pnswhtmvmsrmjwrbfz", "6-16 v: vvvvqcvvvdzvjvldvv", "5-6 c: cntcrl", "12-13 n: nnnnnnnnnnnnln", "2-4 w: wwww", "2-13 w: swrqssmmwrxtw", "15-16 z: zzzzzzzzzzzzzzgqzzz", "6-8 c: cctvkbdcwcbvhc", "1-2 w: wwzdwjtm", "1-3 z: mzzh", "6-13 t: fttttdwtttttkht", "3-9 g: cggfjgqngwmj", "1-5 k: pkkkkkkrk", "1-5 q: qknqd", "7-11 t: tttttckttkb", "6-7 q: qqqqqbl", "16-17 c: ccccccccccccccctc", "2-5 v: vfcnvlvvvxvrnvvvvvv", "1-5 w: nwtwnnwwhtwwdwwww", "2-10 v: ftvxxbjzlqctp", "5-7 d: nddddgddjdbk", "8-11 s: wxqjwkcsflssm", "17-20 v: vvvvvvvvpvgvvvvvvvvv", "6-8 w: wwlwwwqfw", "13-14 r: rrrrgrrrrrrrrr", "5-6 t: ttttzq", "13-14 c: cccccccccccccc", "11-12 k: kwzkkkbdkjkk", "10-11 t: ttttpttttrwwtttt", "3-4 w: ljwwww", "7-8 q: ggqzdvqqw", "1-3 z: zzsg", "1-2 c: crjvdhgwckszmzpcjmr", "10-11 n: nngnntnnnqn", "7-16 w: bwdgwwwgwwnnqtcwc", "4-16 v: vfkvqvvsvnjhfvvv", "10-11 p: fgbhpzqvkmn", "10-11 w: wswgwqwszwwhwnwww", "15-16 t: stwtfxttthptttttt", "1-3 v: vlvsdmrv", "5-8 z: zzzzfzzz", "14-15 t: ttpvtrlqtlcdrlv", "2-6 b: qwfnbkc", "6-9 p: jppjpspplm", "3-4 s: brgqssz", "2-6 q: cqwqlq", "13-14 d: rrdljqdddddcxzdc", "8-13 s: tsnssssssssrcssrssq", "7-8 d: dwddddgl", "9-15 b: gspmhlgbbzbbbrbt", "3-4 h: khhh", "3-4 c: nkccpmcct", "6-7 g: ggggggg", "17-18 p: ppppppppppppppppjnp", "2-6 f: rfjxtff", "3-4 g: ggvg", "7-9 k: kkkkkkkkkkk", "11-16 f: fjdffrtfffjfdffcln", "4-8 w: lhpdwbnfssswwfswwwrw", "5-9 l: lvlllflll", "5-6 t: wtrtlr", "4-8 m: mnmmjgqtgs", "5-11 n: nnrngbnntnk", "5-8 s: ssssfssss", "7-14 m: hrnbhsfcvdmxbmvzfvnq", "1-5 x: xxxxxxxxwxxdg", "1-10 v: vcjvvjvvvv", "5-10 l: vvdmhskmprszklvvl", "3-17 d: dsddkddddddqddddfjdd", "9-13 p: pppppppptpppqp", "12-13 h: hhhhhhhhhhjhh", "1-4 z: mrvlzlzhzlp", "3-4 h: hhmc", "7-17 w: wwwwwwwwwwwwwwwwww", "6-8 z: cgzxzznzzdhcvwh", "4-5 g: mgggg", "7-8 t: ttrthtdkjtgtts", "8-9 w: wwwwwwwpw", "9-15 k: kkkkkkkkkkkkkkk", "8-14 q: qqqqqqqqnqqqqq", "14-19 w: jkwbwwlwdcmwmmwwrwr", "4-5 m: hsxgkdmz", "2-4 v: vsvqvcv", "4-9 h: grhnhhmhhxhlh", "6-14 t: tmttrjbtjtfttc", "1-4 d: pjdbrb", "15-16 r: rrrrrblrrrrrrrxwr", "16-18 p: pppjpppppptpplpppppp", "4-7 x: mxtngxnpsx", "9-10 b: bbbdbqvmrgbzbbbb", "4-5 w: wwwwj", "1-2 d: dgws", "3-10 l: lglsphlbgfmgdtw", "10-11 h: hhhhhfhhhpdhh", "5-9 x: xxxxqgxxjf", "11-19 m: bfmxmmfgzzhqwsmgwmlr", "10-14 x: xxxrxkxxqrlxmpxbdx", "3-8 l: lxlxvwlllznlptfv", "5-9 h: hhthhhhshdq", "1-5 q: sqqqqqqq", "9-14 j: jxfcjpjfjjgjjjjjjjhj", "1-7 g: gvdgjgsvvgnd", "2-5 t: tmttpttttt", "17-19 c: ccccclcccccccccccccc", "10-13 d: mdvddtsddrxtx", "11-12 g: gggggggwggzgp", "3-5 k: hzxnpkq", "9-11 q: qqqqqsqqqqq", "8-9 x: btrkbxngx", "3-5 g: jpkchcxcxhzfhsggqkg", "7-13 s: ssssssssscssssn", "13-15 p: pppppppnppppppp", "5-15 f: ffffjfffffffffgf", "2-4 p: zqphp", "1-4 r: wrrr", "3-9 b: jsjcbrkkczkzmjbg", "1-2 k: kkkkl", "2-3 n: dlnxjwzstsdxns", "7-14 v: fvvvvzhhvpsvvckdvv", "2-8 r: rrrrwrhrrrr", "5-8 n: nnxdnnnnnn", "5-12 c: scmcdscccccmcc", "9-10 v: vvvvvvvvjm", "14-15 w: lwppdwwwwdmrrww", "6-7 c: qcgcvxdrcccpxchrnlq", "8-10 x: hbrqsksxwxxttz", "18-19 w: jnftgwpwmwfdgrcpkww", "3-4 k: ffkj", "5-9 f: flffsvffsff", "3-4 k: kkkkk", "1-3 l: dlglkll", "10-16 f: bfffjzfffnfffffxhfff", "11-20 d: ddbdqsddddddddddfddd", "6-20 l: vmjltmkclbmqrflzgdzl", "5-19 f: ffffffffffffffffffff", "5-10 n: nnnnmnnfnz", "4-13 f: kmrfrfkvjxdbftvhnsdm", "10-11 k: kkkkkkkkskk", "12-13 k: kkkkkkkkkkkhj", "7-8 p: ppppppnhw", "5-8 l: gtpllwklh", "13-14 h: hhhhhhhhzhhwsrh", "7-15 h: hhhhhhthhhhhthqgfh", "1-3 f: mfdmhv", "15-16 f: fffffflfffffffbqf", "12-13 c: cccmcccccccxn", "2-12 q: nqltxqfdcrxqvt", "8-10 d: dddddddddvdddd", "2-9 g: mdrwnvtsd", "12-13 l: lllllllllllll", "2-3 m: mnrgmdm", "6-9 v: hvvvvvvvv", "4-7 q: bqqqrqcqqqq", "8-11 v: vvvvdvvvvvr", "3-4 n: cnnw", "2-12 r: rrrklfrrrksrr", "2-12 p: pxppklhsppwdxwcpzvm", "10-11 n: nnnnnnnnnnn", "5-6 w: wfwwww", "3-5 p: pppppp", "3-8 g: ggggggggggggggg", "1-4 x: xxxxx", "11-16 d: ngcdkglddtppbddgdrd", "11-12 k: kkkkkkkkkkhr", "12-13 c: hgxxchcvxpdlsrt", "1-3 m: vmmm", "4-6 z: kvzgzzzp", "11-12 d: dzddzlcbvdxk", "10-11 g: ggsgggggggggdz", "6-8 q: xfhgpqltbfbdzqg", "4-7 v: vvvlvcxmvvxq", "5-7 v: vkkgvgvnpvvlpgkv", "6-7 b: bbkvnwbqblbvbb", "2-4 l: wmll", "6-9 j: djqjcxxljm", "3-4 j: nmzcsnlnjjjdms", "3-8 w: wnwwsvww", "9-20 m: wbntxzztwmblxmsmltmg", "7-8 r: rprrrtrf", "1-3 r: lnrfxfswmhgvf", "5-12 k: zlkdlktsrqjt", "13-14 v: vvvvvvvvvvvvvbv", "11-12 d: ddddddddddgc", "9-11 c: mdwpjjcdcrc", "5-6 m: mmmmnl", "5-6 l: lblldn", "13-14 c: sccfcxsnsclccc", "4-5 q: qqkkq", "5-8 m: mmmmmmmm", "5-8 z: tzhzzttzvwfcv", "12-15 b: bbbbbbbbbbbtdfb", "7-10 b: bbnplbbxxbh", "1-7 x: zxxxxxvx", "6-17 b: kbnbbbbbptbbrbbbb", "1-3 l: llllll", "13-15 c: ccccccccccccqcz", "1-8 s: sssssssb", "12-19 x: xxxxxxxxpxxcxxxxxxq", "12-13 x: xxxxxxxxxxxxxx", "14-18 w: wwwwwwwwwwwwwlwmbk", "7-8 q: mzwqblqqz", "1-7 p: hrppppgdzp", "2-7 n: qmvpmsjncgkgpbb", "8-10 x: xxxxxxxwxx", "2-4 v: rfcpmpvsswsrjkxpdrxh", "14-16 k: kkkkkkkkkkkhkkskkt", "14-15 d: ddddddkdddddrqdd", "4-16 m: njmqmmmjfmmxhmwgdbc", "12-13 h: hdmdfhhhzhkdhr", "3-14 n: nbntrsmnmrcwbf", "6-8 h: hhwhhlhnhh", "18-19 v: vvvvvvvvvvvvjvvvvvp", "11-20 l: ldllllbvllmqllmlllfl", "2-3 d: dcddd", "4-6 n: bsnncn", "3-6 f: dwlfqfzgs", "8-12 d: xdddddfhnddfddd", "5-13 l: llllqlllllmllll", "10-14 m: mnmzrmmmmwmmqm", "3-12 c: qjchcclnbccccpc", "7-16 j: jjjjjjwjjjjjjrjnjj", "1-10 v: svvdvvgmgpkfkvhvv", "2-4 b: gbpbczblbbv", "7-9 w: drwwbwtgwswwww", "2-8 h: fhkprhplz", "1-6 m: mmmmwm", "3-4 g: gggp", "1-3 q: qqqqj", "10-11 h: htfrdwrnkzntnfpdmkhq", "5-6 x: nxxxxxxxj", "7-8 w: wwwcwwww", "1-5 h: lhhhhh", "7-8 f: ffjfftff", "3-5 g: kdgfgjgbghggp", "2-11 p: flgpvfbhpzxmmntw", "12-13 z: zzzzzzhzzzzgzzqz", "12-13 j: ttmhmdfjpdpnj", "5-7 h: hhhhvhh", "9-13 x: xhxbxxsxxqwxxvxxxx", "1-3 c: ccchcc", "2-5 h: hhrsh", "1-3 q: rqqq", "2-8 x: sxrxbcxx", "12-15 b: bbbbnbbrbcbtbbb", "14-18 m: hmmmmmmmkmmmmmmmmrmm", "1-4 v: vvvv", "5-11 h: brnzvqhrrwh", "4-10 f: dbtfdvlfnffqmwp", "3-16 r: krrrvmrwrrrtrrrrkrwr", "11-15 c: gxbccccccccccbh", "13-18 z: zzrzzzzzzzzzzzzzzzz", "1-3 l: lkvxtrthfvmdzn", "7-9 p: vpppmpppppp", "3-4 w: wwwwwwww", "8-11 z: vlzzzlzvzzmzzz", "2-4 v: vqfv", "10-11 s: sssssslssgss", "4-5 z: zzzww", "11-19 p: zpbpdjfjpdfgnpphhpg", "1-8 f: fftlxmfl", "9-10 p: phlpqzppxpxp", "5-7 q: kbqjqqq", "3-6 m: jcblmv", "9-10 l: llllllllll", "13-14 w: wbwhwwwwhwqwmswwmjwf", "6-11 j: jjjjvtjwjbdjj", "4-8 j: jljjjvgj", "3-4 r: lrhvrzrr", "2-4 p: plpfkphpq", "2-6 b: bbbbrbvwbbgh", "2-11 t: mmjmcwcdcbtztxfbtst", "7-15 r: grsrrjrrtrgvrtrrr", "6-17 d: dddddgddpddzddddddd", "3-10 t: qpnbrnmmjtjxtdg", "6-7 v: vvvvvhb", "5-20 j: vxmkjnssjdtldsdwltlg", "7-10 r: rrrrrrrrrrr", "2-4 r: rrpl", "3-4 m: mmzg", "1-8 h: fshhjrhm", "5-8 m: mmmvmmvmmnmdxnjcjpn", "8-10 m: mmmmmmmlmzmm", "14-15 q: qqqqqqqqqqqqqqb", "6-16 q: qhkdtqnzqqqjgjrqrkbq", "3-9 b: gtbbfgfhbs", "14-16 h: hhhhdhhhmhhhhmhhh", "9-10 n: nnnndnnnvjnl", "11-12 p: pqpppppppppppp", "7-9 d: ddpwfdtdd", "6-10 z: qzzzzjzzmz", "9-14 q: qqdjqdqqrqqwqnqqp", "6-7 k: kkkkkbkk", "7-14 j: hjvjmzkhgltffljjvjj", "4-5 z: znhzz", "11-16 r: rrrrbxdrrqrrsrrb", "5-8 v: vvhvvvvvwvlvhlqg", "4-10 n: wvnnjnndcnk", "5-6 g: ggkggk", "8-9 w: wwwwwwwww", "5-9 r: mrwrrrrrx", "4-6 r: jcfrjrrrg", "6-9 w: wkwwsxwgfcwqb", "5-6 x: grxjxnrb", "2-7 s: nsmldnsszdk", "5-9 j: gjjzkjvjwjcjbj", "6-11 r: jtrgcvwccrrrfmj", "7-12 j: jfjjmjjjjxjqzdjjxq", "1-2 n: nnwqcnr", "5-16 l: lllpclllllllblllll", "5-6 j: wjgdjcdpjxfjxnjj", "3-16 s: klstwqksfclmcbbcn", "17-18 t: ttttdtttttthtttttttt", "9-13 p: ppppppvpvhhqp", "1-2 n: tsnnn", "3-4 p: jpqpjmpplpw", "4-6 t: dtfthsxtfpd", "2-6 m: zhszctkm", "1-13 p: dppmkpbprgppxcpvhrpz", "2-7 m: gvmmflp", "9-16 c: rcdsppjbcwdxnfbc", "5-15 p: pppxpppqnhpptpppz", "10-13 q: xfhqhkqkjqjwqqcfrgl", "14-15 t: tttttttttttttdvtttt", "5-8 q: qqqbqvqzqqn", "1-7 v: cvgvvbcgjv", "6-9 r: rrrrrrrdr", "2-11 j: wjjgxngswkj", "11-13 d: dddddddddddddm", "5-8 p: pzppppppkjp", "15-16 c: cccccccccccccctc", "1-7 c: vsvkccbc", "2-5 h: hhhhchhhhhhhhhh", "9-11 n: nnnnnnnnnnnnn", "11-12 q: mdmfmkxcxzjq", "6-11 m: mmkgnjdhfbc", "1-4 d: dgrdwbdfdkdrmthsj", "12-14 k: kkkkkkkkkrjkkkkk", "2-4 d: hdrvdzd", "2-3 s: sxmsssssssssssssssss", "13-15 z: zzzzzzlzzlzztzzz", "3-8 r: lxrrrnvrtrgnmkrr", "6-7 w: wwlbrwwww", "4-7 r: zlnmmkpnrkkcrrxrmfq", "2-6 v: zgvvjjvvjhjv", "8-11 l: llllllllllcl", "8-14 m: mlmldmmgnqzmmmm", "7-10 z: wzzzzhtzdqzznzz", "4-7 k: mvgwkkk", "5-9 p: pdpltvdpptpppg", "4-15 q: qqqfqbzqlqqqqqpqfqs", "5-7 q: qqqcxqqm", "14-17 x: rltjxxbxxxllsxwcx", "1-13 w: gwggwwwwwwbsbwkwwd", "8-14 k: kkbkkkkhkkkxjzblgk", "5-8 p: cppnpprp", "6-7 k: jrnvhkkgkkb", "12-14 h: hhhhhhhhhhhhhn", "10-20 j: gjwjjjpjjljjjjjsjjjj", "16-17 n: nnnnnnnnnnnnknnnnn", "5-6 f: cdpppf", "12-13 z: bvpzqzzlwzccfszt", "1-9 k: wdkkvtrprltkkltz", "9-16 x: xxxjxdxhtzjxgxsbx", "17-18 j: jjjjjjjjjjjjjjjjlrxj", "2-5 x: xkknmxxxn", "3-9 f: tfffhfffffff", "11-16 r: rrlgrrqmrrprcrrrvjr", "9-12 m: nrlmgmxvgkmpmqvntx", "4-7 r: hrrrzrrnm", "3-15 g: stgjgbgggvdljcgdgfg", "6-9 j: jjjzbpjjjjjj", "2-3 p: ppjp", "9-10 t: tttttttttt", "10-12 w: wwwfwvwvnkwdwww", "11-14 d: nvljddddftrndzx", "6-7 r: rlrrrbqcrqr", "2-4 c: xbdl", "11-18 c: ccccccccccvcccccccc", "5-6 j: gjjjjjj", "3-7 w: xwhnqlhnpfrvlkqqrp", "6-19 q: qqqfxdglhqqkqqqjnhdq", "8-9 d: ddsdddqsv", "14-16 g: gggggggggggggggt", "3-5 b: bbbnh", "11-14 t: mtttwttfttttltq", "8-10 x: xxxcxxxfxq", "8-11 l: lfgvnlzrfcllnxss", "1-3 t: gtttttttwgtptt", "1-20 j: jjgjlqjjcjcjjjsjgjlj", "8-14 v: dqgmngvvqnswvvrrvc", "2-8 m: gmpkmmsmmmmm", "5-6 m: mmmmtm", "6-7 z: zfzrxzxz", "4-15 k: wmwkhhskgwzqpnk", "2-6 d: hddddfzdsdtvg", "9-10 g: gggsggggggrgggqg", "11-12 m: mmmmmmmmmmmm", "5-7 w: wwhqqhwwwwd", "4-9 k: gtkrkkkdfk", "1-4 g: kgtvg", "7-11 r: rhdbzjrwrcr", "9-18 f: fjffffffdffkfffffvf", "3-5 t: ttttt", "13-14 q: qqqqqqqqqqqqcqqq", "1-8 n: vnnnnwnqlbln", "3-5 s: ssmss", "2-4 q: qqqbkvqxh", "3-4 n: nshscq", "10-18 s: sssssssssnsssssssgs", "9-10 v: vvvvvglvzddvvjv", "5-16 s: sspssssrssssslss", "4-7 t: ttptxtt", "2-4 r: rrrrrxdrr", "8-9 b: xqvbbbbbh", "3-4 b: bbbbc", "1-13 v: qhvvvvlvvvvvdv", "4-8 l: lhkllblwllll", "3-4 q: qhjgq", "8-9 p: rpbpppshdpp", "13-18 k: kkkkkkkkkkkkkkkkkx", "8-9 r: rrrrrrrrr", "5-10 f: bjfwwqvjwffpnl", "1-3 l: pbhl", "4-6 g: mggfgv", "1-4 t: tttttt", "5-10 t: llqrtccxtttntxjcdczk", "13-14 q: qqqqmqqqdqqfqgqqqq", "6-10 w: wpwmwwrgwwk", "13-16 q: qqshgkqkqvscqtqq", "4-5 t: qttdt", "2-12 h: fpbbxgpskzjpkvtr", "10-12 z: zrszvzzzzrzkznpzw", "1-4 z: tgzz", "3-6 c: cdwccvgm", "1-15 q: jqqxkqrqmtfqpvbpq", "4-6 v: vjvlvvv", "1-7 l: hjlllwj", "6-13 s: lrspslfwsgjks", "3-4 p: dpvk", "4-8 h: hsjjhhff", "1-18 k: pkkkkkkkkkkkkkkkkkk", "6-8 n: nvcnxqnnrnnn", "7-8 p: ppppppvzp", "12-17 h: hhhhhzhhhhhkhhhhph", "13-17 p: ppmpppnppppplpbvcppp", "8-14 m: tmmcjvmmjgcfmmnj", "1-3 t: tttth", "3-4 x: zxxx", "1-3 d: kpwhxpctcgdbdkb", "5-6 z: knnlzzzzxjrghzb", "9-10 w: wwmwwwwxgww", "9-11 b: bqbbfbbwbbqbb", "2-3 d: ddxp", "10-12 g: gggvgsgtgggggbg", "3-4 b: gbbb", "8-20 c: jccqcxjcqfncfcbccgcc", "4-9 z: zzzztzzzzzb", "2-12 x: gmfmzzrxsqbx", "10-12 x: xxxzbxxjxxxkxb", "3-4 v: szpvbvlvr", "2-5 r: grqqr", "4-8 q: mqqnvqvqqq", "6-10 v: vdbvnvvxvvvnvvvvv", "4-10 d: dddtdddddjd", "3-4 l: klmc", "10-13 k: kkbkkkvkhkvkkkkkl", "1-3 v: vvkz", "13-14 h: pkgffgcszgsghbcdtpm", "11-16 c: dgccccccctchxbfm", "8-9 b: bbbbbbbbbtb", "1-9 h: hrnhhlphhh", "17-18 s: ssssssssssssssssst", "5-10 h: hhhhhzhhhhhx", "2-7 k: dlrncbkkwp", "3-5 k: kfqkk", "4-8 d: ddddddddd", "10-15 m: mmmmrmmmlpmmmrgmkmx", "2-4 g: gsgggkm", "7-9 l: llsllllvrzlxlrgglk", "2-9 f: fzfbvfkff", "13-14 p: pxppppppppppvf", "15-17 z: nbkxkjtszptcndzdl", "3-6 z: zzxzzhz", "12-13 t: tttttttttttttt", "5-10 s: pssbfsbsssnssvsb", "9-18 c: ccxthchscjjdccvcncl", "3-5 f: fndfqffffff", "2-4 m: clwvcdjmz", "7-8 b: bbbpbjbb", "2-3 r: rbxx", "3-8 j: jjjjjjjjj", "5-10 v: zrwnvbrvmm", "4-6 h: hhghhhhh", "6-8 f: fnffzfwfgffpfc", "5-6 q: qvqqqqkqqq", "3-5 v: vgwvrgqc", "9-10 p: pppphbbpphppb", "10-12 l: rdjrcxkgflll", "9-14 h: qwwxnhfhnfhhbhhjr", "9-10 d: ddddpddddd", "4-13 w: wwwwwwwwwwwwwwww", "8-16 x: xrxxxxxxxdlxxxxxxx", "6-10 p: pplppkpzwb", "5-7 j: wpjjjgjjvjqjrgj", "4-13 w: cwwswwwwwjwwdvw", "5-6 j: jjjsjjj", "4-5 c: ccfccqdqctllbm", "7-13 c: bcccccccccccccc", "4-5 p: pgptw", "1-4 p: qpwmp", "3-5 f: fftfff", "2-6 m: mmhbpmjlx", "9-13 r: zrrczrlrzqxrszrrlrr", "2-9 w: xrszwwwdkww", "6-14 g: qlwswsgjdlgmzvwg", "6-8 z: zzzzzzfz", "6-7 w: xwzsjjr", "2-13 f: fxmfdnfffkpggz", "1-7 d: rdnlqdplfddjdd", "11-14 j: kjjvjjjxpjjjjjzjjjz", "12-14 p: gpppwvmqpfpptpfppkpc", "6-11 k: kkkkkhkknkkkkkgk", "2-6 m: swpgmf", "1-3 x: sxzqnxxv", "12-18 n: nnnnlnnnnnnnnnnjnxnn", "3-4 p: pppq", "8-9 v: vvvvvvvpv", "6-9 l: ltsldwtlhll", "11-14 x: xxxxlxxhdtxxxxxsxx", "4-5 q: qsqvqvq", "5-6 b: sbbbbt", "10-13 l: lllllslllrllll", "1-5 m: zmmmmmmm", "2-10 s: ssssssssss", "5-8 r: qqcsbjcrljvksc", "11-15 n: bndcnknnxnnnnnbn", "14-15 v: tvvvvvvvvhqwvvv", "2-3 s: jsvcsb", "12-15 d: mddddfddddlxddddd", "12-18 w: wwznwcgwwwwwwhwwgwgw", "2-9 v: dxlvvlvqg", "2-5 h: njpsxr", "11-13 m: rmmmmmmqmmzmtmm", "4-8 m: smmmmmmm", "3-10 j: ngwtkjfrjjtrxfnvj", "2-3 p: ppwwpp", "3-5 c: cbmcncnqc", "11-13 m: mmmmmmmmmmcmm", "4-14 g: gggggggggggggxg", "2-4 b: bbfb", "16-19 p: ppppppppcppdpppmppwp", "10-12 g: gghgxgkvggrggrg", "1-8 b: wjjqktjbbfkdz", "8-11 c: cqccdcncfcccnc", "2-10 z: qzvlqltzns", "3-7 p: kgpmpzpbx", "1-6 m: mxmdmlmmzmmmmmmmmm", "1-7 r: rrrrrrrrrrrc", "8-10 h: whhhqhhhjjhnch", "6-10 x: xwxxxxkpxdkdnxvh", "10-12 z: zzzzzzkzzpzzzzz", "1-12 s: dssssssssssds", "4-7 f: fvzbfcf", "5-6 w: fwwxfwmswxkd", "14-15 g: gggggggzgggggfhg", "6-7 j: jjjjjnb", "1-6 v: vcgksvnrbgsvkrwsc", "6-15 w: wkvwwwpqwwwbtwxvhw", "8-9 m: smmmmmmmmm", "14-15 r: rrjrlrrrrrrrrzbr", "3-8 w: tjqncxsnqkdpwzm", "4-5 k: kkkkk", "8-11 z: zzzzzzpzzxzzkzglzrz", "5-6 k: kkkktzk", "18-20 w: wwwscwwwwwwwwwwwwgws", "1-12 z: dsfxzctdzzjxbt", "14-15 q: qqqqqdqqqqqqqqq", "7-10 r: rrxrrqrrxcbr", "1-13 d: fmgjzgqhdhddd", "4-6 g: mnggxttn", "6-7 v: gwtmscl", "11-14 g: gggnmgggggjggcgggrgg", "17-18 k: gcslskskkkwkwkklkk", "14-16 r: rrrrrrrrrrrrrbbrr", "13-15 s: sjsxszssgfssdgwsbbsp", "1-9 d: dddddddddddd", "1-13 j: mnjjzckxzjjdj", "1-2 j: hwrqjxxcj", "6-7 z: fzzzwxnnrg", "2-10 x: wxnqjddpxxpxw", "3-10 s: srvvssdcvzfn", "3-11 w: gwwwmwvwwww", "3-4 q: qnqs", "11-12 d: dddqdddzdrddddd", "2-15 l: wldlltxhrwzzlhz", "4-7 g: vgsgkfgmc", "7-9 t: jdsltcgttdttd", "9-10 q: xvqjqqqwqqqqqpqq", "6-9 b: bbkbnbgbbb", "2-5 t: ttcjtbkb", "7-8 r: rdrrqrnrrrrrvp", "2-4 f: pnfqfffvpfffffhbffv", "10-12 z: kzzzzzzzzrzpzszz", "4-5 m: tmmgxlm", "3-4 k: kwhhk", "1-11 h: hhhndhhfhvhchmmqhz", "6-9 m: lmmmmmmmtmmm", "1-5 d: dwdkd", "5-6 d: dddddq", "7-9 j: jjjvgvmjdjbjj", "3-6 w: gwzhpjwkg", "10-11 h: hhlmftlhkhhnh", "8-20 h: hhgnchpthzhhhhqqwhcz", "6-7 s: sssssgfs", "3-11 g: jfgqgbvqppjn", "5-6 d: ddqkmbd", "14-15 h: hhhhhhhhhhhhhphhh", "12-13 l: fwkwphdlwmfwg", "3-4 z: pmbncwz", "3-4 w: lgwv", "3-4 s: sshs", "2-7 z: zzflznzzzlzl", "1-2 k: klbjvbwpkkptb", "17-18 r: rrrrrrrrrrrrrrrrlrr", "7-8 l: mlqgltml", "9-14 l: llljlllxllllvl", "5-13 b: bbbbtbbbbbbbfbbbg", "7-10 d: cddlddgdqkld", "4-6 q: kqqqqq", "5-11 r: rrrvrrrsrjrr", "7-10 t: tttgtltttbt", "13-14 h: hhhhhqhhhhhhndh", "3-4 s: dsss", "6-12 w: jwwtxwwwkkwbw", "7-8 v: cvvgnvvv", "12-16 q: qqqqqqqqxvqqqrqqqq", "6-11 z: zzkvzvzzzxzzzz", "4-9 s: lcrvsssmfs", "6-13 c: ccccpdcbcbcgdcccgdcc", "7-9 q: qpqqqqqqhq", "1-10 k: kwfkkxrkwhtmkwknkx", "9-20 x: dxzrxxxsxxxjxxsxdczx", "15-16 c: ckcccdccccccgccdcccn", "2-7 s: sscxswskxhchxpfs", "2-3 h: hhlh", "2-6 p: ppppppp", "6-8 r: ztmrsjqtrk", "1-2 r: drsdvpmzdn", "6-7 v: vqvvvgp", "5-6 g: gggggg", "3-7 d: tfkddmjzjgdxzlkd", "16-17 b: bbwbbbbbbbbbbbwbt", "2-11 b: hbrwqqhkcrbbvmx", "5-11 f: ffffqfffffffff", "2-5 n: nnwhn", "8-11 j: jxjsjjmjkjkjhjx", "1-3 h: hhhh", "5-12 t: pttttszttwvlt", "7-8 q: mqqqqqqqtkq", "7-10 z: xzzzzzdzzzzzzzz", "6-7 l: ljzqklhmm", "3-5 k: bkkkrk", "6-8 x: xxxxxlxcxxxbx", "6-7 j: jjqjcpjfjj", "8-9 l: cwmllhslwv", "14-15 z: zzzzzzzzrzzzzcz", "11-12 f: sfccmxxffsfg", "5-6 q: qqqqqqqq", "12-15 t: tttttttttttttttt", "7-12 q: pqwsnhqfzdtqxd", "4-5 b: fbcbb", "5-7 t: tttstft", "2-4 h: vjmr", "4-6 r: rrfrprr", "18-19 x: xxkxxxxxxxxxxxxxxxxx", "2-4 k: qklkc", "3-6 s: svjsrs", "14-15 j: pjjjhjjjjjjdjgj", "2-15 l: lhllllllllllllwlll", "2-7 q: qvqqgsq", "2-6 z: zzlzzzn", "8-15 p: pppppppspppzppgp", "3-4 l: kfzbl", "9-12 w: hqjwwtdwwlwwmw", "3-4 k: lghgg", "3-5 k: khgzkk", "2-10 r: rrsjlmrsrr", "2-5 j: jjjjj", "10-12 j: jfgzbdbjwhjjjf", "1-4 f: ffff", "9-10 j: jjjwjjwzgtcjzj", "1-6 m: mmmmmq", "5-6 h: hhwhph", "3-6 w: wwpwwz", "5-6 q: qqqkcb", "8-9 f: ffffffxrff", "13-14 m: lqvpbrhgmxmkgj", "4-6 q: pqqxqk", "7-11 b: mbbnsvbbjbbmb", "17-18 q: qqqqqqqqqqhqqqqqqqq", "3-12 w: qzcwxswwzvwzb", "10-11 l: llllllllxxs", "1-4 c: bvcch", "11-13 s: svssqfvssshst", "6-7 l: lllllrl", "5-8 n: nhqkfnggnpkbhpgntlht", "1-14 g: pgrdgjggggpggc", "12-16 n: hlnhkxpnnsfctsnq", "6-8 c: cccftqcfqc", "1-5 f: fbfff", "1-15 t: mttptftctttptttt", "6-11 t: cxtttnnfttt", "7-11 h: hsrfjswhhhhhhhhc", "12-13 b: bbbbbbbbbbbnfbvb", "12-13 x: lzvxjxxxcxxxq", "12-13 p: ltwhhhqvdjptbsw", "7-9 z: zzzzzztzn", "4-5 g: ggqlgg", "8-10 r: rbqxwzfhlzt", "3-5 g: grhgk", "14-19 z: zrzzbzztszpzvzbdfzm", "7-11 d: tdjbcdpdddb", "3-7 j: jpqjjjj", "1-2 m: mcxklcmmws", "7-8 r: drqlcrfqrgrrxrrg", "1-5 g: kgngw", "8-10 r: wrrbfrrrpntjrrrgrr", "1-4 s: sstf", "3-7 b: fnjnpmfjlwb", "2-5 s: pwjkj", "9-11 t: tbtbtrsrfzntwqq", "13-16 m: mmmmmmmmmmmmmmmmm", "13-18 b: bbbbbbbpqlbbbtbtpxb", "9-11 w: wclwwwrwwwkhww", "7-13 b: bbbbbbbwbbbbb", "8-12 t: sxlwtttttnhn", "4-7 q: kmdqxcqswqw", "3-4 r: rjrr", "4-6 m: mdmdmmmm", "6-17 h: kjrkchhnfhqqcrmhhhdh", "6-9 j: vjjgcjjjpdj", "7-9 x: xxxxsxxxxx", "8-16 j: jjjjjgjnjjjjjjjczd", "6-8 t: tttttttd", "7-17 s: ssmsspvsssspssssfssl", "3-10 f: cgffsqxhdmrvdf", "1-3 f: wfpf", "11-12 k: kkkkkkkkkkfqk", "12-15 s: cxghswzsbttmvpscr", "3-4 n: nnkn", "7-17 w: wwlwnxwxwwswwwwwg", "3-10 h: hhvhhhrhhhfghhcn", "10-11 l: lllxlllllllsl", "5-7 d: ddbddtddv", "5-15 g: ggtggqggggzggggwggv", "2-10 r: mrpdmtrzsrngstsr", "9-15 t: bsbptttttqwzmmtgbgm", "8-10 q: qqqqqqqqqqq", "6-9 h: hlrbcsnhh", "12-17 f: ffkjfffffffnftfmf", "11-12 s: ssssssssssss", "9-15 k: hpkskfkkfkkkkbkk", "8-18 b: bbbbbbdbbbbbbjbbjw", "2-14 p: nppppkkppppppw", "11-12 s: sxhtlrtnbgss", "8-10 q: lnwqqqqqqsqhgqgbq", "2-3 l: llll", "14-17 d: gdddtlfmrdscgsqcmkm", "5-8 w: tmmskwpq", "16-17 b: cbbbpzbbrrbnkbbbbgj", "14-17 b: bbbjbbbbbbbxbvbdszbb", "9-10 q: qqqqqtqqqqr", "5-6 t: ctgttztrtn", "2-3 f: gcfgw", "3-8 d: ddzddddsddddddd", "15-16 g: ggggggjggggggggg", "4-10 m: mcmmmmmmmmmvmmmm", "11-15 p: ptppxjppppmkgpvdp", "12-13 n: nnnnnnnfnnnrn", "1-7 t: tnbwtttmtt", "1-14 c: ndncvjbkcmcjplcp", "1-2 f: fffh", "14-17 t: llzkwltkmdntwndqs", "4-7 w: bffzzwdwtkwwv", "4-8 p: zpbpqpppxpb", "8-14 n: fdnnznhnnlhmtn", "7-14 l: vltrjnzhplkshlt", "1-2 d: ddbhsjtv", "2-10 d: dddddbzrdrdd", "4-10 p: pkppshsfmp", "10-11 g: gmgggzvzmgjgggzqxggd", "2-4 j: fjvjjqf", "5-19 h: cstkhhrctkxhhsfzpph", "6-16 s: sssssssssssssssks", "17-18 v: vvvvvvvvsvvvvvvvtv", "2-5 j: zjjjjj", "2-3 r: xrrrqlxl", "17-19 c: cccrcccgcwcchccccdcc", "7-14 l: lllllllllllllcsl", "12-14 p: jlmhpjgdzjkpnbmnbk", "17-18 n: nnnnnnnnnnnnnnnnqv", "8-10 b: lqzrtbzbrb", "8-11 x: xgxxfxxxxkxx", "5-9 x: xxxsnxnxk", "2-11 t: stxbxwssmgthttx", "6-13 t: ktbktrtqcnlrgtvtwt", "10-12 q: qqqqqqqqqrqq", "14-15 q: znptzmgllrfrqrf", "2-13 f: ffffffcffffff", "9-10 h: hrbhnhhhhhhhh", "8-14 k: lskskkkfzkkkkmkkm", "1-3 g: kbmswhmghvwvnwxzzk", "2-4 v: vttv", "2-3 h: qhhmhghbh", "6-13 t: ttgtttttktttdttt", "7-9 v: vvrrvvvvvv", "5-6 z: hcfwdr", "5-13 g: sgmgggcggggggjrggggk", "10-14 k: kzkkkkkkdskkktkkk", "1-2 j: ptjj", "4-11 b: xnbbbbnbbbb", "14-16 b: bbbbbbbbbbbbbkbc", "4-12 w: mwrwrcpwkpzwxrj", "5-6 l: qlndzlcvw", "1-2 s: ksxk", "4-5 p: spgwql", "7-10 j: ljjjjvxjhzjjjjrj", "6-14 f: ffffffffffffff", "11-15 d: ddddddddddzdpdn", "3-5 t: ttmtz", "6-7 x: kxvxxxx", "3-6 m: mhmklm", "4-8 h: bnhjhphgqxphpnthh", "3-4 w: gwpq", "10-13 t: ttvttjhttcttrtthtb", "1-5 f: ffffg", "6-14 v: rtjjvpbvnvjwlvktvvf", "8-9 n: nnknnnxmnnn", "9-10 v: vvsvvsvvqcvvpp", "9-11 x: xxxxxxxxxwl", "3-6 h: bwqxnrhkhhwhhh", "4-8 l: lvllrlcf", "1-2 m: gtrm", "2-4 z: zzzz", "6-7 j: jjjjjjj", "15-16 g: sggggggggqgclgggmggw", "7-9 v: dvmskjsczkvjcpsv", "3-11 d: hmdrdfdqrddzdddjdd", "7-9 m: mmmwfmmmtmm", "6-8 x: lxsbxmlx", "3-5 q: qqqxq", "5-10 n: nnnnznnnnrn", "4-6 m: zmmbzg", "10-11 c: cpdndvlfltc", "2-4 k: kzkkk", "1-14 m: jmmmmmzmqtmsmmnmm", "3-13 h: gwhhrlbstldmh", "2-5 l: rrlhdqkzst", "1-6 s: sqgsjs", "3-4 h: hhgpb", "11-16 l: llllllllllllllwkllll", "7-11 j: gjjjtvhjjpj", "14-15 h: hjjvmbhfjhhxkbh", "1-2 t: rwxgz", "13-18 p: pppppppppppppppppppp", "1-14 x: xxrxxjxhxxxxxxbxxx", "4-6 b: fbzsbpbbb", "12-16 j: fjjgmnjwbvhjzzzs", "6-7 m: mmmmmtq", "12-16 n: nnmnnpnnnnnknqxkjnjn", "5-8 w: wmwwwwpwlwllzwkcw", "7-10 q: gbpsnqwqfm", "10-13 x: vxxxxxxxxxdqnxx", "7-12 q: qqqqqqqqqqqqqqq", "1-2 n: snnn", "6-10 l: lljllklllwp", "3-4 b: bsnjzbb", "13-14 g: tggztfgdggngmglgg", "6-16 l: bkcwjlwcnfwthlll", "8-11 s: sssssssvsss", "3-4 k: xmthtrcx", "7-8 h: wpmjhbgg", "4-5 p: pcpkvp", "14-15 s: ssrssssssssssjs", "14-15 x: xxxzxxgxxtxxxxqx", "6-7 m: nwmfmxmm", "6-8 k: hsknkmvhkgkkfzkjf", "4-5 s: sssms", "7-9 g: gfgqldxgxdjzglcgg", "10-11 k: ckkthkzpdrfv", "1-14 p: jptppkcppjpppppppp", "10-11 x: pxkccxpxdsq", "2-8 x: xsgxxxxvgxxk", "6-14 j: jjjjjzjjjjjjjtjjjj", "7-10 x: rxxnxrzgxxd", "6-12 g: dmgggpgggwczggghggm", "3-6 h: hdhjhhhhchh", "11-12 r: zrrkcrrrrrlh", "7-9 v: vhqvlvwvzqwqvrxvjnf", "1-5 r: rvmjr" ] valid_pass = 0 for line in (policy): num,name,letters = line.split() pos1,pos2 = num.split("-") value = name[0] pos1 = int(pos1) - 1 pos2 = int(pos2) - 1 if letters[pos1] == value and letters[pos2] == value : pass elif letters[pos1] != value and letters[pos2] != value: pass elif letters[pos1] == value or letters[pos2] == value: valid_pass += 1 print valid_pass
23.765336
59
0.663007
4,079
24,407
3.966413
0.261829
0.004821
0.000927
0.002349
0.006304
0.006304
0.006304
0.006304
0.006304
0.006304
0
0.128891
0.128693
24,407
1,026
60
23.788499
0.631901
0.003974
0
0.001969
0
0
0.81734
0
0
0
0
0
0
0
null
null
0.004921
0
null
null
0.000984
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
0f5166632f87139e788fa82b75befacc268c0b0f
3,129
py
Python
Pandas_test/test1.py
Grade-pan/python-base
bcb69052a6e9299dee45b91468e30b1305d82db3
[ "MulanPSL-1.0" ]
3
2021-12-05T02:57:21.000Z
2022-01-02T01:23:41.000Z
Pandas_test/test1.py
Grade-pan/python-base
bcb69052a6e9299dee45b91468e30b1305d82db3
[ "MulanPSL-1.0" ]
null
null
null
Pandas_test/test1.py
Grade-pan/python-base
bcb69052a6e9299dee45b91468e30b1305d82db3
[ "MulanPSL-1.0" ]
null
null
null
import pandas as pd import numpy as np # s = pd.Series([1, 2, 3, np.nan], index=['A', 'B', 'C', 'D']) # print(s) # dates = pd.date_range('20191029', periods=5) # print(dates) # df = pd.DataFrame(np.random.rand(5, 5), columns=['a', 'b', 'c', 'd', 'e']) # print(df) # df1 = pd.DataFrame({ # 'A': 1, # 'B': pd.date_range('20191029', periods=4), # 'C': pd.Series(np.arange(4)), # 'D': pd.Categorical(['test', 'train', 'test', 'train']) # }) # print(df1) # print(df1.dtypes) # print(df1.index) # print(df1.columns) # print(df1.values) # noteSeries = pd.Series(['C', 'D', 'E', 'F', 'G', 'A', 'B']) # index = [1, 2, 3, 4, 5, 6, 7] # weekdaySeries = pd.Series(['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']) # index = [1, 2, 3, 4, 5, 6, 7] # df2 = pd.DataFrame([noteSeries, weekdaySeries]) # print(df2) # # df2["No."] = pd.Series([1, 2, 3, 4, 5, 6, 7]) # # print('df2:\n{}\n'.format(df2)) # # df3 = pd.DataFrame({"note": ["C", "D", "E", "F", "G", "A", "B"], # "weekday": ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]}) # print("df3:\n{}\n".format(df3)) # del df3["note"] # print('df3:\n{}\n'.format(df3)) # df1 = pd.DataFrame(np.arange(400).reshape(20, 20)) # df1.columns = ['column1', 'column2', 'column3', 'column4', 'column5', 'column6', 'column7', 'column8', 'column9', # 'column10', 'column10', 'column12', 'column13', 'column14', 'column15', 'column16', 'column17', # 'column18', # 'column19', 'column20'] # df1.index = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T'] # print(df1) # print(df1.index) # print(df1.columns) # print(df1.iloc[[1], 0]) # print(df1.loc[['B'], 'column1']) # df1 = pd.read_excel('D:\\China_weather\\db_weather.xlsx') # print(df1) # print(df1.iloc[[128], np.arange(9)]) # print(df1.describe()) # 数字总结 # print(df1.T) # 数据翻转 # url = 'http://www.weather.com.cn/textFC/xn.shtml' # dfs = pd.read_html(url) # print(dfs) from PIL._imaging import display # print(7 in [1, 2, 3, 4, 5]) # print(set([1, 2, 3]) & set([2, 3, 4])) # 交集 # print(set([1, 2, 3]) | set([2, 3, 4])) # 并集 # number = pd.read_csv('H:\\pandas中文参考手册 (cookbook翻译版) 完整版PDF\\pandas-cookbook-code-notes_jb51\\t1.csv') # # A = number['A'] # # print(A) # # A = A.isnull().sum() # # print(A) # print(number.tail()) # print(number.columns) # print(number.count()) # print(number.describe()) # print(number.isnull().any().any()) # print(number != np.nan) # print(number.shape) # print(number.T) # print(number.info()) # print(number.describe(include=[np.number], percentiles=[.01, .05, .10, .25, .5, .75, .9, .95, .99]).T) # A = number.memory_usage(deep=True) # print(A) # # 改变数据类型 由int64变为int8 # # 任何数值类型的列,只要有一个缺失值,就会成为浮点型;这列中的任何整数都会强 制成为浮点型 # number['A'] = number['A'].astype(np.int8) # number['B'] = number['B'].astype(np.int8) # number['C'] = number['C'].astype(np.int8) # number['1'] = number['1'].astype(np.int8) # A = number.memory_usage(deep=True) # print(A) # print(number.dtypes) data_url = "https://kyfw.12306.cn/otn/leftTicket/init?linktypeid=dc" # 填写url读取 df = pd.read_html(data_url) print(df)
35.556818
115
0.569831
475
3,129
3.724211
0.364211
0.063313
0.011871
0.009045
0.224421
0.166761
0.14528
0.109666
0.01922
0
0
0.064783
0.151486
3,129
87
116
35.965517
0.601507
0.886545
0
0
0
0
0.209924
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0.166667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
0f58e88aaabefc0b08a28dda07a7046980d8c0bd
262
py
Python
src/iert_news/migrations/0005_auto_20190211_1640.py
MetricsGroup/IERT-Webapp
9e43f1775767412898f9340b9cc84196eb4abfdb
[ "MIT" ]
3
2019-04-25T11:19:22.000Z
2020-05-10T20:41:12.000Z
src/iert_news/migrations/0005_auto_20190211_1640.py
MetricsGroup/IERT-Webapp
9e43f1775767412898f9340b9cc84196eb4abfdb
[ "MIT" ]
5
2020-06-17T05:16:27.000Z
2022-01-13T02:15:56.000Z
src/iert_news/migrations/0005_auto_20190211_1640.py
MetricsGroup/IERT-Webapp
9e43f1775767412898f9340b9cc84196eb4abfdb
[ "MIT" ]
3
2020-06-13T10:40:27.000Z
2021-10-13T15:45:50.000Z
# Generated by Django 2.1.5 on 2019-02-11 16:40 from django.db import migrations class Migration(migrations.Migration): dependencies = [("iert_news", "0004_auto_20190211_1631")] operations = [migrations.RenameModel(old_name="news", new_name="new")]
23.818182
74
0.736641
37
262
5.054054
0.810811
0
0
0
0
0
0
0
0
0
0
0.136564
0.133588
262
10
75
26.2
0.687225
0.171756
0
0
1
0
0.181395
0.106977
0
0
0
0
0
1
0
false
0
0.25
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
0f78901716c866eb2c049c99786575c2fe82ea8b
1,172
py
Python
setup.py
andrewdodd/docopt_uc
2540ce6121c37f59e2c82cd5dfc59f50784f40cb
[ "MIT" ]
6
2019-08-25T16:14:41.000Z
2022-01-24T02:24:19.000Z
setup.py
andrewdodd/docopt_uc
2540ce6121c37f59e2c82cd5dfc59f50784f40cb
[ "MIT" ]
null
null
null
setup.py
andrewdodd/docopt_uc
2540ce6121c37f59e2c82cd5dfc59f50784f40cb
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name="docopt-uc", description="A docopt library suitable for microcontrollers.", license="MIT", version="1.0.2", author="Andrew Dodd", author_email="andrew.john.dodd@gmail.com", maintainer="Andrew Dodd", maintainer_email="andrew.john.dodd@gmail.com", keywords=["docopt", "microcontroller", "cli"], packages=find_packages(where="src"), package_dir={"": "src"}, package_data={'docopt_uc': ['templates/*.c', 'templates/*.h']}, zip_safe=False, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ], install_requires=["docopt", "jinja2"], entry_points={'console_scripts': [ 'docopt-uc = docopt_uc.docopt_uc:main', ]})
33.485714
67
0.612628
128
1,172
5.507813
0.585938
0.056738
0.177305
0.184397
0.153191
0.076596
0
0
0
0
0
0.016322
0.21587
1,172
34
68
34.470588
0.750816
0.035836
0
0
0
0
0.510638
0.067376
0
0
0
0
0
1
0
true
0
0.033333
0
0.033333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
7e1f2dd8db4f77b587d25ee0a26186e9bb8f397a
418
py
Python
appserver/datetimefield/admin.py
sinag/SWE574-Horuscope
9725dd356cbfd19f0ce88d4a208c872be765bd88
[ "MIT" ]
null
null
null
appserver/datetimefield/admin.py
sinag/SWE574-Horuscope
9725dd356cbfd19f0ce88d4a208c872be765bd88
[ "MIT" ]
null
null
null
appserver/datetimefield/admin.py
sinag/SWE574-Horuscope
9725dd356cbfd19f0ce88d4a208c872be765bd88
[ "MIT" ]
1
2020-08-07T12:54:51.000Z
2020-08-07T12:54:51.000Z
from django.contrib import admin from datetimefield.models import DateTimeField class DateTimeFieldAdmin(admin.ModelAdmin): fieldsets = [ (None, {'fields': ['instance', 'property', 'value']}), ] list_display = ('id', 'instance', 'property', 'value') list_filter = ['instance', 'property'] search_fields = ['instance', 'property'] admin.site.register(DateTimeField, DateTimeFieldAdmin)
26.125
62
0.686603
39
418
7.282051
0.589744
0.225352
0.15493
0.176056
0
0
0
0
0
0
0
0
0.162679
418
15
63
27.866667
0.811429
0
0
0
0
0
0.196172
0
0
0
0
0
0
1
0
false
0
0.2
0
0.7
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
7e254c7c7fbd33244e8e6021609ebce5b6dca24b
654
py
Python
supriya/ugens/Dbufwr.py
deeuu/supriya
14fcb5316eccb4dafbe498932ceff56e1abb9d27
[ "MIT" ]
null
null
null
supriya/ugens/Dbufwr.py
deeuu/supriya
14fcb5316eccb4dafbe498932ceff56e1abb9d27
[ "MIT" ]
null
null
null
supriya/ugens/Dbufwr.py
deeuu/supriya
14fcb5316eccb4dafbe498932ceff56e1abb9d27
[ "MIT" ]
null
null
null
import collections from supriya import CalculationRate from supriya.ugens.DUGen import DUGen class Dbufwr(DUGen): """ A buffer-writing demand-rate UGen. :: >>> dbufwr = supriya.ugens.Dbufwr( ... buffer_id=0, ... source=0, ... loop=1, ... phase=0, ... ) >>> dbufwr Dbufwr() """ ### CLASS VARIABLES ### __documentation_section__ = "Demand UGens" _ordered_input_names = collections.OrderedDict( [("source", 0.0), ("buffer_id", 0.0), ("phase", 0.0), ("loop", 1.0)] ) _valid_calculation_rates = (CalculationRate.DEMAND,)
19.818182
76
0.547401
65
654
5.307692
0.476923
0.017391
0.052174
0
0
0
0
0
0
0
0
0.026258
0.301223
654
32
77
20.4375
0.728665
0.344037
0
0
0
0
0.097297
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
7e2a07fc37e09f74448ba47450f84919292d9d08
360
py
Python
creational/prototype.py
kimgea/design_patterns
def656f50c4f26e25f8a0d2920041c2f002dc2d0
[ "MIT" ]
null
null
null
creational/prototype.py
kimgea/design_patterns
def656f50c4f26e25f8a0d2920041c2f002dc2d0
[ "MIT" ]
null
null
null
creational/prototype.py
kimgea/design_patterns
def656f50c4f26e25f8a0d2920041c2f002dc2d0
[ "MIT" ]
null
null
null
""" Lol, only need to use copy.deepcopy in python """ import copy class A(object): def __init__(self,text="test"): self.text = text def __str__(self): return self.text def clone(self): return copy.deepcopy(self) a = A("test2") b = copy.deepcopy(a) c = a.clone() print (a) print (b) print (c)
13.846154
49
0.558333
51
360
3.784314
0.490196
0.186529
0
0
0
0
0
0
0
0
0
0.004
0.305556
360
26
50
13.846154
0.768
0.125
0
0
0
0
0.029605
0
0
0
0
0
0
1
0.214286
false
0
0.071429
0.142857
0.5
0.214286
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
7e2da4166b578747734c00a74db2a1363372b6bd
1,985
py
Python
com/wy/study/S_Test.py
mygodness100/Python
bc3f979156d5de5e698371cc23885323cbc32d9d
[ "Apache-2.0" ]
null
null
null
com/wy/study/S_Test.py
mygodness100/Python
bc3f979156d5de5e698371cc23885323cbc32d9d
[ "Apache-2.0" ]
null
null
null
com/wy/study/S_Test.py
mygodness100/Python
bc3f979156d5de5e698371cc23885323cbc32d9d
[ "Apache-2.0" ]
null
null
null
""" Created on 2018�?5�?13�? @author: wanyang twisted框架暂不支持python3,只是还没修改�?,现在是否支持,可查官网,类似于httpclient 数据结构:frozenset(不可变集�?),双向队列(队列),单项队列(�?) 当执行某故意python文件的时�?,可以直接在该文件中取得__name__,入口文件的__name__就是__main__的字符串, 若是被其他py文件调用,则__name__就是被调用文件的完整路径�? 深浅拷贝:深拷贝就是重新开辟一块内存放入新的变�?,浅拷贝就是都有原变量的地�? 异常: try: except Exception,e: finally: 方法或其他需要进行操作的地方,若不进行操作,可使用pass关键�?,但是不能�?么都不写,会报�? 异常,日志,python自带,但是不知道有没有框架 zip:将2个元组强行合并成一个类似map一样的组成的元组,可以强制转换为list,可以用enumerate迭代, 使用enumerate对一个数组或list或map进行迭代的时候会返回�?个类似map的对�? 第一个参数是array或list中的元素在列表中的位�?,第二个参数是元素的�?? 用zip转换的元�?,若元组个数不�?,以短的为�?,多个舍弃 __init__:每个包下面自动生成的__init__文件,可以让该包中的所有文件都能被其他包导入使�?,如果没有这个文件 其他包将无法使用该包下的文件,在每个文件中都会有一个内置的__init__,当直接指定该文件的使�?, __init__=="__main__",当该文件是被其他文件调用的使�?,__init__等于该文件的名称 __all__:写在自动生成的__init__文件�?,以元组的形式,写入可以导入的包�?,只要不在这个元组里的,即使导入也无法使用类里的方法 制作�?个可以安装到系统的自己的模块:�?上查�?,�?要先制作�?个安装包,然后打包之后安装,之后可以像系统自带的 包一�?,直接在任务python中用import引入 在一个方法里面不能修改一个基本类型全�?变量,包括str,必须要加上global,例如global a,之后才可以修�?. 否则只能引用,但是可以直接修改对象 当这个基本类型当作参数传递到方法里时,是可以修改的,但是只在�? 方法内改变参�?,不影响全�? 在方法里面对参数进行运算�?,a+=a,和a=a+a是不�?样的结果,后一种是先定义一个变�?,已经不再是传递的参数 的地�?�?,这跟java里很不一�?,可变性太�?,因为java里的元组等不能使�?+=运算,但是python中各种类型都可以 """ # !/usr/bin/env python from _functools import reduce # python的源码格式为py,编译后的为pyc,经过优化的为pyo # 编译 # import py_compile # py_compile("study") # 优化 # python -O -m py_compile study.py # '''3个单引号或双引号里可使用制表符 print("""d, fdsfd dsfdwe """) str1 = input("输入参数:") print(str1) print(__name__) # 可定义对应个数的参数直接取出元组或数组的参数,并赋值到变量 aa, bb, cc = ["fdsf", "fdsgfd", "gfdg"] # lambda表达式,必须使用关键字lambda,并且需要赋值给一个变量,类似js中的变量方法 l = lambda x, y: x * y # 调用lambda函数 l(5, 6) listArr = [1, 2, 34, 56, 6] tupl1=(1,2,34,56,6) # reduce函数类似于递归,第一个参数可以是一个方法,第二个参数是方法作用的列表 # 第一个参数会从list中拿值作为第一个参数的参数,然后将返回值作为参数,继续和下一个参数进行运算,直到list末尾 n=reduce(lambda x,y: x*y, range(1,9)) print(n)
25.779221
75
0.711335
257
1,985
5.447471
0.661479
0.005714
0.02
0.012857
0.024286
0
0
0
0
0
0.022166
0.018278
0.145592
1,985
76
76
26.118421
0.78125
0.831738
0
0
0
0
0.162939
0
0
0
0
0
0
1
0
false
0
0.066667
0
0.066667
0.266667
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
7e5a52d8c0a3d15fabe0b6b863a2f4eae377fcd8
4,001
py
Python
test/test_links_generator.py
cuamckuu/tg-inviter
80b8d4664d1e2628b46ac1a6d58f8495c408d4b4
[ "MIT" ]
20
2020-08-24T19:11:38.000Z
2022-03-17T19:24:50.000Z
test/test_links_generator.py
bequirky12/tg-inviter
5cad1bc1afce101be03a2cee805931e77b7f6842
[ "MIT" ]
null
null
null
test/test_links_generator.py
bequirky12/tg-inviter
5cad1bc1afce101be03a2cee805931e77b7f6842
[ "MIT" ]
8
2021-02-05T11:51:21.000Z
2022-03-22T08:48:44.000Z
import unittest from tginviter import generate_invite_link, get_random_token, \ generate_joinchat_link class TestLinksGeneration(unittest.TestCase): @classmethod def setUpClass(cls): cls.bot_name = "test_bot" cls.token = get_random_token() def test_keywords_param_only(self): with self.assertRaises(TypeError): generate_invite_link(self.bot_name, self.token, 2) generate_invite_link(self.bot_name, max_uses=2) def test_proto_exist(self): with self.assertRaises(ValueError): generate_invite_link(self.bot_name, proto="qwe") generate_invite_link(self.bot_name, token=self.token, proto="tg") generate_invite_link(self.bot_name, token=self.token, proto="http") generate_invite_link(self.bot_name, token=self.token, proto="https") def test_return_token(self): link, token = generate_invite_link(self.bot_name) self.assertTrue(link.endswith(token)) def test_generate_full_deeplink(self): http = f"http://telegram.me/{self.bot_name}?start={self.token}" https = f"https://telegram.me/{self.bot_name}?start={self.token}" link, _ = generate_invite_link(self.bot_name, token=self.token, short=False) self.assertEqual(link, https) link, _ = generate_invite_link( self.bot_name, token=self.token, short=False, proto="https" ) self.assertEqual(link, https) link, _ = generate_invite_link( self.bot_name, token=self.token, short=False, proto="http" ) self.assertEqual(link, http) def test_generate_short_deeplink(self): http = f"http://t.me/{self.bot_name}?start={self.token}" https = f"https://t.me/{self.bot_name}?start={self.token}" link, _ = generate_invite_link(self.bot_name, token=self.token) self.assertEqual(link, https) link, _ = generate_invite_link(self.bot_name, token=self.token, short=True) self.assertEqual(link, https) link, _ = generate_invite_link(self.bot_name, token=self.token, proto="http") self.assertEqual(link, http) link, _ = generate_invite_link( self.bot_name, token=self.token, proto="https" ) self.assertEqual(link, https) def test_random_tokens(self): token1 = get_random_token() token2 = get_random_token() self.assertNotEqual(token1, token2) def test_random_deeplinks(self): https = f"https://t.me/{self.bot_name}?start=" link1, token1 = generate_invite_link(self.bot_name) self.assertTrue(link1.startswith(https)) link2, token2 = generate_invite_link(self.bot_name) self.assertTrue(link1.startswith(https)) self.assertNotEqual(token1, token2) self.assertNotEqual(link1, link2) def test_generate_tg_proto_deeplink(self): tg = f"tg://resolve?domain={self.bot_name}&start={self.token}" link, _ = generate_invite_link(self.bot_name, token=self.token, proto="tg") self.assertEqual(link, tg) link, _ = generate_invite_link( self.bot_name, token=self.token, proto="tg", short=True ) self.assertEqual(link, tg) link, _ = generate_invite_link( self.bot_name, token=self.token, proto="tg", short=False ) self.assertEqual(link, tg) def test_joinchat_short_link(self): https = f"https://t.me/joinchat/{self.token}" http = f"http://t.me/joinchat/{self.token}" link = generate_joinchat_link(self.token) self.assertEqual(link, https) link = generate_joinchat_link(self.token, short=True) self.assertEqual(link, https) def test_joinchat_full_link(self): https = f"https://telegram.me/joinchat/{self.token}" http = f"http://telegram.me/joinchat/{self.token}" link = generate_joinchat_link(self.token, short=False) self.assertEqual(link, https)
33.90678
85
0.658585
511
4,001
4.919765
0.121331
0.072395
0.109387
0.166269
0.728719
0.684964
0.616945
0.57677
0.506762
0.494431
0
0.005128
0.220195
4,001
117
86
34.196581
0.800641
0
0
0.26506
1
0
0.12072
0.013497
0
0
0
0
0.253012
1
0.13253
false
0
0.024096
0
0.168675
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
7e677acbbb11f5ed4eec843f7cf6ab3defcf50c6
1,583
py
Python
VGOS/make_ready_message.py
varenius/oso
eda6247b55796b3e82be6214acd55daf81676836
[ "MIT" ]
null
null
null
VGOS/make_ready_message.py
varenius/oso
eda6247b55796b3e82be6214acd55daf81676836
[ "MIT" ]
null
null
null
VGOS/make_ready_message.py
varenius/oso
eda6247b55796b3e82be6214acd55daf81676836
[ "MIT" ]
null
null
null
#!/usr/bin/env python import sys logfile = "/usr2/log/"+sys.argv[1]+".log" lines = [] for line in open(logfile): lines.append(line) def get_lines(log,pattern,nlines): revlog = list(reversed(log)) ans = [] # Loop through logfile backwards to find the latest entry for i,d in enumerate(revlog): if pattern in d: for n in range(0,nlines): ans.append(revlog[i-n]) break return ans def print_lines(ls): for l in ls: print str(l), print print "READY MESSAGE DATA FROM LOGFILE ", logfile print print "DBBC3 timing:" time = get_lines(lines, "#dbbcn#dbbc3/time/",26) print_lines(time) print print "Pointing:" point = get_lines(lines, "#fivpt#xoffset",1) print_lines(point) print print "CDMS:" cdms = get_lines(lines, "/CDMS/",1) print_lines(cdms) print print "Weather:" wx = get_lines(lines, "/wx/",1) print_lines(wx) print print "SEFD:" header = get_lines(lines, "Center Comp Tsys SEFD ",1) print_lines(header) sefda = get_lines(lines, "l 3432.40",1) print_lines(sefda) sefdb = get_lines(lines, "r 3432.40",1) print_lines(sefdb) sefdc = get_lines(lines, "l 5672.40",1) print_lines(sefdc) sefdd = get_lines(lines, "r 5672.40",1) print_lines(sefdd) sefde = get_lines(lines, "l 6824.40",1) print_lines(sefde) sefdf = get_lines(lines, "r 6824.40",1) print_lines(sefdf) sefdg = get_lines(lines, "l 10664.40",1) print_lines(sefdg) sefdh = get_lines(lines, "r 10664.40",1) print_lines(sefdh) print print "First source:" wx = get_lines(lines, "/source/",1) print_lines(wx) print
21.391892
61
0.67151
254
1,583
4.066929
0.326772
0.116167
0.176186
0.100678
0.168441
0
0
0
0
0
0
0.054096
0.182565
1,583
73
62
21.684932
0.744204
0.048642
0
0.166667
0
0
0.175532
0
0
0
0
0
0
0
null
null
0
0.016667
null
null
0.516667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
7e7e4fdebc925dc06d87c1a0fb80f0ae90da5a87
15,495
py
Python
nibbler/tests/test_nibbler.py
rrodakowski/nibbler
833b82b6eee3a118808886a8165def519dbab04f
[ "MIT" ]
null
null
null
nibbler/tests/test_nibbler.py
rrodakowski/nibbler
833b82b6eee3a118808886a8165def519dbab04f
[ "MIT" ]
null
null
null
nibbler/tests/test_nibbler.py
rrodakowski/nibbler
833b82b6eee3a118808886a8165def519dbab04f
[ "MIT" ]
null
null
null
# python3 library import unittest from unittest.mock import Mock, patch import logging # dependency imports import feedparser from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText # nibbler imports from nibbler.nibbler import NibblerConfig from nibbler.nibbler import HTMLNormalizer #from nibbler.nibbler import FeedAcquirer #from nibbler.nibbler import DatabaseAccess import nibbler.nibbler class NibblerTestCase(unittest.TestCase): """Base class for all Nibbler tests.""" arguments = {'from_email': 'randall.rodakowski@gmail.com', 'to_email': 'randall.rodakowski@gmail.com', 'log_dir': '/app-data/logs/nibbler-logs', 'sub_dir': '/app-bin', 'smtp_ini': './nibbler/tests/smtp_testdata.ini'} def assertCostEqual(self, p, cost): """Custom assert here: `p`'s cost is equal to `cost`.""" self.assertEqual(p.cost(), cost) class TestNibblerConfig(NibblerTestCase): def setUp(self): self.config = NibblerConfig(**self.arguments) def test_smtp_ini_with_values(self): smtp_config = self.config.get_smtp_config() self.assertEqual(smtp_config['username'], "sample_username") self.assertEqual(smtp_config['password'], "Sample_Password/") self.assertEqual(smtp_config['host'], "hostname.test.com") self.assertEqual(smtp_config['port'], '587') def test_smtp_ini_without_values(self): arguments = {'from_email': 'randall.rodakowski@gmail.com', 'to_email': 'randall.rodakowski@gmail.com', 'sub_dir': '/app-bin'} no_smtp_config = NibblerConfig(**arguments) self.assertEqual(no_smtp_config.get_smtp_config(), None) def test_log_dir(self): self.assertEqual(self.config.get_log_dir(), "/app-data/logs/nibbler-logs") def test_database_connection(self): self.assertEqual(self.config.get_database_connection(), "sqlite:///nibbler.db") def test_email_image_styles(self): values = self.config.get_email_image_styles() self.assertEqual(values["width"], 480) self.assertEqual(values["height"], 320) self.assertEqual(values["border"], 0) def tearDown(self): pass class TestHTMLNormalizer(NibblerTestCase): def setUp(self): self.normalizer = HTMLNormalizer(NibblerConfig(**self.arguments)) def test_clean_html(self): input_html = '<p>I <br/><span id="id-styles-text-to-remove">added some text </span><a class="my-class-to-remove" href="http://www.jamesaltucher.com/2017/03/matt-mullenweg/">joined in for the James Altucher<img width="480" id="id-to-remove" class=" size-full wp-image-47258 aligncenter" src="https://i1.wp.com/ma.tt/files/2017/04/ultralight.gif?resize=500%2C288&amp;ssl=1" alt="ultralight.gif"> podcast in an episode that covered a lot of ground</a>. It just needs to be two-way.</p>' clean_html = '<p>I added some text <a href="http://www.jamesaltucher.com/2017/03/matt-mullenweg/">joined in for the James Altucher<img src="https://i1.wp.com/ma.tt/files/2017/04/ultralight.gif?resize=500%2C288&amp;ssl=1" alt="ultralight.gif"> podcast in an episode that covered a lot of ground</a>. It just needs to be two-way.</p>' self.assertEqual(clean_html, self.normalizer.clean_html(input_html)) def test_add_email_markup(self): clean_html = '<p>I added some text <a href="http://www.jamesaltucher.com/2017/03/matt-mullenweg/">joined in for the James Altucher<img src="https://i1.wp.com/ma.tt/files/2017/04/ultralight.gif?resize=500%2C288&amp;ssl=1" alt="ultralight.gif"> podcast in an episode that covered a lot of ground</a>. It just needs to be two-way.</p>' email_html = '<p>I added some text <a href="http://www.jamesaltucher.com/2017/03/matt-mullenweg/">joined in for the James Altucher<img src="https://i1.wp.com/ma.tt/files/2017/04/ultralight.gif?resize=500%2C288&amp;ssl=1" alt="ultralight.gif" width="480" height="320" border="0"> podcast in an episode that covered a lot of ground</a>. It just needs to be two-way.</p>' self.assertEqual(email_html, self.normalizer.add_email_markup(clean_html)) def test_add_full_image_path(self): link = 'https://kottke.org/18/06/the-problem-with-action-scenes-in-dc-movies' input_html= '<p><img src="/plus/misc/images/ai-image-iso-02.jpg" alt="AI image in the dark"></p>' email_html = '<p><img src="https://kottke.org/plus/misc/images/ai-image-iso-02.jpg" alt="AI image in the dark"></p>' self.assertEqual(email_html, self.normalizer.add_full_image_path(input_html, link)) def tearDown(self): pass class TestFeedAcquirer(NibblerTestCase): def setUp(self): self.dal = Mock() self.feedacquirer = nibbler.nibbler.FeedAcquirer(self.dal, NibblerConfig(**self.arguments)) def test_parse_rss_post_no_title(self): test_feed = """ <feed xmlns="http://www.w3.org/2005/Atom"> <title>Daring Fireball</title> <entry> <link rel="alternate" type="text/html" href="https://secure.actblue.com/donate/great_slate"/> <link rel="shorturl" type="text/html" href="http://df4.us/r7j"/> <link rel="related" type="text/html" href="https://daringfireball.net/linked/2018/10/25/donate-to-the-great-slate"/> <id>tag:daringfireball.net,2018:/linked//6.35263</id> <published>2018-10-26T03:59:00Z</published> <updated>2018-10-26T04:30:33Z</updated> <content type="html" xml:base="https://daringfireball.net/linked/" xml:lang="en"> <![CDATA[ <p>The Great Slate:</p> <blockquote> <p>Tech Solidarity is endorsing thirteen candidates for Congress. Each of them is a first-time progressive candidate with no ties to the political establishment, an excellent campaign team, and a clear path to victory in a poor, rural district that is being i ]]> </content> </entry>""" rss_feed = feedparser.parse(test_feed) title = 'https://secure.actblue.com/donate/great_slate' for entry in rss_feed.entries: article = self.feedacquirer.parse_rss_post(entry) self.assertEqual(title, article.title, msg='{}, {}'.format(title, article.title)) def test_parse_rss_post_with_title(self): test_feed = """ <feed xmlns="http://www.w3.org/2005/Atom"> <title>Daring Fireball</title> <entry> <title>Daring Fireball post</title> <link rel="alternate" type="text/html" href="https://secure.actblue.com/donate/great_slate"/> <link rel="shorturl" type="text/html" href="http://df4.us/r7j"/> <link rel="related" type="text/html" href="https://daringfireball.net/linked/2018/10/25/donate-to-the-great-slate"/> <id>tag:daringfireball.net,2018:/linked//6.35263</id> <published>2018-10-26T03:59:00Z</published> <updated>2018-10-26T04:30:33Z</updated> <content type="html" xml:base="https://daringfireball.net/linked/" xml:lang="en"> <![CDATA[ <p>The Great Slate:</p> <blockquote> <p>Tech Solidarity is endorsing thirteen candidates for Congress. Each of them is a first-time progressive candidate with no ties to the political establishment, an excellent campaign team, and a clear path to victory in a poor, rural district that is being i ]]> </content> </entry>""" rss_feed = feedparser.parse(test_feed) title = 'Daring Fireball post' for entry in rss_feed.entries: article = self.feedacquirer.parse_rss_post(entry) self.assertEqual(title, article.title, msg='{}, {}'.format(title, article.title)) def test_parse_rss_post_no_guid(self): test_feed = """ <feed xmlns="http://www.w3.org/2005/Atom"> <title>Daring Fireball</title> <entry> <title>Daring Fireball post</title> <link rel="alternate" type="text/html" href="https://secure.actblue.com/donate/great_slate"/> <link rel="shorturl" type="text/html" href="http://df4.us/r7j"/> <link rel="related" type="text/html" href="https://daringfireball.net/linked/2018/10/25/donate-to-the-great-slate"/> <published>2018-10-26T03:59:00Z</published> <updated>2018-10-26T04:30:33Z</updated> <content type="html" xml:base="https://daringfireball.net/linked/" xml:lang="en"> <![CDATA[ <p>The Great Slate:</p> <blockquote> <p>Tech Solidarity is endorsing thirteen candidates for Congress. Each of them is a first-time progressive candidate with no ties to the political establishment, an excellent campaign team, and a clear path to victory in a poor, rural district that is being i ]]> </content> </entry>""" rss_feed = feedparser.parse(test_feed) guid = 'Daring Fireball post' for entry in rss_feed.entries: article = self.feedacquirer.parse_rss_post(entry) self.assertEqual(guid, article.guid, msg='{}, {}'.format(guid, article.guid)) def test_parse_rss_post_with_guid(self): test_feed = """ <feed xmlns="http://www.w3.org/2005/Atom"> <title>Daring Fireball</title> <entry> <link rel="alternate" type="text/html" href="https://secure.actblue.com/donate/great_slate"/> <link rel="shorturl" type="text/html" href="http://df4.us/r7j"/> <link rel="related" type="text/html" href="https://daringfireball.net/linked/2018/10/25/donate-to-the-great-slate"/> <id>tag:daringfireball.net,2018:/linked//6.35263</id> <published>2018-10-26T03:59:00Z</published> <updated>2018-10-26T04:30:33Z</updated> <content type="html" xml:base="https://daringfireball.net/linked/" xml:lang="en"> <![CDATA[ <p>The Great Slate:</p> <blockquote> <p>Tech Solidarity is endorsing thirteen candidates for Congress. Each of them is a first-time progressive candidate with no ties to the political establishment, an excellent campaign team, and a clear path to victory in a poor, rural district that is being i ]]> </content> </entry>""" rss_feed = feedparser.parse(test_feed) guid = 'tag:daringfireball.net,2018:/linked//6.35263' for entry in rss_feed.entries: article = self.feedacquirer.parse_rss_post(entry) self.assertEqual(guid, article.guid, msg='{}, {}'.format(guid, article.guid)) def test_parse_rss_post_with_pub_date(self): test_feed = """ <feed xmlns="http://www.w3.org/2005/Atom"> <title>Daring Fireball</title> <entry> <link rel="alternate" type="text/html" href="https://secure.actblue.com/donate/great_slate"/> <link rel="shorturl" type="text/html" href="http://df4.us/r7j"/> <link rel="related" type="text/html" href="https://daringfireball.net/linked/2018/10/25/donate-to-the-great-slate"/> <id>tag:daringfireball.net,2018:/linked//6.35263</id> <published>2018-10-26T03:59:00Z</published> <updated>2018-10-26T04:30:33Z</updated> <content type="html" xml:base="https://daringfireball.net/linked/" xml:lang="en"> <![CDATA[ <p>The Great Slate:</p> <blockquote> <p>Tech Solidarity is endorsing thirteen candidates for Congress. Each of them is a first-time progressive candidate with no ties to the political establishment, an excellent campaign team, and a clear path to victory in a poor, rural district that is being i ]]> </content> </entry>""" rss_feed = feedparser.parse(test_feed) pub_date = '2018-10-26T03:59:00Z' for entry in rss_feed.entries: article = self.feedacquirer.parse_rss_post(entry) self.assertEqual(pub_date, article.pub_date, msg='{}, {}'.format(pub_date, article.pub_date)) def test_parse_rss_post_it_should_return_boilerplate_if_empty_content(self): test_feed = """ <channel> <atom:link href="https://unchained.libsyn.com/unchained" rel="self" type="application/rss+xml"/> <title>Unchained</title> <item> <title>This Noble Family's Art Was Taken by Nazis, But Is Being Saved by NFTs - Ep.300</title> <itunes:title>This Noble Family's Art Was Taken by Nazis, But Is Being Saved by NFTs</itunes:title> <pubDate>Tue, 21 Dec 2021 08:30:00 +0000</pubDate> <guid isPermaLink="false"><![CDATA[8d3ec14d-16e6-4f62-8847-917fe13a8b7c]]></guid> <link><![CDATA[https://unchainedpodcast.com/this-noble-familys-art-was-taken-by-nazis-but-is-being-saved-by-nfts/]]></link> <itunes:image href="https://ssl-static.libsyn.com/p/assets/5/e/5/0/5e507f50ba05203140be95ea3302a6a1/Unchained-Podcast-Artwork-2000x2000.png" /> <description><![CDATA[]]></description> <content:encoded><![CDATA[]]></content:encoded> <enclosure length="33490781" type="audio/mpeg" url="https://traffic.libsyn.com/secure/unchained/Unchained_-_Ep.300_-_This_Noble_Familys_Art_Was_Taken_by_Nazis_But_Is_Being_Saved_by_NFTs.mp3?dest-id=619174" /> <itunes:duration>01:07:16</itunes:duration> <itunes:explicit>clean</itunes:explicit> <itunes:keywords /> <itunes:subtitle><![CDATA[]]></itunes:subtitle> <itunes:episode>300</itunes:episode> <itunes:episodeType>full</itunes:episodeType> </item> """ rss_feed = feedparser.parse(test_feed) article_text = '<p>No Content Provided in this article.</p>' for entry in rss_feed.entries: article = self.feedacquirer.parse_rss_post(entry) self.assertEqual(article_text, article.article_text, msg='{}, {}'.format(article_text, article.article_text)) #@patch('nibbler.nibbler.DatabaseAccess.is_post_in_db') #@patch('nibbler.nibbler.DatabaseAccess.store_post') @patch('nibbler.nibbler.DatabaseAccess') def test_store_new_content(self, mock_dal): feed = Mock() feed.xmlUrl = """ <feed xmlns="http://www.w3.org/2005/Atom"> <title>Daring Fireball</title> <entry> <title>Daring post</title> <link rel="alternate" type="text/html" href="https://secure.actblue.com/donate/great_slate"/> <link rel="shorturl" type="text/html" href="http://df4.us/r7j"/> <link rel="related" type="text/html" href="https://daringfireball.net/linked/2018/10/25/donate-to-the-great-slate"/> <id>tag:daringfireball.net,2018:/linked//6.35263</id> <published>2018-10-26T03:59:00Z</published> <updated>2018-10-26T04:30:33Z</updated> <content type="html" xml:base="https://daringfireball.net/linked/" xml:lang="en"> <![CDATA[ <p>The Great Slate:</p> <blockquote> <p>Tech Solidarity is endorsing thirteen candidates for Congress. Each of them is a first-time progressive candidate with no ties to the political establishment, an excellent campaign team, and a clear path to victory in a poor, rural district that is being i ]]> </content> </entry>""" feed.feed_id = 1 mock_dal.is_post_in_db.return_value = False mock_dal.store_post.return_value = True #mock_is_post_in_db.return_value.is_post_in_db.return_value = False # means we will add it #mock_store_post.return_value = True feedacquirer = nibbler.nibbler.FeedAcquirer(mock_dal, NibblerConfig(**self.arguments)) articles_stored = feedacquirer.store_new_content(feed) self.assertEqual("tag:daringfireball.net,2018:/linked//6.35263", articles_stored[0]) def tearDown(self): pass if __name__ == '__main__': unittest.main()
56.345455
491
0.68009
2,167
15,495
4.76096
0.155053
0.030532
0.020936
0.027915
0.717166
0.674809
0.661045
0.640012
0.633518
0.633518
0
0.043848
0.17283
15,495
274
492
56.551095
0.761099
0.028783
0
0.570175
0
0.188596
0.665181
0.110786
0
0
0
0
0.096491
1
0.096491
false
0.017544
0.039474
0
0.157895
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
7e7f51520c0ad9de41b4c59131f95b28303ac34a
1,542
py
Python
Sketches/MPS/BugReports/FixTests/Kamaelia/Examples/TCP_Systems/FortuneCookie_ServerClient.py
sparkslabs/kamaelia_orig
24b5f855a63421a1f7c6c7a35a7f4629ed955316
[ "Apache-2.0" ]
12
2015-10-20T10:22:01.000Z
2021-07-19T10:09:44.000Z
Sketches/MPS/BugReports/FixTests/Kamaelia/Examples/TCP_Systems/FortuneCookie_ServerClient.py
sparkslabs/kamaelia_orig
24b5f855a63421a1f7c6c7a35a7f4629ed955316
[ "Apache-2.0" ]
2
2015-10-20T10:22:55.000Z
2017-02-13T11:05:25.000Z
Sketches/MPS/BugReports/FixTests/Kamaelia/Examples/TCP_Systems/FortuneCookie_ServerClient.py
sparkslabs/kamaelia_orig
24b5f855a63421a1f7c6c7a35a7f4629ed955316
[ "Apache-2.0" ]
6
2015-03-09T12:51:59.000Z
2020-03-01T13:06:21.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1) # # (1) Kamaelia Contributors are listed in the AUTHORS file and at # http://www.kamaelia.org/AUTHORS - please extend this file, # not this notice. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------- # # Simple test harness for integrating TCP clients and servers in one system, sharing selector components etc. # # from Kamaelia.Protocol.FortuneCookieProtocol import FortuneCookieProtocol from Kamaelia.Chassis.ConnectedServer import SimpleServer from Kamaelia.Internet.TCPClient import TCPClient from Kamaelia.Util.Console import ConsoleEchoer from Kamaelia.Chassis.Pipeline import Pipeline import random clientServerTestPort=random.randint(1500,1599) SimpleServer(protocol=FortuneCookieProtocol, port=clientServerTestPort).activate() Pipeline(TCPClient("127.0.0.1",clientServerTestPort), ConsoleEchoer() ).run()
36.714286
109
0.738651
193
1,542
5.901554
0.595855
0.052678
0.015803
0.028095
0
0
0
0
0
0
0
0.018839
0.139429
1,542
41
110
37.609756
0.839488
0.629702
0
0
0
0
0.016514
0
0
0
0
0
0
1
0
false
0
0.545455
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
7eac2cdbbfc7e81bb6e3f6db4a35a537f92f27b4
28,193
py
Python
python/sandbox/property.py
geometer/sandbox
373ec96e69df76744a19b51f7caa865cbc6b58cd
[ "Apache-2.0" ]
6
2020-04-19T11:26:18.000Z
2021-06-21T18:42:51.000Z
python/sandbox/property.py
geometer/sandbox
373ec96e69df76744a19b51f7caa865cbc6b58cd
[ "Apache-2.0" ]
31
2020-04-21T17:24:39.000Z
2020-08-27T15:59:12.000Z
python/sandbox/property.py
geometer/sandbox
373ec96e69df76744a19b51f7caa865cbc6b58cd
[ "Apache-2.0" ]
null
null
null
from enum import Enum, auto import itertools from .figure import Figure, Circle from .scene import Scene from .util import Comment, divide, normalize_number, keys_for_triangle class Property: def __init__(self, property_key, point_set): self.implications = [] self.property_key = property_key self.point_set = point_set self.__hash = None self.__reason = None @property def reason(self): return self.__reason @reason.setter def reason(self, value): if self.__reason: for pre in self.__reason.premises: pre.implications = [p for p in pre.implications if p is not self] while self in value.all_premises: # TODO: select the best variant for prop in value.all_premises: if prop == self: value = prop.reason self.__reason = value for pre in self.__reason.premises: pre.implications.append(self) self.fire_premises_change() @property def priority(self): if not hasattr(self, 'rule'): return self.__priority__ * 2 else: return self.__priority__ * self.rule.priority() @property def __priority__(self): return 3 def fire_premises_change(self): self.reason.reset_premises() for impl in self.implications: impl.fire_premises_change() def keys(self): return [] def stringify(self, printer): return self.description.stringify(printer) def compare_values(self, other): return True def __str__(self): return str(self.description) def __eq__(self, other): return type(self) == type(other) and self.property_key == other.property_key def __hash__(self): if self.__hash is None: self.__hash = hash(type(self)) + hash(self.property_key) return self.__hash class PointAndCircleProperty(Property): """ Point location relative to circle """ class Kind(Enum): inside = auto() on = auto() outside = auto() def __str__(self): return self.name @staticmethod def unique_key(point, cpoints_set): return (point, cpoints_set) def __init__(self, point, cpoint0, cpoint1, cpoint2, location): self.point = point self.circle_key = frozenset((cpoint0, cpoint1, cpoint2)) self.location = location super().__init__(PointAndCircleProperty.unique_key(self.point, self.circle_key), {point, cpoint0, cpoint1, cpoint2}) def keys(self): return self.property_key @property def description(self): if self.location == PointAndCircleProperty.Kind.inside: pattern ='$%{point:pt}$ lies inside $%{circle:circ}$' elif self.location == PointAndCircleProperty.Kind.outside: pattern ='$%{point:pt}$ lies outside of $%{circle:circ}$' elif self.location == PointAndCircleProperty.Kind.on: pattern ='$%{point:pt}$ lies on $%{circle:circ}$' return Comment(pattern, {'pt': self.point, 'circ': Circle(*self.circle_key)}) def compare_values(self, other): return self.location == other.location class CircleCoincidenceProperty(Property): """ Two circles (defined by triples of points) are [not] coincident """ def __init__(self, triple0, triple1, coincident): self.circle_keys = (frozenset(triple0), frozenset(triple1)) super().__init__(frozenset(self.circle_keys), {*triple0, *triple1}) self.coincident = coincident @property def __priority__(self): return 1 @property def description(self): if self.coincident: pattern = '$%{circle:c0}$ coincides with $%{circle:c1}$' else: pattern = '$%{circle:c0}$ and $%{circle:c1}$ differ' return Comment( pattern, {'c0': Circle(*self.circle_keys[0]), 'c1': Circle(*self.circle_keys[1])} ) def compare_values(self, other): return self.coincident == other.coincident class ConcyclicPointsProperty(Property): """ Concyclic points """ def __init__(self, *points): assert len(points) == 4 self.points = points super().__init__(frozenset(self.points), set(points)) @property def __priority__(self): return 1 @property def description(self): return Comment( 'Points $%{point:pt0}$, $%{point:pt1}$, $%{point:pt2}$, and $%{point:pt3}$ are concyclic', dict(('pt%d' % index, pt) for index, pt in enumerate(self.points)) ) class PointOnLineProperty(Property): """ A point lies [not] on a line """ def __init__(self, point, segment, on_line): super().__init__((point, segment), {point, *segment.points}) self.point = point self.segment = segment self.on_line = on_line @property def __priority__(self): return 1 @property def description(self): if self.on_line: pattern = '$%{point:point}$ lies on line $%{line:line}$' else: pattern = '$%{point:point}$ does not lie on line $%{line:line}$' return Comment(pattern, {'point': self.point, 'line': self.segment}) def compare_values(self, other): return self.on_line == other.on_line class LinesCoincidenceProperty(Property): """ Two lines (defined by segments) are [not] coincident """ def __init__(self, segment0, segment1, coincident): self.segments = (segment0, segment1) super().__init__(frozenset(self.segments), {*segment0.points, *segment1.points}) self.coincident = coincident @property def __priority__(self): return 1 @property def description(self): if self.coincident: pattern = '$%{line:line0}$ is the same line as $%{line:line1}$' else: pattern = '$%{line:line0}$ and $%{line:line1}$ are different lines' return Comment(pattern, {'line0': self.segments[0], 'line1': self.segments[1]}) def compare_values(self, other): return self.coincident == other.coincident class PointsCollinearityProperty(Property): """ [Not] collinear points """ def __init__(self, point0, point1, point2, collinear): self.points = (point0, point1, point2) super().__init__(frozenset(self.points), {point0, point1, point2}) self.collinear = collinear @property def __priority__(self): return 1 def keys(self, lengths=None): return keys_for_triangle(Scene.Triangle(*self.points), lengths) @property def description(self): if self.collinear: pattern = 'Points $%{point:pt0}$, $%{point:pt1}$, and $%{point:pt2}$ are collinear' else: pattern = 'Points $%{point:pt0}$, $%{point:pt1}$, and $%{point:pt2}$ are not collinear' return Comment(pattern, {'pt0': self.points[0], 'pt1': self.points[1], 'pt2': self.points[2]}) def compare_values(self, other): return self.collinear == other.collinear class ParallelVectorsProperty(Property): """ Two vectors are parallel (or at least one of them has zero length) """ def __init__(self, vector0, vector1): self.vectors = (vector0, vector1) super().__init__(frozenset(self.vectors), {*vector0.points, *vector1.points}) def keys(self): return [self.vectors[0].as_segment, self.vectors[1].as_segment] @property def __priority__(self): return 1 @property def description(self): return Comment( '$%{vector:vec0} \\uparrow\\!\\!\\!\\uparrow %{vector:vec1}$', {'vec0': self.vectors[0], 'vec1': self.vectors[1]} ) class ParallelSegmentsProperty(Property): """ Two segments are parallel (or at least one of them has zero length) """ def __init__(self, segment0, segment1): self.segments = (segment0, segment1) super().__init__(frozenset(self.segments), {*segment0.points, *segment1.points}) def keys(self): return self.segments @property def __priority__(self): return 1 @property def description(self): return Comment( '$%{segment:seg0} \\,\\|\\, %{segment:seg1}$', {'seg0': self.segments[0], 'seg1': self.segments[1]} ) class PerpendicularSegmentsProperty(Property): """ Two segments are perpendicular (or at least one of them has zero length) """ def __init__(self, segment0, segment1): self.segments = (segment0, segment1) super().__init__(frozenset(self.segments), {*segment0.points, *segment1.points}) def keys(self): return self.segments @property def __priority__(self): return 1 @property def description(self): return Comment( '$%{segment:seg0} \\perp %{segment:seg1}$', {'seg0': self.segments[0], 'seg1': self.segments[1]} ) class PointsCoincidenceProperty(Property): """ [Not] coincident points """ def __init__(self, point0, point1, coincident): assert isinstance(point0, Scene.Point) assert isinstance(point1, Scene.Point) assert point0 != point1 self.points = [point0, point1] super().__init__(frozenset(self.points), {point0, point1}) self.coincident = coincident @property def __priority__(self): return 3 if self.coincident else 1 def keys(self): return [self.points[0].segment(self.points[1]), *self.points] @property def description(self): if self.coincident: pattern = 'Points $%{point:pt0}$ and $%{point:pt1}$ are coincident' else: pattern = 'Points $%{point:pt0}$ and $%{point:pt1}$ are not coincident' return Comment(pattern, {'pt0': self.points[0], 'pt1': self.points[1]}) def compare_values(self, other): return self.coincident == other.coincident class SameOrOppositeSideProperty(Property): """ Two points on opposite/same sides of a line """ @staticmethod def unique_key(segment, point0, point1): return frozenset([segment, point0, point1]) def __init__(self, segment, point0, point1, same): self.segment = segment self.points = (point0, point1) self.same = same super().__init__(SameOrOppositeSideProperty.unique_key(segment, point0, point1), {point0, point1, *segment.points}) @property def __priority__(self): return 1 def keys(self): return [self.segment] @property def description(self): if self.same: pattern = '$%{point:pt0}$, $%{point:pt1}$ located on the same side of line $%{line:line}$' else: pattern = '$%{point:pt0}$, $%{point:pt1}$ located on opposite sides of line $%{line:line}$' return Comment(pattern, {'pt0': self.points[0], 'pt1': self.points[1], 'line': self.segment}) def compare_values(self, other): return self.same == other.same class PointInsideAngleProperty(Property): """ A point lies inside an angle """ def __init__(self, point, angle): self.point = point self.angle = angle super().__init__((point, angle), {point, *angle.point_set}) @property def __priority__(self): return 1 @property def description(self): return Comment('$%{point:pt}$ lies inside $%{angle:angle}$', {'pt': self.point, 'angle': self.angle}) def keys(self): return [self.point, self.angle] class EquilateralTriangleProperty(Property): """ Equilateral triangle """ def __init__(self, points): self.triangle = points if isinstance(points, Scene.Triangle) else Scene.Triangle(*points) super().__init__(frozenset(self.triangle.points), {*self.triangle.points}) def keys(self, lengths=None): return keys_for_triangle(self.triangle, lengths) @property def __priority__(self): return 4.5 @property def description(self): return Comment('$%{triangle:triangle}$ is equilateral', {'triangle': self.triangle}) class SquareProperty(Property): """ Square """ @staticmethod def unique_key(four_points): def perms(four): return [four, (*four[1:], four[0]), (*four[2:], *four[:2]), (four[3], *four[:3])] return frozenset(perms(four_points) + perms(tuple(reversed(four_points)))) def __init__(self, square): assert len(square.points) == 4 self.square = square super().__init__(SquareProperty.unique_key(square.points), {*square.points}) @property def __priority__(self): return 4 @property def description(self): return Comment('$%{polygon:square}$ is a square', {'square': self.square}) class NondegenerateSquareProperty(Property): """ Non-degenerate square """ def __init__(self, square): assert len(square.points) == 4 self.square = square super().__init__(SquareProperty.unique_key(square.points), {*square.points}) @property def __priority__(self): return 4.5 @property def description(self): return Comment('$%{polygon:square}$ is a non-degenerate square', {'square': self.square}) class CentreOfEquilateralTriangleProperty(Property): """ A point is the centre of equilateral triangle """ def __init__(self, centre, triangle): self.centre = centre self.triangle = triangle super().__init__((centre, frozenset(triangle.points)), {centre, *self.triangle.points}) @property def __priority__(self): return 4.5 @property def description(self): return Comment( '$%{point:centre}$ is the centre of equilateral $%{triangle:triangle}$', {'centre': self.centre, 'triangle': self.triangle} ) class AngleKindProperty(Property): """ An angle is acute/obtuse/right """ class Kind(Enum): acute = auto() right = auto() obtuse = auto() def __str__(self): return self.name def __init__(self, angle, kind): self.angle = angle self.kind = kind super().__init__(angle, self.angle.point_set) def keys(self): return [self.angle] @property def __priority__(self): return 1 @property def description(self): if self.kind == AngleKindProperty.Kind.acute: pattern = '$%{angle:angle}$ is acute' elif self.kind == AngleKindProperty.Kind.obtuse: pattern = '$%{angle:angle}$ is obtuse' else: pattern = '$%{angle:angle}$ is right' return Comment(pattern, {'angle': self.angle}) def compare_values(self, other): return self.kind == other.kind class AngleValueProperty(Property): """ Angle value """ @staticmethod def generate(vector0, vector1, value): def rev(first, second): vec0 = vector0.reversed if first else vector0 vec1 = vector1.reversed if second else vector1 return vec0.angle(vec1) if vector0.start == vector1.start: angles = [(rev(False, False), False)] elif vector0.start == vector1.end: angles = [(rev(False, True), True)] elif vector0.end == vector1.start: angles = [(rev(True, False), True)] elif vector0.end == vector1.end: angles = [(rev(True, True), False)] else: angles = [ (rev(False, False), False), (rev(False, True), True), (rev(True, False), True), (rev(True, True), False), ] for ngl, supplementary in angles: yield AngleValueProperty(ngl, 180 - value if supplementary else value) def __init__(self, angle, degree): assert isinstance(angle, Scene.Angle) self.angle = angle self.degree = normalize_number(degree) super().__init__(angle, self.angle.point_set) @property def __priority__(self): return 1 if self.degree in (0, 90, 180) else 3.5 def keys(self): return [self.angle] @property def description(self): if self.angle.vertex: if self.degree == 0: return Comment( '$%{point:pt0}$, $%{point:pt1}$ in the same direction from $%{point:vertex}$', {'pt0': self.angle.vectors[0].end, 'pt1': self.angle.vectors[1].end, 'vertex': self.angle.vertex} ) if self.degree == 180: return Comment( '$%{point:pt}$ lies inside segment $%{segment:seg}$', {'pt': self.angle.vertex, 'seg': self.angle.vectors[0].end.segment(self.angle.vectors[1].end)} ) return Comment('$%{anglemeasure:ang} = %{degree:deg}$', {'ang': self.angle, 'deg': self.degree}) def compare_values(self, other): return self.degree == other.degree class MiddleOfSegmentProperty(Property): """ A point is the middle of segment """ def __init__(self, point, segment): self.point = point self.segment = segment super().__init__((point, segment), {point, *segment.points}) @property def __priority__(self): return 2 @property def description(self): return Comment( '$%{point:point}$ is the middle of $%{segment:segment}$', {'point': self.point, 'segment': self.segment} ) class AngleRatioProperty(Property): """ Two angle values ratio """ def __init__(self, angle0, angle1, ratio, same=False): assert isinstance(angle0, Scene.Angle) assert isinstance(angle1, Scene.Angle) # angle0 / angle1 = ratio if ratio >= 1: self.angle0 = angle0 self.angle1 = angle1 self.value = normalize_number(ratio) else: self.angle0 = angle1 self.angle1 = angle0 self.value = divide(1, ratio) self.same = same super().__init__(frozenset([angle0, angle1]), {*self.angle0.point_set, *self.angle1.point_set}) def keys(self): return [self.angle0, self.angle1] @property def __priority__(self): return 1 if self.same else 3 @property def description(self): params = { 'angle0': self.angle0, 'angle1': self.angle1, 'ratio': self.value } if self.same: pattern = '$%{anglemeasure:angle0} \\equiv %{anglemeasure:angle1}$' elif self.value == 1: pattern = '$%{anglemeasure:angle0} = %{anglemeasure:angle1}$' else: pattern = '$%{anglemeasure:angle0} = %{multiplier:ratio}\\,%{anglemeasure:angle1}$' return Comment(pattern, params) def compare_values(self, other): return self.value == other.value class SumOfThreeAnglesProperty(Property): """ Sum of three angles is equal to degree """ def __init__(self, angle0, angle1, angle2, degree): self.angles = (angle0, angle1, angle2) self.degree = degree super().__init__(frozenset(self.angles), {*angle0.point_set, *angle1.point_set, *angle2.point_set}) def keys(self): return self.angles @property def description(self): return Comment( '$%{anglemeasure:a0} + %{anglemeasure:a1} + %{anglemeasure:a2} = %{degree:value}$', {'a0': self.angles[0], 'a1': self.angles[1], 'a2': self.angles[2], 'value': self.degree} ) def compare_values(self, other): return self.degree == other.degree class SumOfTwoAnglesProperty(Property): """ Sum of two angles is equal to degree """ def __init__(self, angle0, angle1, degree): self.angles = (angle0, angle1) self.degree = degree super().__init__(frozenset([angle0, angle1]), {*angle0.point_set, *angle1.point_set}) def keys(self): return self.angles @property def __priority__(self): return 1 if self.degree == 180 else 3 @property def description(self): return Comment( '$%{anglemeasure:a0} + %{anglemeasure:a1} = %{degree:value}$', {'a0': self.angles[0], 'a1': self.angles[1], 'value': self.degree} ) def compare_values(self, other): return self.degree == other.degree class LengthRatioProperty(Property): """ Two non-zero segment lengths ratio """ def __init__(self, segment0, segment1, ratio): if ratio >= 1: self.segment0 = segment0 self.segment1 = segment1 self.value = normalize_number(ratio) else: self.segment0 = segment1 self.segment1 = segment0 self.value = divide(1, ratio) super().__init__(frozenset([segment0, segment1]), {*self.segment0.points, *self.segment1.points}) def keys(self): return [self.segment0, self.segment1] @property def description(self): return Comment( '$|%{segment:seg0}| / |%{segment:seg1}| = %{number:value}$', {'seg0': self.segment0, 'seg1': self.segment1, 'value': self.value} ) def compare_values(self, other): return self.value == other.value class ProportionalLengthsProperty(Property): """ Two segment lengths ratio """ def __init__(self, segment0, segment1, ratio): if ratio >= 1: self.segment0 = segment0 self.segment1 = segment1 self.value = normalize_number(ratio) else: self.segment0 = segment1 self.segment1 = segment0 self.value = divide(1, ratio) super().__init__(frozenset([segment0, segment1]), {*self.segment0.points, *self.segment1.points}) def keys(self): return [self.segment0, self.segment1] @property def description(self): if self.value == 1: return Comment('$|%{segment:seg0}| = |%{segment:seg1}|$', { 'seg0': self.segment0, 'seg1': self.segment1 }) return Comment('$|%{segment:seg0}| = %{multiplier:value}|%{segment:seg1}|$', { 'seg0': self.segment0, 'seg1': self.segment1, 'value': self.value }) def compare_values(self, other): return self.value == other.value class EqualLengthProductsProperty(Property): """ Two segment lengths products are equal """ @staticmethod def unique_key(segment0, segment1, segment2, segment3): return frozenset([ frozenset([segment0, segment3]), frozenset([segment1, segment2]) ]) def __init__(self, segment0, segment1, segment2, segment3): """ |segment0| * |segment3| == |segment1| * |segment2| """ self.segments = (segment0, segment1, segment2, segment3) super().__init__(EqualLengthProductsProperty.unique_key(segment0, segment1, segment2, segment3), {*segment0.points, *segment1.points, *segment2.points, *segment3.points}) @property def description(self): return Comment( '$|%{segment:seg0}| * |%{segment:seg3}| = |%{segment:seg1}| * |%{segment:seg2}|$', dict(('seg%d' % index, segment) for index, segment in enumerate(self.segments)) ) class EqualLengthRatiosProperty(Property): """ Two segment lengths ratios are equal """ @staticmethod def unique_key(segment0, segment1, segment2, segment3): return frozenset([ (segment0, segment1), (segment2, segment3) ]) def __init__(self, segment0, segment1, segment2, segment3): """ |segment0| / |segment1| == |segment2| / |segment3| """ self.segments = (segment0, segment1, segment2, segment3) super().__init__(EqualLengthRatiosProperty.unique_key(segment0, segment1, segment2, segment3), {*segment0.points, *segment1.points, *segment2.points, *segment3.points}) @property def description(self): return Comment( '$|%{segment:seg0}| / |%{segment:seg1}| = |%{segment:seg2}| / |%{segment:seg3}|$', dict(('seg%d' % index, segment) for index, segment in enumerate(self.segments)) ) class SimilarTrianglesProperty(Property): """ Two triangles are similar """ def __init__(self, points0, points1): self.triangle0 = points0 if isinstance(points0, Scene.Triangle) else Scene.Triangle(*points0) self.triangle1 = points1 if isinstance(points1, Scene.Triangle) else Scene.Triangle(*points1) pairs = [frozenset(perms) for perms in zip(self.triangle0.permutations, self.triangle1.permutations)] super().__init__(frozenset(pairs), {*self.triangle0.points, *self.triangle1.points}) def keys(self, lengths=None): return keys_for_triangle(self.triangle0, lengths) + keys_for_triangle(self.triangle1, lengths) @property def __priority__(self): return 5 @property def description(self): return Comment('$%{triangle:t0} \\sim %{triangle:t1}$', {'t0': self.triangle0, 't1': self.triangle1}) class CongruentTrianglesProperty(Property): """ Two triangles are congruent """ def __init__(self, points0, points1): self.triangle0 = points0 if isinstance(points0, Scene.Triangle) else Scene.Triangle(*points0) self.triangle1 = points1 if isinstance(points1, Scene.Triangle) else Scene.Triangle(*points1) pairs = [frozenset(perms) for perms in zip(self.triangle0.permutations, self.triangle1.permutations)] super().__init__(frozenset(pairs), {*self.triangle0.points, *self.triangle1.points}) def keys(self, lengths=None): return keys_for_triangle(self.triangle0, lengths) + keys_for_triangle(self.triangle1, lengths) @property def __priority__(self): return 5 @property def description(self): return Comment('$%{triangle:t0} \\cong %{triangle:t1}$', {'t0': self.triangle0, 't1': self.triangle1}) class IsoscelesTriangleProperty(Property): """ Isosceles triangle """ def __init__(self, apex, base): self.apex = apex self.base = base self.triangle = Scene.Triangle(apex, *base.points) super().__init__((apex, base), {apex, *base.points}) def keys(self, lengths=None): return keys_for_triangle(self.triangle, lengths) @property def __priority__(self): return 4 @property def description(self): return Comment( '$%{triangle:isosceles}$ is isosceles with apex $%{point:apex}$', {'isosceles': self.triangle, 'apex': self.apex} ) class Cycle(Figure): def __init__(self, pt0, pt1, pt2): self.points = (pt0, pt1, pt2) self.__key = frozenset([(pt0, pt1, pt2), (pt1, pt2, pt0), (pt2, pt0, pt1)]) self.__reversed = None @property def reversed(self): if self.__reversed is None: self.__reversed = Cycle(*reversed(self.points)) self.__reversed.__reversed = self return self.__reversed def __str__(self): return '\\circlearrowleft %s %s %s' % self.points def __eq__(self, other): return self.__key == other.__key def __hash__(self): return hash(self.__key) class SameCyclicOrderProperty(Property): """ Two triples of points have the same cyclic order """ def __init__(self, cycle0, cycle1): self.cycle0 = cycle0 self.cycle1 = cycle1 super().__init__(frozenset([cycle0, cycle1]), {*cycle0.points, *cycle1.points}) @property def __priority__(self): return 1 @property def description(self): return Comment( '$%{cycle:cycle0}$ and $%{cycle:cycle1}$ have the same order', {'cycle0': self.cycle0, 'cycle1': self.cycle1} )
31.713161
178
0.604654
3,036
28,193
5.442358
0.09025
0.039944
0.021304
0.047207
0.557768
0.502088
0.450039
0.409066
0.367851
0.349513
0
0.023331
0.264179
28,193
888
179
31.748874
0.77315
0.042954
0
0.5117
0
0.00936
0.102099
0.010988
0
0
0
0.001126
0.014041
1
0.224649
false
0
0.0078
0.145086
0.461778
0.00312
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
7eb2db3dec520e7305b4a8da4402eff2e207cc78
310
py
Python
hpc-historias-clinicas/inter_consultas/templatetags/inter_consultas_tags.py
btenaglia/hpc-historias-clinicas
649d8660381381b1c591667760c122d73071d5ec
[ "BSD-3-Clause" ]
null
null
null
hpc-historias-clinicas/inter_consultas/templatetags/inter_consultas_tags.py
btenaglia/hpc-historias-clinicas
649d8660381381b1c591667760c122d73071d5ec
[ "BSD-3-Clause" ]
null
null
null
hpc-historias-clinicas/inter_consultas/templatetags/inter_consultas_tags.py
btenaglia/hpc-historias-clinicas
649d8660381381b1c591667760c122d73071d5ec
[ "BSD-3-Clause" ]
null
null
null
from django import template from ..models import InterConsultas register = template.Library() @register.simple_tag def total_inter_consultas(historia_id): """ Devuelve el total de inter consultas para una historia clinica """ return InterConsultas.objects.filter(historia=historia_id).count()
28.181818
70
0.774194
38
310
6.184211
0.684211
0.119149
0
0
0
0
0
0
0
0
0
0
0.145161
310
11
70
28.181818
0.886792
0.2
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
0e1bbb0df00b6e82654050d4532839d24b905f49
1,956
py
Python
pyrrigate/controllers.py
hstefan/pyrrigate
8e83a401b88044a51e4dfdc74500c3f93552a1a3
[ "MIT" ]
1
2017-05-19T03:41:14.000Z
2017-05-19T03:41:14.000Z
pyrrigate/controllers.py
hstefan/pyrrigate
8e83a401b88044a51e4dfdc74500c3f93552a1a3
[ "MIT" ]
null
null
null
pyrrigate/controllers.py
hstefan/pyrrigate
8e83a401b88044a51e4dfdc74500c3f93552a1a3
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import abc import logging try: import wiringpi except ModuleNotFoundError: logging.warning('wiringpi is not installed! This should only be used with --use-dummy-gpio.') from pyrrigate.PyrrigateConfig_pb2 import ControllerConf class Controller(metaclass=abc.ABCMeta): @staticmethod def from_config(conf: ControllerConf, dummy=False): """Creates a controller object for the given configuration.""" # TODO: more controller types assert conf.type == ControllerConf.CONTROLLER_DIGITAL_PIN return DigitalPinController(conf.id, conf.pinNumber, conf.reversed, dummy) def __init__(self, controller_id): self.controller_id = controller_id @abc.abstractclassmethod def activate(self): raise NotImplementedError @abc.abstractclassmethod def deactivate(self): raise NotImplementedError @abc.abstractclassmethod def configure(self): raise NotImplementedError class DigitalPinController(Controller): def __init__(self, controller_id: str, pin: int, reverse: bool=False, dummy=False): super().__init__(controller_id) self.pin = pin self.reverse = reverse self.dummy = dummy def activate(self): logging.info('Activating controller "%s".', self.controller_id) self._digital_write(not self.reverse) def deactivate(self): logging.info('Deactivating controller "%s".', self.controller_id) self._digital_write(self.reverse) def _digital_write(self, value: bool): if self.dummy: logging.info('digital_write(%d, %r)', self.pin, value) else: wiringpi.digitalWrite(self.pin, wiringpi.HIGH if value else wiringpi.LOW) def configure(self): if self.dummy: logging.info('pin_mode(%d, OUTPUT)', self.pin) else: wiringpi.pinMode(self.pin, wiringpi.OUTPUT)
31.047619
97
0.680982
222
1,956
5.86036
0.400901
0.064566
0.061491
0.046118
0.216756
0.147579
0.066103
0.066103
0
0
0
0.001965
0.219325
1,956
62
98
31.548387
0.850033
0.06544
0
0.355556
0
0
0.093956
0
0
0
0
0.016129
0.022222
1
0.222222
false
0
0.088889
0
0.377778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
0
0
0
2
0e359457160d6667e3a0621da01b28b0e49843f4
4,186
py
Python
library/pyvim_umount_cd.py
Rthur/alt-pyvim
9d4dc8be79949972ae80c5ee79ffbc4d70aacb54
[ "Apache-2.0" ]
null
null
null
library/pyvim_umount_cd.py
Rthur/alt-pyvim
9d4dc8be79949972ae80c5ee79ffbc4d70aacb54
[ "Apache-2.0" ]
null
null
null
library/pyvim_umount_cd.py
Rthur/alt-pyvim
9d4dc8be79949972ae80c5ee79ffbc4d70aacb54
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python DOCUMENTATION = ''' --- author: Arthur Reyes module: pyvim_umount_cd description: - unmount an iso on Virtual Machine version_added: "0.1" requirements: - pyVim notes: - This module disables SSL Security and warnings for invalid certificates. - Tested with Ansible 2.0.1.0 options: host: description: - The vSphere server that manages the cluster where the guest is located on. required: true aliases: ['vsphere'] login: description: - A login name which can authenticate to the vSphere cluster. required: true aliases: ['admin'] password: description: - The password used to authenticate to the vSphere cluster. required: true aliases: ['secret'] port: description: - The port the vSphere listens on. required: false default: 443 uuid: description: - the instanceUuid of the guest. Useful to identify a unique guest when multiple virtual machines with the same name exist across clusters. required: true device: description: - the virtual device name where media will be mounded. required: true ''' import atexit import sys import requests try: from pyVim import connect from pyVmomi import vmodl from pyVmomi import vim from tools import cli from tools import tasks except ImportError: print "failed=True msg='pyvmoni python module unavailable'" sys.exit(1) def main(): module = AnsibleModule( argument_spec = dict( host = dict(required=True, aliases=['vsphere']), port = dict(required=False, default=443), login = dict(required=True, aliases=['admin']), password = dict(required=True, aliases=['secret']), uuid = dict(required=False, default=None), device = dict(required=True), ) ) host = module.params.get('host') port = module.params.get('port') login = module.params.get('login') password = module.params.get('password') uuid = module.params.get('uuid') device = module.params.get('device') context = connect.ssl.SSLContext(connect.ssl.PROTOCOL_TLSv1) context.verify_mode = connect.ssl.CERT_NONE requests.packages.urllib3.disable_warnings() try: si = connect.SmartConnect(host=host, port=int(port), user=login, pwd=password, sslContext=context) except Exception, e: module.fail_json(msg='Failed to connect to %s: %s' % (host, e)) atexit.register(connect.Disconnect, si) content = si.RetrieveContent() container = content.viewManager.CreateContainerView(content.rootFolder, [vim.VirtualMachine], True) target = None children = container.view for child in children: if uuid and child.summary.config.instanceUuid == uuid: target = child if not target: module.fail_json(msg='guest machine not found: %s' % (uuid)) for dev in target.config.hardware.device: if isinstance(dev, vim.vm.device.VirtualCdrom) \ and dev.deviceInfo.label == device: cdrom = dev if not cdrom: module.fail_json(msg='virtual device not found: %s' % (device)) if hasattr(cdrom.backing, 'RemotePassthroughBackingInfo'): module.exit_json(changed=False) cdrom_spec = vim.vm.device.VirtualDeviceSpec() cdrom_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit cdrom_spec.device = vim.vm.device.VirtualCdrom() cdrom_spec.device.controllerKey = cdrom.controllerKey cdrom_spec.device.key = cdrom.key cdrom_spec.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo() cdrom_spec.device.backing = \ vim.vm.device.VirtualCdrom.RemotePassthroughBackingInfo() cdrom_spec.device.connectable.allowGuestControl = True container.Destroy() dev_changes = [] dev_changes.append(cdrom_spec) spec = vim.vm.ConfigSpec() spec.deviceChange = dev_changes task = target.ReconfigVM_Task(spec=spec) tasks.wait_for_tasks(si, [task]) if str(dev_changes[0]).find('vim.vm.device.VirtualCdrom.RemotePassthroughBackingInfo') > 0: module.exit_json(changed=True) else: module.fail_json(msg='Device in unknown state: %s, %s' % (device, dev_changes[0])) #<<INCLUDE_ANSIBLE_MODULE_COMMON>> main()
28.868966
93
0.701147
522
4,186
5.557471
0.3659
0.037229
0.026543
0.02344
0.085143
0.034471
0.034471
0.034471
0
0
0
0.005325
0.192547
4,186
144
94
29.069444
0.852959
0.011706
0
0.114754
0
0
0.343009
0.020077
0
0
0
0
0
0
null
null
0.065574
0.07377
null
null
0.008197
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
0e36cdaf39db8bd29d0f785719fbaa8ca3114a95
1,480
py
Python
stdplugins/follow.py
PratikGoswamiPM/BotHub
d02f9773418d2651d85e6cc3d797e78a3afef325
[ "Apache-2.0" ]
1
2021-07-18T06:57:27.000Z
2021-07-18T06:57:27.000Z
stdplugins/follow.py
PratikGoswamiPM/BotHub
d02f9773418d2651d85e6cc3d797e78a3afef325
[ "Apache-2.0" ]
null
null
null
stdplugins/follow.py
PratikGoswamiPM/BotHub
d02f9773418d2651d85e6cc3d797e78a3afef325
[ "Apache-2.0" ]
null
null
null
""" Userbot module for getting information about the social media. """ from asyncio import create_subprocess_shell as asyncrunapp from asyncio.subprocess import PIPE as asyncPIPE from platform import python_version, uname from shutil import which from os import remove from telethon import version from random import randint from asyncio import sleep from os import execl import sys import os import io import sys import json from sample_config import Config from uniborg.util import admin_cmd import uniborg # ================= CONSTANT ================= DEFAULTUSER = Config.ALIVE_NAME if Config.ALIVE_NAME else uname().node # ============================================ @borg.on(admin_cmd(pattern="follow ?(.*)")) async def follow(follow): """ For .follow command, check if the bot is running. """ await follow.edit( f"`FOLLOW {DEFAULTUSER} ON` \n\n" f"[Instagram](https://www.instagram.com/pm_the_angry) \n\n" f"[Telegram](https://www.instagram.com/PM_The_Angry) \n\n" f"[Messenger](https://m.me/pratikgoswami9141) \n\n" f"[GitHub](https://github.com/puribapu9141) \n\n" f"[Facebook](https://www.facebook.com/pratikgoswami9141) \n\n" f"[Twitter](https://twitter.com/PM_The_Angry) \n\n" f"[LinkedIn](https://www.linkedin.com/in/pratik-goswami-pm-94122415b) \n\n" )
36.097561
96
0.618243
187
1,480
4.818182
0.44385
0.017758
0.023307
0.043285
0.09101
0.09101
0.09101
0.073252
0.073252
0.073252
0
0.017437
0.225
1,480
40
97
37
0.768091
0
0
0.066667
0
0.033333
0.338364
0
0
0
0
0
0
0
null
null
0
0.566667
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
0e3c2c0f177e307b955c9b1a44dda9edb96e7f53
551
py
Python
gpt2_bot/__init__.py
chenyaoBOYqu/albino
8c43893b8d8dc3f92a01c2a6f8f66f2bd123b333
[ "CC0-1.0" ]
8
2020-06-30T20:16:14.000Z
2021-01-26T00:51:27.000Z
gpt2_bot/__init__.py
chenyaoBOYqu/albino
8c43893b8d8dc3f92a01c2a6f8f66f2bd123b333
[ "CC0-1.0" ]
null
null
null
gpt2_bot/__init__.py
chenyaoBOYqu/albino
8c43893b8d8dc3f92a01c2a6f8f66f2bd123b333
[ "CC0-1.0" ]
3
2020-09-02T23:03:04.000Z
2021-03-21T23:47:46.000Z
import gpt2_bot.irc, gpt2_bot.commands, gpt2_bot.gpt2 def assemble_bot(config_path): ircbot = gpt2_bot.irc.IRCBot(config_path) ircbot.register_command("ping", gpt2_bot.commands.ping_command) ircbot.register_command("ignore", gpt2_bot.commands.ignore_command) ircbot.register_command("unignore", gpt2_bot.commands.unignore_command) ircbot.register_command("temp", gpt2_bot.commands.temp_command) ircbot.register_command("shitposting", gpt2_bot.commands.shitposting_command) gpt2_bot.gpt2.init(ircbot) return ircbot
34.4375
81
0.789474
74
551
5.567568
0.256757
0.169903
0.218447
0.271845
0
0
0
0
0
0
0
0.02439
0.107078
551
15
82
36.733333
0.813008
0
0
0
0
0
0.059891
0
0
0
0
0
0
1
0.1
false
0
0.1
0
0.3
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0e484b20f678e870513027b30be016da2c77c8b9
11,218
py
Python
insights/tests/client/test_archive.py
TZ3070/insights-core
13f4fc6bfcb89d76f0255c6259902360a298d619
[ "Apache-2.0" ]
null
null
null
insights/tests/client/test_archive.py
TZ3070/insights-core
13f4fc6bfcb89d76f0255c6259902360a298d619
[ "Apache-2.0" ]
null
null
null
insights/tests/client/test_archive.py
TZ3070/insights-core
13f4fc6bfcb89d76f0255c6259902360a298d619
[ "Apache-2.0" ]
null
null
null
from insights.client.archive import InsightsArchive from mock.mock import patch, Mock, call from unittest import TestCase from pytest import raises test_timestamp = '000000' test_hostname = 'testhostname' test_archive_name = 'insights-testhostname-000000' test_archive_dir = '/var/tmp/test/insights-testhostname-000000' test_obfuscated_archive_dir = '/var/tmp/test/insights-localhost-000000' test_cmd_dir = '/var/tmp/test/insights-testhostname-000000/insights_commands' test_tmp_dir = '/var/tmp/insights-archive-000000' @patch('insights.client.archive.time.strftime', Mock(return_value=test_timestamp)) @patch('insights.client.archive.determine_hostname', Mock(return_value=test_hostname)) @patch('insights.client.archive.tempfile.mkdtemp') @patch('insights.client.archive.atexit.register') class TestInsightsArchive(TestCase): @patch('insights.client.archive.InsightsArchive.cleanup_previous_archive') def test_init_archive(self, cleanup, register, mkdtemp): ''' Verify archive is created with default parameters ''' config = Mock() config.obfuscate_hostname = False archive = InsightsArchive(config) assert archive.config == config assert archive.tmp_dir assert archive.archive_dir is None assert archive.cmd_dir is None assert archive.compressor == config.compressor assert archive.archive_name == test_archive_name cleanup.assert_called_once() mkdtemp.assert_has_calls([call(dir='/var/tmp/', prefix='insights-archive-')]) register.assert_called_once() @patch('insights.client.archive.os.makedirs') @patch('insights.client.archive.os.path.exists', Mock(return_value=False)) def test_create_archive_dir_default(self, makedirs, _, __): ''' Verify archive_dir is created when it does not already exist ''' config = Mock() config.obfuscate_hostname = False archive = InsightsArchive(config) # give this a discrete value so we can check the results archive.tmp_dir = '/var/tmp/test' result = archive.create_archive_dir() makedirs.assert_called_once_with(test_archive_dir, 0o700) # ensure the archive_dir is returned from the function assert result == test_archive_dir # ensure the class attr is set assert archive.archive_dir == test_archive_dir # ensure the retval and attr are the same assert result == archive.archive_dir @patch('insights.client.archive.glob.glob', return_value=[]) @patch('insights.client.archive.shutil.rmtree') def test_tmp_directory_no_cleanup(self, rmtree, glob, _, __): InsightsArchive(Mock()) glob.assert_called_with('/var/tmp/insights-archive-*') rmtree.assert_not_called() @patch('insights.client.archive.glob.glob', return_value=[test_tmp_dir]) @patch('insights.client.archive.shutil.rmtree') def test_tmp_directory_cleanup(self, rmtree, glob, _, __): InsightsArchive(Mock()) glob.assert_called_with('/var/tmp/insights-archive-*') rmtree.assert_called_with(test_tmp_dir, True) @patch('insights.client.archive.os.makedirs') @patch('insights.client.archive.os.path.exists', Mock(return_value=False)) def test_create_archive_dir_obfuscated(self, makedirs, _, __): ''' Verify archive_dir is created when it does not already exist ''' config = Mock() config.obfuscate_hostname = True archive = InsightsArchive(config) # give this a discrete value so we can check the results archive.tmp_dir = '/var/tmp/test' result = archive.create_archive_dir() makedirs.assert_called_once_with(test_obfuscated_archive_dir, 0o700) # ensure the archive_dir is returned from the function assert result == test_obfuscated_archive_dir # ensure the class attr is set assert archive.archive_dir == test_obfuscated_archive_dir # ensure the retval and attr are the same assert result == archive.archive_dir @patch('insights.client.archive.os.makedirs') @patch('insights.client.archive.os.path.exists', return_value=False) def test_create_archive_dir_defined_path_DNE(self, exists, makedirs, _, __): ''' Verify archive_dir is created when the attr is defined but the path does not exist ''' config = Mock() config.obfuscate_hostname = False archive = InsightsArchive(config) # give this a discrete value so we can check the results archive.tmp_dir = '/var/tmp/test' archive.archive_dir = test_archive_dir result = archive.create_archive_dir() exists.assert_has_calls([call(archive.archive_dir), call(test_archive_dir)]) makedirs.assert_called_once_with(test_archive_dir, 0o700) # ensure the archive_dir is returned from the function assert result == test_archive_dir # ensure the class attr is set assert archive.archive_dir == test_archive_dir # ensure the retval and attr are the same assert result == archive.archive_dir @patch('insights.client.archive.os.makedirs') @patch('insights.client.archive.os.path.exists', return_value=True) def test_create_archive_dir_undef_path_exists(self, exists, makedirs, _, __): ''' Verify archive_dir is not re-created when the attr is undefined but the path exists ''' config = Mock() config.obfuscate_hostname = False archive = InsightsArchive(config) # give this a discrete value so we can check the results archive.tmp_dir = '/var/tmp/test' result = archive.create_archive_dir() makedirs.assert_not_called() exists.assert_called_once_with(test_archive_dir) # ensure the archive_dir is returned from the function assert result == test_archive_dir # ensure the class attr is set assert archive.archive_dir == test_archive_dir # ensure the retval and attr are the same assert result == archive.archive_dir @patch('insights.client.archive.os.makedirs') @patch('insights.client.archive.os.path.exists', return_value=True) def test_create_archive_dir_defined_path_exists(self, exists, makedirs, _, __): ''' When archive_dir is defined and exists, simply return the class attr and do not attempt to create it ''' archive = InsightsArchive(Mock()) # give this a discrete value so we can check the results archive.tmp_dir = '/var/tmp/test' archive.archive_dir = test_archive_dir result = archive.create_archive_dir() makedirs.assert_not_called() exists.assert_called_once_with(archive.archive_dir) # ensure the archive_dir is returned from the function assert result == test_archive_dir # ensure the class attr is set assert archive.archive_dir == test_archive_dir # ensure the retval and attr are the same assert result == archive.archive_dir @patch('insights.client.archive.InsightsArchive.create_archive_dir', return_value=test_archive_dir) @patch('insights.client.archive.os.makedirs') @patch('insights.client.archive.os.path.exists', return_value=False) def test_create_command_dir(self, exists, makedirs, create_archive_dir, _, __): ''' Verify insights_commands dir is created ''' archive = InsightsArchive(Mock()) archive.archive_dir = test_archive_dir result = archive.create_command_dir() create_archive_dir.assert_called_once() makedirs.assert_called_once_with(test_cmd_dir, 0o700) # ensure the cmd_dir is returned from the function assert result == test_cmd_dir # ensure the class attr is set assert archive.cmd_dir == test_cmd_dir # ensure the retval and attr are the same assert result == archive.cmd_dir @patch('insights.client.archive.InsightsArchive.create_archive_dir', return_value=test_archive_dir) @patch('insights.client.archive.os.path.join', Mock()) def test_get_full_archive_path(self, create_archive_dir, _, __): ''' Verify create_archive_dir is called when calling get_full_archive_path ''' archive = InsightsArchive(Mock()) archive.get_full_archive_path('test') create_archive_dir.assert_called_once() @patch('insights.client.archive.InsightsArchive.create_archive_dir', return_value=test_archive_dir) @patch('insights.client.archive.os.path.join', Mock()) @patch('insights.client.archive.os.path.isdir', Mock()) @patch('insights.client.archive.shutil.copytree', Mock()) def test_copy_dir(self, create_archive_dir, _, __): ''' Verify create_archive_dir is called when calling copy_dir ''' archive = InsightsArchive(Mock()) archive.copy_dir('test') create_archive_dir.assert_called_once() @patch('insights.client.archive.shutil.copyfile') @patch('insights.client.archive.os.path.isdir', Mock()) @patch('insights.client.archive.os.path.exists', return_value=True) def test_keep_archive(self, path_exists, copyfile, _, __): archive = InsightsArchive(Mock()) archive.tar_file = '/var/tmp/insights-archive-test.tar.gz' archive.keep_archive_dir = '/var/tmp/test-archive' archive.storing_archive() copyfile.assert_called_once_with(archive.tar_file, '/var/tmp/test-archive/insights-archive-test.tar.gz') @patch('insights.client.archive.shutil.copyfile', side_effect=OSError) @patch('insights.client.archive.os.path.join', Mock()) @patch('insights.client.archive.os.path.isdir', Mock()) @patch('insights.client.archive.os.path.basename', Mock()) @patch('insights.client.archive.logger') @patch('insights.client.archive.os.path.exists', return_value=True) def test_keep_archive_err_during_copy(self, path_exists, logger, copyfile, _, __): archive = InsightsArchive(Mock()) archive.archive_stored = '/var/tmp/test-archive/test-store-archive' archive.keep_archive_dir = '/var/tmp/test-archive' with raises(Exception): archive.storing_archive() logger.error.assert_called_once_with('ERROR: Could not stored archive to %s', archive.archive_stored) @patch('insights.client.archive.os.makedirs', side_effect=OSError) @patch('insights.client.archive.os.path.exists', return_value=False) @patch('insights.client.archive.os.path.join', Mock()) @patch('insights.client.archive.os.path.isdir', Mock()) @patch('insights.client.archive.os.path.basename', Mock()) @patch('insights.client.archive.logger') def test_keep_arhive_err_creating_directory(self, logger, path_exists, mkdir, _, __): archive = InsightsArchive(Mock()) archive.keep_archive_dir = '/var/tmp/test-archive' with raises(Exception): archive.storing_archive() logger.error.assert_called_once_with('ERROR: Could not create %s', archive.keep_archive_dir)
46.937238
112
0.698431
1,410
11,218
5.312057
0.097163
0.098798
0.123364
0.149266
0.769426
0.717356
0.688518
0.661415
0.646462
0.615354
0
0.005798
0.200571
11,218
238
113
47.134454
0.829393
0.143698
0
0.590361
0
0
0.245616
0.226369
0
0
0
0
0.277108
1
0.084337
false
0
0.024096
0
0.114458
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0e5a1ff8837a7f80b75f3c34fcfe94fefd792abd
778
py
Python
proxyid/exceptions.py
felipepaes/proxyid
122112f5495c5075765c1dd5669fd34be5f2de3a
[ "BSD-2-Clause" ]
null
null
null
proxyid/exceptions.py
felipepaes/proxyid
122112f5495c5075765c1dd5669fd34be5f2de3a
[ "BSD-2-Clause" ]
null
null
null
proxyid/exceptions.py
felipepaes/proxyid
122112f5495c5075765c1dd5669fd34be5f2de3a
[ "BSD-2-Clause" ]
null
null
null
from typing import Union class UnkownProxiedValueError(Exception): """Exception for unkown values given to be decoded""" def __init__(self, value: str): self.value = value def __str__(self) -> str: return (f"Proxied value {self.value} is unkown. " "The value is wrong or was hashed with a different salt.") class ProxyidConfigurationError(Exception): """Exception for missing configuration""" def __init__(self, value: Union[str, None] = None): self.value = value def __str__(self) -> str: if self.value is not None: return (f"PROXYID[{self.value}] is missing, " "check settings.py") else: return "PROXYID dict could not be found in settings.py"
28.814815
74
0.623393
95
778
4.936842
0.494737
0.134328
0.070362
0.06823
0.115139
0.115139
0.115139
0
0
0
0
0
0.27892
778
26
75
29.923077
0.836007
0.106684
0
0.25
0
0
0.277778
0.030702
0
0
0
0
0
1
0.25
false
0
0.0625
0.0625
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
0e94b7f304aadae2a7fece27e15ff19ea3cef175
1,006
py
Python
poetry/utils/toml_file.py
radek-sprta/poetry
c57e1ddfd50da4e4ec60c3c27152811e07bdba2a
[ "MIT" ]
null
null
null
poetry/utils/toml_file.py
radek-sprta/poetry
c57e1ddfd50da4e4ec60c3c27152811e07bdba2a
[ "MIT" ]
null
null
null
poetry/utils/toml_file.py
radek-sprta/poetry
c57e1ddfd50da4e4ec60c3c27152811e07bdba2a
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import pytoml as toml from poetry.toml import dumps from poetry.toml import loads from poetry.toml import TOMLFile from ._compat import Path class TomlFile: def __init__(self, path): self._path = Path(path) @property def path(self): return self._path def read(self, raw=False): # type: (bool) -> dict with self._path.open(encoding="utf-8") as f: if raw: return toml.loads(f.read()) return loads(f.read()) def dumps(self, data, sort=False): # type: (...) -> str if not isinstance(data, TOMLFile): data = toml.dumps(data, sort_keys=sort) else: data = dumps(data) return data def write(self, data, sort=False): # type: (...) -> None data = self.dumps(data, sort=sort) with self._path.open("w", encoding="utf-8") as f: f.write(data) def __getattr__(self, item): return getattr(self._path, item)
23.952381
61
0.575547
133
1,006
4.240602
0.323308
0.085106
0.074468
0.106383
0.12766
0
0
0
0
0
0
0.004219
0.293241
1,006
41
62
24.536585
0.78903
0.080517
0
0
0
0
0.011957
0
0
0
0
0
0
1
0.214286
false
0
0.178571
0.071429
0.607143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
0ea17ccfe6e690af5fcca0d6e2be9635465f9c66
2,502
py
Python
termicoder/models/Judge.py
diveshuttam/termicoder_beta
fe8f8dcaf1696630380c803d404cc370278b2e77
[ "MIT" ]
null
null
null
termicoder/models/Judge.py
diveshuttam/termicoder_beta
fe8f8dcaf1696630380c803d404cc370278b2e77
[ "MIT" ]
null
null
null
termicoder/models/Judge.py
diveshuttam/termicoder_beta
fe8f8dcaf1696630380c803d404cc370278b2e77
[ "MIT" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- # ABC is the AbstractBaseClass in python from abc import ABC, abstractmethod # Judge is an abstract class to be subclassed and implemented # by Judge developers. # Judge class kind of doubles up for login-logout as well as a Factory # for the contest and problem classes for the particular judge class Judge(ABC): @abstractmethod def __init__(self, session_data=None): # Init should not have any network requests # do them in login, logout, check_running_contest self.session_data = session_data @abstractmethod def check_login(self): pass @abstractmethod def login(self): # login also controls all the messages being displayed to the user pass @abstractmethod def logout(self): # logout also controls all the messages displayed to the user pass @abstractmethod def get_running_contests(self): # return a string of running contest, do it in form of a table. pass # This method serves both as a problem getter as well as kind of factory # for problem @abstractmethod def get_problem(self, problem_code, contest_code): # Method should call the respective Problem.__init__ method to create a # problem instance and return it pass @abstractmethod def get_contest(self, contest_code): # Method should call the respective Problem.__init__ method to create a # contest instance with all its problems and return it pass @abstractmethod def get_problem_url(self, problem_code, contest_code): # Method should return the url used by judge for a particular problem pass @abstractmethod def get_contest_url(self, contest_code): # Method should return the url used by judge for a particular contest pass @abstractmethod def get_contests_list_url(self): # Method should return the url used by judge for listing contest pass @abstractmethod def submit(self, problem, code_text, extension): # problem is an instance of judge's problem class # code test is the code to be submitted # extension is the extension of the code file to determine # language of submission pass @abstractmethod def get_testcase(self, inp, ans, code): # returns the testcase with inp, ans and code # used by termicoder test to output diff pass
30.888889
79
0.682254
337
2,502
4.95549
0.311573
0.122156
0.113174
0.086228
0.338922
0.28024
0.28024
0.173653
0.173653
0.150898
0
0.000548
0.270983
2,502
80
80
31.275
0.915022
0.527578
0
0.605263
0
0
0
0
0
0
0
0
0
1
0.315789
false
0.289474
0.026316
0
0.368421
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
7eb7ce4862b50c32cfc7e1509cea7e7c1c736b99
612
py
Python
tests/utils.py
SmartDataAnalytics/KEEN-Model-Zoo
11856a2828c0010e8955a555730c7ba1a009399e
[ "MIT" ]
2
2020-03-01T08:45:49.000Z
2020-03-11T02:40:39.000Z
tests/utils.py
SmartDataAnalytics/KEEN-Model-Zoo
11856a2828c0010e8955a555730c7ba1a009399e
[ "MIT" ]
null
null
null
tests/utils.py
SmartDataAnalytics/KEEN-Model-Zoo
11856a2828c0010e8955a555730c7ba1a009399e
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Utilities for test cases.""" import json import os import torch from pykeen.kge_models import get_kge_model from torch.nn import Module def load_model(model_directory: str) -> Module: """Load trained KGE model.""" # Load configuration file with open(os.path.join(model_directory, 'configuration.json')) as f: config = json.load(f) trained_kge_model: Module = get_kge_model(config=config) path_to_model = os.path.join(model_directory, 'trained_model.pkl') trained_kge_model.load_state_dict(torch.load(path_to_model)) return trained_kge_model
24.48
72
0.728758
90
612
4.711111
0.422222
0.113208
0.141509
0.089623
0.113208
0
0
0
0
0
0
0.001949
0.161765
612
24
73
25.5
0.824561
0.156863
0
0
0
0
0.069444
0
0
0
0
0
0
1
0.083333
false
0
0.416667
0
0.583333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
7ec9bd3b77169671912625d50d256d936158668c
4,635
py
Python
backend/models.py
aj212/group-expense-manager
001c0d413b24a4eb124a0a6d540a2b81654bc01a
[ "MIT" ]
2
2020-05-03T15:12:01.000Z
2021-05-17T14:11:25.000Z
backend/models.py
aj212/group-expense-manager
001c0d413b24a4eb124a0a6d540a2b81654bc01a
[ "MIT" ]
5
2020-09-06T18:37:23.000Z
2022-02-18T06:52:08.000Z
backend/models.py
akashvermaofskt/group-expense-manager
001c0d413b24a4eb124a0a6d540a2b81654bc01a
[ "MIT" ]
4
2019-08-06T10:44:20.000Z
2021-08-23T17:37:54.000Z
from sqlalchemy import Column,Integer,String, DateTime from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship, sessionmaker from sqlalchemy import create_engine from passlib.apps import custom_app_context as pwd_context import random, string from itsdangerous import(TimedJSONWebSignatureSerializer as Serializer, BadSignature, SignatureExpired) import datetime Base = declarative_base() secret_key = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(32)) class UserInfo(Base): __tablename__ = "UserData" #attributes for userdata table id = Column(Integer, primary_key = True ) name = Column( String(100), nullable = False ) email = Column( String(120), unique = True ) password_hash = Column( String(100), nullable = False ) status = Column( String(10), nullable = False ) #for verify email phone_number = Column(Integer,nullable = True) created_on = Column(DateTime, nullable=False, default=datetime.datetime.utcnow) updated_on = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) def hash_password(self, password): self.password_hash = pwd_context.encrypt(password) def verify_password(self, password): return pwd_context.verify(password, self.password_hash) def generate_auth_token(self, expiration=600000): s = Serializer(secret_key, expires_in = expiration) return s.dumps({'email': self.email }) @staticmethod def verify_auth_token(token): s = Serializer(secret_key) try: data = s.loads(token) except SignatureExpired: #Valid Token, but expired return None except BadSignature: #Invalid Token return None email = data['email'] return email def __init__(self, name, email): self.name = name self.email = email self.status = "Not Verified" def toJSON(self): return { "login_details" : { "email" : self.email, "name" : self.name, "status" : self.status } } class GroupInfo(Base): __tablename__ = "GroupData" #attributes for userdata table id = Column(Integer, primary_key = True ) name = Column( String(100), nullable = False) owner = Column( String(120), nullable = False) status = Column( String(10), nullable = False ) #active or not created_on = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) updated_on = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) def __init__(self, name, owner): self.name = name self.owner = owner self.status = "Active" def toJSON(self): return { "Group Details" : { "owner" : self.owner, "name" : self.name, "status" : self.status } } class GroupMapping(Base): __tablename__ = "GroupMapping" #attributes for userdata table id = Column(Integer, primary_key = True ) user_id = Column( Integer, nullable = False) group_id = Column( Integer, nullable = False ) spent = Column( Integer, nullable = False ) paid = Column( Integer, nullable = False ) created_on = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) updated_on = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) def __init__(self, user_id, group_id): self.user_id = user_id self.group_id = group_id self.spent = 0 self.paid = 0 def toJSON(self): return { "Group Mapping Details" : { "Group" : self.group_id, "User" : self.user_id } } class FriendMapping(Base): __tablename__ = "Friends" #attributes for userdata table user_id = Column( Integer, nullable = False, primary_key = True) friend_id = Column( Integer, nullable = False, primary_key = True) created_on = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) updated_on = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) def __init__(self, user_id, friend_id): self.user_id = user_id self.friend_id = friend_id def toJSON(self): return { "Friend Mapping Details" : { "User_id" : self.user_id, "Friend_id" : self.friend_id } } # sqlite://<nohostname>/<path> # where <path> is relative: engine = create_engine('sqlite:///version1.db',connect_args={'check_same_thread': False}) Base.metadata.create_all(engine)
34.849624
103
0.654585
529
4,635
5.553875
0.238185
0.088496
0.043567
0.065351
0.422737
0.391763
0.36998
0.332539
0.273996
0.273996
0
0.008547
0.242718
4,635
132
104
35.113636
0.82849
0.051133
0
0.266667
0
0
0.051504
0.004786
0
0
0
0
0
1
0.114286
false
0.057143
0.07619
0.047619
0.590476
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
7ecb31f73d3266cb79c9e59df821ed3f9582be9a
1,389
py
Python
tempest/services/compute/v3/xml/version_client.py
BeenzSyed/tempest
7a64ee1216d844f6b99928b53f5c665b84cb8719
[ "Apache-2.0" ]
null
null
null
tempest/services/compute/v3/xml/version_client.py
BeenzSyed/tempest
7a64ee1216d844f6b99928b53f5c665b84cb8719
[ "Apache-2.0" ]
null
null
null
tempest/services/compute/v3/xml/version_client.py
BeenzSyed/tempest
7a64ee1216d844f6b99928b53f5c665b84cb8719
[ "Apache-2.0" ]
null
null
null
# Copyright 2014 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from lxml import etree from tempest.common import rest_client from tempest.services.compute.xml import common class VersionV3ClientXML(rest_client.RestClientXML): def __init__(self, config, username, password, auth_url, tenant_name=None): super(VersionV3ClientXML, self).__init__(config, username, password, auth_url, tenant_name) self.service = self.config.compute.catalog_v3_type def _parse_array(self, node): json = common.xml_to_json(node) return json def get_version(self): resp, body = self.get('', self.headers) body = self._parse_array(etree.fromstring(body)) return resp, body
36.552632
79
0.673146
178
1,389
5.123596
0.595506
0.065789
0.028509
0.035088
0.085526
0.085526
0.085526
0
0
0
0
0.010638
0.25558
1,389
37
80
37.540541
0.871373
0.432685
0
0
0
0
0
0
0
0
0
0
0
1
0.1875
false
0.125
0.1875
0
0.5625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
7ed0ea9fc79c14ad82d0621372bfaf4de4ad04da
729
py
Python
conjur/resource.py
jdumas/conjur-api-python3
96e438765d580722357a070507305feafcb59d2a
[ "Apache-2.0" ]
16
2019-05-17T15:34:59.000Z
2021-11-08T10:30:21.000Z
conjur/resource.py
jdumas/conjur-api-python3
96e438765d580722357a070507305feafcb59d2a
[ "Apache-2.0" ]
301
2019-05-07T18:27:10.000Z
2022-01-26T13:03:49.000Z
conjur/resource.py
jdumas/conjur-api-python3
96e438765d580722357a070507305feafcb59d2a
[ "Apache-2.0" ]
10
2019-07-30T17:00:13.000Z
2022-01-20T17:00:34.000Z
# -*- coding: utf-8 -*- """ Resource module """ # pylint: disable=too-few-public-methods class Resource: """ Used for representing Conjur resources """ def __init__(self, type_:type, name:str): self.type = type_ self.name = name def full_id(self): """ Method for building the full resource ID in the format 'user:someuser' for example """ return f"{self.type}:{self.name}" def __eq__(self, other): """ Method for comparing resources by their values and not by reference """ return self.type == other.type and self.name == other.name def __repr__(self): return f"'type': '{self.type}', 'name': '{self.name}'"
24.3
90
0.585734
91
729
4.527473
0.494505
0.097087
0.058252
0
0
0
0
0
0
0
0
0.001898
0.277092
729
29
91
25.137931
0.779886
0.366255
0
0
0
0
0.170918
0.058673
0
0
0
0
0
1
0.4
false
0
0
0.1
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
7ed1e1c13fe9f63fc269a82e6ba0d76bf3493edb
5,131
py
Python
winregeditor.py
U-23/UinstallTool
d2066a583c15666e6f377ae63cc613b4c96663e2
[ "MIT" ]
1
2019-08-07T08:32:11.000Z
2019-08-07T08:32:11.000Z
winregeditor.py
U-23/UinstallTool
d2066a583c15666e6f377ae63cc613b4c96663e2
[ "MIT" ]
3
2021-03-19T02:36:27.000Z
2022-01-13T01:30:07.000Z
winregeditor.py
U-23/UinstallTool
d2066a583c15666e6f377ae63cc613b4c96663e2
[ "MIT" ]
null
null
null
import winreg import re # 注册表操作 class winregeditor: dicList = {} def orderDict(self, numkey, DisplayName, DisplayIcon,UninstallString,KeyPath,InstallLocation): self.dicList[numkey] = {'DisplayName': DisplayName, 'DisplayIcon': DisplayIcon,'UninstallString':UninstallString,'KeyPath':KeyPath,'InstallLocation':InstallLocation} exeIcon = re.compile('.*DLL|.*exe',re.I) match = exeIcon.match(DisplayIcon) if match: #匹配到exe, 可直接打开 self.dicList[numkey]['exe'] = match.group() #去除双引号 self.dicList[numkey]['exe'] = self.dicList[numkey]['exe'].replace("\"", "") else: # 没有exe,Icon可为ico 文件 self.dicList[numkey]['icon'] =DisplayIcon return self.dicList def getwinreg(self): software_name = list() result = {} try: # 定义检测位置 sub_key = [r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall', r'SOFTWARE\Wow6432Node\Microsoft\Windows\CurrentVersion\Uninstall'] q=0 for i in sub_key: key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, i, 0, winreg.KEY_ALL_ACCESS) h = winreg.QueryInfoKey(key)[0] for j in range(0, winreg.QueryInfoKey(key)[0] - 1): DisplayName = '' DisplayIcon = '' UninstallString = '' InstallLocation = '' try: key_name = winreg.EnumKey(key, j) key_path = i + '\\' + key_name KeyPath = 'HKEY_LOCAL_MACHINE\\'+key_path each_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_path, 0, winreg.KEY_ALL_ACCESS) DisplayName, REG_SZ = winreg.QueryValueEx(each_key, 'DisplayName') DisplayName = DisplayName.encode('utf-8') try: DisplayIcon, REG_SZ = winreg.QueryValueEx(each_key, "DisplayIcon") DisplayIcon = DisplayIcon.encode('utf-8') UninstallString,REG_SZ = winreg.QueryValueEx(each_key, "UninstallString") UninstallString = UninstallString.encode('utf-8') InstallLocation,REG_SZ = winreg.QueryValueEx(each_key, "InstallLocation") InstallLocation = InstallLocation.encode('utf-8') except WindowsError: pass # 注册表中同时满足DisplayName 和 DisplayIcon if DisplayName and DisplayIcon : software_name.append(str(DisplayName,encoding='utf-8')) result = self.orderDict(str(q+j), str(DisplayName,encoding='utf-8'),str(DisplayIcon,encoding='utf-8'),UninstallString,KeyPath,InstallLocation) except WindowsError: pass #获取到总得数目 k=q+h #将获取到的值重新赋值 q = h except IOError: pass else: # 定义检测位置 i = r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall' key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, i, 0, winreg.KEY_ALL_ACCESS) for j in range(0, winreg.QueryInfoKey(key)[0] - 1): DisplayName = '' DisplayIcon = '' UninstallString = '' InstallLocation = '' try: key_name = winreg.EnumKey(key, j) key_path = i + '\\' + key_name KeyPath = 'HKEY_CURRENT_USER\\'+key_path each_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, key_path, 0, winreg.KEY_ALL_ACCESS) DisplayName, REG_SZ = winreg.QueryValueEx(each_key, 'DisplayName') DisplayName = DisplayName.encode('utf-8') try: DisplayIcon, REG_SZ = winreg.QueryValueEx(each_key, "DisplayIcon") DisplayIcon = DisplayIcon.encode('utf-8') UninstallString,REG_SZ = winreg.QueryValueEx(each_key, "UninstallString") UninstallString = UninstallString.encode('utf-8') InstallLocation,REG_SZ = winreg.QueryValueEx(each_key, "InstallLocation") InstallLocation = InstallLocation.encode('utf-8') except WindowsError: pass # 注册表中同时满足DisplayName 和 DisplayIcon if DisplayName and DisplayIcon: software_name.append(str(DisplayName,encoding='utf-8')) result = self.orderDict(str(q+j), str(DisplayName,encoding='utf-8'),str(DisplayIcon,encoding='utf-8'),UninstallString,KeyPath,InstallLocation) except WindowsError: pass software_name = list(set(software_name)) software_name = sorted(software_name) return software_name,result
50.80198
173
0.536348
449
5,131
6
0.200445
0.020787
0.032665
0.0683
0.713808
0.705271
0.654788
0.618411
0.590943
0.590943
0
0.009259
0.368544
5,131
101
174
50.80198
0.822222
0.02748
0
0.604651
0
0.011628
0.093976
0.033133
0
0
0
0
0
1
0.023256
false
0.05814
0.023256
0
0.093023
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2