hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0857d32f63007eae2f73b44b39cd06f0e51d9f6d
| 26
|
py
|
Python
|
gpalu/models/__init__.py
|
Obito1903/gpa.lu
|
45db1a53a0818b73485e6862e3ce18daefcc7f72
|
[
"MIT"
] | null | null | null |
gpalu/models/__init__.py
|
Obito1903/gpa.lu
|
45db1a53a0818b73485e6862e3ce18daefcc7f72
|
[
"MIT"
] | null | null | null |
gpalu/models/__init__.py
|
Obito1903/gpa.lu
|
45db1a53a0818b73485e6862e3ce18daefcc7f72
|
[
"MIT"
] | null | null | null |
from .email import Email
| 13
| 25
| 0.769231
| 4
| 26
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 26
| 1
| 26
| 26
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4b6b746f98ee9e30a7aa5b377c0e947983be359c
| 70
|
py
|
Python
|
flypy/exceptions.py
|
token631/fly.py
|
e37ea1f63aaedafeb462249dafa6dc97200cb856
|
[
"MIT"
] | null | null | null |
flypy/exceptions.py
|
token631/fly.py
|
e37ea1f63aaedafeb462249dafa6dc97200cb856
|
[
"MIT"
] | null | null | null |
flypy/exceptions.py
|
token631/fly.py
|
e37ea1f63aaedafeb462249dafa6dc97200cb856
|
[
"MIT"
] | null | null | null |
class NotFound(Exception):
pass
class HTTPError(Exception):
pass
| 11.666667
| 27
| 0.757143
| 8
| 70
| 6.625
| 0.625
| 0.490566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 5
| 28
| 14
| 0.898305
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4b76ae8a0c1ccf523e01cbfa2e7b57b23de82a38
| 150
|
py
|
Python
|
src/cogs/ideas/alert.py
|
jmuilwijk/DiscordBot
|
63cbb3e96d473b24ecc2928fd5e65ab0e4fec4a8
|
[
"MIT"
] | 3
|
2018-11-29T23:31:22.000Z
|
2019-05-15T12:13:05.000Z
|
src/cogs/ideas/alert.py
|
jmuilwijk/DiscordBot
|
63cbb3e96d473b24ecc2928fd5e65ab0e4fec4a8
|
[
"MIT"
] | 1
|
2018-12-13T21:07:13.000Z
|
2018-12-13T21:07:13.000Z
|
src/cogs/ideas/alert.py
|
jmuilwijk/DiscordBot
|
63cbb3e96d473b24ecc2928fd5e65ab0e4fec4a8
|
[
"MIT"
] | 2
|
2019-05-26T15:49:24.000Z
|
2019-07-31T21:13:49.000Z
|
# TODO: add the option to send a notification to all servers, takes input, owner only.
# TODO: allow for customizable alert channel based on server.
| 37.5
| 86
| 0.766667
| 24
| 150
| 4.791667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18
| 150
| 3
| 87
| 50
| 0.934959
| 0.96
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.333333
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4bbabee3369dc810e5cf0ad7813506bbeae07e5b
| 9,399
|
py
|
Python
|
TEST3D/GUI/0010002_page_micro/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 31
|
2015-04-01T15:59:36.000Z
|
2022-03-18T20:21:47.000Z
|
TEST3D/GUI/0010002_page_micro/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 3
|
2015-02-06T19:30:24.000Z
|
2017-05-25T14:14:31.000Z
|
TEST3D/GUI/0010002_page_micro/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 7
|
2015-01-23T15:19:22.000Z
|
2021-06-09T09:03:59.000Z
|
# -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
# We test the basical ways of creating a microstructure and handling their voxels groups.
# And also saving a Python Log.
import tests
findWidget('OOF3D').resize(550, 350)
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
# create a microstructure named test
findWidget('OOF3D:Microstructure Page:New').clicked()
checkpoint toplevel widget mapped Dialog-Create Microstructure
findWidget('Dialog-Create Microstructure').resize(315, 199)
findWidget('Dialog-Create Microstructure:name:Auto').clicked()
findWidget('Dialog-Create Microstructure:name:Text').set_text('t')
findWidget('Dialog-Create Microstructure:name:Text').set_text('te')
findWidget('Dialog-Create Microstructure:name:Text').set_text('tes')
findWidget('Dialog-Create Microstructure:name:Text').set_text('test')
findWidget('Dialog-Create Microstructure:gtk-ok').clicked()
findWidget('OOF3D:Microstructure Page:Pane').set_position(159)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint active area status updated
checkpoint microstructure page sensitized
checkpoint Field page sensitized
checkpoint meshable button set
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.Microstructure.New
# create a voxels selection named a
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:New').clicked()
checkpoint toplevel widget mapped Dialog-Create new voxel group
findWidget('Dialog-Create new voxel group').resize(246, 67)
findWidget('Dialog-Create new voxel group:name:Auto').clicked()
findWidget('Dialog-Create new voxel group:name:Text').set_text('a')
findWidget('Dialog-Create new voxel group:gtk-ok').clicked()
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.PixelGroup.New
checkpoint microstructure page sensitized
checkpoint meshable button set
# create a voxels selection with a generated name
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:New').clicked()
checkpoint toplevel widget mapped Dialog-Create new voxel group
findWidget('Dialog-Create new voxel group').resize(246, 67)
findWidget('Dialog-Create new voxel group:name:Auto').clicked()
findWidget('Dialog-Create new voxel group:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.PixelGroup.New
# create another voxels selection with a generated name
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:New').clicked()
checkpoint toplevel widget mapped Dialog-Create new voxel group
findWidget('Dialog-Create new voxel group').resize(246, 67)
findWidget('Dialog-Create new voxel group:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.PixelGroup.New
# create a microstructure with loaded files
findWidget('OOF3D:Microstructure Page:NewFromFile').clicked()
checkpoint toplevel widget mapped Dialog-Load Image and create Microstructure
findWidget('Dialog-Load Image and create Microstructure').resize(401, 215)
findWidget('Dialog-Load Image and create Microstructure:filenames:Entire Directory:directory').set_text('TEST_DATA/5color')
findWidget('Dialog-Load Image and create Microstructure:gtk-ok').clicked()
findWidget('OOF3D Messages 1').resize(603, 200)
findWidget('OOF3D:Microstructure Page:Pane').set_position(159)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint microstructure page sensitized
checkpoint OOF.Microstructure.Create_From_ImageFile
# check the microstructures state
assert tests.sensitization4()
assert tests.chooserCheck('OOF3D:Microstructure Page:Microstructure', ['test', '5color'])
assert tests.chooserStateCheck('OOF3D:Microstructure Page:Microstructure', '5color')
assert tests.chooserCheck('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList', [])
assert tests.chooserListStateCheck('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList', [])
# select the microstructure test
setComboBox(findWidget('OOF3D:Microstructure Page:Microstructure'), 'test')
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint microstructure page sensitized
# check the current microstructure voxels selection groups
assert tests.sensitization5()
assert tests.chooserCheck('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList', ['a (0 voxels, meshable)', 'pixelgroup (0 voxels, meshable)', 'pixelgroup<2> (0 voxels, meshable)'])
assert tests.chooserListStateCheck('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList', [])
# delete the microstructure test
findWidget('OOF3D:Microstructure Page:Delete').clicked()
checkpoint toplevel widget mapped Questioner
findWidget('Questioner').resize(190, 89)
findWidget('Questioner:gtk-yes').clicked()
checkpoint pixel page updated
checkpoint active area status updated
checkpoint Materials page updated
findWidget('OOF3D:Microstructure Page:Pane').set_position(159)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint Field page sensitized
##checkpoint skeleton page sensitized
checkpoint meshable button set
checkpoint OOF.Microstructure.Delete
# check that the microstructures list contain jusr 5color
assert tests.sensitization4()
assert tests.chooserCheck('OOF3D:Microstructure Page:Microstructure', ['5color'])
assert tests.chooserStateCheck('OOF3D:Microstructure Page:Microstructure', '5color')
assert tests.chooserCheck('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList', [])
assert tests.chooserListStateCheck('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList', [])
# delete the microstructure 5color too
findWidget('OOF3D:Microstructure Page:Delete').clicked()
checkpoint toplevel widget mapped Questioner
findWidget('Questioner').resize(190, 89)
findWidget('Questioner:gtk-yes').clicked()
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint active area status updated
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
findWidget('OOF3D:Microstructure Page:Pane').set_position(156)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint Field page sensitized
checkpoint microstructure page sensitized
##checkpoint skeleton page sensitized
checkpoint OOF.Microstructure.Delete
checkpoint meshable button set
# quit OOF3D and create a saved python log file
findMenu(findWidget('OOF3D:MenuBar'), 'File:Save:Python_Log').activate()
checkpoint toplevel widget mapped Dialog-Python_Log
findWidget('Dialog-Python_Log').resize(190, 95)
findWidget('Dialog-Python_Log:filename').set_text('micro2.log')
findWidget('Dialog-Python_Log:gtk-ok').clicked()
checkpoint OOF.File.Save.Python_Log
assert tests.filediff('micro2.log')
widget_0=findWidget('OOF3D')
handled_0=widget_0.event(event(gtk.gdk.DELETE,window=widget_0.window))
| 45.84878
| 198
| 0.836685
| 1,142
| 9,399
| 6.861646
| 0.158494
| 0.122511
| 0.085758
| 0.072358
| 0.796452
| 0.767611
| 0.748469
| 0.698826
| 0.647779
| 0.616514
| 0
| 0.014842
| 0.089584
| 9,399
| 204
| 199
| 46.073529
| 0.9009
| 0.118736
| 0
| 0.77439
| 0
| 0
| 0.267805
| 0.075703
| 0
| 0
| 0
| 0
| 0.085366
| 0
| null | null | 0
| 0.006098
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4bbd57af7ef94a3d5738ca3bb43d41d9124747c4
| 119
|
py
|
Python
|
qgis/qgisCtrl.py
|
1cgeo/ControleAcervo
|
e843d9f814489359e5ab2da25f6fc28cf5c7cd56
|
[
"MIT"
] | null | null | null |
qgis/qgisCtrl.py
|
1cgeo/ControleAcervo
|
e843d9f814489359e5ab2da25f6fc28cf5c7cd56
|
[
"MIT"
] | null | null | null |
qgis/qgisCtrl.py
|
1cgeo/ControleAcervo
|
e843d9f814489359e5ab2da25f6fc28cf5c7cd56
|
[
"MIT"
] | null | null | null |
class QgisCtrl():
def __init__(self, iface):
super(QgisCtrl, self).__init__()
self.iface = iface
| 17
| 40
| 0.605042
| 13
| 119
| 4.923077
| 0.538462
| 0.25
| 0.40625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268908
| 119
| 7
| 41
| 17
| 0.735632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
298cfe574862d940d8508d2ae50f88f7d0c05001
| 6,356
|
py
|
Python
|
z2/part2/interactive/jm/random_normal_1/463878308.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part2/interactive/jm/random_normal_1/463878308.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part2/interactive/jm/random_normal_1/463878308.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 463878308
"""
"""
random actions, total chaos
"""
board = gamma_new(6, 6, 6, 3)
assert board is not None
assert gamma_move(board, 1, 1, 2) == 1
assert gamma_move(board, 2, 3, 2) == 1
assert gamma_move(board, 3, 2, 5) == 1
assert gamma_move(board, 4, 0, 1) == 1
assert gamma_move(board, 5, 2, 5) == 0
assert gamma_move(board, 5, 2, 0) == 1
assert gamma_move(board, 6, 3, 2) == 0
assert gamma_move(board, 1, 5, 1) == 1
assert gamma_move(board, 1, 4, 5) == 1
assert gamma_golden_possible(board, 2) == 1
assert gamma_free_fields(board, 3) == 29
assert gamma_move(board, 4, 1, 4) == 1
assert gamma_free_fields(board, 4) == 28
assert gamma_move(board, 5, 4, 1) == 1
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 6, 5, 5) == 1
assert gamma_move(board, 1, 5, 1) == 0
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 4, 4) == 1
assert gamma_move(board, 3, 1, 3) == 1
assert gamma_busy_fields(board, 3) == 2
assert gamma_move(board, 5, 3, 0) == 1
assert gamma_move(board, 5, 4, 2) == 1
assert gamma_busy_fields(board, 5) == 4
assert gamma_move(board, 6, 3, 3) == 1
assert gamma_move(board, 6, 2, 4) == 1
assert gamma_move(board, 1, 2, 5) == 0
assert gamma_move(board, 2, 1, 1) == 1
assert gamma_move(board, 2, 3, 4) == 1
assert gamma_move(board, 3, 5, 0) == 1
assert gamma_move(board, 4, 3, 0) == 0
assert gamma_move(board, 5, 4, 1) == 0
assert gamma_move(board, 5, 1, 3) == 0
assert gamma_free_fields(board, 5) == 17
assert gamma_move(board, 6, 3, 0) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 2, 0, 4) == 0
assert gamma_move(board, 2, 2, 5) == 0
board964821874 = gamma_board(board)
assert board964821874 is not None
assert board964821874 == ("..3.16\n"
".4622.\n"
".3.6..\n"
".1.25.\n"
"42..51\n"
"..55.3\n")
del board964821874
board964821874 = None
assert gamma_move(board, 3, 5, 1) == 0
assert gamma_move(board, 4, 4, 5) == 0
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 6, 3, 2) == 0
assert gamma_busy_fields(board, 6) == 3
assert gamma_move(board, 1, 3, 5) == 1
assert gamma_golden_move(board, 2, 5, 3) == 0
assert gamma_move(board, 3, 4, 3) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_move(board, 4, 4, 2) == 0
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 5, 0, 0) == 1
assert gamma_move(board, 6, 1, 2) == 0
assert gamma_move(board, 1, 3, 0) == 0
assert gamma_move(board, 2, 3, 1) == 1
assert gamma_move(board, 2, 3, 3) == 0
assert gamma_move(board, 3, 5, 5) == 0
assert gamma_busy_fields(board, 3) == 3
assert gamma_move(board, 4, 0, 4) == 1
assert gamma_move(board, 4, 3, 4) == 0
assert gamma_move(board, 5, 2, 3) == 0
assert gamma_move(board, 6, 2, 2) == 0
assert gamma_move(board, 6, 2, 3) == 1
assert gamma_move(board, 1, 2, 5) == 0
assert gamma_move(board, 2, 5, 1) == 0
assert gamma_move(board, 2, 4, 0) == 0
board455698278 = gamma_board(board)
assert board455698278 is not None
assert board455698278 == ("..3116\n"
"44622.\n"
".366..\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board455698278
board455698278 = None
assert gamma_move(board, 3, 2, 0) == 0
assert gamma_move(board, 3, 0, 2) == 0
board995577683 = gamma_board(board)
assert board995577683 is not None
assert board995577683 == ("..3116\n"
"44622.\n"
".366..\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board995577683
board995577683 = None
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 4, 5, 3) == 1
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 0, 1) == 0
assert gamma_move(board, 5, 1, 5) == 0
assert gamma_move(board, 6, 1, 4) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 2, 3, 0) == 0
assert gamma_busy_fields(board, 2) == 5
assert gamma_move(board, 3, 2, 2) == 0
assert gamma_move(board, 3, 3, 5) == 0
assert gamma_move(board, 4, 2, 2) == 0
assert gamma_move(board, 5, 3, 4) == 0
assert gamma_free_fields(board, 5) == 5
board670804334 = gamma_board(board)
assert board670804334 is not None
assert board670804334 == ("..3116\n"
"44622.\n"
".366.4\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board670804334
board670804334 = None
assert gamma_move(board, 6, 2, 0) == 0
assert gamma_move(board, 1, 1, 2) == 0
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_busy_fields(board, 1) == 4
assert gamma_move(board, 3, 2, 5) == 0
assert gamma_move(board, 3, 4, 4) == 0
assert gamma_move(board, 4, 1, 2) == 0
assert gamma_move(board, 4, 4, 3) == 1
board397825260 = gamma_board(board)
assert board397825260 is not None
assert board397825260 == ("..3116\n"
"44622.\n"
".36644\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board397825260
board397825260 = None
assert gamma_move(board, 5, 4, 5) == 0
assert gamma_golden_possible(board, 5) == 1
assert gamma_move(board, 6, 5, 0) == 0
assert gamma_move(board, 6, 3, 4) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_free_fields(board, 1) == 3
assert gamma_move(board, 2, 2, 2) == 1
assert gamma_move(board, 2, 3, 4) == 0
assert gamma_free_fields(board, 2) == 3
assert gamma_move(board, 3, 1, 2) == 0
assert gamma_golden_move(board, 3, 5, 5) == 0
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 5, 3, 2) == 0
assert gamma_move(board, 5, 2, 1) == 1
assert gamma_free_fields(board, 5) == 3
assert gamma_move(board, 6, 5, 5) == 0
assert gamma_move(board, 6, 3, 4) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 1, 5, 0) == 0
assert gamma_move(board, 2, 0, 2) == 0
assert gamma_move(board, 2, 5, 0) == 0
assert gamma_busy_fields(board, 2) == 6
assert gamma_move(board, 3, 0, 1) == 0
assert gamma_move(board, 3, 1, 0) == 0
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 4, 4, 5) == 0
assert gamma_move(board, 5, 0, 5) == 0
board495675822 = gamma_board(board)
assert board495675822 is not None
assert board495675822 == ("..3116\n"
"44622.\n"
".36644\n"
".1225.\n"
"425251\n"
"5.55.3\n")
del board495675822
board495675822 = None
assert gamma_move(board, 6, 1, 0) == 1
assert gamma_move(board, 6, 4, 5) == 0
gamma_delete(board)
| 29.425926
| 46
| 0.657174
| 1,163
| 6,356
| 3.454858
| 0.048151
| 0.323046
| 0.369587
| 0.492782
| 0.766302
| 0.735938
| 0.516924
| 0.326033
| 0.214783
| 0.189647
| 0
| 0.166603
| 0.17747
| 6,356
| 215
| 47
| 29.562791
| 0.601951
| 0
| 0
| 0.209424
| 0
| 0
| 0.045948
| 0
| 0
| 0
| 0
| 0
| 0.685864
| 1
| 0
| false
| 0
| 0.005236
| 0
| 0.005236
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
29a1f31133d4341cb89f278f61c56d2d7e276ec8
| 147
|
py
|
Python
|
app/schemas/console.py
|
cPoolChia/ChiaAutoplotter-Backend
|
8d875c1f846df395ddc76e3d84b36da45ad4d557
|
[
"MIT"
] | 7
|
2021-06-01T09:20:34.000Z
|
2021-10-12T07:24:04.000Z
|
app/schemas/console.py
|
cPoolChia/ChiaFarmerManager-Backend
|
8d875c1f846df395ddc76e3d84b36da45ad4d557
|
[
"MIT"
] | null | null | null |
app/schemas/console.py
|
cPoolChia/ChiaFarmerManager-Backend
|
8d875c1f846df395ddc76e3d84b36da45ad4d557
|
[
"MIT"
] | 1
|
2021-05-31T13:08:14.000Z
|
2021-05-31T13:08:14.000Z
|
from pydantic import BaseModel
import time
class ConsoleLog(BaseModel):
command: str = ""
stdout: str = ""
time: float = time.time()
| 16.333333
| 30
| 0.659864
| 17
| 147
| 5.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231293
| 147
| 9
| 31
| 16.333333
| 0.858407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
29beae4ff296f1930eb080c3d16565548b22a6bc
| 23
|
py
|
Python
|
pmanagement/items/models/__init__.py
|
Ferald89/projectmanagemen
|
88d96f2f59daca0029b69a60e69c0118dacc30ea
|
[
"MIT"
] | null | null | null |
pmanagement/items/models/__init__.py
|
Ferald89/projectmanagemen
|
88d96f2f59daca0029b69a60e69c0118dacc30ea
|
[
"MIT"
] | 2
|
2020-06-15T17:13:04.000Z
|
2020-06-15T17:13:05.000Z
|
pmanagement/items/models/__init__.py
|
Ferald89/projectmanagemen
|
88d96f2f59daca0029b69a60e69c0118dacc30ea
|
[
"MIT"
] | null | null | null |
from .items import Item
| 23
| 23
| 0.826087
| 4
| 23
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
29e744c6a5499d82c14a383b37e3b966a5023bcb
| 45
|
py
|
Python
|
testsuite/modulegraph-dir/import_sys_star.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 9
|
2020-03-22T14:48:01.000Z
|
2021-05-30T12:18:12.000Z
|
testsuite/modulegraph-dir/import_sys_star.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 15
|
2020-01-06T10:02:32.000Z
|
2021-05-28T12:22:44.000Z
|
testsuite/modulegraph-dir/import_sys_star.py
|
ronaldoussoren/modulegraph2
|
b6ab1766b0098651b51083235ff8a18a5639128b
|
[
"MIT"
] | 4
|
2020-05-10T18:51:41.000Z
|
2021-04-07T14:03:12.000Z
|
if __name__ == "main":
from sys import *
| 15
| 22
| 0.6
| 6
| 45
| 3.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 45
| 2
| 23
| 22.5
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d99c035add957bd3ea6309a4538931c1cc89751e
| 132
|
py
|
Python
|
Code/test2.py
|
Tim-eyes/Beamer-Template-LaTex
|
194d46a98205d89fe018a71030f8d6a2fc57ea52
|
[
"MIT"
] | 1
|
2022-01-30T14:48:46.000Z
|
2022-01-30T14:48:46.000Z
|
Code/test2.py
|
Tim-eyes/Beamer-Template-LaTex
|
194d46a98205d89fe018a71030f8d6a2fc57ea52
|
[
"MIT"
] | null | null | null |
Code/test2.py
|
Tim-eyes/Beamer-Template-LaTex
|
194d46a98205d89fe018a71030f8d6a2fc57ea52
|
[
"MIT"
] | null | null | null |
class A:
def func():
pass
class B:
def func():
pass
class C(B,A):
pass
c=C()
c.func()
| 10.153846
| 15
| 0.401515
| 19
| 132
| 2.789474
| 0.368421
| 0.264151
| 0.415094
| 0.603774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.462121
| 132
| 13
| 16
| 10.153846
| 0.746479
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.3
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
d9b597da6a80d86b05ca27f0cbe21831eb36c9ab
| 53
|
py
|
Python
|
neorl/rl/baselines/acer/__init__.py
|
evdcush/neorl
|
a1af069072e752ab79e7279a88ad95d195a81821
|
[
"MIT"
] | 20
|
2021-04-20T19:15:33.000Z
|
2022-03-19T17:00:12.000Z
|
neorl/rl/baselines/acer/__init__.py
|
evdcush/neorl
|
a1af069072e752ab79e7279a88ad95d195a81821
|
[
"MIT"
] | 17
|
2021-04-07T21:52:41.000Z
|
2022-03-06T16:05:31.000Z
|
neorl/rl/baselines/acer/__init__.py
|
evdcush/neorl
|
a1af069072e752ab79e7279a88ad95d195a81821
|
[
"MIT"
] | 8
|
2021-05-07T03:36:30.000Z
|
2021-12-15T03:41:41.000Z
|
from neorl.rl.baselines.acer.acer_simple import ACER
| 26.5
| 52
| 0.849057
| 9
| 53
| 4.888889
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 53
| 1
| 53
| 53
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8a31aa3ebf895a31cf37950cfacc8ea7ae526c0d
| 23
|
py
|
Python
|
advanced/part10-12_course_records/src/course_records.py
|
Hannah-Abi/python-pro-21
|
2ce32c4bf118054329d19afdf83c50561be1ada8
|
[
"MIT"
] | null | null | null |
advanced/part10-12_course_records/src/course_records.py
|
Hannah-Abi/python-pro-21
|
2ce32c4bf118054329d19afdf83c50561be1ada8
|
[
"MIT"
] | null | null | null |
advanced/part10-12_course_records/src/course_records.py
|
Hannah-Abi/python-pro-21
|
2ce32c4bf118054329d19afdf83c50561be1ada8
|
[
"MIT"
] | null | null | null |
# tee ratkaisusi tänne
| 11.5
| 22
| 0.782609
| 3
| 23
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0.869565
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a36814d7fa4dcb0b8ef3a9b3f22c1ec07527e7f
| 231
|
py
|
Python
|
jetbrains-academy/Numeric Matrix Processor/Problems/Hexagon/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | null | null | null |
jetbrains-academy/Numeric Matrix Processor/Problems/Hexagon/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | null | null | null |
jetbrains-academy/Numeric Matrix Processor/Problems/Hexagon/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | null | null | null |
import math
class Hexagon:
def __init__(self, side_length):
self.side_length = side_length
# create get_area here
def get_area(self):
return round((3 * math.sqrt(3) * (self.side_length ** 2)) / 2, 3)
| 21
| 73
| 0.632035
| 34
| 231
| 4
| 0.529412
| 0.294118
| 0.308824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028902
| 0.251082
| 231
| 10
| 74
| 23.1
| 0.757225
| 0.08658
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
8a3f055b431aafd4e425dbe79f38d205bce2d414
| 2,142
|
py
|
Python
|
screenplay/actions/if_value_of.py
|
byran/ScreenPlay
|
b26bbf125c224453cc1d09933e84b31374b32a2a
|
[
"MIT"
] | null | null | null |
screenplay/actions/if_value_of.py
|
byran/ScreenPlay
|
b26bbf125c224453cc1d09933e84b31374b32a2a
|
[
"MIT"
] | null | null | null |
screenplay/actions/if_value_of.py
|
byran/ScreenPlay
|
b26bbf125c224453cc1d09933e84b31374b32a2a
|
[
"MIT"
] | 2
|
2020-03-22T18:12:45.000Z
|
2020-06-15T14:57:00.000Z
|
from screenplay import Action, Actor
class _equals_if_value_of(Action):
def __init__(self, id: str):
super().__init__()
self._id = id
self._values = []
self._actions = []
def then(self, *actions):
self._actions.extend(actions)
return self
def is_None(self):
self._values.append(None)
return self
def equals(self, *values):
self._values.extend(values)
return self
def perform_as(self, actor: Actor):
if self._id is not None:
value = actor.state[self._id].value
for required_value in self._values:
if value == required_value:
actor.attempts_to(*self._actions)
class _not_equals_if_value_of(Action):
def __init__(self, id: str):
super().__init__()
self._id = id
self._values = []
self._actions = []
def then(self, *actions):
self._actions.extend(actions)
return self
def is_not_None(self):
self._values.append(None)
return self
def does_not_equal_any_of(self, *values):
self._values.extend(values)
return self
def perform_as(self, actor: Actor):
if self._id is not None:
value = actor.state[self._id].value
matched = False
for required_value in self._values:
if value == required_value:
matched = True
if matched is False:
actor.attempts_to(*self._actions)
class if_value_of():
def __init__(self, id: str):
super().__init__()
self._id = id
def is_None(self):
return _equals_if_value_of(self._id).is_None()
def equals(self, *values):
return _equals_if_value_of(self._id).equals(*values)
def is_not_None(self):
return _not_equals_if_value_of(self._id).is_not_None()
def does_not_equal_any_of(self, *values):
return _not_equals_if_value_of(self._id).does_not_equal_any_of(*values)
def perform_as(self, actor: Actor):
assert False, "No values specified in if_value_of task"
| 26.775
| 79
| 0.603641
| 279
| 2,142
| 4.240143
| 0.154122
| 0.071006
| 0.060862
| 0.076078
| 0.841927
| 0.803043
| 0.728656
| 0.679628
| 0.58918
| 0.519865
| 0
| 0
| 0.297852
| 2,142
| 79
| 80
| 27.113924
| 0.786569
| 0
| 0
| 0.8
| 0
| 0
| 0.018207
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 1
| 0.266667
| false
| 0
| 0.016667
| 0.066667
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a5bcc522c69e3151207fb3733801f0a08ab153c
| 128
|
py
|
Python
|
Backend/ecom/api/user/admin.py
|
parthparikh02/Ecommerce-Store
|
1b1e52b37c0fa60290ab23f357f4a86d63f67504
|
[
"MIT"
] | null | null | null |
Backend/ecom/api/user/admin.py
|
parthparikh02/Ecommerce-Store
|
1b1e52b37c0fa60290ab23f357f4a86d63f67504
|
[
"MIT"
] | null | null | null |
Backend/ecom/api/user/admin.py
|
parthparikh02/Ecommerce-Store
|
1b1e52b37c0fa60290ab23f357f4a86d63f67504
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import CustomeUser
# Register your models here.
admin.site.register(CustomeUser)
| 21.333333
| 32
| 0.820313
| 17
| 128
| 6.176471
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117188
| 128
| 5
| 33
| 25.6
| 0.929204
| 0.203125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8a615622d8c68a67983be45a5d71e0e5c9b15d90
| 74
|
py
|
Python
|
web/olga/functional_tests/__init__.py
|
raccoongang/acceptor
|
fdc1504912b502c8d789d5478eba8cc1a491934b
|
[
"Apache-2.0"
] | 5
|
2017-10-20T05:52:59.000Z
|
2020-02-25T10:46:33.000Z
|
web/olga/functional_tests/__init__.py
|
raccoongang/OLGA
|
fdc1504912b502c8d789d5478eba8cc1a491934b
|
[
"Apache-2.0"
] | 233
|
2017-08-14T10:56:16.000Z
|
2021-04-07T01:09:17.000Z
|
web/olga/functional_tests/__init__.py
|
raccoongang/acceptor
|
fdc1504912b502c8d789d5478eba8cc1a491934b
|
[
"Apache-2.0"
] | 2
|
2018-03-16T22:22:57.000Z
|
2018-06-15T20:02:56.000Z
|
# pylint: disable-all
# flake8: noqa
from olga.functional_tests import *
| 14.8
| 35
| 0.756757
| 10
| 74
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.148649
| 74
| 4
| 36
| 18.5
| 0.857143
| 0.432432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8a66819032fa67d2e5befedfedc06b5ef87d78de
| 37
|
py
|
Python
|
bd_tools/bin/__init__.py
|
berkeleyopenarms/bldc-controller
|
0b15539071baa13dc392181cbcdeb1692f1d3745
|
[
"MIT"
] | 14
|
2019-04-10T07:42:13.000Z
|
2019-12-11T08:58:39.000Z
|
bd_tools/bin/__init__.py
|
OpenRTA/bldc-controller
|
0b15539071baa13dc392181cbcdeb1692f1d3745
|
[
"MIT"
] | 5
|
2019-04-09T08:18:57.000Z
|
2019-08-22T15:31:08.000Z
|
bd_tools/bin/__init__.py
|
berkeleyopenarms/bldc-controller
|
0b15539071baa13dc392181cbcdeb1692f1d3745
|
[
"MIT"
] | 3
|
2019-04-10T23:01:58.000Z
|
2019-11-02T06:43:11.000Z
|
"""Executable tools for BetzDrive"""
| 18.5
| 36
| 0.72973
| 4
| 37
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.818182
| 0.810811
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a79745e415f19a865d6ce1652d3d0c517dd88cf
| 252
|
py
|
Python
|
features_extractor/__init__.py
|
omarsou/MIL-lymphocytosis
|
0971e1cfc9e033ec6af6bc3e214a9eb4bb9f326a
|
[
"MIT"
] | 3
|
2021-02-16T15:27:28.000Z
|
2021-03-26T08:40:08.000Z
|
features_extractor/__init__.py
|
omarsou/MIL-lymphocytosis
|
0971e1cfc9e033ec6af6bc3e214a9eb4bb9f326a
|
[
"MIT"
] | null | null | null |
features_extractor/__init__.py
|
omarsou/MIL-lymphocytosis
|
0971e1cfc9e033ec6af6bc3e214a9eb4bb9f326a
|
[
"MIT"
] | 1
|
2021-02-28T11:19:15.000Z
|
2021-02-28T11:19:15.000Z
|
from .dataloader import LymphoDataset, InferLymphoDataset
from .net import LymphoAutoEncoder
from .utils import save, train, MyRotateTransform
__all__ = ['LymphoDataset', 'InferLymphoDataset', 'LymphoAutoEncoder', 'save', 'train', 'MyRotateTransform']
| 50.4
| 108
| 0.805556
| 22
| 252
| 9.045455
| 0.545455
| 0.311558
| 0.261307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09127
| 252
| 5
| 108
| 50.4
| 0.868996
| 0
| 0
| 0
| 0
| 0
| 0.29249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8a7fa7d8cfb96a3bc8b8f8083a18b9a402ff08cc
| 45
|
py
|
Python
|
data_loaders/__init__.py
|
neomatrix369/embeddings-for-trees
|
022fe30df4ece0162c16aaa42a59c93240eae180
|
[
"MIT"
] | 1
|
2020-12-12T18:10:58.000Z
|
2020-12-12T18:10:58.000Z
|
data_loaders/__init__.py
|
neomatrix369/embeddings-for-trees
|
022fe30df4ece0162c16aaa42a59c93240eae180
|
[
"MIT"
] | null | null | null |
data_loaders/__init__.py
|
neomatrix369/embeddings-for-trees
|
022fe30df4ece0162c16aaa42a59c93240eae180
|
[
"MIT"
] | null | null | null |
from .tree_dgl_dataset import TreeDGLDataset
| 22.5
| 44
| 0.888889
| 6
| 45
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8a9686ecff066bb09369c8550f20f9de978d0f3c
| 915
|
py
|
Python
|
pirates/leveleditor/worldData/peridida_island_tunnel_jungle_2.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/peridida_island_tunnel_jungle_2.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/peridida_island_tunnel_jungle_2.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Objects': {'1172209713.31sdnaik': {'Type': 'Connector Tunnel','Name': 'peridida_island_tunnel_jungle_2','AdditionalData': ['tunnel_jungle'],'Objects': {'1178816407.44kmuller': {'Type': 'Locator Node','Name': 'portal_connector_1','Hpr': VBase3(-90.189, 0.0, 0.0),'Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1178816407.45kmuller': {'Type': 'Locator Node','Name': 'portal_connector_2','Hpr': VBase3(88.723, 0.0, 0.0),'Pos': Point3(-94.898, 150.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)}},'Visual': {'Model': 'models/tunnels/tunnel_jungle'}}},'Node Links': [],'Layers': {},'ObjectIds': {'1172209713.31sdnaik': '["Objects"]["1172209713.31sdnaik"]','1178816407.44kmuller': '["Objects"]["1172209713.31sdnaik"]["Objects"]["1178816407.44kmuller"]','1178816407.45kmuller': '["Objects"]["1172209713.31sdnaik"]["Objects"]["1178816407.45kmuller"]'}}
| 457.5
| 868
| 0.684153
| 120
| 915
| 5.133333
| 0.366667
| 0.042208
| 0.043831
| 0.032468
| 0.358766
| 0.217532
| 0.064935
| 0.064935
| 0.064935
| 0.064935
| 0
| 0.2194
| 0.053552
| 915
| 2
| 868
| 457.5
| 0.491917
| 0
| 0
| 0
| 0
| 0
| 0.598253
| 0.252183
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8ab2ba4aba9a88e74cc3d3cd49b5009f86ac569b
| 1,760
|
py
|
Python
|
scripts/field/hayatoJobChange.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 54
|
2019-04-16T23:24:48.000Z
|
2021-12-18T11:41:50.000Z
|
scripts/field/hayatoJobChange.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 3
|
2019-05-19T15:19:41.000Z
|
2020-04-27T16:29:16.000Z
|
scripts/field/hayatoJobChange.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 49
|
2020-11-25T23:29:16.000Z
|
2022-03-26T16:20:24.000Z
|
# Created by MechAviv
# Map ID :: 807040000
# Momijigaoka : Unfamiliar Hillside
if "1" not in sm.getQRValue(57375) and sm.getChr().getJob() == 4001:
sm.curNodeEventEnd(True)
sm.setTemporarySkillSet(0)
sm.setInGameDirectionMode(True, True, False, False)
sm.sendDelay(1000)
sm.levelUntil(10)
sm.createQuestWithQRValue(57375, "1")
sm.removeSkill(40010001)
sm.setJob(4100)
sm.resetStats()
# Unhandled Stat Changed [HP] Packet: 00 00 00 04 00 00 00 00 00 00 CB 00 00 00 FF 00 00 00 00
# Unhandled Stat Changed [MHP] Packet: 00 00 00 08 00 00 00 00 00 00 C2 00 00 00 FF 00 00 00 00
# Unhandled Stat Changed [MMP] Packet: 00 00 00 20 00 00 00 00 00 00 71 00 00 00 FF 00 00 00 00
sm.addSP(6, True)
# Unhandled Stat Changed [MHP] Packet: 00 00 00 08 00 00 00 00 00 00 BC 01 00 00 FF 00 00 00 00
# Unhandled Stat Changed [MMP] Packet: 00 00 00 20 00 00 00 00 00 00 D5 00 00 00 FF 00 00 00 00
# [INVENTORY_GROW] [01 1C ]
# [INVENTORY_GROW] [02 1C ]
# [INVENTORY_GROW] [03 1C ]
# [INVENTORY_GROW] [04 1C ]
sm.giveSkill(40010000, 1, 1)
sm.giveSkill(40010067, 1, 1)
sm.giveSkill(40011288, 1, 1)
sm.giveSkill(40011289, 1, 1)
sm.removeSkill(40011227)
sm.giveSkill(40011227, 1, 1)
# Unhandled Stat Changed [HP] Packet: 00 00 00 04 00 00 00 00 00 00 D2 01 00 00 FF 00 00 00 00
# Unhandled Stat Changed [MP] Packet: 00 00 00 10 00 00 00 00 00 00 DF 00 00 00 FF 00 00 00 00
# Unhandled Stat Changed [WILL_EXP] Packet: 00 00 00 00 40 00 00 00 00 00 20 2B 00 00 FF 00 00 00 00
# Unhandled Message [INC_NON_COMBAT_STAT_EXP_MESSAGE] Packet: 14 00 00 40 00 00 00 00 00 20 2B 00 00
sm.setTemporarySkillSet(0)
sm.setInGameDirectionMode(False, True, False, False)
| 48.888889
| 104
| 0.675568
| 323
| 1,760
| 3.650155
| 0.244582
| 0.335878
| 0.3257
| 0.230704
| 0.523325
| 0.443596
| 0.433418
| 0.433418
| 0.390161
| 0.390161
| 0
| 0.31292
| 0.239205
| 1,760
| 36
| 105
| 48.888889
| 0.567588
| 0.585227
| 0
| 0.105263
| 0
| 0
| 0.002801
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8aba92aaac14809e5d4ac985da785ad705c6c3aa
| 683
|
py
|
Python
|
python-flask-server/swagger_server/models/__init__.py
|
broadinstitute/sharpener-bicluster-expander
|
8c09e32b8bfbebd5e33c274a5a1a93a550e5aeef
|
[
"MIT"
] | null | null | null |
python-flask-server/swagger_server/models/__init__.py
|
broadinstitute/sharpener-bicluster-expander
|
8c09e32b8bfbebd5e33c274a5a1a93a550e5aeef
|
[
"MIT"
] | null | null | null |
python-flask-server/swagger_server/models/__init__.py
|
broadinstitute/sharpener-bicluster-expander
|
8c09e32b8bfbebd5e33c274a5a1a93a550e5aeef
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# flake8: noqa
from __future__ import absolute_import
# import models into model package
from swagger_server.models.attribute import Attribute
from swagger_server.models.error_msg import ErrorMsg
from swagger_server.models.gene_info import GeneInfo
from swagger_server.models.gene_info_identifiers import GeneInfoIdentifiers
from swagger_server.models.model_property import ModelProperty
from swagger_server.models.parameter import Parameter
from swagger_server.models.transformer_info import TransformerInfo
from swagger_server.models.transformer_info_properties import TransformerInfoProperties
from swagger_server.models.transformer_query import TransformerQuery
| 45.533333
| 87
| 0.887262
| 87
| 683
| 6.701149
| 0.37931
| 0.169811
| 0.262436
| 0.35506
| 0.295026
| 0.236707
| 0
| 0
| 0
| 0
| 0
| 0.003175
| 0.077599
| 683
| 14
| 88
| 48.785714
| 0.922222
| 0.086384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8ad281bdb07e3b654aafd59e253319bfa02054d9
| 8,049
|
py
|
Python
|
tests/test_response.py
|
dennyx/ApiTestEngine
|
18a0daa8a64a961cb47b15e846243b2f538c6ebe
|
[
"MIT"
] | null | null | null |
tests/test_response.py
|
dennyx/ApiTestEngine
|
18a0daa8a64a961cb47b15e846243b2f538c6ebe
|
[
"MIT"
] | null | null | null |
tests/test_response.py
|
dennyx/ApiTestEngine
|
18a0daa8a64a961cb47b15e846243b2f538c6ebe
|
[
"MIT"
] | 1
|
2019-05-02T09:56:37.000Z
|
2019-05-02T09:56:37.000Z
|
import requests
from ate import response, exception
from tests.base import ApiServerUnittest
class TestResponse(ApiServerUnittest):
def test_parse_response_object_json(self):
url = "http://127.0.0.1:5000/api/users"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
parsed_dict = resp_obj.parsed_dict()
self.assertIn('status_code', parsed_dict)
self.assertIn('headers', parsed_dict)
self.assertIn('body', parsed_dict)
self.assertIn('Content-Type', parsed_dict['headers'])
self.assertIn('Content-Length', parsed_dict['headers'])
self.assertIn('success', parsed_dict['body'])
def test_parse_response_object_text(self):
url = "http://127.0.0.1:5000/"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
parsed_dict = resp_obj.parsed_dict()
self.assertIn('status_code', parsed_dict)
self.assertIn('headers', parsed_dict)
self.assertIn('body', parsed_dict)
self.assertIn('Content-Type', parsed_dict['headers'])
self.assertIn('Content-Length', parsed_dict['headers'])
self.assertTrue(str, type(parsed_dict['body']))
def test_extract_response_json(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': {
'success': False,
"person": {
"name": {
"first_name": "Leo",
"last_name": "Lee",
},
"age": 29,
"cities": ["Guangzhou", "Shenzhen"]
}
}
}
)
extract_binds_list = [
{"resp_status_code": "status_code"},
{"resp_headers_content_type": "headers.content-type"},
{"resp_content_body_success": "body.success"},
{"resp_content_content_success": "content.success"},
{"resp_content_text_success": "text.success"},
{"resp_content_person_first_name": "content.person.name.first_name"},
{"resp_content_cities_1": "content.person.cities.1"}
]
resp_obj = response.ResponseObject(resp)
extract_binds_dict_list = resp_obj.extract_response(extract_binds_list)
self.assertEqual(
extract_binds_dict_list[0]["resp_status_code"],
200
)
self.assertEqual(
extract_binds_dict_list[1]["resp_headers_content_type"],
"application/json"
)
self.assertEqual(
extract_binds_dict_list[2]["resp_content_body_success"],
False
)
self.assertEqual(
extract_binds_dict_list[3]["resp_content_content_success"],
False
)
self.assertEqual(
extract_binds_dict_list[4]["resp_content_text_success"],
False
)
self.assertEqual(
extract_binds_dict_list[5]["resp_content_person_first_name"],
"Leo"
)
self.assertEqual(
extract_binds_dict_list[6]["resp_content_cities_1"],
"Shenzhen"
)
def test_extract_response_fail(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': {
'success': False,
"person": {
"name": {
"first_name": "Leo",
"last_name": "Lee",
},
"age": 29,
"cities": ["Guangzhou", "Shenzhen"]
}
}
}
)
extract_binds_list = [
{"resp_content_dict_key_error": "content.not_exist"}
]
resp_obj = response.ResponseObject(resp)
with self.assertRaises(exception.ParseResponseError):
resp_obj.extract_response(extract_binds_list)
extract_binds_list = [
{"resp_content_list_index_error": "content.person.cities.3"}
]
resp_obj = response.ResponseObject(resp)
with self.assertRaises(exception.ParseResponseError):
resp_obj.extract_response(extract_binds_list)
def test_extract_response_json_string(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': "abc"
}
)
extract_binds_list = [
{"resp_content_body": "content"}
]
resp_obj = response.ResponseObject(resp)
extract_binds_dict_list = resp_obj.extract_response(extract_binds_list)
self.assertEqual(
extract_binds_dict_list[0]["resp_content_body"],
"abc"
)
def test_extract_response_empty(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': ""
}
)
extract_binds_list = [
{"resp_content_body": "content"}
]
resp_obj = response.ResponseObject(resp)
extract_binds_dict_list = resp_obj.extract_response(extract_binds_list)
self.assertEqual(
extract_binds_dict_list[0]["resp_content_body"],
""
)
extract_binds_list = [
{"resp_content_body": "content.abc"}
]
resp_obj = response.ResponseObject(resp)
with self.assertRaises(exception.ResponseError):
resp_obj.extract_response(extract_binds_list)
def test_validate(self):
url = "http://127.0.0.1:5000/"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
validators = [
{"check": "resp_status_code", "comparator": "eq", "expected": 201},
{"check": "resp_body_success", "comparator": "eq", "expected": True}
]
variables_mapping = {
"resp_status_code": 200,
"resp_body_success": True
}
with self.assertRaises(exception.ValidationError):
resp_obj.validate(validators, variables_mapping)
validators = [
{"check": "resp_status_code", "comparator": "eq", "expected": 201},
{"check": "resp_body_success", "comparator": "eq", "expected": True}
]
variables_mapping = {
"resp_status_code": 201,
"resp_body_success": True
}
self.assertTrue(resp_obj.validate(validators, variables_mapping))
def test_validate_exception(self):
url = "http://127.0.0.1:5000/"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
# expected value missed in validators
validators = [
{"check": "status_code", "comparator": "eq", "expected": 201},
{"check": "body_success", "comparator": "eq"}
]
variables_mapping = {}
with self.assertRaises(exception.ValidationError):
resp_obj.validate(validators, variables_mapping)
# expected value missed in variables mapping
validators = [
{"check": "resp_status_code", "comparator": "eq", "expected": 201},
{"check": "body_success", "comparator": "eq"}
]
variables_mapping = {
"resp_status_code": 200
}
with self.assertRaises(exception.ValidationError):
resp_obj.validate(validators, variables_mapping)
| 34.693966
| 81
| 0.546031
| 763
| 8,049
| 5.461337
| 0.124509
| 0.069114
| 0.046076
| 0.057595
| 0.813055
| 0.75258
| 0.712743
| 0.712743
| 0.664747
| 0.637389
| 0
| 0.022672
| 0.336936
| 8,049
| 231
| 82
| 34.844156
| 0.758104
| 0.009691
| 0
| 0.573529
| 0
| 0
| 0.222515
| 0.055221
| 0
| 0
| 0
| 0
| 0.137255
| 1
| 0.039216
| false
| 0
| 0.014706
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
76d7badbde9cb039178a67c10fd464749e3189ad
| 92
|
py
|
Python
|
pycelsiusnetwork/utils/__init__.py
|
eitchtee/pyCelsiusNetwork
|
7aa36687334c43989ff3318bde336d0ec663eb9c
|
[
"MIT"
] | 4
|
2020-09-17T18:30:08.000Z
|
2021-03-15T19:28:13.000Z
|
pycelsiusnetwork/utils/__init__.py
|
eitchtee/pyCelsiusNetwork
|
7aa36687334c43989ff3318bde336d0ec663eb9c
|
[
"MIT"
] | null | null | null |
pycelsiusnetwork/utils/__init__.py
|
eitchtee/pyCelsiusNetwork
|
7aa36687334c43989ff3318bde336d0ec663eb9c
|
[
"MIT"
] | 1
|
2020-09-17T18:30:12.000Z
|
2020-09-17T18:30:12.000Z
|
from .abstraction import get_key, filter_transactions
from .time import convert_to_datetime
| 30.666667
| 53
| 0.869565
| 13
| 92
| 5.846154
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097826
| 92
| 2
| 54
| 46
| 0.915663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
76e1e121ababe021cf338e8e705f7f64d4b8dc85
| 89
|
py
|
Python
|
enthought/mayavi/core/registry.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/mayavi/core/registry.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/mayavi/core/registry.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from mayavi.core.registry import *
| 22.25
| 38
| 0.831461
| 12
| 89
| 5.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123596
| 89
| 3
| 39
| 29.666667
| 0.884615
| 0.134831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
76ef8636b72f6fba55dc8b5f0c63daf8f57d5a18
| 55
|
py
|
Python
|
vnpy/api/nst/__init__.py
|
funrunskypalace/vnpy
|
2d87aede685fa46278d8d3392432cc127b797926
|
[
"MIT"
] | 19,529
|
2015-03-02T12:17:35.000Z
|
2022-03-31T17:18:27.000Z
|
vnpy/api/nst/__init__.py
|
funrunskypalace/vnpy
|
2d87aede685fa46278d8d3392432cc127b797926
|
[
"MIT"
] | 2,186
|
2015-03-04T23:16:33.000Z
|
2022-03-31T03:44:01.000Z
|
vnpy/api/nst/__init__.py
|
funrunskypalace/vnpy
|
2d87aede685fa46278d8d3392432cc127b797926
|
[
"MIT"
] | 8,276
|
2015-03-02T05:21:04.000Z
|
2022-03-31T13:13:13.000Z
|
from .vnnsttd import TdApi
from .nst_constant import *
| 18.333333
| 27
| 0.8
| 8
| 55
| 5.375
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 55
| 2
| 28
| 27.5
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0a13fbf2282be380bd70fa7d8ebb7661bbc4bf2a
| 130
|
py
|
Python
|
solution/v3/core/Service.py
|
mjastad/automation
|
f84742dc044954484679243fc51bd5eb2c660d44
|
[
"CC-BY-3.0"
] | 1
|
2020-01-01T22:28:00.000Z
|
2020-01-01T22:28:00.000Z
|
solution/v3/core/Service.py
|
mjastad/automation
|
f84742dc044954484679243fc51bd5eb2c660d44
|
[
"CC-BY-3.0"
] | null | null | null |
solution/v3/core/Service.py
|
mjastad/automation
|
f84742dc044954484679243fc51bd5eb2c660d44
|
[
"CC-BY-3.0"
] | 4
|
2018-01-23T15:09:04.000Z
|
2020-02-11T20:15:34.000Z
|
class Service:
def __init__(self):
self.ver = '/api/nutanix/v3/'
@property
def name(self):
return self.ver
| 14.444444
| 35
| 0.6
| 17
| 130
| 4.352941
| 0.705882
| 0.189189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.269231
| 130
| 8
| 36
| 16.25
| 0.768421
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
0a4bd642cf8c4921ee0b588b0a79c33d8d1783c7
| 245
|
py
|
Python
|
Lesson8_2/levels.py
|
shinkai-tester/python_beginner
|
a934328c9a50241cc3f02a423060e16aab53b425
|
[
"Apache-2.0"
] | 2
|
2021-06-01T13:24:04.000Z
|
2021-06-01T13:27:47.000Z
|
Lesson8_2/levels.py
|
shinkai-tester/python_beginner
|
a934328c9a50241cc3f02a423060e16aab53b425
|
[
"Apache-2.0"
] | null | null | null |
Lesson8_2/levels.py
|
shinkai-tester/python_beginner
|
a934328c9a50241cc3f02a423060e16aab53b425
|
[
"Apache-2.0"
] | null | null | null |
def labyrinth(levels, orcs_first, L):
if levels == 1:
return orcs_first
return min(labyrinth(levels - 1, orcs_first, L) ** 2, labyrinth(levels - 1, orcs_first, L) + L)
print(labyrinth(int(input()), int(input()), int(input())))
| 30.625
| 99
| 0.640816
| 36
| 245
| 4.25
| 0.388889
| 0.235294
| 0.196078
| 0.261438
| 0.339869
| 0.339869
| 0
| 0
| 0
| 0
| 0
| 0.020101
| 0.187755
| 245
| 7
| 100
| 35
| 0.748744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.6
| 0.2
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0a5f7d200f4d33f3652e61be0dcaf00b13c08d06
| 155
|
py
|
Python
|
python/testData/quickFixes/PyRenameElementQuickFixTest/protectedMember_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/quickFixes/PyRenameElementQuickFixTest/protectedMember_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/quickFixes/PyRenameElementQuickFixTest/protectedMember_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class A:
def __init__(self):
self.A_NEW_NAME = 1
def _foo(self):
pass
a_class = A()
a_class._foo()
print(a_class.A_NEW_NAME)
| 10.333333
| 27
| 0.593548
| 26
| 155
| 3.038462
| 0.423077
| 0.227848
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009091
| 0.290323
| 155
| 14
| 28
| 11.071429
| 0.709091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.125
| 0
| 0
| 0.375
| 0.125
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6a571f7a5a2f387feeb87e1b7a61c9bba742ded4
| 204
|
py
|
Python
|
networktables/networktable.py
|
TheTripleV/pynetworktables
|
07c0b51949ff497cfed0c588c7bd5825f9dc3d56
|
[
"BSD-3-Clause"
] | 56
|
2015-01-05T00:59:55.000Z
|
2022-03-30T22:40:09.000Z
|
networktables/networktable.py
|
TheTripleV/pynetworktables
|
07c0b51949ff497cfed0c588c7bd5825f9dc3d56
|
[
"BSD-3-Clause"
] | 71
|
2015-01-17T23:17:03.000Z
|
2021-12-28T18:44:44.000Z
|
networktables/networktable.py
|
TheTripleV/pynetworktables
|
07c0b51949ff497cfed0c588c7bd5825f9dc3d56
|
[
"BSD-3-Clause"
] | 36
|
2015-01-26T15:00:04.000Z
|
2022-03-02T23:02:22.000Z
|
import warnings
from . import NetworkTable # noqa
warnings.warn(
"networktables.networktable is deprecated, import networktables.NetworkTable directly",
DeprecationWarning,
stacklevel=2,
)
| 20.4
| 91
| 0.769608
| 19
| 204
| 8.263158
| 0.684211
| 0.318471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005848
| 0.161765
| 204
| 9
| 92
| 22.666667
| 0.912281
| 0.019608
| 0
| 0
| 0
| 0
| 0.424242
| 0.262626
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6a5c4307c9200273085bcdf74165cecf5a6d69e8
| 91
|
py
|
Python
|
book/scripts/chapter0/part2.py
|
shoaibkhanz/Scikit-Learn-a-complete-machine-learning-book
|
bf5ec510da3cdb29c91fff541674fba9e4a05893
|
[
"MIT"
] | null | null | null |
book/scripts/chapter0/part2.py
|
shoaibkhanz/Scikit-Learn-a-complete-machine-learning-book
|
bf5ec510da3cdb29c91fff541674fba9e4a05893
|
[
"MIT"
] | null | null | null |
book/scripts/chapter0/part2.py
|
shoaibkhanz/Scikit-Learn-a-complete-machine-learning-book
|
bf5ec510da3cdb29c91fff541674fba9e4a05893
|
[
"MIT"
] | null | null | null |
#%% [markdown]
# # When do we use machine learning?
# ## Machine learning examples
# %%
| 13
| 36
| 0.626374
| 10
| 91
| 5.7
| 0.8
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21978
| 91
| 6
| 37
| 15.166667
| 0.802817
| 0.857143
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6ab2af2f07e2dc6bc2cee14798e227b480a90ea1
| 1,044
|
py
|
Python
|
stubs/ev3_pybricks_v1_0_0/pybricks/display.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/ev3_pybricks_v1_0_0/pybricks/display.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/ev3_pybricks_v1_0_0/pybricks/display.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
"""
Module: 'pybricks.display' on LEGO EV3 v1.0.0
"""
# MCU: sysname=ev3, nodename=ev3, release=('v1.0.0',), version=('0.0.0',), machine=ev3
# Stubber: 1.3.2 - updated
from typing import Any
class Align:
""""""
BOTTOM = 2
BOTTOM_LEFT = 1
BOTTOM_RIGHT = 3
CENTER = 5
LEFT = 4
RIGHT = 6
TOP = 8
TOP_LEFT = 7
TOP_RIGHT = 9
class Display:
""""""
_font_height = 8
def _next_line(self, *argv) -> Any:
pass
def _reset_text_history(self, *argv) -> Any:
pass
_valid_devices = None
def clear(self, *argv) -> Any:
pass
def image(self, *argv) -> Any:
pass
def text(self, *argv) -> Any:
pass
class Ev3devDisplay:
""""""
def image(self, *argv) -> Any:
pass
def reset_screen(self, *argv) -> Any:
pass
def scroll(self, *argv) -> Any:
pass
def text_grid(self, *argv) -> Any:
pass
def text_pixels(self, *argv) -> Any:
pass
class ImageFile:
""""""
path = None
| 14.914286
| 86
| 0.533525
| 137
| 1,044
| 3.948905
| 0.430657
| 0.147874
| 0.203327
| 0.277264
| 0.371534
| 0.236599
| 0.096118
| 0
| 0
| 0
| 0
| 0.038082
| 0.320881
| 1,044
| 69
| 87
| 15.130435
| 0.724965
| 0.149425
| 0
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.27027
| false
| 0.27027
| 0.027027
| 0
| 0.702703
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
6add1cc345b66b092f84462630b56540d1397a8c
| 5,133
|
py
|
Python
|
tests/test_0007-single-chunk-interface.py
|
ryuwd/uproot4
|
20d8575e941c32559c7b5e62b0ed5f92bc4927d0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0007-single-chunk-interface.py
|
ryuwd/uproot4
|
20d8575e941c32559c7b5e62b0ed5f92bc4927d0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0007-single-chunk-interface.py
|
ryuwd/uproot4
|
20d8575e941c32559c7b5e62b0ed5f92bc4927d0
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
from __future__ import absolute_import
import sys
import os
import numpy
import pytest
import uproot
def tobytes(x):
if hasattr(x, "tobytes"):
return x.tobytes()
else:
return x.tostring()
def test_file(tmpdir):
filename = os.path.join(str(tmpdir), "tmp.raw")
with open(filename, "wb") as tmp:
tmp.write(b"****** ...+++++++!!!!!@@@@@")
expected = [
b"******",
b" ",
b"...",
b"+++++++",
b"!!!!!",
b"@@@@@",
]
for num_workers in [1, 2]:
with uproot.source.file.MultithreadedFileSource(
filename, num_workers=num_workers
) as source:
for i, (start, stop) in enumerate(
[(0, 6), (6, 10), (10, 13), (13, 20), (20, 25), (25, 30)]
):
chunk = source.chunk(start, stop)
assert tobytes(chunk.raw_data) == expected[i]
with pytest.raises(Exception):
uproot.source.file.MultithreadedFileSource(
filename + "-does-not-exist", num_workers=num_workers
)
def test_memmap(tmpdir):
filename = os.path.join(str(tmpdir), "tmp.raw")
with open(filename, "wb") as tmp:
tmp.write(b"****** ...+++++++!!!!!@@@@@")
expected = [
b"******",
b" ",
b"...",
b"+++++++",
b"!!!!!",
b"@@@@@",
]
with uproot.source.file.MemmapSource(filename, num_fallback_workers=1) as source:
for i, (start, stop) in enumerate(
[(0, 6), (6, 10), (10, 13), (13, 20), (20, 25), (25, 30)]
):
chunk = source.chunk(start, stop)
assert tobytes(chunk.raw_data) == expected[i]
with pytest.raises(Exception):
uproot.source.file.MemmapSource(
filename + "-does-not-exist", num_fallback_workers=1
)
@pytest.mark.network
def test_http():
for num_workers in [1, 2]:
with uproot.source.http.MultithreadedHTTPSource(
"https://example.com", num_workers=num_workers, timeout=10
) as source:
for start, stop in [(0, 100), (50, 55), (200, 400)]:
chunk = source.chunk(start, stop)
assert len(tobytes(chunk.raw_data)) == stop - start
with pytest.raises(Exception):
with uproot.source.http.MultithreadedHTTPSource(
"https://wonky.cern/does-not-exist",
num_workers=num_workers,
timeout=0.1,
) as source:
source.chunk(0, 100)
@pytest.mark.network
def test_http_multipart():
with uproot.source.http.HTTPSource(
"https://example.com", timeout=10, num_fallback_workers=1
) as source:
for start, stop in [(0, 100), (50, 55), (200, 400)]:
chunk = source.chunk(start, stop)
assert len(tobytes(chunk.raw_data)) == stop - start
with pytest.raises(Exception):
with uproot.source.http.HTTPSource(
"https://wonky.cern/does-not-exist", timeout=0.1, num_fallback_workers=1
) as source:
tobytes(source.chunk(0, 100).raw_data)
@pytest.mark.network
@pytest.mark.xrootd
def test_xrootd():
pytest.importorskip("XRootD")
with uproot.source.xrootd.MultithreadedXRootDSource(
"root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod/Run2012B_DoubleMuParked.root",
num_workers=1,
timeout=10,
) as source:
one = tobytes(source.chunk(0, 100).raw_data)
assert len(one) == 100
two = tobytes(source.chunk(50, 55).raw_data)
assert len(two) == 5
three = tobytes(source.chunk(200, 400).raw_data)
assert len(three) == 200
assert one[:4] == b"root"
@pytest.mark.network
@pytest.mark.xrootd
def test_xrootd_worker():
pytest.importorskip("XRootD")
with uproot.source.xrootd.MultithreadedXRootDSource(
"root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod/Run2012B_DoubleMuParked.root",
num_workers=5,
timeout=10,
) as source:
one = tobytes(source.chunk(0, 100).raw_data)
assert len(one) == 100
two = tobytes(source.chunk(50, 55).raw_data)
assert len(two) == 5
three = tobytes(source.chunk(200, 400).raw_data)
assert len(three) == 200
assert one[:4] == b"root"
@pytest.mark.network
@pytest.mark.xrootd
def test_xrootd_vectorread():
pytest.importorskip("XRootD")
with uproot.source.xrootd.XRootDSource(
"root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod/Run2012B_DoubleMuParked.root",
timeout=10,
max_num_elements=None,
num_workers=1,
) as source:
one = tobytes(source.chunk(0, 100).raw_data)
assert len(one) == 100
two = tobytes(source.chunk(50, 55).raw_data)
assert len(two) == 5
three = tobytes(source.chunk(200, 400).raw_data)
assert len(three) == 200
assert one[:4] == b"root"
| 30.921687
| 104
| 0.570816
| 620
| 5,133
| 4.622581
| 0.187097
| 0.057572
| 0.062805
| 0.050244
| 0.855548
| 0.795883
| 0.718074
| 0.656315
| 0.656315
| 0.617237
| 0
| 0.052575
| 0.277421
| 5,133
| 165
| 105
| 31.109091
| 0.72014
| 0.01578
| 0
| 0.705882
| 0
| 0
| 0.116436
| 0.055248
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.058824
| false
| 0
| 0.066176
| 0
| 0.139706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0a7bf22472abf54fe7186271f10a8eb043d57697
| 91
|
py
|
Python
|
mums/web/__init__.py
|
franramirez688/mums-app
|
30d6e7d6ada767f31b351b38bfee70e1bdefc504
|
[
"MIT"
] | null | null | null |
mums/web/__init__.py
|
franramirez688/mums-app
|
30d6e7d6ada767f31b351b38bfee70e1bdefc504
|
[
"MIT"
] | null | null | null |
mums/web/__init__.py
|
franramirez688/mums-app
|
30d6e7d6ada767f31b351b38bfee70e1bdefc504
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
web = Blueprint('web', __name__)
from . import views, errors
| 15.166667
| 32
| 0.747253
| 12
| 91
| 5.333333
| 0.666667
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 5
| 33
| 18.2
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0.032967
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
0a90b3a600e56a09076d32b4ae8787f9510244a4
| 72
|
py
|
Python
|
python/testData/resolve/multiFile/transitiveImport/TransitiveImport.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/resolve/multiFile/transitiveImport/TransitiveImport.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/resolve/multiFile/transitiveImport/TransitiveImport.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from channel import * # token must pass through
print(token)
# <ref>
| 24
| 47
| 0.694444
| 10
| 72
| 5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 72
| 3
| 48
| 24
| 0.877193
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
0acdd4949e43021d5c3ab6d22c74f4d93993e533
| 52
|
py
|
Python
|
main.py
|
marksikaundi/Computer-Programming-with-Python
|
136fec0196a246eb47e3802337fb3fad38d441bb
|
[
"Intel"
] | 2
|
2022-01-18T09:13:36.000Z
|
2022-01-18T09:41:05.000Z
|
main.py
|
marksikaundi/Computer-Programming-with-Python
|
136fec0196a246eb47e3802337fb3fad38d441bb
|
[
"Intel"
] | null | null | null |
main.py
|
marksikaundi/Computer-Programming-with-Python
|
136fec0196a246eb47e3802337fb3fad38d441bb
|
[
"Intel"
] | null | null | null |
print("welcome to computer programming with python")
| 52
| 52
| 0.826923
| 7
| 52
| 6.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 52
| 1
| 52
| 52
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
0afdb8374b6787aa8b5b7be69e7c8e20c3e64bb5
| 218
|
py
|
Python
|
rllib/agents/a3c/__init__.py
|
mgelbart/ray
|
4cec2286572e368a4bd64aae467751a384eff62d
|
[
"Apache-2.0"
] | 1
|
2022-03-07T06:40:06.000Z
|
2022-03-07T06:40:06.000Z
|
rllib/agents/a3c/__init__.py
|
mgelbart/ray
|
4cec2286572e368a4bd64aae467751a384eff62d
|
[
"Apache-2.0"
] | 73
|
2021-09-25T07:11:39.000Z
|
2022-03-26T07:10:59.000Z
|
rllib/agents/a3c/__init__.py
|
mgelbart/ray
|
4cec2286572e368a4bd64aae467751a384eff62d
|
[
"Apache-2.0"
] | null | null | null |
from ray.rllib.agents.a3c.a3c import A3CConfig, A3CTrainer, DEFAULT_CONFIG
from ray.rllib.agents.a3c.a2c import A2CConfig, A2CTrainer
__all__ = ["A2CConfig", "A2CTrainer", "A3CConfig", "A3CTrainer", "DEFAULT_CONFIG"]
| 43.6
| 82
| 0.784404
| 27
| 218
| 6.111111
| 0.518519
| 0.084848
| 0.145455
| 0.218182
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060302
| 0.087156
| 218
| 4
| 83
| 54.5
| 0.768844
| 0
| 0
| 0
| 0
| 0
| 0.238532
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7c3ee8682bc33941678e0d8fb23a5f85fbfb86ba
| 81
|
py
|
Python
|
pyvb/__init__.py
|
lucidfrontier45/PyVB
|
5218fbcf9ffa106a644e318b03a8005f3daf9f57
|
[
"BSD-3-Clause"
] | 13
|
2015-07-23T07:43:35.000Z
|
2021-04-05T06:04:55.000Z
|
pyvb/__init__.py
|
lucidfrontier45/PyVB
|
5218fbcf9ffa106a644e318b03a8005f3daf9f57
|
[
"BSD-3-Clause"
] | null | null | null |
pyvb/__init__.py
|
lucidfrontier45/PyVB
|
5218fbcf9ffa106a644e318b03a8005f3daf9f57
|
[
"BSD-3-Clause"
] | 6
|
2015-11-17T21:32:55.000Z
|
2020-06-17T15:59:50.000Z
|
__version__ = "1.5"
from core import *
from vbgmm import VBGMM
import hmm, vbhmm
| 16.2
| 23
| 0.753086
| 13
| 81
| 4.384615
| 0.692308
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029851
| 0.17284
| 81
| 4
| 24
| 20.25
| 0.820896
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7c504a4ef1cbe04b0e2edc5bb499c8e70ff52d14
| 81
|
py
|
Python
|
bitmovin_api_sdk/encoding/encodings/streams/input/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/encodings/streams/input/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/encodings/streams/input/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.encodings.streams.input.input_api import InputApi
| 40.5
| 80
| 0.888889
| 12
| 81
| 5.75
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049383
| 81
| 1
| 81
| 81
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7c76028e389212c4c7cfbe672d0d5a85a9aea186
| 235
|
py
|
Python
|
resource/welcome.py
|
TintypeMolly/Yuzuki
|
94dc874c4000ac918f0b52846927311b3f25ce2c
|
[
"MIT"
] | 6
|
2015-01-09T06:32:15.000Z
|
2015-08-15T13:23:34.000Z
|
resource/welcome.py
|
TintypeMolly/Yuzuki
|
94dc874c4000ac918f0b52846927311b3f25ce2c
|
[
"MIT"
] | 73
|
2015-01-08T11:38:34.000Z
|
2015-09-10T09:55:08.000Z
|
resource/welcome.py
|
TintypeMolly/Yuzuki
|
94dc874c4000ac918f0b52846927311b3f25ce2c
|
[
"MIT"
] | 11
|
2015-01-09T06:26:12.000Z
|
2015-03-26T13:16:19.000Z
|
# -*- coding: utf-8 -*-
from helper.resource import YuzukiResource
from helper.template import render_template
class Welcome(YuzukiResource):
def render_GET(self, request):
return render_template("welcome.html", request)
| 26.111111
| 55
| 0.748936
| 28
| 235
| 6.178571
| 0.642857
| 0.115607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005
| 0.148936
| 235
| 8
| 56
| 29.375
| 0.86
| 0.089362
| 0
| 0
| 0
| 0
| 0.056604
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
7ca0cc7ef33abaa5a8fa6b8e95235775524801c0
| 2,145
|
py
|
Python
|
demo.py
|
HuAndHe/HumanManagerBattle
|
9dc5e20691775ce3546056c04b8b18afab152b44
|
[
"Apache-2.0"
] | 1
|
2022-01-02T23:14:03.000Z
|
2022-01-02T23:14:03.000Z
|
demo.py
|
HuAndHe/HumanManagerBattle
|
9dc5e20691775ce3546056c04b8b18afab152b44
|
[
"Apache-2.0"
] | null | null | null |
demo.py
|
HuAndHe/HumanManagerBattle
|
9dc5e20691775ce3546056c04b8b18afab152b44
|
[
"Apache-2.0"
] | 3
|
2019-12-12T06:24:27.000Z
|
2020-03-25T12:55:37.000Z
|
#_*_coding:utf-8 _*_
import pygame
from pygame.locals import *
from sys import exit
pygame.init()
screen=pygame.display.set_mode((800,800),0,32)
pygame.display.set_caption("中国AI象棋")
screen.fill((255,255,255))
screen.set_clip(0, 0, 600, 650)
#竖线
pygame.draw.line(screen, (222, 125, 44), (75, 0), (75, 280), 1)
pygame.draw.line(screen, (222, 125, 44), (150, 0), (150, 280), 1)
pygame.draw.line(screen, (222, 125, 44), (225, 0), (225, 280), 1)
pygame.draw.line(screen, (222, 125, 44), (300, 0), (300, 280), 1)
pygame.draw.line(screen, (222, 125, 44), (375, 0), (375, 280), 1)
pygame.draw.line(screen, (222, 125, 44), (450, 0), (450, 280), 1)
pygame.draw.line(screen, (222, 125, 44), (525, 0), (525, 280), 1)
pygame.draw.line(screen, (222, 125, 44), (75, 370), (75, 650), 1)
pygame.draw.line(screen, (222, 125, 44), (150, 370), (150, 650), 1)
pygame.draw.line(screen, (222, 125, 44), (225, 370), (225, 650), 1)
pygame.draw.line(screen, (222, 125, 44), (300, 370), (300, 650), 1)
pygame.draw.line(screen, (222, 125, 44), (375, 370), (375, 650), 1)
pygame.draw.line(screen, (222, 125, 44), (450, 370), (450, 650), 1)
pygame.draw.line(screen, (222, 125, 44), (525, 370), (525, 650), 1)
#横线
pygame.draw.line(screen, (0, 0, 255), (0, 70), (600, 70), 1)
pygame.draw.line(screen, (0, 0, 255), (0, 140), (600, 140), 1)
pygame.draw.line(screen, (0, 0, 255), (0, 210), (600, 210), 1)
pygame.draw.line(screen, (0, 0, 255), (0, 280), (600, 280), 1)
pygame.draw.line(screen, (0, 0, 255), (0, 370), (600, 370), 1)
pygame.draw.line(screen, (0, 0, 255), (0, 440), (600, 440), 1)
pygame.draw.line(screen, (0, 0, 255), (0, 510), (600, 510), 1)
pygame.draw.line(screen, (0, 0, 255), (0, 580), (600, 580), 1)
#斜线
pygame.draw.line(screen, (222, 125, 44), (225, 0), (375, 140), 1)
pygame.draw.line(screen, (222, 125, 44), (375, 0), (225, 140), 1)
pygame.draw.line(screen, (222, 125, 44), (225, 510), (375, 650), 1)
pygame.draw.line(screen, (222, 125, 44), (375, 510), (225, 650), 1)
#边框
pygame.draw.rect(screen,(0,0,0),((0,0),(600,650)),3)
while True:
for event in pygame.event.get():
if event.type==QUIT:
pygame.display.quit()
exit()
pygame.display.update()
| 43.77551
| 67
| 0.611655
| 388
| 2,145
| 3.363402
| 0.157216
| 0.206897
| 0.278927
| 0.398467
| 0.652107
| 0.652107
| 0.64751
| 0.64751
| 0.603065
| 0.133333
| 0
| 0.279656
| 0.131469
| 2,145
| 49
| 68
| 43.77551
| 0.420827
| 0.012587
| 0
| 0
| 0
| 0
| 0.002838
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.073171
| 0
| 0.073171
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7ca18e5a6f3a971ebd4675c1d007ab269fed1fd3
| 5,444
|
py
|
Python
|
projects/HighRoadside/highroadside_model/highroadside_dataset.py
|
taofuyu/detectron2
|
cadc0ea99394aa9a62a61bf4231bfaf93d0df7a5
|
[
"Apache-2.0"
] | null | null | null |
projects/HighRoadside/highroadside_model/highroadside_dataset.py
|
taofuyu/detectron2
|
cadc0ea99394aa9a62a61bf4231bfaf93d0df7a5
|
[
"Apache-2.0"
] | null | null | null |
projects/HighRoadside/highroadside_model/highroadside_dataset.py
|
taofuyu/detectron2
|
cadc0ea99394aa9a62a61bf4231bfaf93d0df7a5
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
@author: taofuyu
@description: the dataset func for high_roadside project
@note:
The img list should be like:
.../path/img.jpg xmin ymin xmax ymax label xmin ymin xmax ymax label ...
eg: .../path/any.jpg 22 34 89 135 1 78 45 564 348 3 ...
'''
import os
import logging
from PIL import Image
import cv2
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.structures.boxes import BoxMode
def check_gt_box(annos, file_name):
for anno in annos:
#zero
if anno["bbox"][0] == 0 and anno["bbox"][1] == 0 and anno["bbox"][2] == 0 and anno["bbox"][3] == 0:
print("{}, in-valid gt box, check it".format(file_name))
print(anno["bbox"])
assert(False)
#neg
if anno["bbox"][0] < 0 or anno["bbox"][1] < 0 or anno["bbox"][2] < 0 or anno["bbox"][3] < 0:
print("{}, in-valid gt box, check it".format(file_name))
print(anno["bbox"])
assert(False)
#big small
if anno["bbox"][0] > anno["bbox"][2] or anno["bbox"][1] > anno["bbox"][3]:
print("{}, in-valid gt box, check it".format(file_name))
print(anno["bbox"])
assert(False)
#same box
def highroadside_dataset_function(img_list):
'''
Use img_list_file to generate a stdandard detectron2 data list
'''
logger = logging.getLogger("detectron2")
#read img list file
assert(os.path.exists(img_list)), 'assert failed, {} not exists !'.format(img_list)
with open(img_list, 'r') as f:
lines = f.readlines()
num_imgs = len(lines)
logger.info('total {} imgs to train'.format(num_imgs))
#cvt img list to detectron2 standard format:
det2_dataset = []
img_id = 0
for line in lines: #one line contains all info about an img
splited = line.strip('\n').strip(' ').split(' ')
file_name = splited[0]
img = Image.open(file_name)
img_w, img_h = img.size
img_dict = {}
annotations = []
num_boxes = ( len(splited)-1 ) // 5
if num_boxes == 0:
continue
for i in range(num_boxes): #one object
anno = {}
x_min = splited[1 + 5*i]
y_min = splited[2 + 5*i]
x_max = splited[3 + 5*i]
y_max = splited[4 + 5*i]
label = splited[5 + 5*i]
anno['bbox'] = [float(x_min), float(y_min), float(x_max), float(y_max)]
anno['bbox_mode'] = BoxMode.XYXY_ABS
anno['category_id'] = int(label)
annotations.append(anno)
check_gt_box(annotations, file_name)
img_dict['file_name'] = file_name
img_dict['width'] = img_w
img_dict['height'] = img_h
img_dict['image_id'] = img_id
img_dict['annotations'] = annotations
img_id += 1
det2_dataset.append(img_dict)
return det2_dataset
def mix_dataset_function(img_list):
'''
Use img_list_file to generate a stdandard detectron2 data list
'''
logger = logging.getLogger("detectron2")
#read img list file
assert(os.path.exists(img_list)), 'assert failed, {} not exists !'.format(img_list)
with open(img_list, 'r', encoding="utf-8") as f:
lines = f.readlines()
num_imgs = len(lines)
logger.info('total {} imgs to train'.format(num_imgs))
#cvt img list to detectron2 standard format:
det2_dataset = []
img_id = 0
for line in lines: #one line contains all info about an img
splited = line.strip('\n').strip(' ').split(' ')
file_name = splited[0]
# img = Image.open(file_name)
# img_w, img_h = img.size
img = cv2.imread(file_name)
img_h, img_w = img.shape[:2]
img_dict = {}
annotations = []
num_boxes = ( len(splited)-1 ) // 5
if num_boxes == 0:
continue
for i in range(num_boxes): #one object
anno = {}
x_min = splited[1 + 5*i]
y_min = splited[2 + 5*i]
x_max = splited[3 + 5*i]
y_max = splited[4 + 5*i]
label = splited[5 + 5*i]
anno['bbox'] = [float(x_min), float(y_min), float(x_max), float(y_max)]
anno['bbox_mode'] = BoxMode.XYXY_ABS
anno['category_id'] = int(label)
annotations.append(anno)
img_dict['file_name'] = file_name
img_dict['width'] = img_w
img_dict['height'] = img_h
img_dict['image_id'] = img_id
img_dict['annotations'] = annotations
img_id += 1
det2_dataset.append(img_dict)
return det2_dataset
for phase in ['train', 'val']:
DatasetCatalog.register('highroadside_dataset_' + phase, lambda phase=phase: highroadside_dataset_function('/detectron2/datasets/txt/high_roadside/'+phase+'_high_roadisde_imglist.txt'))
MetadataCatalog.get('highroadside_dataset_' + phase).set(thing_classes=['plate','head','tail','car','side_win','win'], thing_colors=['b', 'g', 'r', 'c', 'm', 'y'])
DatasetCatalog.register('mix_dataset_' + phase, lambda phase=phase: mix_dataset_function('/detectron2/datasets/txt/high_roadside/'+phase+'_mix_imglist.txt'))
MetadataCatalog.get('mix_dataset_' + phase).set(thing_classes=['plate','head','tail','car','side_win','win'], thing_colors=['b', 'g', 'r', 'c', 'm', 'y'])
| 34.897436
| 189
| 0.580088
| 744
| 5,444
| 4.069892
| 0.228495
| 0.050198
| 0.021797
| 0.010898
| 0.764531
| 0.72424
| 0.72424
| 0.72424
| 0.689234
| 0.689234
| 0
| 0.023852
| 0.276084
| 5,444
| 155
| 190
| 35.122581
| 0.744481
| 0.126929
| 0
| 0.754902
| 0
| 0
| 0.142948
| 0.031057
| 0
| 0
| 0
| 0
| 0.04902
| 1
| 0.029412
| false
| 0
| 0.058824
| 0
| 0.107843
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7cfa6c4eeef3780ab28bfc338afc0306e3f885ee
| 201
|
py
|
Python
|
einops/__init__.py
|
Naereen/einops
|
9579de6604f0309e5106d6184a5c350c193ba781
|
[
"MIT"
] | null | null | null |
einops/__init__.py
|
Naereen/einops
|
9579de6604f0309e5106d6184a5c350c193ba781
|
[
"MIT"
] | null | null | null |
einops/__init__.py
|
Naereen/einops
|
9579de6604f0309e5106d6184a5c350c193ba781
|
[
"MIT"
] | null | null | null |
__author__ = 'Alex Rogozhnikov'
__version__ = '0.1'
__all__ = ['rearrange', 'reduce', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, parse_shape, asnumpy, EinopsError
| 33.5
| 74
| 0.741294
| 22
| 201
| 6.136364
| 0.727273
| 0.222222
| 0.296296
| 0.37037
| 0.637037
| 0.637037
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.114428
| 201
| 5
| 75
| 40.2
| 0.747191
| 0
| 0
| 0
| 0
| 0
| 0.313433
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6b0b3e254ee9c7b29c3aecebb9a01d2b0376e157
| 207
|
py
|
Python
|
tests/test_flow.py
|
biglocalnews/prefect-flow-template
|
9ce6e5470ea1e8267b37fb6344389e678ff1e499
|
[
"Apache-2.0"
] | 5
|
2022-03-29T19:27:45.000Z
|
2022-03-31T18:13:07.000Z
|
tests/test_flow.py
|
biglocalnews/prefect-flow-template
|
9ce6e5470ea1e8267b37fb6344389e678ff1e499
|
[
"Apache-2.0"
] | null | null | null |
tests/test_flow.py
|
biglocalnews/prefect-flow-template
|
9ce6e5470ea1e8267b37fb6344389e678ff1e499
|
[
"Apache-2.0"
] | 1
|
2022-03-31T18:13:11.000Z
|
2022-03-31T18:13:11.000Z
|
import pytest
from prefect.utilities.storage import extract_flow_from_file
@pytest.mark.vcr()
def test_flow():
"""Test scrape tasks."""
flow = extract_flow_from_file("flow.py")
flow.validate()
| 20.7
| 60
| 0.729469
| 29
| 207
| 4.965517
| 0.586207
| 0.152778
| 0.208333
| 0.263889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144928
| 207
| 9
| 61
| 23
| 0.813559
| 0.086957
| 0
| 0
| 0
| 0
| 0.038251
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6b29075fdae4e579e8d699bee5e2e865c3028794
| 94
|
py
|
Python
|
priorities/admin.py
|
itmonitoringcommunity/backend
|
3107cceecc29bbc94ba692bd67922f679a61d2fd
|
[
"MIT"
] | null | null | null |
priorities/admin.py
|
itmonitoringcommunity/backend
|
3107cceecc29bbc94ba692bd67922f679a61d2fd
|
[
"MIT"
] | 2
|
2021-03-19T09:46:09.000Z
|
2021-06-04T23:34:54.000Z
|
priorities/admin.py
|
smiley-py/backend
|
3107cceecc29bbc94ba692bd67922f679a61d2fd
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Priority
admin.site.register(Priority)
| 15.666667
| 32
| 0.819149
| 13
| 94
| 5.923077
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117021
| 94
| 5
| 33
| 18.8
| 0.927711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8617baa81eae1f818bc7eea1824b950e7ad308d8
| 193
|
py
|
Python
|
src/user.py
|
b-wu8/License_Recog
|
57bf2c76379c106e6572d805225dbf4eed0d6b07
|
[
"MIT"
] | 2
|
2019-09-29T18:14:29.000Z
|
2019-11-22T15:41:47.000Z
|
src/user.py
|
b-wu8/License_Recog
|
57bf2c76379c106e6572d805225dbf4eed0d6b07
|
[
"MIT"
] | 2
|
2019-12-05T21:15:40.000Z
|
2019-12-05T21:17:02.000Z
|
src/user.py
|
b-wu8/License_Recog
|
57bf2c76379c106e6572d805225dbf4eed0d6b07
|
[
"MIT"
] | null | null | null |
class User:
def __init__(self, username, password):
self.name = username
self.password = password
def login(self):
return
def logout(self):
return
| 17.545455
| 43
| 0.585492
| 21
| 193
| 5.190476
| 0.52381
| 0.183486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.331606
| 193
| 10
| 44
| 19.3
| 0.844961
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0.25
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
8632c6cd399dd5f6d110d57332a5ae636971f818
| 486
|
py
|
Python
|
sample_config.py
|
navodpro/TelegramFiletoCloud
|
d6f0e829c4481b400407bd069852d16c83b9a9b2
|
[
"Unlicense"
] | 1
|
2021-03-15T13:36:33.000Z
|
2021-03-15T13:36:33.000Z
|
sample_config.py
|
navodpro/Public_Cloud_Upload
|
d8275233ef3aa0cbf577e35fcc8f626f0cb4a5d5
|
[
"Unlicense"
] | null | null | null |
sample_config.py
|
navodpro/Public_Cloud_Upload
|
d8275233ef3aa0cbf577e35fcc8f626f0cb4a5d5
|
[
"Unlicense"
] | null | null | null |
class Config:
BOT_TOKEN = '' # from @botfather
APP_ID = '' # from https://my.telegram.org/apps
API_HASH = '' # from https://my.telegram.org/apps
API_KEY = '' # from https://mixdrop.co
API_EMAIL = '' # from https://mixdrop.co
AUTH_USERS = [694380168] # ADD YOUR USER ID
| 54
| 94
| 0.376543
| 41
| 486
| 4.317073
| 0.609756
| 0.20339
| 0.124294
| 0.214689
| 0.327684
| 0.327684
| 0.327684
| 0
| 0
| 0
| 0
| 0.039301
| 0.528807
| 486
| 8
| 95
| 60.75
| 0.733624
| 0.304527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
86457886fabc3b256f7acda562fc48c2580cc53c
| 134
|
py
|
Python
|
Core/context_proccesor.py
|
Firexd2/control-money
|
c72626f057c39766f8d750fc96e5f7accf1f5810
|
[
"MIT"
] | null | null | null |
Core/context_proccesor.py
|
Firexd2/control-money
|
c72626f057c39766f8d750fc96e5f7accf1f5810
|
[
"MIT"
] | null | null | null |
Core/context_proccesor.py
|
Firexd2/control-money
|
c72626f057c39766f8d750fc96e5f7accf1f5810
|
[
"MIT"
] | null | null | null |
def get_last_version(request):
from Core.models import VersionControl
return {'version': VersionControl.objects.all().last()}
| 33.5
| 59
| 0.753731
| 16
| 134
| 6.1875
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126866
| 134
| 3
| 60
| 44.666667
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.052239
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
86541ea87e9d03357224a176234e019fcd918d99
| 100
|
py
|
Python
|
dashboard/admin.py
|
marissapang/covid19-Django
|
d29cd1f751dd8d0914492c2bfa1310ab8275cde0
|
[
"Apache-2.0"
] | null | null | null |
dashboard/admin.py
|
marissapang/covid19-Django
|
d29cd1f751dd8d0914492c2bfa1310ab8275cde0
|
[
"Apache-2.0"
] | 7
|
2020-04-12T22:42:55.000Z
|
2021-09-22T18:48:51.000Z
|
dashboard/admin.py
|
marissapang/covid19-Django
|
d29cd1f751dd8d0914492c2bfa1310ab8275cde0
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from dashboard.models import Profile
admin.site.register(Profile)
| 20
| 36
| 0.84
| 14
| 100
| 6
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 100
| 4
| 37
| 25
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
865b0f7d145e2ddac6c8c616760edaddec2ca1f6
| 62
|
py
|
Python
|
django_laravel_validator/__init__.py
|
youngershen/django-laravel-validator
|
bb893beff9e7ba6b5da4a7d193a080369172d1bb
|
[
"MIT"
] | 14
|
2015-03-10T03:06:26.000Z
|
2021-08-16T19:30:31.000Z
|
django_laravel_validator/__init__.py
|
youngershen/django-laravel-validator
|
bb893beff9e7ba6b5da4a7d193a080369172d1bb
|
[
"MIT"
] | 1
|
2017-06-27T21:45:53.000Z
|
2017-06-27T21:45:53.000Z
|
django_laravel_validator/__init__.py
|
youngershen/django-laravel-validator
|
bb893beff9e7ba6b5da4a7d193a080369172d1bb
|
[
"MIT"
] | 5
|
2015-04-02T10:20:36.000Z
|
2017-01-10T04:20:35.000Z
|
#
# TODO: i18n
# TODO: python3 adaptation
# TODO: custom rules
| 15.5
| 26
| 0.709677
| 8
| 62
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.177419
| 62
| 4
| 27
| 15.5
| 0.803922
| 0.870968
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.25
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
86cc95300cc9bed5ab8a13b52f810205de3e564d
| 192
|
py
|
Python
|
Submissions/Easy/00067_Add_Binary.py
|
abhisekjuneja/LeetCode
|
bfcdefe81b34e0de969b96c694cc74c9e241f15f
|
[
"MIT"
] | null | null | null |
Submissions/Easy/00067_Add_Binary.py
|
abhisekjuneja/LeetCode
|
bfcdefe81b34e0de969b96c694cc74c9e241f15f
|
[
"MIT"
] | null | null | null |
Submissions/Easy/00067_Add_Binary.py
|
abhisekjuneja/LeetCode
|
bfcdefe81b34e0de969b96c694cc74c9e241f15f
|
[
"MIT"
] | null | null | null |
# Difficulty: Easy
# Problem Statement: https://leetcode.com/problems/add-binary/
class Solution:
def addBinary(self, a: str, b: str) -> str:
return bin(int(a, 2) + int(b, 2))[2:]
| 32
| 62
| 0.640625
| 29
| 192
| 4.241379
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019108
| 0.182292
| 192
| 6
| 63
| 32
| 0.764331
| 0.401042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
86e4664143a9b3275cbc025ae27a0076589dd8aa
| 216
|
py
|
Python
|
generated-libraries/python/netapp/aggr/aggrhapolicy.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | 2
|
2017-03-28T15:31:26.000Z
|
2018-08-16T22:15:18.000Z
|
generated-libraries/python/netapp/aggr/aggrhapolicy.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
generated-libraries/python/netapp/aggr/aggrhapolicy.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
class Aggrhapolicy(basestring):
"""
sfo|cfo
Possible values:
<ul>
<li> "cfo" ,
<li> "sfo"
</ul>
"""
@staticmethod
def get_api_name():
return "aggrhapolicy"
| 14.4
| 31
| 0.490741
| 20
| 216
| 5.2
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.365741
| 216
| 14
| 32
| 15.428571
| 0.759124
| 0.273148
| 0
| 0
| 0
| 0
| 0.099174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
86f56c86edf67fd02021d8f7c4e582880592c7f6
| 925
|
py
|
Python
|
test/test_general_api.py
|
mksoska/openapi-client-netdisco
|
d6444505307e4897a9fef1ded60a180eb764d4b8
|
[
"MIT"
] | null | null | null |
test/test_general_api.py
|
mksoska/openapi-client-netdisco
|
d6444505307e4897a9fef1ded60a180eb764d4b8
|
[
"MIT"
] | null | null | null |
test/test_general_api.py
|
mksoska/openapi-client-netdisco
|
d6444505307e4897a9fef1ded60a180eb764d4b8
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
App::Netdisco
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 2.050003
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import openapi_netdisco
from openapi_netdisco.api.general_api import GeneralApi # noqa: E501
from openapi_netdisco.rest import ApiException
class TestGeneralApi(unittest.TestCase):
"""GeneralApi unit test stubs"""
def setUp(self):
self.api = openapi_netdisco.api.general_api.GeneralApi() # noqa: E501
def tearDown(self):
pass
def test_login_post(self):
"""Test case for login_post
"""
pass
def test_logout_get(self):
"""Test case for logout_get
"""
pass
if __name__ == '__main__':
unittest.main()
| 20.108696
| 124
| 0.682162
| 112
| 925
| 5.410714
| 0.491071
| 0.09901
| 0.062706
| 0.082508
| 0.092409
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02371
| 0.224865
| 925
| 45
| 125
| 20.555556
| 0.821478
| 0.39027
| 0
| 0.1875
| 1
| 0
| 0.015296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.1875
| 0.3125
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
8110d5952a700a0da7a7a70d153c1a9faabe468d
| 54
|
py
|
Python
|
tools/__init__.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
tools/__init__.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 7,642
|
2018-05-28T09:38:03.000Z
|
2022-03-31T20:55:48.000Z
|
tools/__init__.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
from . import localpaths as _localpaths # noqa: F401
| 27
| 53
| 0.759259
| 7
| 54
| 5.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 0.185185
| 54
| 1
| 54
| 54
| 0.840909
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d49604fe748ecde1a3e9ddb7b5f57cecf1b7a62d
| 37
|
py
|
Python
|
mothermayi/errors.py
|
EliRibble/mothermayi
|
5a5e1b528265eb7b7d901566d9afcba52a4cec78
|
[
"MIT"
] | 3
|
2015-09-30T20:17:43.000Z
|
2017-07-10T16:05:24.000Z
|
mothermayi/errors.py
|
EliRibble/mothermayi
|
5a5e1b528265eb7b7d901566d9afcba52a4cec78
|
[
"MIT"
] | 1
|
2021-03-25T21:27:48.000Z
|
2021-03-25T21:27:48.000Z
|
mothermayi/errors.py
|
EliRibble/mothermayi
|
5a5e1b528265eb7b7d901566d9afcba52a4cec78
|
[
"MIT"
] | null | null | null |
class FailHook(Exception):
pass
| 9.25
| 26
| 0.702703
| 4
| 37
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 37
| 3
| 27
| 12.333333
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
d4eb1186cf4657eeffbb2f073d233fa29e19dd71
| 108
|
py
|
Python
|
Jumpscale/clients/gitea/client/GiteaBase.py
|
threefoldtech/JumpscaleX
|
5fb073a82aeb0e66fc7d9660c45a1e31bc094bfa
|
[
"Apache-2.0"
] | 2
|
2019-05-09T07:21:25.000Z
|
2019-08-05T06:37:53.000Z
|
Jumpscale/clients/gitea/client/GiteaBase.py
|
threefoldtech/JumpscaleX
|
5fb073a82aeb0e66fc7d9660c45a1e31bc094bfa
|
[
"Apache-2.0"
] | 664
|
2018-12-19T12:43:44.000Z
|
2019-08-23T04:24:42.000Z
|
Jumpscale/clients/gitea/client/GiteaBase.py
|
threefoldtech/jumpscale10
|
5fb073a82aeb0e66fc7d9660c45a1e31bc094bfa
|
[
"Apache-2.0"
] | 7
|
2019-05-03T07:14:37.000Z
|
2019-08-05T12:36:52.000Z
|
import json
from Jumpscale import j
JSBASE = j.application.JSBaseClass
class GiteaBase(object):
pass
| 12
| 34
| 0.768519
| 14
| 108
| 5.928571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175926
| 108
| 8
| 35
| 13.5
| 0.932584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
d4ec93421af0fff4dbcdd9babfbe1c477c399b6f
| 218
|
py
|
Python
|
importpath_field/apps.py
|
Apkawa/django-import-path-field
|
16e45621b519f13147ed9e980ffaefd1c79d0940
|
[
"MIT"
] | null | null | null |
importpath_field/apps.py
|
Apkawa/django-import-path-field
|
16e45621b519f13147ed9e980ffaefd1c79d0940
|
[
"MIT"
] | 1
|
2020-01-08T11:18:34.000Z
|
2020-01-08T11:18:34.000Z
|
importpath_field/apps.py
|
Apkawa/django-import-path-field
|
16e45621b519f13147ed9e980ffaefd1c79d0940
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig as BaseConfig
from django.utils.translation import ugettext_lazy as _
class ImportpathFieldConfig(BaseConfig):
name = 'importpath_field'
verbose_name = _('Importpath Field')
| 27.25
| 55
| 0.793578
| 25
| 218
| 6.72
| 0.68
| 0.119048
| 0.22619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142202
| 218
| 7
| 56
| 31.142857
| 0.898396
| 0
| 0
| 0
| 0
| 0
| 0.146789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be006dc6e7f8dc57713a1b1e86d9e53673671701
| 82
|
py
|
Python
|
scarper/routes.py
|
x14119641/scarper_using_aiohttp
|
eac3153329ffeedfd2411373033eeeeb20792136
|
[
"MIT"
] | null | null | null |
scarper/routes.py
|
x14119641/scarper_using_aiohttp
|
eac3153329ffeedfd2411373033eeeeb20792136
|
[
"MIT"
] | null | null | null |
scarper/routes.py
|
x14119641/scarper_using_aiohttp
|
eac3153329ffeedfd2411373033eeeeb20792136
|
[
"MIT"
] | null | null | null |
from views import index
def setup_routes(app):
app.router.add_get('/', index)
| 20.5
| 34
| 0.719512
| 13
| 82
| 4.384615
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 82
| 4
| 34
| 20.5
| 0.814286
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be02e1a720a9b5e3c97dbb398d62fba150035349
| 80
|
py
|
Python
|
elabjournal/elabjournal/SampleTypes.py
|
matthijsbrouwer/elabjournal-python
|
4063b01993f0bf17ea2857009c1bedc5ace8b87b
|
[
"Apache-2.0"
] | 2
|
2021-06-29T11:17:27.000Z
|
2022-01-11T18:41:49.000Z
|
elabjournal/elabjournal/SampleTypes.py
|
matthijsbrouwer/elabjournal-python
|
4063b01993f0bf17ea2857009c1bedc5ace8b87b
|
[
"Apache-2.0"
] | null | null | null |
elabjournal/elabjournal/SampleTypes.py
|
matthijsbrouwer/elabjournal-python
|
4063b01993f0bf17ea2857009c1bedc5ace8b87b
|
[
"Apache-2.0"
] | 1
|
2019-06-06T13:23:11.000Z
|
2019-06-06T13:23:11.000Z
|
from .eLABJournalPager import *
class SampleTypes(eLABJournalPager):
pass
| 13.333333
| 36
| 0.775
| 7
| 80
| 8.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1625
| 80
| 6
| 37
| 13.333333
| 0.925373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
077344bd57cd8990d010206d15e75f3288d953cf
| 22
|
py
|
Python
|
{{cookiecutter.project_slug}}/server/tasks/__init__.py
|
jtbaker/cookiecutter_fullstack
|
00b6ab9a938e68605f446603ad50b88ddb538b13
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/server/tasks/__init__.py
|
jtbaker/cookiecutter_fullstack
|
00b6ab9a938e68605f446603ad50b88ddb538b13
|
[
"MIT"
] | 3
|
2022-02-13T19:39:07.000Z
|
2022-02-27T09:50:18.000Z
|
{{cookiecutter.project_slug}}/server/tasks/__init__.py
|
jtbaker/cookiecutter_fullstack
|
00b6ab9a938e68605f446603ad50b88ddb538b13
|
[
"MIT"
] | null | null | null |
from .tasks import add
| 22
| 22
| 0.818182
| 4
| 22
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
07752892da80e60e5ba4c4bb238507e70106021b
| 219
|
py
|
Python
|
pysteps/utils/__init__.py
|
Fangyh09/pysteps
|
9eb7f4ead0a946d98b7504d1bd66b18dc405ed51
|
[
"BSD-3-Clause"
] | 6
|
2019-01-06T07:42:55.000Z
|
2021-02-03T13:59:50.000Z
|
pysteps/utils/__init__.py
|
Fangyh09/pysteps
|
9eb7f4ead0a946d98b7504d1bd66b18dc405ed51
|
[
"BSD-3-Clause"
] | 5
|
2018-12-23T15:10:27.000Z
|
2021-01-06T15:03:03.000Z
|
pysteps/utils/__init__.py
|
Fangyh09/pysteps
|
9eb7f4ead0a946d98b7504d1bd66b18dc405ed51
|
[
"BSD-3-Clause"
] | 2
|
2019-08-06T14:16:43.000Z
|
2019-08-13T00:36:31.000Z
|
"""Miscellaneous utility functions."""
from .arrays import *
from .conversion import *
from .dimension import *
from .interface import get_method
from .fft import *
from .spectral import *
from .transformation import *
| 24.333333
| 38
| 0.767123
| 26
| 219
| 6.423077
| 0.538462
| 0.299401
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141553
| 219
| 9
| 39
| 24.333333
| 0.888298
| 0.146119
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0787b853c75fb2c08ceae87bb3dfd01029589429
| 7,826
|
py
|
Python
|
aregalo-backend/aregalo/service.py
|
FreNeS1/aregalo-backend
|
b96e46972e72320668caf90780fa23fc8b2a9ad8
|
[
"MIT"
] | null | null | null |
aregalo-backend/aregalo/service.py
|
FreNeS1/aregalo-backend
|
b96e46972e72320668caf90780fa23fc8b2a9ad8
|
[
"MIT"
] | null | null | null |
aregalo-backend/aregalo/service.py
|
FreNeS1/aregalo-backend
|
b96e46972e72320668caf90780fa23fc8b2a9ad8
|
[
"MIT"
] | null | null | null |
import http
from typing import List
from fastapi import HTTPException
from .schema import Present, UserData, PresentWishData, PresentGiftData, PresentCreateData
from .store import Store
class Service:
"""Interface for the service of the application."""
def get_all_users(self) -> List[UserData]:
"""
Retrieves all user data from the service.
:return: The data of all the retrieved users.
"""
raise NotImplementedError()
def get_user_data(self, user: str) -> UserData:
"""
Retrieve the user data from the service.
:param user: The name of the user to retrieve.
:return: The data of the retrieved user.
"""
raise NotImplementedError()
def get_user_present_wish_list(self, user: str) -> List[PresentWishData]:
"""
Retrieve the present list of an user.
:param user: The name of the user of the present list.
:return: The present list of the user as expected by wishers.
"""
raise NotImplementedError()
def get_user_present_gift_list(self, user: str) -> List[PresentGiftData]:
"""
Retrieve the present list of an user.
:param user: The name of the user of the present list.
:return: The present list of the user as expected by gifters.
"""
raise NotImplementedError()
def create_present(self, user: str, present: PresentCreateData) -> List[PresentWishData]:
"""
Creates a present and saves it.
:param user: The name of the user of the present list.
:param present: The present data of the new present.
:return: The new present list for the user.
"""
raise NotImplementedError()
def update_present(self, user: str, present_id: int, present: PresentWishData) -> List[PresentWishData]:
"""
Updates a present.
:param user: The name of the user of the present list.
:param present_id: The id of the present to update.
:param present: The present data of the new present.
:return: The new present list for the user.
"""
raise NotImplementedError()
def delete_present(self, user: str, present_id: int) -> List[PresentWishData]:
"""
Deletes a present.
:param user: The name of the user of the present list.
:param present_id: The id of the present to delete.
:return: The new present list for the user.
"""
raise NotImplementedError()
def assign_user_to_present(self, user: str, present_id: int, gifter: str) -> List[PresentGiftData]:
"""
Assigns an user as a gifter for a present of another user.
:param user: The name of the user of the present list.
:param present_id: The id of the present to assign the gifter.
:param gifter: The name of the gifter user.
:return: The new present list for the gifter user.
"""
raise NotImplementedError()
def remove_user_from_present(self, user: str, present_id: int, gifter: str) -> List[PresentGiftData]:
"""
Removes an user as a gifter for a present of another user.
:param user: The name of the user of the present list.
:param present_id: The id of the present to assign the gifter.
:param gifter: The name of the gifter user.
:return: The new present list for the gifter user.
"""
raise NotImplementedError()
class StoreService(Service):
"""Service implementation that handles requests with a store."""
def __init__(self, store: Store):
self._store = store
def get_all_users(self) -> List[UserData]:
return [user.to_user_data() for user in self._store.get_users()]
def get_user_data(self, user: str) -> UserData:
store_user = self._store.get_user(user)
if store_user is None:
raise HTTPException(detail=f"No user with name \"{user}\"", status_code=http.HTTPStatus.NOT_FOUND)
return store_user.to_user_data()
def get_user_present_wish_list(self, user: str) -> List[PresentWishData]:
store_presents = self._store.get_presents(user)
if store_presents is None:
raise HTTPException(detail=f"No user with name \"{user}\"", status_code=http.HTTPStatus.NOT_FOUND)
return [present.to_present_wish_response() for present in store_presents]
def get_user_present_gift_list(self, user: str) -> List[PresentGiftData]:
store_presents = self._store.get_presents(user)
if store_presents is None:
raise HTTPException(detail=f"No user with name \"{user}\"", status_code=http.HTTPStatus.NOT_FOUND)
return [present.to_present_gift_response() for present in store_presents]
def create_present(self, user: str, present: PresentCreateData) -> List[PresentWishData]:
wish_user = self._store.get_user(user)
present_id = wish_user.present_id
wish_user.present_id = present_id + 1
self._store.upsert_user(wish_user)
presents = self._store.get_presents(user)
presents.append(present.to_present(present_id, assigned_to=[]))
self._store.upsert_presents(user, presents)
return [present.to_present_wish_response() for present in presents]
def update_present(self, user: str, present_id: int, present: PresentWishData) -> List[PresentWishData]:
presents, found_present, found_present_index = self.find_present_in_list_by_id(user, present_id)
presents[found_present_index] = present.to_present(found_present.assigned_to)
self._store.upsert_presents(user, presents)
return [present.to_present_wish_response() for present in presents]
def delete_present(self, user: str, present_id: int) -> List[PresentWishData]:
presents, found_present, found_present_index = self.find_present_in_list_by_id(user, present_id)
del presents[found_present_index]
self._store.upsert_presents(user, presents)
return [present.to_present_wish_response() for present in presents]
def assign_user_to_present(self, user: str, present_id: int, gifter: str) -> List[PresentGiftData]:
presents, found_present, found_present_index = self.find_present_in_list_by_id(user, present_id)
if gifter not in presents[found_present_index].assigned_to:
presents[found_present_index].assigned_to.append(gifter)
self._store.upsert_presents(user, presents)
return [present.to_present_gift_response() for present in presents]
def remove_user_from_present(self, user: str, present_id: int, gifter: str) -> List[PresentGiftData]:
presents, found_present, found_present_index = self.find_present_in_list_by_id(user, present_id)
if gifter in presents[found_present_index].assigned_to:
presents[found_present_index].assigned_to.remove(gifter)
self._store.upsert_presents(user, presents)
return [present.to_present_gift_response() for present in presents]
def find_present_in_list_by_id(self, user: str, present_id: int) -> (List[Present], Present, int):
"""
Auxiliary method to retrieve the presents of an user and select one by id.
:param user: The name of the user of the present list.
:param present_id: The id of the present to find.
:return: The list of presents, the found present, and its index.
"""
presents = self._store.get_presents(user)
found_present = next(filter(lambda p: p.id == present_id, presents), None)
if found_present is None:
raise HTTPException(detail=f"No present with id \"{present_id}\" for user \"{user}\"", status_code=http.HTTPStatus.NOT_FOUND)
return presents, found_present, presents.index(found_present)
| 46.583333
| 137
| 0.68298
| 1,047
| 7,826
| 4.908309
| 0.097421
| 0.029189
| 0.036388
| 0.025686
| 0.796264
| 0.783032
| 0.748589
| 0.706169
| 0.685153
| 0.675812
| 0
| 0.000166
| 0.231408
| 7,826
| 167
| 138
| 46.862275
| 0.854198
| 0.255175
| 0
| 0.617284
| 0
| 0
| 0.016406
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.246914
| false
| 0
| 0.061728
| 0.012346
| 0.45679
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
07c4a98df45e55d8bb3fb9b9eb01daa9086143f6
| 122
|
py
|
Python
|
test/units/api_import_test.py
|
praktikos/praktikos-template-python
|
324c7640ee05469fba87a7ec64d6ac61675b259e
|
[
"MIT"
] | null | null | null |
test/units/api_import_test.py
|
praktikos/praktikos-template-python
|
324c7640ee05469fba87a7ec64d6ac61675b259e
|
[
"MIT"
] | null | null | null |
test/units/api_import_test.py
|
praktikos/praktikos-template-python
|
324c7640ee05469fba87a7ec64d6ac61675b259e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# noinspection PyUnresolvedReferences
def test_package_import():
import api
assert True
| 15.25
| 37
| 0.696721
| 13
| 122
| 6.384615
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.196721
| 122
| 7
| 38
| 17.428571
| 0.836735
| 0.467213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
07f3f86999b8328a3f079305f24b202f739b5747
| 4,860
|
py
|
Python
|
tests/interfaces/test_numpyro/test_numpyro_priors.py
|
thomaspinder/GPJax
|
929fcb88d13d15bb10e1175491dbc3e79622325a
|
[
"Apache-2.0"
] | 44
|
2020-12-03T14:07:39.000Z
|
2022-03-14T17:45:34.000Z
|
tests/interfaces/test_numpyro/test_numpyro_priors.py
|
thomaspinder/GPJax
|
929fcb88d13d15bb10e1175491dbc3e79622325a
|
[
"Apache-2.0"
] | 28
|
2020-12-05T08:54:45.000Z
|
2022-03-01T09:56:50.000Z
|
tests/interfaces/test_numpyro/test_numpyro_priors.py
|
thomaspinder/GPJax
|
929fcb88d13d15bb10e1175491dbc3e79622325a
|
[
"Apache-2.0"
] | 7
|
2021-02-05T12:37:57.000Z
|
2022-03-13T13:00:20.000Z
|
import chex
import jax.numpy as jnp
import jax.random as jr
import numpyro
import numpyro.distributions as dist
import pytest
from numpyro.contrib.tfp import distributions as tfd
from numpyro.distributions import constraints
from gpjax.gps import Prior
from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params
from gpjax.kernels import RBF
from gpjax.likelihoods import Gaussian
from gpjax.parameters import initialise
# TODO: test conjugate posterior
def _get_conjugate_posterior_params() -> dict:
kernel = RBF()
prior = Prior(kernel=kernel)
lik = Gaussian()
posterior = prior * lik
params = initialise(posterior)
return params
def test_numpyro_dict_priors_defaults_numpyro():
demo_priors = {
"lengthscale": dist.LogNormal(loc=0.0, scale=1.0),
"variance": dist.LogNormal(loc=0.0, scale=1.0),
"obs_noise": dist.LogNormal(loc=0.0, scale=1.0),
}
numpyro_params = numpyro_dict_params(demo_priors)
assert set(numpyro_params) == set(demo_priors.keys())
for ikey, iparam in demo_priors.items():
# check keys exist for param
assert set(numpyro_params[ikey].keys()) == set(("prior", "param_type"))
# check init value is the same as initial value
chex.assert_equal(numpyro_params[ikey]["prior"], iparam)
def test_numpyro_dict_priors_defaults_tfp():
demo_priors = {
"lengthscale": tfd.LogNormal(loc=0.0, scale=1.0),
"variance": tfd.LogNormal(loc=0.0, scale=1.0),
"obs_noise": tfd.LogNormal(loc=0.0, scale=1.0),
}
numpyro_params = numpyro_dict_params(demo_priors)
assert set(numpyro_params) == set(demo_priors.keys())
for ikey, iparam in demo_priors.items():
# check keys exist for param
assert set(numpyro_params[ikey].keys()) == set(("prior", "param_type"))
# check init value is the same as initial value
chex.assert_equal(numpyro_params[ikey]["prior"], iparam)
@pytest.mark.parametrize(
"prior",
[
dist.Gamma(concentration=1.0, rate=1.0),
dist.HalfCauchy(scale=1.0),
dist.LogNormal(loc=0.0, scale=1.0),
tfd.Gamma(concentration=1.0, rate=1.0),
tfd.HalfCauchy(loc=0.0, scale=1.0),
tfd.LogNormal(loc=0.0, scale=1.0),
],
)
def test_numpyro_add_priors_all(prior):
gpjax_params = _get_conjugate_posterior_params()
numpyro_params = numpyro_dict_params(gpjax_params)
# add constraint
new_numpyro_params = add_priors(numpyro_params, prior)
for iparams in new_numpyro_params.values():
# check if constraint in new dictionary
chex.assert_equal(iparams["param_type"], "prior")
chex.assert_equal(iparams["prior"], prior)
# check we didn't modify original dictionary
chex.assert_equal(gpjax_params, _get_conjugate_posterior_params())
@pytest.mark.parametrize(
"variable",
["lengthscale", "obs_noise", "variance"],
)
@pytest.mark.parametrize(
"prior",
[
dist.Gamma(concentration=1.0, rate=1.0),
dist.HalfCauchy(scale=1.0),
dist.LogNormal(loc=0.0, scale=1.0),
tfd.Gamma(concentration=1.0, rate=1.0),
tfd.HalfCauchy(loc=0.0, scale=1.0),
tfd.LogNormal(loc=0.0, scale=1.0),
],
)
def test_numpyro_add_priors_str(variable, prior):
gpjax_params = _get_conjugate_posterior_params()
numpyro_params = numpyro_dict_params(gpjax_params)
# add constraint
new_numpyro_params = add_priors(numpyro_params, variable, prior)
# check if constraint in new dictionary
chex.assert_equal(new_numpyro_params[variable]["param_type"], "prior")
chex.assert_equal(new_numpyro_params[variable]["prior"], prior)
# check we didn't modify original dictionary
chex.assert_equal(gpjax_params, _get_conjugate_posterior_params())
@pytest.mark.parametrize(
"variable",
["lengthscale", "obs_noise", "variance"],
)
@pytest.mark.parametrize(
"prior",
[
dist.Gamma(concentration=1.0, rate=1.0),
dist.HalfCauchy(scale=1.0),
dist.LogNormal(loc=0.0, scale=1.0),
tfd.Gamma(concentration=1.0, rate=1.0),
tfd.HalfCauchy(loc=0.0, scale=1.0),
tfd.LogNormal(loc=0.0, scale=1.0),
],
)
def test_numpyro_add_priors_dict(variable, prior):
gpjax_params = _get_conjugate_posterior_params()
numpyro_params = numpyro_dict_params(gpjax_params)
# create new dictionary
new_param_dict = {str(variable): prior}
# add constraint
new_numpyro_params = add_priors(numpyro_params, new_param_dict)
# check if constraint in new dictionary
chex.assert_equal(new_numpyro_params[variable]["param_type"], "prior")
chex.assert_equal(new_numpyro_params[variable]["prior"], prior)
# check we didn't modify original dictionary
chex.assert_equal(gpjax_params, _get_conjugate_posterior_params())
| 31.558442
| 85
| 0.696091
| 665
| 4,860
| 4.879699
| 0.136842
| 0.01849
| 0.038829
| 0.046225
| 0.777196
| 0.777196
| 0.748536
| 0.748536
| 0.724191
| 0.676117
| 0
| 0.022693
| 0.183951
| 4,860
| 153
| 86
| 31.764706
| 0.795512
| 0.1
| 0
| 0.54717
| 0
| 0
| 0.05576
| 0
| 0
| 0
| 0
| 0.006536
| 0.141509
| 1
| 0.056604
| false
| 0
| 0.122642
| 0
| 0.188679
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ed16f7806f1dfc804837cda1a0247e280dfbe0f8
| 39
|
py
|
Python
|
src/clearskies/input_outputs/exceptions/cli_not_found.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | 4
|
2021-04-23T18:13:06.000Z
|
2022-03-26T01:51:01.000Z
|
src/clearskies/input_outputs/exceptions/cli_not_found.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
src/clearskies/input_outputs/exceptions/cli_not_found.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
class CLINotFound(Exception):
pass
| 13
| 29
| 0.74359
| 4
| 39
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 39
| 2
| 30
| 19.5
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ed2dbeba4db09bc98cf49fa0cdaf0385159556de
| 143
|
py
|
Python
|
run.py
|
Zo0x/hedhes
|
7cb933f49079f6b75f7a84ba275fff9721ca06d3
|
[
"MIT"
] | 3
|
2017-08-29T13:06:50.000Z
|
2017-09-22T18:40:17.000Z
|
run.py
|
Zo0x/hedhes
|
7cb933f49079f6b75f7a84ba275fff9721ca06d3
|
[
"MIT"
] | 1
|
2017-08-29T13:25:59.000Z
|
2017-08-29T13:46:57.000Z
|
run.py
|
Zo0x/hedhes
|
7cb933f49079f6b75f7a84ba275fff9721ca06d3
|
[
"MIT"
] | null | null | null |
#!MediaManagerEnv/bin/python
import sys
from app import app, models
app.run(debug=True, port=int(sys.argv[1]) if len(sys.argv) > 1 else 5000)
| 23.833333
| 73
| 0.741259
| 26
| 143
| 4.076923
| 0.730769
| 0.132075
| 0.150943
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.118881
| 143
| 5
| 74
| 28.6
| 0.793651
| 0.188811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ed3406c036e106c060367f2f0ad7b0ba4b61be4e
| 56
|
py
|
Python
|
example_file3.py
|
kdeckerpyplus/PyPlus_Class1
|
f6d058db7dd612c22f619a24e76c46dd618ea478
|
[
"Apache-2.0"
] | null | null | null |
example_file3.py
|
kdeckerpyplus/PyPlus_Class1
|
f6d058db7dd612c22f619a24e76c46dd618ea478
|
[
"Apache-2.0"
] | null | null | null |
example_file3.py
|
kdeckerpyplus/PyPlus_Class1
|
f6d058db7dd612c22f619a24e76c46dd618ea478
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
print("This is example file 3")
| 11.2
| 31
| 0.678571
| 10
| 56
| 3.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.160714
| 56
| 4
| 32
| 14
| 0.787234
| 0.357143
| 0
| 0
| 0
| 0
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ed440753a05a0833dc69f8a725a6cceae6f31fad
| 305
|
py
|
Python
|
db/db_helpers/orgSelection.py
|
akarapun/elearning
|
fe116d5815925269819061ea183cbfdb773844cf
|
[
"MIT"
] | 1
|
2020-03-14T11:00:14.000Z
|
2020-03-14T11:00:14.000Z
|
db/db_helpers/orgSelection.py
|
akarapun/elearning
|
fe116d5815925269819061ea183cbfdb773844cf
|
[
"MIT"
] | null | null | null |
db/db_helpers/orgSelection.py
|
akarapun/elearning
|
fe116d5815925269819061ea183cbfdb773844cf
|
[
"MIT"
] | null | null | null |
from db import db_session
from db_models import OrganizationDBModel
def selectOrgByOrgCode(orgCode):
return db_session.query(OrganizationDBModel).filter_by(orgCode=orgCode).first()
def isOrgByOrgCodeExist(orgCode):
org = selectOrgByOrgCode(orgCode)
return True if org is not None else False
| 30.5
| 83
| 0.806557
| 38
| 305
| 6.368421
| 0.605263
| 0.049587
| 0.256198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 305
| 9
| 84
| 33.888889
| 0.913208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.285714
| 0.142857
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
ed71c06691e041cb4247d67c151b6c6b87671098
| 16
|
py
|
Python
|
maptest/a.py
|
FractalOSS2020/FractalOSS2020.github.io
|
a53f2ee7a0cf98cac714108a32ecd365f775a0c4
|
[
"MIT"
] | null | null | null |
maptest/a.py
|
FractalOSS2020/FractalOSS2020.github.io
|
a53f2ee7a0cf98cac714108a32ecd365f775a0c4
|
[
"MIT"
] | null | null | null |
maptest/a.py
|
FractalOSS2020/FractalOSS2020.github.io
|
a53f2ee7a0cf98cac714108a32ecd365f775a0c4
|
[
"MIT"
] | null | null | null |
a = 123
print(a)
| 8
| 8
| 0.625
| 4
| 16
| 2.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0.1875
| 16
| 2
| 8
| 8
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ed76d9865526677845375b444f79fa3dda0cc926
| 59
|
py
|
Python
|
numpy/core/pypy_impl/__init__.py
|
dand-oss/numpypy
|
3d058248fa626b089e34804f36ede25fc67e9dd3
|
[
"BSD-3-Clause"
] | null | null | null |
numpy/core/pypy_impl/__init__.py
|
dand-oss/numpypy
|
3d058248fa626b089e34804f36ede25fc67e9dd3
|
[
"BSD-3-Clause"
] | null | null | null |
numpy/core/pypy_impl/__init__.py
|
dand-oss/numpypy
|
3d058248fa626b089e34804f36ede25fc67e9dd3
|
[
"BSD-3-Clause"
] | null | null | null |
from digitize import digitize
from bincount import bincount
| 29.5
| 29
| 0.881356
| 8
| 59
| 6.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118644
| 59
| 2
| 30
| 29.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ed7cfd60834f31eea9444efa11b40e67e7d785d6
| 985
|
py
|
Python
|
exercicios_programas/ex7_contadores/exercicio5.10.py
|
robinson-1985/livro_python
|
2b94d32c81121b631a70f8c9fc443d697947ee63
|
[
"MIT"
] | null | null | null |
exercicios_programas/ex7_contadores/exercicio5.10.py
|
robinson-1985/livro_python
|
2b94d32c81121b631a70f8c9fc443d697947ee63
|
[
"MIT"
] | null | null | null |
exercicios_programas/ex7_contadores/exercicio5.10.py
|
robinson-1985/livro_python
|
2b94d32c81121b631a70f8c9fc443d697947ee63
|
[
"MIT"
] | null | null | null |
'''5.10 Modifique o programa da listagem para que aceite respostas com letras maiúsculas e minúsculas em todas as questões.
Listagem 5.10 – Contagem de questões corretas
pontos = 0
questão = 1
while questão <= 3:
resposta = input(f"Resposta da questão {questão}: ")
if questão == 1 and resposta == "b":
pontos = pontos + 1
if questão == 2 and resposta == "a":
pontos = pontos + 1
if questão == 3 and resposta == "d":
pontos = pontos + 1
questão = questão + 1
print(f"O aluno fez {pontos} ponto(s)") '''
pontos = 0
questão = 1
while questão <= 3:
resposta = input(f"Resposta da questão {questão}: ")
if questão == 1 and (resposta == "b" or resposta == "B"):
pontos = pontos + 1
if questão == 2 and (resposta == "a" or resposta == "A"):
pontos = pontos + 1
if questão == 3 and (resposta == "d" or resposta == "D"):
pontos = pontos + 1
questão = questão + 1
print(f"O aluno fez {pontos} ponto(s)")
| 33.965517
| 123
| 0.604061
| 141
| 985
| 4.234043
| 0.319149
| 0.080402
| 0.130653
| 0.100503
| 0.758794
| 0.758794
| 0.758794
| 0.758794
| 0.758794
| 0.758794
| 0
| 0.035961
| 0.26599
| 985
| 29
| 124
| 33.965517
| 0.786999
| 0.549239
| 0
| 0.25
| 0
| 0
| 0.150685
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ed7e3c909e6d0b573aca579b66b0e9ddf1f63b6a
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/clikit/api/config/config.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/clikit/api/config/config.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/clikit/api/config/config.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/da/27/eb/bc0609ed79d48f6fa198ff1acc3f2d8c6be8a0f9d11da759ca60731c99
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.354167
| 0
| 96
| 1
| 96
| 96
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ed7ec90554f168bc00d7144458f64276f893c53f
| 106
|
py
|
Python
|
mundo-3/ex112.py
|
GustavoMendel/curso-python
|
2270734cc159cf44cf513184f1467a9040a7bc46
|
[
"MIT"
] | null | null | null |
mundo-3/ex112.py
|
GustavoMendel/curso-python
|
2270734cc159cf44cf513184f1467a9040a7bc46
|
[
"MIT"
] | null | null | null |
mundo-3/ex112.py
|
GustavoMendel/curso-python
|
2270734cc159cf44cf513184f1467a9040a7bc46
|
[
"MIT"
] | null | null | null |
from utilidades import moeda
from utilidades import dado
valor = dado.leiaDinheiro()
moeda.resumo(valor)
| 17.666667
| 28
| 0.811321
| 14
| 106
| 6.142857
| 0.571429
| 0.325581
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122642
| 106
| 5
| 29
| 21.2
| 0.924731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
71fe81f8a0f8a6b61eb0bca43f320e7493f01b29
| 108
|
py
|
Python
|
09_functions/048_intro-to-functions.py
|
selimmh/Python_Basics
|
9e16de82306261f3acd44d88b862bcc14d1a71a1
|
[
"CNRI-Python"
] | 1
|
2020-04-17T20:12:55.000Z
|
2020-04-17T20:12:55.000Z
|
09_functions/048_intro-to-functions.py
|
selimmh/Python_Basics
|
9e16de82306261f3acd44d88b862bcc14d1a71a1
|
[
"CNRI-Python"
] | null | null | null |
09_functions/048_intro-to-functions.py
|
selimmh/Python_Basics
|
9e16de82306261f3acd44d88b862bcc14d1a71a1
|
[
"CNRI-Python"
] | null | null | null |
# intro to function
def my_function():
print("Hello, this is function")
# calling function
my_function()
| 15.428571
| 34
| 0.731481
| 15
| 108
| 5.133333
| 0.666667
| 0.25974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157407
| 108
| 7
| 35
| 15.428571
| 0.846154
| 0.314815
| 0
| 0
| 0
| 0
| 0.319444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
71ff80a97d7ae4d8b771384b0b4aed335da328a9
| 96
|
py
|
Python
|
code/delete_cluster.py
|
nateGeorge/udacity_dend_capstone
|
9d3eff1ed4cd38a4698ade0d4aca75b187fb0d54
|
[
"Apache-2.0"
] | null | null | null |
code/delete_cluster.py
|
nateGeorge/udacity_dend_capstone
|
9d3eff1ed4cd38a4698ade0d4aca75b187fb0d54
|
[
"Apache-2.0"
] | null | null | null |
code/delete_cluster.py
|
nateGeorge/udacity_dend_capstone
|
9d3eff1ed4cd38a4698ade0d4aca75b187fb0d54
|
[
"Apache-2.0"
] | null | null | null |
from infrastructure_as_code import redshift_creator
rc = redshift_creator()
rc.delete_cluster()
| 24
| 51
| 0.854167
| 13
| 96
| 5.923077
| 0.769231
| 0.38961
| 0.441558
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 4
| 52
| 24
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9c236ac3b346fdc378d2571239a69253175698e3
| 64
|
py
|
Python
|
sp_uncase_ja_30000/__init__.py
|
iki-taichi/tokenization
|
ac11210ac824109eb26e0f66f0e5d091a8327f96
|
[
"MIT"
] | null | null | null |
sp_uncase_ja_30000/__init__.py
|
iki-taichi/tokenization
|
ac11210ac824109eb26e0f66f0e5d091a8327f96
|
[
"MIT"
] | null | null | null |
sp_uncase_ja_30000/__init__.py
|
iki-taichi/tokenization
|
ac11210ac824109eb26e0f66f0e5d091a8327f96
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from .sp_uncase_ja_30000 import FullTokenizer
| 12.8
| 45
| 0.796875
| 10
| 64
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 0.140625
| 64
| 4
| 46
| 16
| 0.763636
| 0.203125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9c5bd6940afb470f200c672ea0c90c0eb8094863
| 14,861
|
py
|
Python
|
sheer/test_indexing.py
|
fna/sheer
|
2395da4eb342cae8116942851b73d0e3f00b4d1e
|
[
"CC0-1.0"
] | null | null | null |
sheer/test_indexing.py
|
fna/sheer
|
2395da4eb342cae8116942851b73d0e3f00b4d1e
|
[
"CC0-1.0"
] | null | null | null |
sheer/test_indexing.py
|
fna/sheer
|
2395da4eb342cae8116942851b73d0e3f00b4d1e
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
import mock
from StringIO import StringIO
from .indexer import ContentProcessor, index_location
from elasticsearch.exceptions import TransportError
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class TestIndexing(object):
"""
Test Sheer content indexing.
"""
def setup(self):
# Sheer indexing tries to load three JSON files. For testing purposes,
# `settings.json` and `mappings.json` are not necessary, but we do need
# a content processor to test with. The contents of `processors.json` is
# mocked here.
self.mock_processors = {'posts':
{'url': 'http://test/api/get_posts/',
'processor': 'post_processor',
'mappings': '_settings/posts_mappings.json'}}
self.mock_processor_mappings = '''{}'''
self.mock_document = {
'_id': u'a-great-post-slug',
'_index': 'content',
'_type': 'posts'
}
self.config = {'location': '.',
'elasticsearch': None,
'index': 'content'}
# This is our mock ContentProcessor. It will return mappings and
# documents for a particular document type, 'posts' in our mock
# scenario. documents() returns a generator, which requires us to mock
# its iterator.
self.mock_processor = mock.Mock(spec=ContentProcessor)
self.mock_processor.name = 'posts'
self.mock_processor.processor_name = 'posts_processor'
self.mock_processor.mapping.return_value = {}
self.mock_processor.documents.return_value = iter([self.mock_document])
@mock.patch('sheer.indexer.bulk')
@mock.patch('sheer.indexer.Elasticsearch')
@mock.patch('sheer.indexer.ContentProcessor')
@mock.patch('sheer.indexer.read_json_file')
@mock.patch('os.path.exists')
def test_indexing(self, mock_exists, mock_read_json_file,
mock_ContentProcessor, mock_Elasticsearch, mock_bulk):
"""
`sheer index`
Test the creation of indexes by Sheer. For a given index, if it
does not exist, it should be created and the documents yeilded
by a given set of content processors should be created.
"""
# Mock file existing/opening/reading
# os.path.exists is only called directly for settings.json and
# mappings.json, which are not necessary for our tests.
mock_exists.return_value = False
mock_read_json_file.side_effect = [self.mock_processors, {}]
# Wire-up our mock content processor
mock_ContentProcessor.return_value = self.mock_processor
# Here we want to test:
# * Index doesn't exist -> should be created
# * Mappings don't exist for processor -> should be created
# * Documents don't exist for processor -> should be created
mock_es = mock_Elasticsearch.return_value
mock_es.indices.exists.return_value = False
mock_es.indices.get_mapping.return_value = None
test_args = AttrDict(processors=[], reindex=False)
index_location(test_args, self.config)
mock_es.indices.create.assert_called_with(index=self.config['index'])
mock_bulk.assert_called_with(mock_es,
self.mock_processor.documents())
@mock.patch('sheer.indexer.bulk')
@mock.patch('sheer.indexer.Elasticsearch')
@mock.patch('sheer.indexer.ContentProcessor')
@mock.patch('sheer.indexer.read_json_file')
@mock.patch('os.path.exists')
def test_reindexing(self, mock_exists, mock_read_json_file,
mock_ContentProcessor, mock_Elasticsearch, mock_bulk):
"""
`sheer index --reindex`
Test the re-creation of existing indexes by Sheer. For a given
index, if it already exists, it should be removed and recreated.
"""
# Mock file existing/opening/reading
# os.path.exists is only called directly for settings.json and
# mappings.json, which are not necessary for our tests.
mock_exists.return_value = False
mock_read_json_file.side_effect = [self.mock_processors, {}]
# Wire-up our mock content processor
mock_ContentProcessor.return_value = self.mock_processor
# Here we want to test:
# * Index exists -> should be deleted and recreated.
# ... therefore ...
# * Mappings don't exist for processor -> should be created
# * Documents don't exist for processor -> should be created
mock_es = mock_Elasticsearch.return_value
mock_es.indices.exists.side_effect = [True, False]
mock_es.indices.get_mapping.return_value = None
test_args = AttrDict(processors=[], reindex=True)
index_location(test_args, self.config)
mock_es.indices.delete.assert_called_with(self.config['index'])
mock_es.indices.create.assert_called_with(index=self.config['index'])
mock_bulk.assert_called_with(mock_es,
self.mock_processor.documents())
@mock.patch('sheer.indexer.bulk')
@mock.patch('sheer.indexer.Elasticsearch')
@mock.patch('sheer.indexer.ContentProcessor')
@mock.patch('sheer.indexer.read_json_file')
@mock.patch('os.path.exists')
def test_partial_indexing(self, mock_exists, mock_read_json_file,
mock_ContentProcessor, mock_Elasticsearch,
mock_bulk):
"""
`sheer index --processors posts`
Test partial indexing of the document type associated with a
content processor.
"""
# Mock file existing/opening/reading
# os.path.exists is only called directly for settings.json and
# mappings.json, which are not necessary for our tests.
mock_exists.return_value = False
mock_read_json_file.side_effect = [self.mock_processors, {}]
# Wire-up our mock content processor
mock_ContentProcessor.return_value = self.mock_processor
# Here we want to test:
# * Index exists -> should be left alone
# * Mappings exist for processor -> should be deleted and recreated
# * Documents don't exist for processor -> should be created
mock_es = mock_Elasticsearch.return_value
mock_es.indices.exists.return_value = True
# The tests for the get_mapping return value simply need to evaluate to
# True in indexer.py.
mock_es.indices.get_mapping.return_value = True
mock_create_exception = TransportError(409)
mock_es.create.side_effect = mock_create_exception
test_args = AttrDict(processors=['posts'], reindex=False)
index_location(test_args, self.config)
mock_bulk.assert_called_with(mock_es,
self.mock_processor.documents())
@mock.patch('sheer.indexer.bulk')
@mock.patch('sheer.indexer.Elasticsearch')
@mock.patch('sheer.indexer.ContentProcessor')
@mock.patch('sheer.indexer.read_json_file')
@mock.patch('os.path.exists')
def test_partial_reindexing(self, mock_exists, mock_read_json_file,
mock_ContentProcessor, mock_Elasticsearch,
mock_bulk):
"""
`sheer index --processors posts --reindex`
Test the re-creation of mappings associated with content processor
and the updating of its documents.
"""
# Mock file existing/opening/reading
# os.path.exists is only called directly for settings.json and
# mappings.json, which are not necessary for our tests.
mock_exists.return_value = False
mock_read_json_file.side_effect = [self.mock_processors, {}]
# Wire-up our mock content processor
mock_ContentProcessor.return_value = self.mock_processor
# Here we want to test:
# * Index exists and we're given processors -> should be left alone.
# * Mappings exist for processor -> should be deleted and recreated
# * Documents no longer exist for processor -> should be created
mock_es = mock_Elasticsearch.return_value
mock_es.indices.exists.side_effect = [True, False]
mock_es.indices.get_mapping.return_value = True
test_args = AttrDict(processors=['posts'], reindex=True)
index_location(test_args, self.config)
mock_es.indices.delete_mapping.assert_called_with(
index=self.config['index'],
doc_type='posts')
mock_bulk.assert_called_with(mock_es,
self.mock_processor.documents())
@mock.patch('sheer.indexer.bulk')
@mock.patch('sheer.indexer.Elasticsearch')
@mock.patch('sheer.indexer.ContentProcessor')
@mock.patch('sheer.indexer.read_json_file')
@mock.patch('os.path.exists')
def test_indexing_failure_ioerr(self, mock_exists, mock_read_json_file,
mock_ContentProcessor, mock_Elasticsearch,
mock_bulk):
"""
`sheer index`
Test the failure of indexing by Sheer via an IOError, and make sure it
fails gracefully. This simulates the unavailability and timeout of the
upstream source of information.
"""
# We want to capture stderr
sys.stderr = StringIO()
# Add mock error processors to the mock_processor json. This will let us
# have two processors total. Because we're mocking ContentProcessor()
# below we don't have to worry about the actual contents of this
# dictionary.
self.mock_processors['ioerrs'] = {
'url': 'http://test/api/get_posts/',
'processor': 'post_processor',
'mappings': '_settings/posts_mappings.json'}
# Mock file existing/opening/reading
# os.path.exists is only called directly for settings.json and
# mappings.json, which are not necessary for our tests.
mock_exists.return_value = False
mock_read_json_file.side_effect = [self.mock_processors, {}]
# A context processor that will raise an IOError to simulate a
# connection failure in requests.
mock_ioerr_processor = mock.Mock(spec=ContentProcessor)
mock_ioerr_processor.name = 'ioerrs'
mock_ioerr_processor.processor_name = 'posts_processor'
mock_ioerr_processor.mapping.return_io = {}
mock_ioerr_processor.documents.side_effect = IOError("Connection aborted.")
# Make sure ContentProcessor returns the processors
mock_ContentProcessor.side_effect = [mock_ioerr_processor,
self.mock_processor]
# Here we assume:
# * Index doesn't exist -> should be created
# * Mappings don't exist for processor -> should be created
# * Documents don't exist for processor -> should be created
# * An exception is raised when trying to fetch documents from the
# context processor.
mock_es = mock_Elasticsearch.return_value
mock_es.indices.exists.return_value = False
mock_es.indices.get_mapping.return_value = None
test_args = AttrDict(processors=[], reindex=False)
try:
index_location(test_args, self.config)
except SystemExit, s:
assert s.code == \
'Indexing the following processor(s) failed: ioerrs'
# Ensure that we got the right error message.
assert 'error making connection' in sys.stderr.getvalue()
mock_bulk.assert_called_with(mock_es,
self.mock_processor.documents())
@mock.patch('sheer.indexer.bulk')
@mock.patch('sheer.indexer.Elasticsearch')
@mock.patch('sheer.indexer.ContentProcessor')
@mock.patch('sheer.indexer.read_json_file')
@mock.patch('os.path.exists')
def test_indexing_failure_valueerr(self, mock_exists, mock_read_json_file,
mock_ContentProcessor, mock_Elasticsearch,
mock_bulk):
"""
`sheer index`
Test the failure of indexing by Sheer via a ValueError, and make sure it
fails gracefully. This simulates the unavailability and timeout of the
upstream source of information.
"""
# We want to capture stderr
sys.stderr = StringIO()
# Add a mock error processor to the mock_processor json. This will let us
# have three processors total. Because we're mocking ContentProcessor()
# below we don't have to worry about the actual contents of these
# dictionaries.
valueerr_mock_processor = {'valueerrs': {
'url': 'http://test/api/get_posts/',
'processor': 'post_processor',
'mappings': '_settings/posts_mappings.json'}
}
# Mock file existing/opening/reading
# os.path.exists is only called directly for settings.json and
# mappings.json, which are not necessary for our tests.
mock_exists.return_value = False
mock_read_json_file.side_effect = [valueerr_mock_processor, {}]
# A context processor that will raise a ValueError to simulate bad json
# being provided by the upstream source.
mock_bulk.side_effect = ValueError("No JSON object could be decoded")
mock_valueerr_processor = mock.Mock(spec=ContentProcessor)
mock_valueerr_processor.name = 'valueerrs'
mock_valueerr_processor.processor_name = 'posts_processor'
mock_valueerr_processor.mapping.return_value = {}
# Make sure ContentProcessor returns the err processor
mock_ContentProcessor.side_effect = [mock_valueerr_processor]
# Here we assume:
# * Index doesn't exist -> should be created
# * Mappings don't exist for processor -> should be created
# * Documents don't exist for processor -> should be created
# * An exception is raised when trying to fetch documents from the
# context processor.
mock_es = mock_Elasticsearch.return_value
mock_es.indices.exists.return_value = False
mock_es.indices.get_mapping.return_value = None
test_args = AttrDict(processors=[], reindex=False)
try:
index_location(test_args, self.config)
except SystemExit, s:
assert s.code == \
'Indexing the following processor(s) failed: valueerrs'
# Ensure that we got the right error message.
assert 'error reading documents' in sys.stderr.getvalue()
| 43.580645
| 83
| 0.646457
| 1,762
| 14,861
| 5.272985
| 0.127696
| 0.026693
| 0.036164
| 0.054246
| 0.775589
| 0.744269
| 0.708105
| 0.70423
| 0.699709
| 0.684856
| 0
| 0.000368
| 0.268488
| 14,861
| 340
| 84
| 43.708824
| 0.854291
| 0.258462
| 0
| 0.624277
| 0
| 0
| 0.141717
| 0.061891
| 0
| 0
| 0
| 0
| 0.075145
| 0
| null | null | 0
| 0.028902
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9c66c7d0b3775f583041600dc0f50c550b5b37f0
| 318
|
py
|
Python
|
PyMOTW/source/pkgutil/pkgutil_devel.py
|
axetang/AxePython
|
3b517fa3123ce2e939680ad1ae14f7e602d446a6
|
[
"Apache-2.0"
] | 1
|
2019-01-04T05:47:50.000Z
|
2019-01-04T05:47:50.000Z
|
PyMOTW/source/pkgutil/pkgutil_devel.py
|
axetang/AxePython
|
3b517fa3123ce2e939680ad1ae14f7e602d446a6
|
[
"Apache-2.0"
] | 1
|
2020-07-18T03:52:03.000Z
|
2020-07-18T04:18:01.000Z
|
PyMOTW/source/pkgutil/pkgutil_devel.py
|
axetang/AxePython
|
3b517fa3123ce2e939680ad1ae14f7e602d446a6
|
[
"Apache-2.0"
] | 2
|
2021-03-06T04:28:32.000Z
|
2021-03-06T04:59:17.000Z
|
#!/usr/bin/env python3
# encoding: utf-8
#
# Copyright (c) 2008 Doug Hellmann All rights reserved.
#
"""
"""
#end_pymotw_header
import demopkg2
print('demopkg2 :', demopkg2.__file__)
import demopkg2.overloaded
print('demopkg2.overloaded:', demopkg2.overloaded.__file__)
print()
demopkg2.overloaded.func()
| 17.666667
| 59
| 0.726415
| 37
| 318
| 5.972973
| 0.648649
| 0.325792
| 0.208145
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047273
| 0.13522
| 318
| 17
| 60
| 18.705882
| 0.756364
| 0.339623
| 0
| 0
| 0
| 0
| 0.203046
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
9c6ff5ec92af3964d63b36bb7506b6b9de7b4fcf
| 162
|
py
|
Python
|
airflow/dags/python_callables/compliance.py
|
ryapric/boilerplate
|
e3c79c8c09b65ebfbb621020696eb6ae2fc46bca
|
[
"MIT"
] | 9
|
2018-12-16T07:02:59.000Z
|
2021-11-16T13:32:24.000Z
|
airflow/dags/python_callables/compliance.py
|
ryapric/boilerplate
|
e3c79c8c09b65ebfbb621020696eb6ae2fc46bca
|
[
"MIT"
] | null | null | null |
airflow/dags/python_callables/compliance.py
|
ryapric/boilerplate
|
e3c79c8c09b65ebfbb621020696eb6ae2fc46bca
|
[
"MIT"
] | null | null | null |
# You can import and use this in a DAG in the parent folder like usual in
# Python, i.e. `import python_callables.compliance`
def check_port_22_open():
pass
| 27
| 73
| 0.746914
| 29
| 162
| 4.034483
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.185185
| 162
| 5
| 74
| 32.4
| 0.871212
| 0.746914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
9c7904ca2fb1815a84d7c446a0fe5ba54a4d2bfe
| 9,719
|
py
|
Python
|
pyhealth/test/test_model_sequence.py
|
rkalahasty/PyHealth
|
1ee0859d8d39a7fc6f8df48ef8d2bf6c17dcf4a5
|
[
"BSD-2-Clause"
] | 485
|
2020-08-03T20:04:21.000Z
|
2022-02-25T13:35:43.000Z
|
pyhealth/test/test_model_sequence.py
|
rkalahasty/PyHealth
|
1ee0859d8d39a7fc6f8df48ef8d2bf6c17dcf4a5
|
[
"BSD-2-Clause"
] | 6
|
2020-08-06T01:07:45.000Z
|
2021-10-15T21:49:42.000Z
|
pyhealth/test/test_model_sequence.py
|
rkalahasty/PyHealth
|
1ee0859d8d39a7fc6f8df48ef8d2bf6c17dcf4a5
|
[
"BSD-2-Clause"
] | 98
|
2020-08-04T01:04:38.000Z
|
2022-02-09T10:36:03.000Z
|
import unittest
import numpy as np
import torch
import os
import shutil
from pyhealth.models.sequence.dipole import Dipole
from pyhealth.models.sequence.lstm import LSTM
from pyhealth.models.sequence.gru import GRU
from pyhealth.models.sequence.embedgru import EmbedGRU
from pyhealth.models.sequence.retain import Retain
from pyhealth.models.sequence.raim import RAIM
from pyhealth.models.sequence.tlstm import tLSTM
from pyhealth.models.sequence.stagenet import StageNet
from pyhealth.models.sequence.xgboost_seq import XGBoostSequence
from pyhealth.models.sequence.rf import RandomForest
from pyhealth.data.expdata_generator import sequencedata as expdata_generator
from pyhealth.evaluation.evaluator import func
import sys
if sys.version_info >= (3, 6):
import zipfile
else:
import zipfile36 as zipfile
class TestSequentialModel(unittest.TestCase):
expdata_id = 'test.sequence.model'
def test_01(self):
if os.path.exists('./experiments_data') is False:
os.mkdir('./experiments_data')
if os.path.exists('./datasets/mimic') is False:
z = zipfile.ZipFile("./datasets/mimic.zip", "r")
seq_x = []
label_y = []
for filename in z.namelist( ):
z.extract(filename,'./datasets')
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.get_exp_data(sel_task='mortality', data_root='./datasets/mimic')
def test_02_lstm_cpu(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.lstm.gpu'
clf = LSTM(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=False,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_lstm_gpu(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.lstm.cpu'
clf = LSTM(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_gru(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.gru'
clf = GRU(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_embedgru(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.embedgru'
clf = EmbedGRU(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_dipole(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.dipole'
clf = Dipole(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_retain(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.retain'
clf = Retain(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_raim(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.raim'
clf = RAIM(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_tlstm(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.tlstm'
clf = tLSTM(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_stagenet(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.stagenet'
clf = StageNet(expmodel_id=expmodel_id,
n_batchsize=20,
use_gpu=True,
n_epoch=10)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_xgboost(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.xgboost'
clf = XGBoostSequence(expmodel_id=expmodel_id)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_02_rm(self):
cur_dataset = expdata_generator(self.expdata_id)
cur_dataset.load_exp_data()
expmodel_id = 'test.randomforest'
clf = RandomForest(expmodel_id=expmodel_id)
clf.fit(cur_dataset.train, cur_dataset.valid)
clf.load_model()
clf.inference(cur_dataset.test)
pred_results = clf.get_results()
assert np.shape(pred_results['hat_y']) == np.shape(pred_results['y'])
assert True not in np.isnan(pred_results['hat_y']).tolist()
assert True not in np.isnan(pred_results['hat_y']*0).tolist()
def test_03_delete(self):
shutil.rmtree(os.path.join('./experiments_data', self.expdata_id))
shutil.rmtree(os.path.join('./experiments_records', 'test.lstm.cpu'))
shutil.rmtree(os.path.join('./experiments_records', 'test.lstm.gpu'))
shutil.rmtree(os.path.join('./experiments_records', 'test.gru'))
shutil.rmtree(os.path.join('./experiments_records', 'test.embedgru'))
shutil.rmtree(os.path.join('./experiments_records', 'test.dipole'))
shutil.rmtree(os.path.join('./experiments_records', 'test.retain'))
shutil.rmtree(os.path.join('./experiments_records', 'test.raim'))
shutil.rmtree(os.path.join('./experiments_records', 'test.tlstm'))
shutil.rmtree(os.path.join('./experiments_records', 'test.stagenet'))
shutil.rmtree(os.path.join('./experiments_records', 'test.xgboost'))
shutil.rmtree(os.path.join('./experiments_records', 'test.randomforest'))
| 42.627193
| 84
| 0.641527
| 1,300
| 9,719
| 4.545385
| 0.083846
| 0.096463
| 0.078186
| 0.083771
| 0.776612
| 0.776612
| 0.771027
| 0.771027
| 0.704011
| 0.679472
| 0
| 0.010414
| 0.239222
| 9,719
| 227
| 85
| 42.814978
| 0.788748
| 0
| 0
| 0.618357
| 0
| 0
| 0.083548
| 0.023768
| 0
| 0
| 0
| 0
| 0.15942
| 1
| 0.062802
| false
| 0
| 0.096618
| 0
| 0.169082
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
92bc404056bf19d5f0509ac81d1d5a51e65c3951
| 290
|
py
|
Python
|
pintFoam/parareal/__init__.py
|
ParallelWindfarms/cylinder
|
67cbc876e99440e7a8eed1474d0c387918edf479
|
[
"Apache-2.0"
] | null | null | null |
pintFoam/parareal/__init__.py
|
ParallelWindfarms/cylinder
|
67cbc876e99440e7a8eed1474d0c387918edf479
|
[
"Apache-2.0"
] | 13
|
2020-07-16T12:32:04.000Z
|
2021-11-18T09:17:57.000Z
|
pintFoam/parareal/__init__.py
|
ParallelWindfarms/cylinder
|
67cbc876e99440e7a8eed1474d0c387918edf479
|
[
"Apache-2.0"
] | null | null | null |
# ~\~ language=Python filename=pintFoam/parareal/__init__.py
# ~\~ begin <<lit/parareal.md|pintFoam/parareal/__init__.py>>[0]
from .tabulate_solution import tabulate
from .parareal import parareal
from . import abstract
__all__ = ["tabulate", "parareal", "schedule", "abstract"]
# ~\~ end
| 32.222222
| 64
| 0.737931
| 34
| 290
| 5.911765
| 0.558824
| 0.159204
| 0.199005
| 0.218905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003846
| 0.103448
| 290
| 8
| 65
| 36.25
| 0.769231
| 0.444828
| 0
| 0
| 0
| 0
| 0.203822
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
92c65347ca8f6d610c45126b2e9d6c434709bc39
| 20
|
py
|
Python
|
checkov/version.py
|
0xflotus/checkov
|
7d260c4a68b57b929cb789b505e01105a3cc9f98
|
[
"Apache-2.0"
] | null | null | null |
checkov/version.py
|
0xflotus/checkov
|
7d260c4a68b57b929cb789b505e01105a3cc9f98
|
[
"Apache-2.0"
] | null | null | null |
checkov/version.py
|
0xflotus/checkov
|
7d260c4a68b57b929cb789b505e01105a3cc9f98
|
[
"Apache-2.0"
] | null | null | null |
version = '2.0.740'
| 10
| 19
| 0.6
| 4
| 20
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.15
| 20
| 1
| 20
| 20
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
92ec5c3c67deb55a0cf9dea43612214f445ffa3d
| 35,173
|
py
|
Python
|
tests/test_views_login.py
|
jazzband/django-two-factor-auth
|
29f217d62fd62d0ab97d989eb0214deccb3e7276
|
[
"MIT"
] | 65
|
2021-12-30T16:58:11.000Z
|
2022-03-28T21:36:51.000Z
|
tests/test_views_login.py
|
jazzband/django-two-factor-auth
|
29f217d62fd62d0ab97d989eb0214deccb3e7276
|
[
"MIT"
] | 68
|
2021-12-29T19:48:40.000Z
|
2022-03-31T10:51:12.000Z
|
tests/test_views_login.py
|
jazzband/django-two-factor-auth
|
29f217d62fd62d0ab97d989eb0214deccb3e7276
|
[
"MIT"
] | 21
|
2021-12-30T16:58:14.000Z
|
2022-03-30T07:10:23.000Z
|
import json
from importlib import import_module
from time import sleep
from unittest import mock
from django.conf import settings
from django.shortcuts import resolve_url
from django.test import RequestFactory, TestCase
from django.test.utils import override_settings
from django.urls import reverse
from django_otp import DEVICE_ID_SESSION_KEY
from django_otp.oath import totp
from django_otp.util import random_hex
from two_factor.views.core import LoginView
from .utils import UserMixin, totp_str
class LoginTest(UserMixin, TestCase):
def _post(self, data=None):
return self.client.post(reverse('two_factor:login'), data=data)
def test_form(self):
response = self.client.get(reverse('two_factor:login'))
self.assertContains(response, 'Password:')
def test_invalid_login(self):
response = self._post({'auth-username': 'unknown',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Please enter a correct')
self.assertContains(response, 'and password.')
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_valid_login(self, mock_signal):
self.create_user()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
# No signal should be fired for non-verified user logins.
self.assertFalse(mock_signal.called)
def test_valid_login_with_custom_redirect(self):
redirect_url = reverse('two_factor:setup')
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('two_factor:login'), 'next=' + redirect_url),
{'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, redirect_url)
def test_valid_login_with_custom_post_redirect(self):
redirect_url = reverse('two_factor:setup')
self.create_user()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth',
'next': redirect_url})
self.assertRedirects(response, redirect_url)
def test_valid_login_with_redirect_field_name(self):
redirect_url = reverse('two_factor:setup')
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('custom-field-name-login'), 'next_page=' + redirect_url),
{'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, redirect_url)
def test_valid_login_with_allowed_external_redirect(self):
redirect_url = 'https://test.allowed-success-url.com'
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('custom-allowed-success-url-login'), 'next=' + redirect_url),
{'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, redirect_url, fetch_redirect_response=False)
def test_valid_login_with_disallowed_external_redirect(self):
redirect_url = 'https://test.disallowed-success-url.com'
self.create_user()
response = self.client.post(
'%s?%s' % (reverse('custom-allowed-success-url-login'), 'next=' + redirect_url),
{'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, reverse('two_factor:profile'), fetch_redirect_response=False)
@mock.patch('two_factor.views.core.time')
def test_valid_login_primary_key_stored(self, mock_time):
mock_time.time.return_value = 12345.12
user = self.create_user()
user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(
self.client.session['wizard_login_view']['user_backend'],
'django.contrib.auth.backends.ModelBackend')
self.assertEqual(self.client.session['wizard_login_view']['authentication_time'], 12345)
@mock.patch('two_factor.views.core.time')
def test_valid_login_post_auth_session_clear_of_form_data(self, mock_time):
mock_time.time.return_value = 12345.12
user = self.create_user()
user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(self.client.session['wizard_login_view']['step'], 'token')
self.assertEqual(self.client.session['wizard_login_view']['step_data'], {'auth': None})
self.assertEqual(self.client.session['wizard_login_view']['step_files'], {'auth': {}})
self.assertEqual(self.client.session['wizard_login_view']['validated_step_data'], {})
@mock.patch('two_factor.views.core.logger')
@mock.patch('two_factor.views.core.time')
def test_valid_login_expired(self, mock_time, mock_logger):
mock_time.time.return_value = 12345.12
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(
self.client.session['wizard_login_view']['user_backend'],
'django.contrib.auth.backends.ModelBackend')
self.assertEqual(self.client.session['wizard_login_view']['authentication_time'], 12345)
mock_time.time.return_value = 20345.12
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'Token:')
self.assertContains(response, 'Password:')
self.assertContains(response, 'Your session has timed out. Please login again.')
# Check that a message was logged.
mock_logger.info.assert_called_with(
"User's authentication flow has timed out. The user "
"has been redirected to the initial auth form.")
@override_settings(TWO_FACTOR_LOGIN_TIMEOUT=0)
@mock.patch('two_factor.views.core.time')
def test_valid_login_no_timeout(self, mock_time):
mock_time.time.return_value = 12345.12
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertEqual(self.client.session['wizard_login_view']['user_pk'], str(user.pk))
self.assertEqual(
self.client.session['wizard_login_view']['user_backend'],
'django.contrib.auth.backends.ModelBackend')
self.assertEqual(self.client.session['wizard_login_view']['authentication_time'], 12345)
mock_time.time.return_value = 20345.12
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
self.assertEqual(self.client.session['_auth_user_id'], str(user.pk))
def test_valid_login_with_redirect_authenticated_user(self):
user = self.create_user()
response = self.client.get(
reverse('custom-redirect-authenticated-user-login')
)
self.assertEqual(response.status_code, 200)
self.client.force_login(user)
response = self.client.get(
reverse('custom-redirect-authenticated-user-login')
)
self.assertRedirects(response, reverse('two_factor:profile'))
def test_valid_login_with_redirect_authenticated_user_loop(self):
redirect_url = reverse('custom-redirect-authenticated-user-login')
user = self.create_user()
self.client.force_login(user)
with self.assertRaises(ValueError):
self.client.get(
'%s?%s' % (reverse('custom-redirect-authenticated-user-login'), 'next=' + redirect_url),
)
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_with_generator(self, mock_signal):
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertContains(response, 'autofocus="autofocus"')
self.assertContains(response, 'pattern="[0-9]*"')
self.assertContains(response, 'autocomplete="one-time-code"')
response = self._post({'token-otp_token': '123456',
'login_view-current_step': 'token'})
self.assertEqual(response.context_data['wizard']['form'].errors,
{'__all__': ['Invalid token. Please make sure you '
'have entered it correctly.']})
# reset throttle because we're not testing that
device.throttle_reset()
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
self.assertEqual(device.persistent_id,
self.client.session.get(DEVICE_ID_SESSION_KEY))
# Check that the signal was fired.
mock_signal.assert_called_with(sender=mock.ANY, request=mock.ANY, user=user, device=device)
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_throttle_with_generator(self, mock_signal):
user = self.create_user()
device = user.totpdevice_set.create(name='default',
key=random_hex())
self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
# throttle device
device.throttle_increment()
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertEqual(response.context_data['wizard']['form'].errors,
{'__all__': ['Invalid token. Please make sure you '
'have entered it correctly.']})
@mock.patch('two_factor.gateways.fake.Fake')
@mock.patch('two_factor.views.core.signals.user_verified.send')
@override_settings(
TWO_FACTOR_SMS_GATEWAY='two_factor.gateways.fake.Fake',
TWO_FACTOR_CALL_GATEWAY='two_factor.gateways.fake.Fake',
)
def test_with_backup_phone(self, mock_signal, fake):
user = self.create_user()
for no_digits in (6, 8):
with self.settings(TWO_FACTOR_TOTP_DIGITS=no_digits):
user.totpdevice_set.create(name='default', key=random_hex(),
digits=no_digits)
device = user.phonedevice_set.create(name='backup', number='+31101234567',
method='sms',
key=random_hex())
# Backup phones should be listed on the login form
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Send text message to +31 ** *** **67')
# Ask for challenge on invalid device
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'challenge_device': 'MALICIOUS/INPUT/666'})
self.assertContains(response, 'Send text message to +31 ** *** **67')
# Ask for SMS challenge
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'challenge_device': device.persistent_id})
self.assertContains(response, 'We sent you a text message')
test_call_kwargs = fake.return_value.send_sms.call_args[1]
self.assertEqual(test_call_kwargs['device'], device)
self.assertIn(test_call_kwargs['token'],
[str(totp(device.bin_key, digits=no_digits, drift=i)).zfill(no_digits)
for i in [-1, 0]])
# Ask for phone challenge
device.method = 'call'
device.save()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'challenge_device': device.persistent_id})
self.assertContains(response, 'We are calling your phone right now')
test_call_kwargs = fake.return_value.make_call.call_args[1]
self.assertEqual(test_call_kwargs['device'], device)
self.assertIn(test_call_kwargs['token'],
[str(totp(device.bin_key, digits=no_digits, drift=i)).zfill(no_digits)
for i in [-1, 0]])
# Valid token should be accepted.
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
self.assertEqual(device.persistent_id,
self.client.session.get(DEVICE_ID_SESSION_KEY))
# Check that the signal was fired.
mock_signal.assert_called_with(sender=mock.ANY, request=mock.ANY, user=user, device=device)
@mock.patch('two_factor.views.core.signals.user_verified.send')
def test_with_backup_token(self, mock_signal):
user = self.create_user()
user.totpdevice_set.create(name='default', key=random_hex())
device = user.staticdevice_set.create(name='backup')
device.token_set.create(token='abcdef123')
# Backup phones should be listed on the login form
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Backup Token')
# Should be able to go to backup tokens step in wizard
response = self._post({'wizard_goto_step': 'backup'})
self.assertContains(response, 'backup tokens')
# Wrong codes should not be accepted
response = self._post({'backup-otp_token': 'WRONG',
'login_view-current_step': 'backup'})
self.assertEqual(response.context_data['wizard']['form'].errors,
{'__all__': ['Invalid token. Please make sure you '
'have entered it correctly.']})
# static devices are throttled
device.throttle_reset()
# Valid token should be accepted.
response = self._post({'backup-otp_token': 'abcdef123',
'login_view-current_step': 'backup'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
# Check that the signal was fired.
mock_signal.assert_called_with(sender=mock.ANY, request=mock.ANY, user=user, device=device)
@mock.patch('two_factor.views.utils.logger')
def test_reset_wizard_state(self, mock_logger):
self.create_user()
self.enable_otp()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# A GET request resets the state of the wizard...
self.client.get(reverse('two_factor:login'))
# ...so there is no user in this request anymore. As the login flow
# depends on a user being present, this should be handled gracefully.
response = self._post({'token-otp_token': '123456',
'login_view-current_step': 'token'})
self.assertContains(response, 'Password:')
# Check that a message was logged.
mock_logger.warning.assert_called_with(
"Requested step '%s' is no longer valid, returning to last valid "
"step in the wizard.",
'token')
@mock.patch('two_factor.views.utils.logger')
def test_login_different_user_on_existing_session(self, mock_logger):
"""
This test reproduces the issue where a user is logged in and a different user
attempts to login.
"""
self.create_user()
self.create_user(username='vedran@example.com')
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
response = self._post({'auth-username': 'vedran@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
def test_missing_management_data(self):
# missing management data
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret'})
# view should return HTTP 400 Bad Request
self.assertEqual(response.status_code, 400)
def test_no_password_in_session(self):
self.create_user()
self.enable_otp()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
session_contents = json.dumps(list(self.client.session.items()))
self.assertNotIn('secret', session_contents)
def test_login_different_user_with_otp_on_existing_session(self):
self.create_user()
vedran_user = self.create_user(username='vedran@example.com')
device = vedran_user.totpdevice_set.create(name='default', key=random_hex())
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response,
resolve_url(settings.LOGIN_REDIRECT_URL))
response = self._post({'auth-username': 'vedran@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertRedirects(response,
resolve_url(settings.LOGIN_REDIRECT_URL))
def test_login_view_is_step_visible(self):
request = RequestFactory().get(reverse('login'))
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore(None)
login_view = LoginView(**LoginView.get_initkwargs())
login_view.setup(request)
login_view.dispatch(request)
# Initially, any step is visible
for step, form_class in login_view.form_list.items():
self.assertTrue(login_view.is_step_visible(step, form_class))
login_view.storage.validated_step_data['auth'] = {'username': 'joe', 'password': 'any'}
login_view.storage.validated_step_data['token'] = {'otp_token': '123456'}
# Once token was entered, the token step is no longer visible
for step, form_class in login_view.form_list.items():
if step == 'token':
self.assertFalse(login_view.is_step_visible(step, form_class))
else:
self.assertTrue(login_view.is_step_visible(step, form_class))
class BackupTokensTest(UserMixin, TestCase):
def setUp(self):
super().setUp()
self.create_user()
self.enable_otp()
self.login_user()
def test_empty(self):
response = self.client.get(reverse('two_factor:backup_tokens'))
self.assertContains(response, 'You don\'t have any backup codes yet.')
def test_generate(self):
url = reverse('two_factor:backup_tokens')
response = self.client.post(url)
self.assertRedirects(response, url)
response = self.client.get(url)
first_set = set([token.token for token in
response.context_data['device'].token_set.all()])
self.assertNotContains(response, 'You don\'t have any backup codes '
'yet.')
self.assertEqual(10, len(first_set))
# Generating the tokens should give a fresh set
self.client.post(url)
response = self.client.get(url)
second_set = set([token.token for token in
response.context_data['device'].token_set.all()])
self.assertNotEqual(first_set, second_set)
def test_no_cancel_url(self):
response = self.client.get(reverse('two_factor:login'))
self.assertEqual(response.status_code, 200)
self.assertNotIn('cancel_url', response.context.keys())
@override_settings(LOGOUT_REDIRECT_URL='custom-field-name-login')
def test_cancel_redirects_to_logout_redirect_url(self):
response = self.client.get(reverse('two_factor:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['cancel_url'], reverse('custom-field-name-login'))
@override_settings(LOGOUT_URL='custom-field-name-login')
def test_logout_url_warning_raised(self):
with self.assertWarns(DeprecationWarning):
response = self.client.get(reverse('two_factor:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['cancel_url'], reverse('custom-field-name-login'))
@override_settings(ROOT_URLCONF='tests.urls_admin')
class RememberLoginTest(UserMixin, TestCase):
def setUp(self):
super().setUp()
self.user = self.create_user()
self.device = self.user.totpdevice_set.create(name='default', key=random_hex())
def _post(self, data=None):
return self.client.post(reverse('two_factor:login'), data=data)
def set_invalid_remember_cookie(self):
for cookie in self.client.cookies:
if cookie.startswith("remember-cookie_"):
self._restore_remember_cookie_data = dict(name=cookie, value=self.client.cookies[cookie].value)
self.client.cookies[cookie] = self.client.cookies[cookie].value[:-5] + "0" * 5 # an invalid key
def restore_remember_cookie(self):
self.client.cookies[self._restore_remember_cookie_data['name']] = self._restore_remember_cookie_data['value']
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)
def test_with_remember(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertRedirects(response, reverse('two_factor:profile'), fetch_redirect_response=False)
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
# Login without token
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 200)
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 3)
def test_with_remember_label_3_min(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'ask again on this device for 3 minutes')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60 * 4)
def test_with_remember_label_4_hours(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'ask again on this device for 4 hours')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60 * 24 * 5)
def test_with_remember_label_5_days(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'ask again on this device for 5 days')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)
def test_without_remember(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token'})
self.assertEqual(0, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=1)
def test_expired(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
# Wait to expire
sleep(1)
# Login but expired remember cookie
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
self.assertFalse(any(
key.startswith('remember-cookie_') and cookie.value
for key, cookie in self.client.cookies.items()
))
@override_settings(TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60)
def test_wrong_signature(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
response = self.client.get('/secure/raises/')
self.assertEqual(response.status_code, 403)
self.set_invalid_remember_cookie()
# Login but exired remember cookie
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
@override_settings(
TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60,
OTP_HOTP_THROTTLE_FACTOR=60 * 60,
OTP_TOTP_THROTTLE_FACTOR=60 * 60,
)
def test_remember_token_throttling(self):
# Login
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# Enter token
response = self._post({'token-otp_token': totp_str(self.device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertEqual(1, len([cookie for cookie in response.cookies if cookie.startswith('remember-cookie_')]))
# Logout
self.client.get(reverse('logout'))
# Login having an invalid remember cookie
self.set_invalid_remember_cookie()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# Login with valid remember cookie but throttled
self.client = self.client_class()
self.restore_remember_cookie()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'Token:')
# Reset throttling
self.device.throttle_reset()
# Login with valid remember cookie
self.client = self.client_class()
self.restore_remember_cookie()
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, reverse(settings.LOGIN_REDIRECT_URL), fetch_redirect_response=False)
@mock.patch('two_factor.gateways.fake.Fake')
@mock.patch('two_factor.views.core.signals.user_verified.send')
@override_settings(
TWO_FACTOR_SMS_GATEWAY='two_factor.gateways.fake.Fake',
TWO_FACTOR_CALL_GATEWAY='two_factor.gateways.fake.Fake',
TWO_FACTOR_REMEMBER_COOKIE_AGE=60 * 60,
)
def test_phonedevice_with_remember_cookie(self, mock_signal, fake):
self.user.totpdevice_set.first().delete()
device = self.user.phonedevice_set.create(name='default', number='+31101234567', method='sms')
# Ask for SMS challenge
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertContains(response, 'We sent you a text message')
test_call_kwargs = fake.return_value.send_sms.call_args[1]
self.assertEqual(test_call_kwargs['device'], device)
# Valid token should be accepted.
response = self._post({'token-otp_token': totp_str(device.bin_key),
'login_view-current_step': 'token',
'token-remember': 'on'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
# Logout
self.client.get(reverse('logout'))
# Ask for SMS challenge
response = self._post({'auth-username': 'bouke@example.com',
'auth-password': 'secret',
'login_view-current_step': 'auth'})
self.assertRedirects(response, resolve_url(settings.LOGIN_REDIRECT_URL))
| 46.710491
| 117
| 0.600006
| 3,831
| 35,173
| 5.285565
| 0.090055
| 0.035113
| 0.042669
| 0.053336
| 0.799299
| 0.758556
| 0.730851
| 0.718455
| 0.68591
| 0.659835
| 0
| 0.008808
| 0.280215
| 35,173
| 752
| 118
| 46.772606
| 0.79101
| 0.044409
| 0
| 0.676732
| 0
| 0
| 0.223796
| 0.073167
| 0
| 0
| 0
| 0
| 0.211368
| 1
| 0.078153
| false
| 0.085258
| 0.026643
| 0.003552
| 0.113677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
130bb420a4a416ea6e84f458a1e9fe6bbb73c916
| 167
|
py
|
Python
|
Task_13.py
|
jyoti140220/python_websraping
|
454d8dda66b99f5a209500d2a676855c98e8a92d
|
[
"MIT"
] | null | null | null |
Task_13.py
|
jyoti140220/python_websraping
|
454d8dda66b99f5a209500d2a676855c98e8a92d
|
[
"MIT"
] | null | null | null |
Task_13.py
|
jyoti140220/python_websraping
|
454d8dda66b99f5a209500d2a676855c98e8a92d
|
[
"MIT"
] | null | null | null |
from task_4 import movie_detials
from task_12 import movie_cast_detials
from pprint import pprint
movie_detials["caste"]=movie_cast_detials
h=movie_detials
pprint(h)
| 20.875
| 41
| 0.856287
| 28
| 167
| 4.785714
| 0.392857
| 0.268657
| 0.238806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019868
| 0.095808
| 167
| 8
| 42
| 20.875
| 0.86755
| 0
| 0
| 0
| 0
| 0
| 0.029762
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1315bcf850700ec0f163b154658ecdde5e0a41c8
| 62
|
py
|
Python
|
app/component_b/command/tests/fixtures.py
|
mirevsky/django-grpc-cqrs-kafka-template
|
31af0bf5d15e393837f937cace90f82a7de26355
|
[
"MIT"
] | 2
|
2022-01-10T19:52:36.000Z
|
2022-03-19T07:34:54.000Z
|
app/component_b/command/tests/fixtures.py
|
mirevsky/django-grpc-cqrs-kafka-template
|
31af0bf5d15e393837f937cace90f82a7de26355
|
[
"MIT"
] | null | null | null |
app/component_b/command/tests/fixtures.py
|
mirevsky/django-grpc-cqrs-kafka-template
|
31af0bf5d15e393837f937cace90f82a7de26355
|
[
"MIT"
] | null | null | null |
import pytest
from component_b.common.tests.fixtures import *
| 20.666667
| 47
| 0.83871
| 9
| 62
| 5.666667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 62
| 3
| 47
| 20.666667
| 0.910714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
132eed4cd21c2797b3e9665c8a9f0dcad70c8eba
| 88,895
|
py
|
Python
|
test/integration/component/test_add_remove_network.py
|
ycyun/ablestack-cloud
|
b7bd36a043e2697d05303246373988aa033c9229
|
[
"Apache-2.0"
] | 1,131
|
2015-01-08T18:59:06.000Z
|
2022-03-29T11:31:10.000Z
|
test/integration/component/test_add_remove_network.py
|
ycyun/ablestack-cloud
|
b7bd36a043e2697d05303246373988aa033c9229
|
[
"Apache-2.0"
] | 5,908
|
2015-01-13T15:28:37.000Z
|
2022-03-31T20:31:07.000Z
|
test/integration/component/test_add_remove_network.py
|
ycyun/ablestack-cloud
|
b7bd36a043e2697d05303246373988aa033c9229
|
[
"Apache-2.0"
] | 1,083
|
2015-01-05T01:16:52.000Z
|
2022-03-31T12:14:10.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
P1 tests for Add Remove Network to VM
Test Plan: https://cwiki.apache.org/confluence/display/CLOUDSTACK/Add+Remove+networks+to+VM+Test+cases
Issue Link: https://issues.apache.org/jira/browse/CLOUDSTACK-645
Feature Specifications: https://cwiki.apache.org/confluence/display/CLOUDSTACK/Add+Remove+Networks+to+VMs
"""
import random
import time
import unittest
from ddt import ddt, data
from marvin.cloudstackAPI import (addNicToVirtualMachine,
removeNicFromVirtualMachine,
updateDefaultNicForVirtualMachine)
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.codes import PASS
from marvin.lib.base import (
Account,
Domain,
ServiceOffering,
VirtualMachine,
NetworkOffering,
Network,
VpcOffering,
VPC,
PublicIPAddress,
FireWallRule,
NATRule
)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_virtual_machines,
list_events,
list_zones,
get_free_vlan,
update_resource_limit,
list_nat_rules
)
from marvin.lib.utils import (validateList,
random_gen,
get_hypervisor_type)
# Import Local Modules
from nose.plugins.attrib import attr
class Services:
"""Test Add Remove Network Services
"""
def __init__(self):
self.services = {
"sleep": 60,
"ostype": "CentOS 5.3 (64-bit)",
# Cent OS 5.3 (64 bit)
"isolated_network_offering": {
"name": 'Test Isolated Network offering',
"displaytext": 'Test Isolated Network offering',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
},
},
"shared_network_offering": {
"name": 'Test Shared Network Offering',
"displaytext": 'Test Shared Network Offering',
"guestiptype": 'Shared',
"supportedservices": 'Dhcp,Dns,UserData',
"specifyVlan": "True",
"specifyIpRanges": "True",
"traffictype": 'GUEST',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"UserData": 'VirtualRouter'
},
},
"shared_network": {
"name": "Test Shared Network",
"displaytext": "Test Shared Network",
"gateway": "172.16.17.1",
"netmask": "255.255.255.0",
"startip": "172.16.17.2",
"endip": "172.16.17.20",
},
"shared_network_2": {
"name": "Test Shared Network",
"displaytext": "Test Shared Network",
"gateway": "172.16.18.1",
"netmask": "255.255.255.0",
"startip": "172.16.18.2",
"endip": "172.16.18.20",
},
"isolated_network": {
"name": "Test Isolated Network",
"displaytext": "Test Isolated Network",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
# in MHz
"memory": 256,
# In MBs
},
"account": {
"email": "test@test.com",
"firstname": "Test_add_remove_network_vm",
"lastname": "User",
"username": "test_add_remove_network_vm",
"password": "password",
},
"domain": {
"name": "Domain_add_nw_to_vm",
},
"virtual_machine": {
"displayname": "testserver",
"username": "root", # VM creds for SSH
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"vpc_offering": {
"name": 'VPC off add remove network',
"displaytext": 'VPC off add remove network',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat,NetworkACL',
},
"vpc": {
"name": "TestVPC add remove network",
"displaytext": "TestVPC add remove network",
"cidr": '10.0.0.1/24'
},
"natrule": {
"privateport": 22,
"publicport": 22,
"protocol": "TCP"
},
}
@ddt
class TestAddNetworkToVirtualMachine(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestAddNetworkToVirtualMachine, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"], accountid=cls.account.name,
domainid=cls.account.domainid, serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.defaultNetworkId = cls.virtual_machine.nic[0].networkid
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"])
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.shared_network_offering = NetworkOffering.create(cls.api_client, cls.services["shared_network_offering"])
cls._cleanup.append(cls.shared_network_offering)
cls.shared_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
cls.services["shared_network"]["vlan"] = get_free_vlan(cls.api_client, cls.zone.id)[1]
shared_network_subnet_number = random.randrange(1, 254)
cls.services["shared_network"]["gateway"] = "172.16." + str(shared_network_subnet_number) + ".1"
cls.services["shared_network"]["startip"] = "172.16." + str(shared_network_subnet_number) + ".2"
cls.services["shared_network"]["endip"] = "172.16." + str(shared_network_subnet_number) + ".20"
cls.shared_nw_endip = cls.services["shared_network"]["endip"]
cls.shared_network = Network.create(cls.api_client, cls.services["shared_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.shared_network_offering.id)
cls._cleanup.append(cls.shared_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.addednics = []
shared_network_subnet_number = random.randrange(1, 254)
self.services["shared_network"]["gateway"] = "172.16." + str(shared_network_subnet_number) + ".1"
self.services["shared_network"]["startip"] = "172.16." + str(shared_network_subnet_number) + ".2"
self.services["shared_network"]["endip"] = "172.16." + str(shared_network_subnet_number) + ".20"
self.services["shared_network_2"]["gateway"] = "172.16." + str(shared_network_subnet_number + 1) + ".1"
self.services["shared_network_2"]["startip"] = "172.16." + str(shared_network_subnet_number + 1) + ".2"
self.services["shared_network_2"]["endip"] = "172.16." + str(shared_network_subnet_number + 1) + ".20"
self.cleanup = []
def tearDown(self):
try:
for nic in self.addednics:
self.virtual_machine.remove_nic(self.apiclient, nic.id)
except Exception as e:
self.debug("Exception during removal of nics : %s" % e)
super(TestAddNetworkToVirtualMachine, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
# Disable Network Offerings
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
cls.shared_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disable of networks : %s" % e)
super(TestAddNetworkToVirtualMachine, cls).tearDownClass()
def addNetworkToVm(self, network, vm, ipaddress=None):
"""Add network to VM and check if new nic added in the VM"""
self.debug("Adding %s Network: %s to virtual machine %s" %
(network.type, network.id, vm.id))
vm.add_nic(self.apiclient, network.id, ipaddress=ipaddress)
vm_list = list_virtual_machines(self.apiclient, id=vm.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
nics = [x for x in vm_list[0].nic if x.networkid == network.id]
self.debug("Filtered nics list: %s:" % nics)
# Only the nics added to self.virtual_machine should be added to this list
# Nics added to their list are removed before execution of next test case because we are using
# same virtual machine in all test cases, so it is important that the common
# virtual machine should contain only the default nic whenever new test case
# execution starts
if vm.id == self.virtual_machine.id:
self.addednics.append(nics[-1])
self.assertTrue(len(nics) == 1, "nics list should contain the nic of added isolated network,\
the number of nics for the network should be 1, instead they are %s" %
len(nics))
if ipaddress is not None:
self.assertEqual(nics[0].ipaddress, ipaddress, "The ip address of nic does not match with \
the ip address passed while adding network to vm. ip address of nic is %s \
while passed ip address is %s" % (nics[0].ipaddress, ipaddress))
return
@attr(tags=["advanced", "dvs"])
@data("isolated", "shared")
def test_01_add_nw_running_vm(self, value):
"""Add network to running VM"""
# 1. Deploy VM in an account
# 2. Add isolated/shared network to the VM which is in running state
# Validate the following:
# 1. New nic is generated for the added network
# 2. Event NIC.CREATE is generated
network = None # The network which we are adding to the vm
if value == "isolated":
network = self.isolated_network
elif value == "shared":
network = self.shared_network
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
self.addNetworkToVm(network, self.virtual_machine)
self.debug("Retrieving the list of events matching 'NIC.CREATE' in account: %s" % self.account.name)
events = list_events(self.apiclient, account=self.account.name, domainid=self.account.domainid,
type='NIC.CREATE')
event_list_validation_result = validateList(events)
self.assertEqual(event_list_validation_result[0], PASS, "event list validation failed due to %s" %
event_list_validation_result[2])
self.debug("Events list contains event NIC.CREATE")
return
@attr(tags=["advanced", "dvs"])
@data("isolated", "shared")
def test_02_add_nw_stopped_vm(self, value):
"""Add network to stopped VM"""
# 1. Deploy VM in an account
# 2. Stop the VM
# 3. Add isolated/shared network to the stopped VM
# Validate the following:
# 1. New nic is generated for the added network
try:
self.virtual_machine.stop(self.apiclient)
except Exception as e:
self.fail("Failed to stop VM: %s" % e)
network = None # The network which we are adding to the vm
if value == "isolated":
network = self.isolated_network
elif value == "shared":
network = self.shared_network
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
self.addNetworkToVm(network, self.virtual_machine)
self.debug("Starting Virtual Machine: %s" % self.virtual_machine.id)
self.virtual_machine.start(self.apiclient)
return
@attr(tags=["advanced", "dvs"])
@data("isolated", "shared")
def test_03_add_nw_multiple_times(self, value):
"""Add same network multiple times to running VM"""
# 1. Deploy VM in an account
# 2. Add isolated/shared network to the VM
# 3. Try Adding same network again to the VM
# Validate the following:
# 1. Adding same network to vm multiple times fails
network = None # The network which we are adding to the vm
if value == "isolated":
network = self.isolated_network
elif value == "shared":
network = self.shared_network
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
try:
virtual_machine = VirtualMachine.create(
self.api_client, self.services["virtual_machine"],
accountid=self.account.name, domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.defaultNetworkId])
self.cleanup.append(virtual_machine)
except Exception as e:
self.fail("Failed to deply virtual machine: %s" % e)
# Adding network to vm for the first time
self.addNetworkToVm(network, virtual_machine)
# Trying to add same network to vm for the second time
with self.assertRaises(Exception) as e:
self.addNetworkToVm(network, virtual_machine)
self.debug("Adding same network again failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
@data("isolated")
def test_04_vpc_nw_running_vm(self, value):
"""Add VPC network to running VM belonging to isolated network"""
# 1. Deploy VM in an account
# 2. Add isolated network to the VM
# 3. Create VPC
# 4. Try adding VPC to the VM
# Validate the following:
# 1. Adding VPC to vm should fail
try:
virtual_machine = VirtualMachine.create(
self.api_client, self.services["virtual_machine"],
accountid=self.account.name, domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.defaultNetworkId])
self.cleanup.append(virtual_machine)
except Exception as e:
self.fail("Failed to deply virtual machine: %s" % e)
network = self.isolated_network
self.addNetworkToVm(network, virtual_machine)
self.debug("Creating VPC offering")
vpc_off = VpcOffering.create(self.api_client, self.services["vpc_offering"])
self.cleanup.append(vpc_off)
self.debug("Created VPC offering: %s" % vpc_off.id)
self.debug("Enabling the VPC offering")
vpc_off.update(self.apiclient, state='Enabled')
self.debug("Creating VPC")
vpc = VPC.create(self.apiclient, self.services["vpc"], vpcofferingid=vpc_off.id, zoneid=self.zone.id,
account=self.account.name, domainid=self.account.domainid)
self.cleanup.append(vpc)
self.debug("Trying to add VPC to vm belonging to isolated network, this should fail")
with self.assertRaises(Exception):
virtual_machine.add_nic(self.apiclient, vpc.id)
self.debug("Disabling vpc offering: %s" % vpc_off.id)
vpc_off.update(self.apiclient, state='Disabled')
return
@attr(tags=["advanced", "dvs"])
@data("isolated")
def test_05_add_vpc_nw_stopped_vm(self, value):
"""Add VPC network to stopped VM belonging to isolated network"""
# 1. Deploy VM in an account
# 2. Stop the VM
# 3. Add isolated network to the VM
# 4. Create VPC
# 5. Try adding VPC to the stopped VM
# Validate the following:
# 1. Adding VPC to vm should fail
try:
self.virtual_machine.stop(self.apiclient)
except Exception as e:
self.fail("Failed to stop virtual machine: %s" % e)
self.addNetworkToVm(self.isolated_network, self.virtual_machine)
self.debug("Creating VPC offering")
vpc_off = VpcOffering.create(self.api_client, self.services["vpc_offering"])
self.cleanup.append(vpc_off)
self.debug("Created VPC offering: %s" % vpc_off.id)
self.debug("Enabling the VPC offering")
vpc_off.update(self.apiclient, state='Enabled')
self.debug("Creating VPC")
vpc = VPC.create(self.apiclient, self.services["vpc"], vpcofferingid=vpc_off.id, zoneid=self.zone.id,
account=self.account.name, domainid=self.account.domainid)
self.cleanup.append(vpc)
self.debug("Trying to add VPC to vm belonging to isolated network, this should fail")
with self.assertRaises(Exception):
self.virtual_machine.add_nic(self.apiclient, vpc.id)
self.debug("Starting virtual machine")
self.virtual_machine.start(self.apiclient)
self.debug("Disabling vpc offering: %s" % vpc_off.id)
vpc_off.update(self.apiclient, state='Disabled')
return
@attr(tags=["advanced", "dvs"])
def test_06_add_nw_ipaddress_running_vm(self):
"""Add network and ip address to running VM"""
# 1. Deploy VM in an account
# 2. Add shared network and ip address to this VM
# Validate the following:
# 1. New nic gets added for the shared network
# 2. The newly added nic has the ip address same as
# that passed while adding the network
try:
virtual_machine = VirtualMachine.create(
self.api_client, self.services["virtual_machine"],
accountid=self.account.name, domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.defaultNetworkId])
self.cleanup.append(virtual_machine)
except Exception as e:
self.fail("Failed to deply virtual machine: %s" % e)
ipaddress = self.shared_nw_endip
self.debug("Adding network to vm with ip address %s: " % ipaddress)
self.addNetworkToVm(self.shared_network, virtual_machine, ipaddress=ipaddress)
return
@attr(tags=["advanced", "dvs"])
def test_10_add_nw_invalid_ipaddress_running_vm(self):
"""Add network with invalid ip address to running VM"""
# 1. Deploy VM in an account
# 2. Add shared network with invalid ip address to this VM
# Validate the following:
# 1. Adding network to VM should fail because of invalid ip address
ipaddress = "257.257.257.257" # Invalid ip address
self.debug("Adding network to vm with ip address %s: " % ipaddress)
with self.assertRaises(Exception) as e:
self.addNetworkToVm(self.shared_network, self.virtual_machine,
ipaddress=ipaddress)
self.debug("API failed with exception: %s" % e.exception)
return
# was tags=["advanced", "dvs"],
# the apiclient that is being used to test this has to much rights?
@attr(tags=["TODO"])
@data("isolated", "shared")
def test_14_add_nw_different_account(self, value):
"""Add network to running VM"""
# 1. Deploy VM in an account
# 2. Create new account under same domain and create network in that account
# 3. Add isolated/shared network belonging to other account to the VM in first account
# Validate the following:
# 1. Adding network should fail
network = None # The network which we are adding to the vm
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
if value == "isolated":
network = Network.create(self.api_client, self.services["isolated_network"], account.name,
account.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network)
elif value == "shared":
self.services["shared_network_2"]["zoneid"] = self.zone.id
self.services["shared_network_2"]["vlan"] = get_free_vlan(self.apiclient, self.zone.id)[1]
network = Network.create(self.api_client, self.services["shared_network_2"], account.name,
account.domainid, networkofferingid=self.shared_network_offering.id)
self.cleanup.append(network)
if network is None:
self.skipTest("Network should not be none. Case not handled for Network of type %s" % value)
self.debug("Trying to %s network in account %s to a vm in account %s, This should fail" %
(network.type, account.name, self.account.name))
try:
vm_with_nic = self.virtual_machine.add_nic(self.apiclient, network.id)
nics = [x for x in vm_with_nic.nic if x.networkid == network.id]
self.addednics.append(nics[-1])
except Exception:
pass
else:
self.fail("User was able to add NIC, test failed! This issue has been hit: CLOUDSTACK-10071")
return
@attr(tags=["advanced", "dvs"])
def test_24_add_nw_different_domain(self):
"""Add network to running VM"""
# 1. Create two domains
# 2. Create network in one domain and create virtual machine in other domain
# 3. Ad isolated/shared network belonging to one domain to the vm belonging to other domain
# Validate the following:
# 1. Adding network should fail
network = None # The network which we are adding to the vm
try:
self.child_domain_1 = Domain.create(self.apiclient,
services=self.services["domain"],
parentdomainid=self.domain.id)
self.cleanup.append(self.child_domain_1)
self.child_do_admin_1 = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.child_domain_1.id
)
self.cleanup.append(self.child_do_admin_1)
self.child_domain_2 = Domain.create(self.apiclient,
services=self.services["domain"],
parentdomainid=self.domain.id)
self.cleanup.append(self.child_domain_2)
self.child_do_admin_2 = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.child_domain_2.id)
self.cleanup.append(self.child_do_admin_2)
except Exception as e:
self.fail(e)
network = Network.create(self.api_client, self.services["isolated_network"], self.child_do_admin_1.name,
self.child_do_admin_1.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network)
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"], accountid=self.child_do_admin_2.name,
domainid=self.child_do_admin_2.domainid, serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
self.cleanup.append(virtual_machine)
time.sleep(self.services["sleep"])
self.debug("Trying to %s network in domain %s to a vm in domain %s, This should fail" %
(network.type, self.child_domain_1.name, self.child_domain_2.name))
with self.assertRaises(Exception) as e:
virtual_machine.add_nic(self.apiclient, network.id)
self.debug("Operation failed with exception %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_25_add_nw_above_account_limit(self):
"""Add network to VM with maximum network limit reached"""
# 1. Create an account and create maximum allowed networks in the account
# 2. Deploy VM in this account
# 3. Create a network in other account and add to this VM
# Validate the following:
# 1. Adding network should fail
self.debug("Creating account 1")
account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(account_1)
self.debug("setting network limit of account: %s as 1" % account_1.name)
update_resource_limit(
self.apiclient,
6, # Network
max=1,
account=account_1.name,
domainid=account_1.domainid
)
self.debug("Creating isolated network in account: %s" % account_1.name)
network_1 = Network.create(self.api_client, self.services["isolated_network"], account_1.name,
account_1.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network_1)
self.debug("created network %s" % network_1.name)
self.debug("Deploying virtual machine in account: %s" % account_1.name)
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"], accountid=account_1.name,
domainid=account_1.domainid, serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
self.cleanup.append(virtual_machine)
self.debug("Deployed virtual machine : %s" % virtual_machine.id)
self.debug("Creating another account")
account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(account_2)
self.debug("Created account %s" % account_2.name)
self.debug("Creating network in account %s" % account_2.name)
network_2 = Network.create(self.api_client, self.services["isolated_network"], account_2.name,
account_2.domainid, networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(network_2)
self.debug("Created network %s" % network_2.name)
self.debug("Trying to add netwrok %s to VM %s, this should fail" %
(network_2.name, virtual_machine.id))
with self.assertRaises(Exception) as e:
virtual_machine.add_nic(self.apiclient, network_2.id)
self.debug("Operation failed with exception %s" % e.exception)
return
class TestRemoveNetworkFromVirtualMachine(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestRemoveNetworkFromVirtualMachine, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"], accountid=cls.account.name,
domainid=cls.account.domainid, serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
# Create Shared Network Offering
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"])
cls._cleanup.append(cls.isolated_network_offering)
# Enable Isolated Network offering
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
self.addednics = []
def tearDown(self):
try:
for nic in self.addednics:
self.virtual_machine.remove_nic(self.apiclient, nic.id)
except Exception as e:
self.debug("Exception during removal of nics : %s" % e)
super(TestRemoveNetworkFromVirtualMachine, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling network offering : %s" % e)
super(TestRemoveNetworkFromVirtualMachine, cls).tearDownClass()
def addNetworkToVm(self, network, vm):
"""Add network to VM and check if new nic added in the VM"""
self.debug("Adding %s Network: %s to virtual machine %s" %
(network.type, network.id, vm.id))
vm.add_nic(self.apiclient, network.id)
vm_list = list_virtual_machines(self.apiclient, id=vm.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
# Add nic of network to list so that it can be deleted later accessing its id from this list
self.nics = [x for x in vm_list[0].nic if x.networkid == network.id]
self.debug("Filtered nics list: %s:" % self.nics)
self.assertTrue(len(self.nics) == 1, "nics list should contain the nic of added isolated network,\
the number of nics for the network should be 1, instead they are %s" %
len(self.nics))
return self.nics
@attr(tags=["advanced", "dvs"])
def test_07_remove_nic_running_vm(self):
"""Remove nic from running VM"""
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Remove the nic added by the newly added network
# Validate the following:
# 1. Newly added nic is removed
# 2. Event NIC.DELETE is generated
self.addNetworkToVm(self.isolated_network, self.virtual_machine)
# Access the nic of the added network from self.nics object which is fillled
# in addNetworkToVm function
self.debug("Removing added nic %s from vm %s" %
(self.nics[0].id, self.virtual_machine.id))
self.virtual_machine.remove_nic(self.apiclient, self.nics[0].id)
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
# Verify the nic is removed from the virtual machine
self.debug("Verifying the nic is removed from the virtual machine")
self.assertFalse(any(x.networkid == self.isolated_network.id for x in vm_list[0].nic),
"nic still present in the virtual machine nic list")
self.debug("nic removed successfully")
self.debug("Retrieving events list matching events 'NIC.DELETE'")
events = list_events(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
type='NIC.DELETE'
)
event_list_validation_result = validateList(events)
self.assertEqual(event_list_validation_result[0], PASS, "vm list validation failed due to %s" %
event_list_validation_result[2])
self.debug("Events list contains event NIC.DELETE")
self.debug("events: %s" % events)
return
@attr(tags=["advanced", "dvs"])
def test_08_remove_default_nic(self):
"""Test Remove default nic of running VM"""
# 1. Deploy Vm in account
# 2. Try to remove the default nic of the VM
# Validate the following:
# 1. Default nic of vm is not removed
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
self.assertEqual(len(vm_list[0].nic), 1, "There should only be default nic present in the vm")
self.debug("Trying to remove the default nic of vm : %s, this should fail" %
self.virtual_machine.id)
with self.assertRaises(Exception):
self.virtual_machine.remove_nic(self.apiclient, vm_list[0].nic[0].id)
self.debug("Removing default nic of vm failed")
return
@attr(tags=["advanced", "dvs"])
def test_09_remove_foreign_nic(self):
"""Remove nic which does not belong to VM"""
# 1. Add VM in an account
# 1. Add new account and deploy vm in it
# 2. Try to remove nic of the new vm from first vm
# Validate the following:
# 1. Nic remove operation should fail
self.debug("Creating new account")
account = Account.create(
self.api_client,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(account)
self.debug("created new account : %s" % account.name)
self.debug("Deploying virtual machine in this account")
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"], accountid=account.name,
domainid=account.domainid, serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
self.debug("Deployed virtual machine: %s" % virtual_machine.id)
self.debug("Trying to remove nic of new virtual machine from existing virtual machine, This \
operation should fail")
with self.assertRaises(Exception) as e:
self.virtual_machine.remove_nic(self.apiclient, virtual_machine.nic[0].id)
self.debug("Operation failed with exception: %s" % e.exception)
return
@attr(tags=["advanced"], required_hardware="true")
def test_29_remove_nic_CS22503(self):
"""Test to verify remove nic from vm if the nic ip is same as another vm ip in another network"""
# 1. Deploy vm v1 with networks n1 and n2
# 2. Check the ip address of nic in n2 say ip1
# 3. Deployed vm v2 in another network say n3 with same IP address as ip1 using
# 'deployVirtualMachine' api with 'ipaddress' as one of the parameters.
# 4. Acquire public IP in n3 network.
# 5. Configure PF on the acquired IP and assign it to vm v2
# 6. Try to remove nic n2 from v1. Should be successfull
# There was a bug due to both vms has same ip address, so not allowing to remove nic
vm1 = self.virtual_machine
nic2 = self.addNetworkToVm(self.isolated_network, vm1)
self.addednics.append(nic2)
# get the ip address of the nic added in 2nd network
vm1_ip = nic2[0].ipaddress
self.assertIsNotNone(vm1_ip, "New nic did not get the ip address")
# Create network n3
self.network3 = Network.create(
self.api_client,
self.services["isolated_network"],
self.account.name,
self.account.domainid,
networkofferingid=self.isolated_network_offering.id
)
self.cleanup.append(self.network3)
self.vm2 = VirtualMachine.create(
self.api_client,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[self.network3.id],
ipaddress=vm1_ip,
mode=self.zone.networktype
)
self.cleanup.append(self.vm2)
vm2 = VirtualMachine.list(
self.api_client,
id=self.vm2.id
)
self.assertEqual(validateList(vm2)[0], PASS, "list vms returned invalid response")
self.assertIsNotNone(vm2[0].nic[0].ipaddress, "vm2 didn't get the ip address")
self.assertEqual(
vm1_ip,
vm2[0].nic[0].ipaddress,
"vm2 did not get the ip address passed while deploying vm"
)
ip_address = PublicIPAddress.create(
self.apiclient,
self.account.name,
self.zone.id,
self.account.domainid,
self.services["virtual_machine"],
self.network3.id
)
self.cleanup.append(ip_address)
# Open up firewall port for SSH
FireWallRule.create(
self.apiclient,
ipaddressid=ip_address.ipaddress.id,
protocol=self.services["natrule"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule"]["publicport"],
endport=self.services["natrule"]["publicport"]
)
# Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
self.vm2,
self.services["natrule"],
ip_address.ipaddress.id
)
list_nat_rule_response = list_nat_rules(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
validateList(list_nat_rule_response)[0],
PASS,
"Check list response returns a valid list"
)
self.assertEqual(
list_nat_rule_response[0].id,
nat_rule.id,
"Check Correct Port forwarding Rule is returned"
)
# Try to remove nic 2 from vm1
try:
vm1.remove_nic(self.apiclient, self.nics[0].id)
vm1_res = VirtualMachine.list(self.apiclient, id=vm1.id)
self.assertEqual(validateList(vm1_res)[0], PASS, "invalid listvm response")
self.assertEqual(
len(vm1_res[0].nic),
1,
"VM has more than one nic even after removing the 2nd nic"
)
except Exception as e:
self.fail("Failed to delete the nic from vm")
return
@attr(tags=["advanced"], required_hardware="true")
def test_30_remove_nic_reattach(self):
"""
Test to verify vm start after NIC removal and reattach
# 1.Create vm which has 3 nics(e.g. #0,#1,#2)
# 2.Stop the vm
# 3.Remove second nic(#1)
# 4.Add/Reattach same network(#1)
# 5.Start the instance
"""
self.ntwk2 = Network.create(
self.apiclient,
self.services["isolated_network"],
self.account.name,
self.account.domainid,
networkofferingid=self.isolated_network_offering.id
)
self.cleanup.append(self.ntwk2)
self.ntwk3 = Network.create(
self.apiclient,
self.services["isolated_network"],
self.account.name,
self.account.domainid,
networkofferingid=self.isolated_network_offering.id
)
self.cleanup.append(self.ntwk3)
self.test_vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype,
networkids=[self.isolated_network.id, self.ntwk2.id, self.ntwk3.id]
)
self.cleanup.append(self.test_vm)
self.assertIsNotNone(self.test_vm, "Failed to create vm with 3 nics")
vm_res = VirtualMachine.list(
self.apiclient,
id=self.test_vm.id
)
self.assertEqual(validateList(vm_res)[0], PASS, "Invalid list vm response")
self.nics = vm_res[0].nic
self.assertEqual(
validateList(self.nics)[0],
PASS,
"vm response does not contain nics info"
)
self.assertEqual(len(self.nics), 3, "Not all nics found in vm response")
self.test_vm.stop(self.apiclient)
vm_res2 = VirtualMachine.list(
self.apiclient,
id=self.test_vm.id
)
self.assertEqual(validateList(vm_res2)[0], PASS, "Invalid response")
self.assertEqual(
vm_res2[0].state,
"Stopped",
"VM did not stop properly"
)
"""
get the network id of the nic which we are remove from the nic, so that we can
use that network id for reattach
"""
nic_to_attach = [x for x in [self.isolated_network, self.ntwk2, self.ntwk3] \
if x.id == self.nics[1].networkid]
self.assertEqual(validateList(nic_to_attach)[0], PASS, "No matching nics")
self.assertEqual(len(nic_to_attach), 1, "More than one nic in same network")
try:
self.test_vm.remove_nic(self.apiclient, nicId=self.nics[1].id)
self.test_vm.add_nic(
self.apiclient,
nic_to_attach[0].id
)
self.test_vm.start(self.apiclient)
except Exception as e:
self.fail("Failed to start vm after nic removal and attachment")
vm_res3 = VirtualMachine.list(self.apiclient, id=self.test_vm.id)
self.assertEqual(
validateList(vm_res3)[0],
PASS,
"Invalid listvm response after nic detach and attach"
)
self.assertEqual(
vm_res3[0].state,
"Running",
"VM didn't come to running state after nic detach and attach"
)
vm_nics = vm_res3[0].nic
self.assertEqual(validateList(vm_nics)[0], PASS, "Invalid nics after vm stop/start")
self.assertEqual(
len(vm_nics),
3,
"Nic is not attached/detected"
)
self.addednics.extend(vm_nics)
return
class TestUpdateVirtualMachineNIC(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestUpdateVirtualMachineNIC, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"])
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
self.addednics = []
def tearDown(self):
try:
for nic in self.addednics:
self.virtual_machine.remove_nic(self.apiclient, nic.id)
except Exception as e:
self.debug("Exception during removal of nics : %s" % e)
super(TestUpdateVirtualMachineNIC, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disable of network offering : %s" % e)
super(TestUpdateVirtualMachineNIC, cls).tearDownClass()
def addNetworkToVm(self, network, vm):
"""Add network to VM and check if new nic added in the VM"""
self.debug("Adding %s Network: %s to virtual machine %s" %
(network.type, network.id, vm.id))
vm.add_nic(self.apiclient, network.id)
vm_list = list_virtual_machines(self.apiclient, id=vm.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
self.debug("virtual machine nics: %s" % vm_list[0].nic)
# Add nic of network to list so that it can be deleted later accessing its id from this list
self.nics = [x for x in vm_list[0].nic if x.networkid == network.id]
self.debug("Filtered nics list: %s:" % self.nics)
self.assertTrue(len(self.nics) == 1, "nics list should contain the nic of added isolated network,\
the number of nics for the network should be 1, instead they are %s" %
len(self.nics))
self.addednics.append(self.nics[0])
return
@attr(tags=["advanced", "dvs"])
def test_11_update_nic_running_vm(self):
"""update default nic of running VM"""
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Update default nic of VM (Make the newly added NIC as default)
# Validate the following:
# 1. Default nic is updated
# 2. Previous default nic is now non-default
# 3. Event NIC.UPDATE is generated
self.addNetworkToVm(self.isolated_network, self.virtual_machine)
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
self.virtual_machine.update_default_nic(self.apiclient, nicId=nonDefaultNicIdBeforeUpdate)
self.debug("Again listing the NIC list of VM to verify the update operation was successful")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdAfterUpdate = nic.id
self.assertEqual(nonDefaultNicIdBeforeUpdate, defaultNicIdAfterUpdate, "old non default NIC not made\
default one, update_default_nic API failed")
self.debug("Retrieving events list matching events 'NIC.UPDATE'")
events = list_events(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
type='NIC.UPDATE'
)
event_list_validation_result = validateList(events)
self.assertEqual(event_list_validation_result[0], PASS, "event list validation failed due to %s" %
event_list_validation_result[2])
self.debug("Events list contains event NIC.UPDATE")
self.debug("events: %s" % events)
return
@attr(tags=["advanced", "dvs"])
def test_12_make_default_nic_as_default(self):
"""Try to set default nic of vm again as default"""
# 1. Deploy Vm in account
# 2. Set default nic of vm again as default
# Validate the following:
# 1. updateDefaultNic API fails
self.debug("Listing virtual machine to get default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
defaultNicId = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicId = nic.id
self.debug("Trying to set default nic again as default nic, This should fail")
with self.assertRaises(Exception) as e:
self.virtual_machine.update_default_nic(self.apiclient, nicId=defaultNicId)
self.debug("updateDefaultNic operation failed as expected with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_13_set_foreign_nic_as_default(self):
"""set nic which does not belong to VM as its default one"""
# 1. Add VM in an account
# 1. Add new account and deploy vm in it
# 2. Try to set nic of the new vm as default nic of first vm
# Validate the following:
# 1. updateDefaultNic operation should fail
self.debug("Creating new account")
account = Account.create(self.api_client, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("created new account : %s" % account.name)
self.debug("Deploying virtual machine in this account")
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"],
accountid=account.name, domainid=account.domainid,
serviceofferingid=self.service_offering.id, mode=self.zone.networktype)
self.cleanup.append(virtual_machine)
time.sleep(self.services["sleep"])
self.debug("Deployed virtual machine: %s" % virtual_machine.id)
foreignNicId = virtual_machine.nic[0].id
self.debug("Trying to set nic of new virtual machine as default nic of existing virtual machine, This \
operation should fail")
with self.assertRaises(Exception) as e:
self.virtual_machine.update_default_nic(self.apiclient, nicId=foreignNicId)
self.debug("updateDefaultNic operation failed as expected with exception: %s" %
e.exception)
return
class TestFailureScenariosAddNetworkToVM(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestFailureScenariosAddNetworkToVM, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"], )
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
super(TestFailureScenariosAddNetworkToVM, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling network offering : %s" % e)
super(TestFailureScenariosAddNetworkToVM, cls).tearDownClass()
@attr(tags=["advanced", "dvs"])
def test_15_add_nic_wrong_vm_id(self):
"""Add network to vm with wrong vm id"""
# 1. Call add network to VM API with correct network id but wrong vm id
# Validate the following:
# 1. API should throw exception saying unable to find virtual machine
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = random_gen(id="virtual_machine", size=30)
cmd.networkid = self.isolated_network.id
with self.assertRaises(Exception) as e:
self.apiclient.addNicToVirtualMachine(cmd)
self.debug("addNicToVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_16_add_nic_wrong_network_id(self):
"""Add network to vm with wrong network id"""
# 1. Call add network to VM API with correct network id but wrong network id
# Validate the following:
# 1. API should throw exception saying unable to find a network
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.networkid = random_gen(id="network_id", size=30)
with self.assertRaises(Exception) as e:
self.apiclient.addNicToVirtualMachine(cmd)
self.debug("addNicToVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_17_add_nic_different_zone(self):
"""Add network to vm where both belong to different zones"""
# 1. Deploy a VM in zone 1
# 2. Create a network in zone 2
# 3. Try to add this network to the VM (both belong to different zones)
# Validate the following:
# 1. API should throw exception vminstance is in zone<id>, but network is in zone <id>
foreignZoneId = None
zones = list_zones(self.apiclient, available=True)
list_zones_validation_result = validateList(zones)
self.assertEqual(list_zones_validation_result[0], PASS, "list zones validation failed due to: %s" %
list_zones_validation_result[2])
if len(zones) >= 2:
for zone in zones:
if zone.id != self.zone.id:
foreignZoneId = zone.id
break
else:
self.skipTest("This test requires at least two zones to be present in the setup")
self.services["isolated_network"]["zoneid"] = foreignZoneId
self.debug("Creating isolated network in zone %s which is foreign to VM" %
foreignZoneId)
isolated_network = Network.create(self.apiclient, self.services["isolated_network"],
self.account.name, self.account.domainid,
networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(isolated_network)
self.debug("Created isolated network %s in zone %s" %
(isolated_network.id, foreignZoneId))
self.debug("Trying to add network to VM, both belonging to different zones")
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.networkid = isolated_network.id
with self.assertRaises(Exception) as e:
time.sleep(5)
self.apiclient.addNicToVirtualMachine(cmd)
self.debug("addNicToVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["invalid"])
def test_18_add_nic_basic_zone(self):
"""Add network to vm in basic zone"""
# 1. Deploy a vm and create network in basic zone
# 2. Try adding network to vm
# Validate following
# 1. API should throw exception saying Can't add a new nic to vm in basic network
basicZone = None
zones = list_zones(self.apiclient, available=True)
list_zones_validation_result = validateList(zones)
self.assertEqual(list_zones_validation_result[0], PASS, "list zones validation failed due to: %s" %
list_zones_validation_result[2])
for zone in zones:
if zone.networktype.lower() == 'BASIC':
basicZone = zone.id
break
if basicZone is None:
self.skipTest("This test requires at least one basic zone to be present in the setup")
self.services["isolated_network"]["zoneid"] = basicZone.id
self.debug("Creating isolated network in basic zone: %s" % basicZone.id)
isolated_network = Network.create(self.apiclient, self.services["isolated_network"],
networkofferingid=self.isolated_network_offering.id)
self.cleanup.append(isolated_network)
self.debug("Created isolated network %s:" % isolated_network.id)
self.services["virtual_machine"]["zoneid"] = basicZone.id
self.debug("Deploying virtual machine in basic zone: %s" % basicZone.id)
virtual_machine = VirtualMachine.create(self.apiclient, self.services["virtual_machine"],
serviceofferingid=self.service_offering.id,
mode=basicZone.networktype)
self.cleanup.append(virtual_machine)
time.sleep(self.services["sleep"])
self.debug("Deployed virtual machine %s: " % virtual_machine.id)
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = virtual_machine.id
cmd.networkid = isolated_network.id
self.dedbug("Trying to add isolated network to VM (both in basic zone,\
this operation should fail")
with self.assertRaises(Exception) as e:
time.sleep(5)
self.apiclient.addNicToVirtualMachine(cmd)
return
@attr(tags=["advanced", "dvs"])
def test_26_add_nic_insufficient_permission(self):
"""Try to add network to vm with insufficient permission"""
# 1. Call add network to VM API with api client of other account
# Validate the following:
# 1. API should throw exception saying insufficient permission
cmd = addNicToVirtualMachine.addNicToVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.networkid = self.isolated_network.id
self.debug("Creating new account")
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Created account %s" % account.name)
self.debug("creating user api client for account: %s" % account.name)
api_client = self.testClient.getUserApiClient(UserName=account.name, DomainName=self.account.domain)
self.debug("Trying to add network to vm with this api client, this should fail due to \
insufficient permission")
with self.assertRaises(Exception) as e:
time.sleep(5)
api_client.addNicToVirtualMachine(cmd)
return
class TestFailureScenariosRemoveNicFromVM(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestFailureScenariosRemoveNicFromVM, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"], )
cls._cleanup.append(cls.isolated_network_offering)
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"], cls.account.name,
cls.account.domainid, networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
cls.virtual_machine.add_nic(cls.api_client, cls.isolated_network.id)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
super(TestFailureScenariosRemoveNicFromVM, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling of network offering : %s" % e)
super(TestFailureScenariosRemoveNicFromVM, cls).tearDownClass()
@attr(tags=["advanced", "dvs"])
def test_19_remove_nic_wrong_vm_id(self):
"""Try to remove nic from a vm providing wrong vm id to API"""
# (Frist two steps are perfromed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Remove the nic added by the newly added network providing wrong vm id to the API
# Validate the following:
# 1. API throws exception unable to find a virtual machine with id
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
vm = vm_list_validation_result[1]
nics = [x for x in vm.nic if x.networkid == self.isolated_network.id]
self.assertEqual(len(nics), 1, "There should be exactly one nic corresponding to the isolate\
network %s" % self.isolated_network.id)
cmd = removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id + random_gen()
cmd.nicid = nics[0].id
with self.assertRaises(Exception) as e:
self.apiclient.removeNicFromVirtualMachine(cmd)
self.debug("removeNicFromVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_20_remove_nic_wrong_nic_id(self):
"""Try to remove nic from a vm providing wrong nic id to API"""
# (Frist two steps are perfromed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Remove the nic added by the newly added network providing wrong nic id to the API
# Validate the following:
# 1. API throws exception unable to find nic with id
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
vm = vm_list_validation_result[1]
nics = [x for x in vm.nic if x.networkid == self.isolated_network.id]
self.assertEqual(len(nics), 1, "There should be exactly one nic corresponding to the isolate\
network %s" % self.isolated_network.id)
cmd = removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nics[0].id + random_gen()
with self.assertRaises(Exception) as e:
self.apiclient.removeNicFromVirtualMachine(cmd)
self.debug("removeNicFromVirtualMachine API failed with exception: %s" % e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_27_remove_nic_insufficient_permission(self):
"""Try to remove nic from vm with insufficient permission"""
# 1. Call remove network from VM API with api client of other account
# Validate the following:
# 1. API should throw exception saying insufficient permission
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
vm = vm_list_validation_result[1]
nics = [x for x in vm.nic if x.networkid == self.isolated_network.id]
self.assertEqual(len(nics), 1, "There should be exactly one nic corresponding to the isolate\
network %s" % self.isolated_network.id)
cmd = removeNicFromVirtualMachine.removeNicFromVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nics[0].id
self.debug("Creating new account")
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Created account %s" % account.name)
self.debug("creating user api client for account: %s" % account.name)
api_client = self.testClient.getUserApiClient(UserName=account.name, DomainName=self.account.domain)
self.debug("Trying to add network to vm with this api client, this should fail due to \
insufficient permission")
with self.assertRaises(Exception) as e:
api_client.removeNicFromVirtualMachine(cmd)
self.debug("removeNicFromVirtualMachine API failed with exception: %s" % e.exception)
self.apiclient.removeNicFromVirtualMachine(cmd)
return
class TestFailureScenariosUpdateVirtualMachineNIC(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestFailureScenariosUpdateVirtualMachineNIC, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
hypervisor = get_hypervisor_type(cls.api_client)
if hypervisor.lower() not in ["xenserver", "kvm"]:
raise unittest.SkipTest("This feature is supported only on XenServer and KVM")
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
template = get_template(cls.api_client, cls.zone.id, cls.services["ostype"])
# Set Zones and disk offerings
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
# Create Accounts & networks
cls.services["isolated_network"]["zoneid"] = cls.zone.id
cls.services["shared_network"]["zoneid"] = cls.zone.id
cls._cleanup = []
cls.addednics = []
cls.account = Account.create(cls.api_client, cls.services["account"], domainid=cls.domain.id)
cls._cleanup.append(cls.account)
cls.service_offering = ServiceOffering.create(cls.api_client, cls.services["service_offering"])
cls._cleanup.append(cls.service_offering)
cls.virtual_machine = VirtualMachine.create(cls.api_client, cls.services["virtual_machine"],
accountid=cls.account.name, domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id, mode=cls.zone.networktype)
cls._cleanup.append(cls.virtual_machine)
cls.defaultNetworkId = cls.virtual_machine.nic[0].networkid
# Create Shared Network Offering
cls.isolated_network_offering = NetworkOffering.create(cls.api_client, cls.services["isolated_network_offering"], )
cls._cleanup.append(cls.isolated_network_offering)
# Enable Isolated Network offering
cls.isolated_network_offering.update(cls.api_client, state='Enabled')
cls.isolated_network = Network.create(cls.api_client, cls.services["isolated_network"],
cls.account.name, cls.account.domainid,
networkofferingid=cls.isolated_network_offering.id)
cls._cleanup.append(cls.isolated_network)
vm_with_nic = cls.virtual_machine.add_nic(cls.api_client, cls.isolated_network.id)
nics = [x for x in vm_with_nic.nic if x.networkid == cls.isolated_network.id]
cls.addednics.append(nics[-1])
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
super(TestFailureScenariosUpdateVirtualMachineNIC, self).tearDown()
@classmethod
def tearDownClass(cls):
try:
for nic in cls.addednics:
cls.virtual_machine.remove_nic(cls.apiclient, nic.id)
except Exception as e:
cls.debug("Exception during removal of nics : %s" % e)
try:
cls.isolated_network_offering.update(cls.api_client, state='Disabled')
except Exception as e:
cls.debug("Exception during disabling of network offering : %s" % e)
super(TestFailureScenariosUpdateVirtualMachineNIC, cls).tearDownClass()
@attr(tags=["advanced", "dvs"])
def test_21_update_nic_wrong_vm_id(self):
"""update default nic of vm providing wrong vm id to the API"""
# (First two steps are performed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Update default nic of VM (Make the newly added NIC as default) by providing wrong
# vm id to the API
# Validate the following:
# 1. API throws exception saying can't find the virtual machine
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id + random_gen()
cmd.nicid = nonDefaultNicIdBeforeUpdate
with self.assertRaises(Exception) as e:
self.apiclient.updateDefaultNicForVirtualMachine(cmd)
self.debug("updateDefaultNicForVirtualMachine API failed with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_22_update_nic_wrong_nic_id(self):
"""update default nic of vm providing wrong nic id to the API"""
# (First two steps are performed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Update default nic of VM (Make the newly added NIC as default) by providing wrong
# nic id to the API
# Validate the following:
# 1. API throws exception saying can't find the nic with id
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nonDefaultNicIdBeforeUpdate + random_gen()
with self.assertRaises(Exception) as e:
self.apiclient.updateDefaultNicForVirtualMachine(cmd)
self.debug("updateDefaultNicForVirtualMachine API failed with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_23_update_nic_incorrect_vm_state(self):
"""update default nic of vm when vm is state is not Running or Stopped"""
# (First two steps are performed in setupClass)
# 1. Deploy Vm in account
# 2. Add network to VM
# 3. Destroy virtual machine so that the VM state becomes Destroyed or Expunging
# 4. Update default nic of VM (Make the newly added NIC as default)
# Validate the following:
# 1. API throws exception instance is not Running or Stopped
self.debug("Creating new account")
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Creating virtual machine in the account %s" % account.name)
virtual_machine = VirtualMachine.create(self.api_client, self.services["virtual_machine"],
accountid=account.name, domainid=account.domainid,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype)
time.sleep(self.services["sleep"])
self.debug("Created virtual machine %s" % virtual_machine.id)
self.debug("Creating isolated network in account %s" % account.name)
isolated_network = Network.create(self.apiclient, self.services["isolated_network"], account.name,
account.domainid, networkofferingid=self.isolated_network_offering.id)
self.debug("Created isolated network %s" % isolated_network.id)
self.debug("Adding isolated network %s to vm %s" % (isolated_network.id, virtual_machine.id))
virtual_machine.add_nic(self.apiclient, isolated_network.id)
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=virtual_machine.id, listall=True)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Destroying VM %s" % virtual_machine.id)
virtual_machine.delete(self.apiclient, expunge=False)
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = virtual_machine.id
cmd.nicid = nonDefaultNicIdBeforeUpdate
with self.assertRaises(Exception) as e:
self.apiclient.updateDefaultNicForVirtualMachine(cmd)
self.debug("updateDefaultNicForVirtualMachine API failed with exception: %s" %
e.exception)
return
@attr(tags=["advanced", "dvs"])
def test_28_update_nic_insufficient_permission(self):
"""Try to update default nic of vm with insufficient permission"""
# 1. Call update nic of VM API with api client of other account
# Validate the following:
# 1. API should throw exception saying insufficient permission
account = Account.create(self.apiclient, self.services["account"], domainid=self.domain.id)
self.cleanup.append(account)
self.debug("Created account %s" % account.name)
self.debug("creating user api client for account: %s" % account.name)
api_client = self.testClient.getUserApiClient(UserName=account.name, DomainName=self.account.domain)
self.debug("Listing virtual machine so that to retrive the list of non-default and default nic")
vm_list = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
vm_list_validation_result = validateList(vm_list)
self.assertEqual(vm_list_validation_result[0], PASS, "vm list validation failed due to %s" %
vm_list_validation_result[2])
if len(vm_list[0].nic) != 2:
self.fail("VM should have exactly two NICs")
defaultNicIdBeforeUpdate = None
nonDefaultNicIdBeforeUpdate = None
for nic in vm_list[0].nic:
if nic.isdefault:
defaultNicIdBeforeUpdate = nic.id
else:
nonDefaultNicIdBeforeUpdate = nic.id
self.debug("Default nic of VM is %s and non default nic of VM is %s"
% (defaultNicIdBeforeUpdate, nonDefaultNicIdBeforeUpdate))
self.debug("Making non default nic as default nic")
cmd = updateDefaultNicForVirtualMachine.updateDefaultNicForVirtualMachineCmd()
cmd.virtualmachineid = self.virtual_machine.id
cmd.nicid = nonDefaultNicIdBeforeUpdate
with self.assertRaises(Exception) as e:
api_client.updateDefaultNicForVirtualMachine(cmd)
return
| 43.683047
| 135
| 0.627876
| 10,386
| 88,895
| 5.253129
| 0.058059
| 0.045932
| 0.017376
| 0.016221
| 0.800106
| 0.763522
| 0.732473
| 0.708591
| 0.678843
| 0.662567
| 0
| 0.01
| 0.278925
| 88,895
| 2,034
| 136
| 43.704523
| 0.841154
| 0.122425
| 0
| 0.64467
| 0
| 0
| 0.156978
| 0.008037
| 0
| 0
| 0
| 0
| 0.052212
| 1
| 0.042059
| false
| 0.026106
| 0.007977
| 0
| 0.083394
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
135775734be3e837824dd9c9b10d66e6e13dbaf2
| 224
|
py
|
Python
|
handlers/__init__.py
|
slavik57/historical-dictionary
|
2548ef6dc39dd5d9a4ccc9fb6e16e4b112ff50c2
|
[
"MIT"
] | null | null | null |
handlers/__init__.py
|
slavik57/historical-dictionary
|
2548ef6dc39dd5d9a4ccc9fb6e16e4b112ff50c2
|
[
"MIT"
] | null | null | null |
handlers/__init__.py
|
slavik57/historical-dictionary
|
2548ef6dc39dd5d9a4ccc9fb6e16e4b112ff50c2
|
[
"MIT"
] | null | null | null |
from GetHandler import GetHandler
from SetHandler import SetHandler
from UnsetHandler import UnsetHandler
from NumEqualToHandler import NumEqualToHandler
from UndoHandler import UndoHandler
from EndHandler import EndHandler
| 37.333333
| 47
| 0.892857
| 24
| 224
| 8.333333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 224
| 6
| 48
| 37.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
13a003e0cef22751a10e6a9d79eb5c4433c59a8a
| 252
|
py
|
Python
|
utensor_cgen/transformer/__init__.py
|
ufo2011/utensor_cgen
|
210bb530e7a532fbb9898f7df65f692e2f48f046
|
[
"Apache-2.0"
] | 1
|
2017-12-29T17:40:49.000Z
|
2017-12-29T17:40:49.000Z
|
utensor_cgen/transformer/__init__.py
|
ufo2011/utensor_cgen
|
210bb530e7a532fbb9898f7df65f692e2f48f046
|
[
"Apache-2.0"
] | 1
|
2017-12-28T02:25:45.000Z
|
2017-12-28T02:25:45.000Z
|
utensor_cgen/transformer/__init__.py
|
ufo2011/utensor_cgen
|
210bb530e7a532fbb9898f7df65f692e2f48f046
|
[
"Apache-2.0"
] | 3
|
2017-12-27T17:15:38.000Z
|
2017-12-29T06:43:00.000Z
|
# -*- coding:utf8 -*-
from .base import Transformer
from .conv_pool import *
from .graph_viz import *
from .linear_reoder import *
from .ns_transformer import *
from .optimizer import *
from .pipeline import TransformerPipeline
from .quantize import *
| 25.2
| 41
| 0.769841
| 32
| 252
| 5.9375
| 0.53125
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00463
| 0.142857
| 252
| 9
| 42
| 28
| 0.875
| 0.075397
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
13bde4a918e149a45bad1068ef4d50827e2fa98d
| 215
|
py
|
Python
|
apps/portalbase/macros/page/span/1_main.py
|
threefoldtech/jumpscale_portal_classic
|
d14fe4a17c0486df7a87d149e900746654091fda
|
[
"Apache-2.0"
] | null | null | null |
apps/portalbase/macros/page/span/1_main.py
|
threefoldtech/jumpscale_portal_classic
|
d14fe4a17c0486df7a87d149e900746654091fda
|
[
"Apache-2.0"
] | null | null | null |
apps/portalbase/macros/page/span/1_main.py
|
threefoldtech/jumpscale_portal_classic
|
d14fe4a17c0486df7a87d149e900746654091fda
|
[
"Apache-2.0"
] | null | null | null |
from JumpscalePortalClassic.portal.macrolib import div_base
def main(j, args, params, *other_args):
return div_base.macro(j, args, params)
def match(j, args, params, tags, tasklet):
return True
| 21.5
| 60
| 0.706977
| 30
| 215
| 4.966667
| 0.633333
| 0.100671
| 0.221477
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195349
| 215
| 9
| 61
| 23.888889
| 0.861272
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b95f8e051919cbcf7c64dd48a9060e188825fc1f
| 166
|
py
|
Python
|
ui/prediction.py
|
shangfr/Data-Follower
|
3baf3d815f3f823ec39139482defd6261d6bedf1
|
[
"Apache-2.0"
] | null | null | null |
ui/prediction.py
|
shangfr/Data-Follower
|
3baf3d815f3f823ec39139482defd6261d6bedf1
|
[
"Apache-2.0"
] | null | null | null |
ui/prediction.py
|
shangfr/Data-Follower
|
3baf3d815f3f823ec39139482defd6261d6bedf1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 25 11:35:11 2021
@author: shangfr
"""
def ui_prediction():
pass
if __name__ == "__main__":
ui_prediction()
| 11.857143
| 35
| 0.608434
| 23
| 166
| 3.956522
| 0.869565
| 0.263736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0.216867
| 166
| 13
| 36
| 12.769231
| 0.6
| 0.457831
| 0
| 0
| 0
| 0
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
b9c55842fbf5048327a713d9e1a29efe96b66390
| 19
|
py
|
Python
|
index.py
|
guanglinjiang/py3_flask
|
5d5867c6e05f29bc136f4409154a158844f6d7d9
|
[
"MIT"
] | null | null | null |
index.py
|
guanglinjiang/py3_flask
|
5d5867c6e05f29bc136f4409154a158844f6d7d9
|
[
"MIT"
] | 1
|
2018-10-18T04:41:08.000Z
|
2018-10-18T04:41:08.000Z
|
index.py
|
guanglinjiang/py3_flask
|
5d5867c6e05f29bc136f4409154a158844f6d7d9
|
[
"MIT"
] | null | null | null |
print("github,您好")
| 9.5
| 18
| 0.684211
| 3
| 19
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 19
| 1
| 19
| 19
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
b9cd0b753d491535080a2df3e0a18d2ddaea4805
| 223
|
py
|
Python
|
dgmr/__init__.py
|
johmathe/skillful_nowcasting
|
e1f74144996f830fcf4997c3a644f7a0c4d13f43
|
[
"MIT"
] | 42
|
2021-09-09T13:05:18.000Z
|
2022-03-30T20:44:19.000Z
|
dgmr/__init__.py
|
johmathe/skillful_nowcasting
|
e1f74144996f830fcf4997c3a644f7a0c4d13f43
|
[
"MIT"
] | 17
|
2021-09-06T13:58:06.000Z
|
2022-03-23T04:45:09.000Z
|
dgmr/__init__.py
|
johmathe/skillful_nowcasting
|
e1f74144996f830fcf4997c3a644f7a0c4d13f43
|
[
"MIT"
] | 15
|
2021-09-30T04:50:39.000Z
|
2022-03-24T07:44:01.000Z
|
from .dgmr import DGMR
from .generators import Sampler, Generator
from .discriminators import SpatialDiscriminator, TemporalDiscriminator, Discriminator
from .common import LatentConditioningStack, ContextConditioningStack
| 44.6
| 86
| 0.874439
| 20
| 223
| 9.75
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089686
| 223
| 4
| 87
| 55.75
| 0.960591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6a03a71123c452d2d58aa64cd34f2cc6ff76c80b
| 155
|
py
|
Python
|
mysite/blog/admin.py
|
sakshikhachane/Blogger
|
a1a6f2fc1843b83b47f1ba8b3c88c5c478f5d6ac
|
[
"MIT"
] | 52
|
2020-07-01T10:06:34.000Z
|
2021-09-30T18:23:23.000Z
|
mysite/blog/admin.py
|
sakshikhachane/Blogger
|
a1a6f2fc1843b83b47f1ba8b3c88c5c478f5d6ac
|
[
"MIT"
] | 206
|
2020-07-25T08:48:05.000Z
|
2022-03-12T00:43:35.000Z
|
mysite/blog/admin.py
|
sakshikhachane/Blogger
|
a1a6f2fc1843b83b47f1ba8b3c88c5c478f5d6ac
|
[
"MIT"
] | 124
|
2020-08-07T11:22:44.000Z
|
2021-10-16T05:39:17.000Z
|
from django.contrib import admin
from .models import Post, TagDict
# Register your models here.
admin.site.register(Post)
admin.site.register(TagDict)
| 15.5
| 33
| 0.787097
| 22
| 155
| 5.545455
| 0.545455
| 0.147541
| 0.278689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 155
| 9
| 34
| 17.222222
| 0.903704
| 0.167742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6a16a0f60f2512fc6fbfe4b96cd8577702e24209
| 57
|
py
|
Python
|
qcelemental/molutil/__init__.py
|
laurennk/QCElemental
|
fd6e008da0f2885109f99eb518ee165f9dd7d82e
|
[
"BSD-3-Clause"
] | null | null | null |
qcelemental/molutil/__init__.py
|
laurennk/QCElemental
|
fd6e008da0f2885109f99eb518ee165f9dd7d82e
|
[
"BSD-3-Clause"
] | null | null | null |
qcelemental/molutil/__init__.py
|
laurennk/QCElemental
|
fd6e008da0f2885109f99eb518ee165f9dd7d82e
|
[
"BSD-3-Clause"
] | null | null | null |
from .align import B787, kabsch_align, compute_scramble
| 19
| 55
| 0.824561
| 8
| 57
| 5.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0.122807
| 57
| 2
| 56
| 28.5
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6a30bb3c3f6c6e1652f589007f183af6bd1e95d6
| 729
|
py
|
Python
|
wrap/gtwrap/template_instantiator/__init__.py
|
h-rover/gtsam
|
a0206e210d8f47b6ee295a1fbf95af84d98c5cf0
|
[
"BSD-3-Clause"
] | 12
|
2020-10-24T10:41:40.000Z
|
2022-03-07T06:04:28.000Z
|
wrap/gtwrap/template_instantiator/__init__.py
|
h-rover/gtsam
|
a0206e210d8f47b6ee295a1fbf95af84d98c5cf0
|
[
"BSD-3-Clause"
] | 89
|
2020-08-27T03:19:54.000Z
|
2022-03-26T21:03:04.000Z
|
wrap/gtwrap/template_instantiator/__init__.py
|
h-rover/gtsam
|
a0206e210d8f47b6ee295a1fbf95af84d98c5cf0
|
[
"BSD-3-Clause"
] | 7
|
2020-08-26T22:18:33.000Z
|
2022-02-08T07:16:34.000Z
|
"""Code to help instantiate templated classes, methods and functions."""
# pylint: disable=too-many-arguments, too-many-instance-attributes, no-self-use, no-else-return, too-many-arguments, unused-format-string-argument, unused-variable. unused-argument, too-many-branches
from typing import Iterable, Sequence, Union
import gtwrap.interface_parser as parser
from gtwrap.template_instantiator.classes import *
from gtwrap.template_instantiator.constructor import *
from gtwrap.template_instantiator.declaration import *
from gtwrap.template_instantiator.function import *
from gtwrap.template_instantiator.helpers import *
from gtwrap.template_instantiator.method import *
from gtwrap.template_instantiator.namespace import *
| 48.6
| 199
| 0.832647
| 92
| 729
| 6.51087
| 0.48913
| 0.116861
| 0.210351
| 0.350584
| 0.360601
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083676
| 729
| 14
| 200
| 52.071429
| 0.896707
| 0.363512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6a4befedd903104c8718d7218a09d36bcf076cd7
| 3,465
|
py
|
Python
|
terrascript/data/rancher/rancher2.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/data/rancher/rancher2.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/data/rancher/rancher2.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/data/rancher/rancher2.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:25:37 UTC)
import terrascript
class rancher2_app(terrascript.Data):
pass
class rancher2_catalog(terrascript.Data):
pass
class rancher2_catalog_v2(terrascript.Data):
pass
class rancher2_certificate(terrascript.Data):
pass
class rancher2_cloud_credential(terrascript.Data):
pass
class rancher2_cluster(terrascript.Data):
pass
class rancher2_cluster_alert_group(terrascript.Data):
pass
class rancher2_cluster_alert_rule(terrascript.Data):
pass
class rancher2_cluster_driver(terrascript.Data):
pass
class rancher2_cluster_logging(terrascript.Data):
pass
class rancher2_cluster_role_template_binding(terrascript.Data):
pass
class rancher2_cluster_scan(terrascript.Data):
pass
class rancher2_cluster_template(terrascript.Data):
pass
class rancher2_cluster_v2(terrascript.Data):
pass
class rancher2_etcd_backup(terrascript.Data):
pass
class rancher2_global_dns_provider(terrascript.Data):
pass
class rancher2_global_role(terrascript.Data):
pass
class rancher2_global_role_binding(terrascript.Data):
pass
class rancher2_multi_cluster_app(terrascript.Data):
pass
class rancher2_namespace(terrascript.Data):
pass
class rancher2_node_driver(terrascript.Data):
pass
class rancher2_node_pool(terrascript.Data):
pass
class rancher2_node_template(terrascript.Data):
pass
class rancher2_notifier(terrascript.Data):
pass
class rancher2_pod_security_policy_template(terrascript.Data):
pass
class rancher2_project(terrascript.Data):
pass
class rancher2_project_alert_group(terrascript.Data):
pass
class rancher2_project_alert_rule(terrascript.Data):
pass
class rancher2_project_logging(terrascript.Data):
pass
class rancher2_project_role_template_binding(terrascript.Data):
pass
class rancher2_registry(terrascript.Data):
pass
class rancher2_role_template(terrascript.Data):
pass
class rancher2_secret(terrascript.Data):
pass
class rancher2_secret_v2(terrascript.Data):
pass
class rancher2_setting(terrascript.Data):
pass
class rancher2_storage_class_v2(terrascript.Data):
pass
class rancher2_user(terrascript.Data):
pass
__all__ = [
"rancher2_app",
"rancher2_catalog",
"rancher2_catalog_v2",
"rancher2_certificate",
"rancher2_cloud_credential",
"rancher2_cluster",
"rancher2_cluster_alert_group",
"rancher2_cluster_alert_rule",
"rancher2_cluster_driver",
"rancher2_cluster_logging",
"rancher2_cluster_role_template_binding",
"rancher2_cluster_scan",
"rancher2_cluster_template",
"rancher2_cluster_v2",
"rancher2_etcd_backup",
"rancher2_global_dns_provider",
"rancher2_global_role",
"rancher2_global_role_binding",
"rancher2_multi_cluster_app",
"rancher2_namespace",
"rancher2_node_driver",
"rancher2_node_pool",
"rancher2_node_template",
"rancher2_notifier",
"rancher2_pod_security_policy_template",
"rancher2_project",
"rancher2_project_alert_group",
"rancher2_project_alert_rule",
"rancher2_project_logging",
"rancher2_project_role_template_binding",
"rancher2_registry",
"rancher2_role_template",
"rancher2_secret",
"rancher2_secret_v2",
"rancher2_setting",
"rancher2_storage_class_v2",
"rancher2_user",
]
| 17.953368
| 73
| 0.765657
| 397
| 3,465
| 6.284635
| 0.141058
| 0.228457
| 0.281764
| 0.346293
| 0.636072
| 0.537475
| 0.177956
| 0.040882
| 0
| 0
| 0
| 0.032434
| 0.15469
| 3,465
| 192
| 74
| 18.046875
| 0.819392
| 0.031169
| 0
| 0.324561
| 1
| 0
| 0.246273
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.324561
| 0.008772
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
dbe657070786d7656535122c8dbfbb9bb4383525
| 148
|
py
|
Python
|
recipes/Python/111971_Format_version_numbers/recipe-111971.py
|
tdiprima/code
|
61a74f5f93da087d27c70b2efe779ac6bd2a3b4f
|
[
"MIT"
] | 2,023
|
2017-07-29T09:34:46.000Z
|
2022-03-24T08:00:45.000Z
|
recipes/Python/111971_Format_version_numbers/recipe-111971.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 32
|
2017-09-02T17:20:08.000Z
|
2022-02-11T17:49:37.000Z
|
recipes/Python/111971_Format_version_numbers/recipe-111971.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 780
|
2017-07-28T19:23:28.000Z
|
2022-03-25T20:39:41.000Z
|
def StringVersion( seq ):
return '.'.join( ['%s'] * len( seq )) % tuple( seq )
def TupleVersion( str ):
return map( int, str.split( '.' ))
| 24.666667
| 56
| 0.554054
| 18
| 148
| 4.555556
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22973
| 148
| 5
| 57
| 29.6
| 0.719298
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e0033c3f1ae5751e9340c0be3edc3d6b8f2b7582
| 4,707
|
py
|
Python
|
cmibs/cisco_cdp_mib.py
|
xUndero/noc
|
9fb34627721149fcf7064860bd63887e38849131
|
[
"BSD-3-Clause"
] | 1
|
2019-09-20T09:36:48.000Z
|
2019-09-20T09:36:48.000Z
|
cmibs/cisco_cdp_mib.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
cmibs/cisco_cdp_mib.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------
# CISCO-CDP-MIB
# Compiled MIB
# Do not modify this file directly
# Run ./noc mib make_cmib instead
# ----------------------------------------------------------------------
# Copyright (C) 2007-2018 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# MIB Name
NAME = "CISCO-CDP-MIB"
# Metadata
LAST_UPDATED = "2005-03-21"
COMPILED = "2018-06-09"
# MIB Data: name -> oid
MIB = {
"CISCO-CDP-MIB::ciscoCdpMIB": "1.3.6.1.4.1.9.9.23",
"CISCO-CDP-MIB::ciscoCdpMIBObjects": "1.3.6.1.4.1.9.9.23.1",
"CISCO-CDP-MIB::cdpInterface": "1.3.6.1.4.1.9.9.23.1.1",
"CISCO-CDP-MIB::cdpInterfaceTable": "1.3.6.1.4.1.9.9.23.1.1.1",
"CISCO-CDP-MIB::cdpInterfaceEntry": "1.3.6.1.4.1.9.9.23.1.1.1.1",
"CISCO-CDP-MIB::cdpInterfaceIfIndex": "1.3.6.1.4.1.9.9.23.1.1.1.1.1",
"CISCO-CDP-MIB::cdpInterfaceEnable": "1.3.6.1.4.1.9.9.23.1.1.1.1.2",
"CISCO-CDP-MIB::cdpInterfaceMessageInterval": "1.3.6.1.4.1.9.9.23.1.1.1.1.3",
"CISCO-CDP-MIB::cdpInterfaceGroup": "1.3.6.1.4.1.9.9.23.1.1.1.1.4",
"CISCO-CDP-MIB::cdpInterfacePort": "1.3.6.1.4.1.9.9.23.1.1.1.1.5",
"CISCO-CDP-MIB::cdpInterfaceName": "1.3.6.1.4.1.9.9.23.1.1.1.1.6",
"CISCO-CDP-MIB::cdpInterfaceExtTable": "1.3.6.1.4.1.9.9.23.1.1.2",
"CISCO-CDP-MIB::cdpInterfaceExtEntry": "1.3.6.1.4.1.9.9.23.1.1.2.1",
"CISCO-CDP-MIB::cdpInterfaceExtendedTrust": "1.3.6.1.4.1.9.9.23.1.1.2.1.1",
"CISCO-CDP-MIB::cdpInterfaceCosForUntrustedPort": "1.3.6.1.4.1.9.9.23.1.1.2.1.2",
"CISCO-CDP-MIB::cdpCache": "1.3.6.1.4.1.9.9.23.1.2",
"CISCO-CDP-MIB::cdpCacheTable": "1.3.6.1.4.1.9.9.23.1.2.1",
"CISCO-CDP-MIB::cdpCacheEntry": "1.3.6.1.4.1.9.9.23.1.2.1.1",
"CISCO-CDP-MIB::cdpCacheIfIndex": "1.3.6.1.4.1.9.9.23.1.2.1.1.1",
"CISCO-CDP-MIB::cdpCacheDeviceIndex": "1.3.6.1.4.1.9.9.23.1.2.1.1.2",
"CISCO-CDP-MIB::cdpCacheAddressType": "1.3.6.1.4.1.9.9.23.1.2.1.1.3",
"CISCO-CDP-MIB::cdpCacheAddress": "1.3.6.1.4.1.9.9.23.1.2.1.1.4",
"CISCO-CDP-MIB::cdpCacheVersion": "1.3.6.1.4.1.9.9.23.1.2.1.1.5",
"CISCO-CDP-MIB::cdpCacheDeviceId": "1.3.6.1.4.1.9.9.23.1.2.1.1.6",
"CISCO-CDP-MIB::cdpCacheDevicePort": "1.3.6.1.4.1.9.9.23.1.2.1.1.7",
"CISCO-CDP-MIB::cdpCachePlatform": "1.3.6.1.4.1.9.9.23.1.2.1.1.8",
"CISCO-CDP-MIB::cdpCacheCapabilities": "1.3.6.1.4.1.9.9.23.1.2.1.1.9",
"CISCO-CDP-MIB::cdpCacheVTPMgmtDomain": "1.3.6.1.4.1.9.9.23.1.2.1.1.10",
"CISCO-CDP-MIB::cdpCacheNativeVLAN": "1.3.6.1.4.1.9.9.23.1.2.1.1.11",
"CISCO-CDP-MIB::cdpCacheDuplex": "1.3.6.1.4.1.9.9.23.1.2.1.1.12",
"CISCO-CDP-MIB::cdpCacheApplianceID": "1.3.6.1.4.1.9.9.23.1.2.1.1.13",
"CISCO-CDP-MIB::cdpCacheVlanID": "1.3.6.1.4.1.9.9.23.1.2.1.1.14",
"CISCO-CDP-MIB::cdpCachePowerConsumption": "1.3.6.1.4.1.9.9.23.1.2.1.1.15",
"CISCO-CDP-MIB::cdpCacheMTU": "1.3.6.1.4.1.9.9.23.1.2.1.1.16",
"CISCO-CDP-MIB::cdpCacheSysName": "1.3.6.1.4.1.9.9.23.1.2.1.1.17",
"CISCO-CDP-MIB::cdpCacheSysObjectID": "1.3.6.1.4.1.9.9.23.1.2.1.1.18",
"CISCO-CDP-MIB::cdpCachePrimaryMgmtAddrType": "1.3.6.1.4.1.9.9.23.1.2.1.1.19",
"CISCO-CDP-MIB::cdpCachePrimaryMgmtAddr": "1.3.6.1.4.1.9.9.23.1.2.1.1.20",
"CISCO-CDP-MIB::cdpCacheSecondaryMgmtAddrType": "1.3.6.1.4.1.9.9.23.1.2.1.1.21",
"CISCO-CDP-MIB::cdpCacheSecondaryMgmtAddr": "1.3.6.1.4.1.9.9.23.1.2.1.1.22",
"CISCO-CDP-MIB::cdpCachePhysLocation": "1.3.6.1.4.1.9.9.23.1.2.1.1.23",
"CISCO-CDP-MIB::cdpCacheLastChange": "1.3.6.1.4.1.9.9.23.1.2.1.1.24",
"CISCO-CDP-MIB::cdpCtAddressTable": "1.3.6.1.4.1.9.9.23.1.2.2",
"CISCO-CDP-MIB::cdpCtAddressEntry": "1.3.6.1.4.1.9.9.23.1.2.2.1",
"CISCO-CDP-MIB::cdpCtAddressIndex": "1.3.6.1.4.1.9.9.23.1.2.2.1.3",
"CISCO-CDP-MIB::cdpCtAddressType": "1.3.6.1.4.1.9.9.23.1.2.2.1.4",
"CISCO-CDP-MIB::cdpCtAddress": "1.3.6.1.4.1.9.9.23.1.2.2.1.5",
"CISCO-CDP-MIB::cdpGlobal": "1.3.6.1.4.1.9.9.23.1.3",
"CISCO-CDP-MIB::cdpGlobalRun": "1.3.6.1.4.1.9.9.23.1.3.1",
"CISCO-CDP-MIB::cdpGlobalMessageInterval": "1.3.6.1.4.1.9.9.23.1.3.2",
"CISCO-CDP-MIB::cdpGlobalHoldTime": "1.3.6.1.4.1.9.9.23.1.3.3",
"CISCO-CDP-MIB::cdpGlobalDeviceId": "1.3.6.1.4.1.9.9.23.1.3.4",
"CISCO-CDP-MIB::cdpGlobalLastChange": "1.3.6.1.4.1.9.9.23.1.3.5",
"CISCO-CDP-MIB::cdpGlobalDeviceIdFormatCpb": "1.3.6.1.4.1.9.9.23.1.3.6",
"CISCO-CDP-MIB::cdpGlobalDeviceIdFormat": "1.3.6.1.4.1.9.9.23.1.3.7",
"CISCO-CDP-MIB::ciscoCdpMIBConformance": "1.3.6.1.4.1.9.9.23.2",
"CISCO-CDP-MIB::ciscoCdpMIBCompliances": "1.3.6.1.4.1.9.9.23.2.1",
"CISCO-CDP-MIB::ciscoCdpMIBGroups": "1.3.6.1.4.1.9.9.23.2.2",
}
| 60.346154
| 85
| 0.582537
| 1,042
| 4,707
| 2.629559
| 0.114203
| 0.050365
| 0.240876
| 0.084672
| 0.40073
| 0.362774
| 0.29854
| 0.281752
| 0.281752
| 0.278102
| 0
| 0.199953
| 0.090504
| 4,707
| 77
| 86
| 61.12987
| 0.440084
| 0.093903
| 0
| 0
| 0
| 0.873016
| 0.820748
| 0.799341
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e04140a9d6bcaf25d2ca5088ced39ef7ec8147df
| 324
|
py
|
Python
|
tests/test_utils/test_is_negative.py
|
natanfeitosa/pyfunctools
|
b5354e0d737542b03049eb3e347d6ca1ccceb164
|
[
"MIT"
] | 4
|
2021-11-17T15:26:11.000Z
|
2022-03-12T01:30:55.000Z
|
tests/test_utils/test_is_negative.py
|
natanfeitosa/pyfunctools
|
b5354e0d737542b03049eb3e347d6ca1ccceb164
|
[
"MIT"
] | null | null | null |
tests/test_utils/test_is_negative.py
|
natanfeitosa/pyfunctools
|
b5354e0d737542b03049eb3e347d6ca1ccceb164
|
[
"MIT"
] | null | null | null |
from pyfunctools.utils import is_negative
def test_is_negative():
assert is_negative(-1000)
assert is_negative('-100')
assert is_negative('-1.0')
assert not is_negative(1.0)
assert not is_negative(+10)
assert not is_negative(10000)
assert not is_negative('100')
assert not is_negative('a')
| 24.923077
| 41
| 0.70679
| 49
| 324
| 4.44898
| 0.346939
| 0.458716
| 0.252294
| 0.43578
| 0.238532
| 0.238532
| 0.238532
| 0.238532
| 0
| 0
| 0
| 0.079848
| 0.188272
| 324
| 12
| 42
| 27
| 0.749049
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0
| 0
| 0
| 0
| 0
| 0.8
| 1
| 0.1
| true
| 0
| 0.1
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0ecadebb5c6e3eab8deb584a4e52cbd1e39e4070
| 79
|
py
|
Python
|
apps/gridportal/utils/gendocs_jpackages.py
|
Jumpscale/jumpscale_portal8
|
3a4d56a1ba985b68fe9b525aed2486a54808332f
|
[
"Apache-2.0"
] | null | null | null |
apps/gridportal/utils/gendocs_jpackages.py
|
Jumpscale/jumpscale_portal8
|
3a4d56a1ba985b68fe9b525aed2486a54808332f
|
[
"Apache-2.0"
] | 74
|
2015-12-28T16:17:20.000Z
|
2021-09-08T12:28:59.000Z
|
apps/gridportal/utils/gendocs_jpackages.py
|
Jumpscale/jumpscale_portal8
|
3a4d56a1ba985b68fe9b525aed2486a54808332f
|
[
"Apache-2.0"
] | null | null | null |
from JumpScale import j
j.application.start("gendocs")
j.application.stop()
| 11.285714
| 30
| 0.759494
| 11
| 79
| 5.454545
| 0.727273
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 79
| 6
| 31
| 13.166667
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.