hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2dc0027e5c056672abf889973cba05f85d0ebb33
| 173
|
py
|
Python
|
tests/test_whos_there.py
|
twsl/whos-there
|
cd6167c74e5de20fb7fa24776cba3a74af180bb9
|
[
"MIT"
] | 49
|
2021-11-04T00:26:37.000Z
|
2022-03-21T18:53:03.000Z
|
tests/test_whos_there.py
|
twsl/whos-there
|
cd6167c74e5de20fb7fa24776cba3a74af180bb9
|
[
"MIT"
] | 83
|
2021-11-08T01:37:55.000Z
|
2022-03-30T01:33:49.000Z
|
tests/test_whos_there.py
|
twsl/whos-there
|
cd6167c74e5de20fb7fa24776cba3a74af180bb9
|
[
"MIT"
] | 4
|
2021-11-09T21:51:45.000Z
|
2022-01-01T16:52:41.000Z
|
from whos_there import __version__
def test_version():
# real version initialized in package from poetry and resolved from resources
assert __version__ == "0.0.0"
| 24.714286
| 81
| 0.757225
| 24
| 173
| 5.041667
| 0.708333
| 0.033058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.184971
| 173
| 6
| 82
| 28.833333
| 0.836879
| 0.433526
| 0
| 0
| 0
| 0
| 0.052083
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2dc5135d1e05a0684fe9a0f42a684e67c8cc7f5a
| 24
|
py
|
Python
|
satyrus/api/__init__.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/api/__init__.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/api/__init__.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
from .main import SatAPI
| 24
| 24
| 0.833333
| 4
| 24
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2df6ec7f3669e00a3d8c57a34b9a45f9e9574097
| 38
|
py
|
Python
|
web/web/views.py
|
OPI-py/django_blog
|
43cb0079499a6397246fd01dc50212fd8d432431
|
[
"BSD-2-Clause"
] | null | null | null |
web/web/views.py
|
OPI-py/django_blog
|
43cb0079499a6397246fd01dc50212fd8d432431
|
[
"BSD-2-Clause"
] | null | null | null |
web/web/views.py
|
OPI-py/django_blog
|
43cb0079499a6397246fd01dc50212fd8d432431
|
[
"BSD-2-Clause"
] | null | null | null |
from django.http import HttpResponse
| 12.666667
| 36
| 0.842105
| 5
| 38
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 2
| 37
| 19
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2dffb9fc674bff3df6449acd14141eb7be13a731
| 5,496
|
py
|
Python
|
tests/test_filters.py
|
iandroogmans/django-reports
|
2663abfa0546df6e12af651ea959b395eb4c764b
|
[
"BSD-3-Clause"
] | 1
|
2020-06-11T12:38:21.000Z
|
2020-06-11T12:38:21.000Z
|
tests/test_filters.py
|
iandroogmans/django-reports
|
2663abfa0546df6e12af651ea959b395eb4c764b
|
[
"BSD-3-Clause"
] | 3
|
2020-06-05T18:31:28.000Z
|
2021-06-10T20:29:29.000Z
|
tests/test_filters.py
|
iandroogmans/django-reports
|
2663abfa0546df6e12af651ea959b395eb4c764b
|
[
"BSD-3-Clause"
] | 1
|
2020-01-22T09:16:41.000Z
|
2020-01-22T09:16:41.000Z
|
from django.test import TestCase
from advanced_reports.defaults import AdvancedReport
class AdvancedReportTest(TestCase):
def setUp(self):
self.report = AdvancedReport()
def test_tabbed_filter_fields(self):
self.report.tabbed_filter_fields = {
'card': {
'images': {
'default': 'img/item.png',
'item2': 'img/item2.png'
},
'types': [
'item1', 'item2', 'item3', 'item4'
]
}
}
self.assertEqual(['card'], [k for k, v in self.report.get_tabbed_filter_links()])
dict_iteritems = [v for k, v in self.report.get_tabbed_filter_links()]
self.assertEqual(
[
('item1', 'img/item.png'),
('item2', 'img/item2.png'),
('item3', 'img/item.png'),
('item4', 'img/item.png'),
], sorted([(k, v) for k, v in dict_iteritems[0]]))
def test_tabbed_filter_fields_default_only(self):
self.report.tabbed_filter_fields = {
'card': {
'images': {
'default': 'img/item2.png'
},
'types': [
'item1', 'item2', 'item3', 'item4'
]
}
}
self.assertEqual(['card'], [k for k, v in self.report.get_tabbed_filter_links()])
dict_iteritems = [v for k, v in self.report.get_tabbed_filter_links()]
self.assertEqual(
[
('item1', 'img/item2.png'),
('item2', 'img/item2.png'),
('item3', 'img/item2.png'),
('item4', 'img/item2.png'),
], sorted([(k, v) for k, v in dict_iteritems[0]]))
def test_tabbed_filter_fields_without_default_image(self):
self.report.tabbed_filter_fields = {
'card': {
'images': {
'item2': 'img/item2.png'
},
'types': [
'item1', 'item2', 'item3', 'item4'
]
}
}
self.assertEqual(['card'], [k for k, v in self.report.get_tabbed_filter_links()])
dict_iteritems = [v for k, v in self.report.get_tabbed_filter_links()]
self.assertEqual(
[
('item1', None),
('item2', 'img/item2.png'),
('item3', None),
('item4', None),
], sorted([(k, v) for k, v in dict_iteritems[0]]))
def test_tabbed_filter_fields_without_image_values(self):
self.report.tabbed_filter_fields = {
'card': {
'images': {
},
'types': [
'item1', 'item2', 'item3', 'item4'
]
}
}
self.assertEqual(['card'], [k for k, v in self.report.get_tabbed_filter_links()])
dict_iteritems = [v for k, v in self.report.get_tabbed_filter_links()]
self.assertEqual(
[
('item1', None),
('item2', None),
('item3', None),
('item4', None),
], sorted([(k, v) for k, v in dict_iteritems[0]]))
def test_tabbed_filter_fields_without_images(self):
self.report.tabbed_filter_fields = {
'card': {
'types': [
'item1', 'item2', 'item3', 'item4'
]
}
}
self.assertEqual(['card'], [k for k, v in self.report.get_tabbed_filter_links()])
dict_iteritems = [v for k, v in self.report.get_tabbed_filter_links()]
self.assertEqual(
[
('item1', None),
('item2', None),
('item3', None),
('item4', None),
], sorted([(k, v) for k, v in dict_iteritems[0]]))
def test_tabbed_filter_fields_without_types(self):
self.report.tabbed_filter_fields = {
'card': {
'images': {
},
}
}
with self.assertRaises(Exception):
self.report.get_tabbed_filter_links()
def test_tabbed_filter_fields_multiple(self):
self.report.tabbed_filter_fields = {
'card': {
'images': {
'default': 'img/item.png',
'item2': 'img/item2.png'
},
'types': [
'item1', 'item2', 'item3', 'item4'
]
},
'gender': {
'images': {
'male': 'img/male.png',
'female': 'img/female.png'
},
'types': [
'male', 'female'
]
}
}
self.assertEqual(['card', 'gender'], sorted([k for k, v in self.report.get_tabbed_filter_links()]))
dict_iteritems = [v for k, v in self.report.get_tabbed_filter_links()]
self.assertEqual(
[
('item1', 'img/item.png'),
('item2', 'img/item2.png'),
('item3', 'img/item.png'),
('item4', 'img/item.png'),
], sorted([(k, v) for k, v in dict_iteritems[1]]))
self.assertEqual(
[
('female', 'img/female.png'),
('male', 'img/male.png'),
], sorted([(k, v) for k, v in dict_iteritems[0]]))
| 33.717791
| 107
| 0.44687
| 530
| 5,496
| 4.443396
| 0.1
| 0.13758
| 0.04034
| 0.056476
| 0.836518
| 0.810616
| 0.797877
| 0.771125
| 0.717622
| 0.717622
| 0
| 0.021224
| 0.408479
| 5,496
| 162
| 108
| 33.925926
| 0.703168
| 0
| 0
| 0.575342
| 0
| 0
| 0.134279
| 0
| 0
| 0
| 0
| 0
| 0.09589
| 1
| 0.054795
| false
| 0
| 0.013699
| 0
| 0.075342
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
93104c858964dae6d4a927dffa5bd3e9534a426b
| 22
|
py
|
Python
|
logparser/Drain/__init__.py
|
f1amingo/logparser
|
65f077a78a974a50e0fff792257fb6fea0a86821
|
[
"MIT"
] | null | null | null |
logparser/Drain/__init__.py
|
f1amingo/logparser
|
65f077a78a974a50e0fff792257fb6fea0a86821
|
[
"MIT"
] | null | null | null |
logparser/Drain/__init__.py
|
f1amingo/logparser
|
65f077a78a974a50e0fff792257fb6fea0a86821
|
[
"MIT"
] | null | null | null |
from .Drain import *
| 11
| 21
| 0.681818
| 3
| 22
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 22
| 1
| 22
| 22
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
931500a0459264600cb8b9096359b127f6612a89
| 2,957
|
py
|
Python
|
code/fig666.py
|
felixwzh/SJTU-CS410-Project
|
b6e90cb353af49f8f18b1bc368db00e9c8e6aab8
|
[
"MIT"
] | 1
|
2018-01-14T16:58:11.000Z
|
2018-01-14T16:58:11.000Z
|
code/fig666.py
|
felixwzh/SJTU-CS410-Project
|
b6e90cb353af49f8f18b1bc368db00e9c8e6aab8
|
[
"MIT"
] | null | null | null |
code/fig666.py
|
felixwzh/SJTU-CS410-Project
|
b6e90cb353af49f8f18b1bc368db00e9c8e6aab8
|
[
"MIT"
] | null | null | null |
import pickle
import matplotlib.pyplot as plt
sex=pickle.load(open('sex_nn_result.pkl'))
#sex['loss']
#sex['acc']
bio=pickle.load(open('bio_nn_result.pkl'))
#bio['loss']
#bio['acc']
fig = plt.figure(frameon=False)
fig.set_size_inches(6, 4)
plt.grid()
# fig = pylab.figure()
ax = fig.add_subplot(1, 1, 1)
ax.yaxis.grid(color='gray', linestyle='dashed')
ax.xaxis.grid(color='gray', linestyle='dashed')
plt.plot(sex['loss'][0],'-ro', label="lr=0.0001")
plt.plot(sex['loss'][1],'-go', label="lr=0.0005")
plt.plot(sex['loss'][2],'-bo', label="lr=0.001")
plt.plot(sex['loss'][3],'-yo', label="lr=0.01")
plt.plot(sex['loss'][4],'-co', label="lr=0.03")
plt.title('influence of lr in Sex-2 task')
#plt.plot(history.history['val_acc'])
plt.xlabel('epoch')
plt.ylabel('Loss')
# plt.legend(loc='lower right')
plt.legend()
plt.savefig('lr_loss_sex.pdf')
plt.show()
# summarize history for loss
fig = plt.figure(frameon=False)
fig.set_size_inches(6, 4)
plt.grid()
# fig = pylab.figure()
ax = fig.add_subplot(1, 1, 1)
ax.yaxis.grid(color='gray', linestyle='dashed')
ax.xaxis.grid(color='gray', linestyle='dashed')
plt.plot(sex['acc'][0],'-ro', label="lr=0.0001")
plt.plot(sex['acc'][1],'-go', label="lr=0.0005")
plt.plot(sex['acc'][2],'-bo', label="lr=0.001")
plt.plot(sex['acc'][3],'-yo', label="lr=0.01")
plt.plot(sex['acc'][4],'-co', label="lr=0.03")
plt.title('influence of lr in Sex-2 task')
#plt.plot(history.history['val_acc'])
plt.xlabel('epoch')
plt.ylabel('Accuracy')
plt.legend(loc='lower right')
# plt.legend()
plt.savefig('lr_acc_sex.pdf')
plt.show()
fig = plt.figure(frameon=False)
fig.set_size_inches(6, 4)
plt.grid()
# fig = pylab.figure()
ax = fig.add_subplot(1, 1, 1)
ax.yaxis.grid(color='gray', linestyle='dashed')
ax.xaxis.grid(color='gray', linestyle='dashed')
plt.plot(bio['loss'][0],'-ro', label="lr=0.0001")
plt.plot(bio['loss'][1],'-go', label="lr=0.0005")
plt.plot(bio['loss'][2],'-bo', label="lr=0.001")
plt.plot(bio['loss'][3],'-yo', label="lr=0.01")
plt.plot(bio['loss'][4],'-co', label="lr=0.03")
plt.title('influence of lr in BioSourceType-7 task')
#plt.plot(history.history['val_acc'])
plt.xlabel('epoch')
plt.ylabel('Loss')
# plt.legend(loc='lower right')
plt.legend()
plt.savefig('lr_loss_bio.pdf')
plt.show()
# summarize history for loss
fig = plt.figure(frameon=False)
fig.set_size_inches(6, 4)
plt.grid()
# fig = pylab.figure()
ax = fig.add_subplot(1, 1, 1)
ax.yaxis.grid(color='gray', linestyle='dashed')
ax.xaxis.grid(color='gray', linestyle='dashed')
plt.plot(bio['acc'][0],'-ro', label="lr=0.0001")
plt.plot(bio['acc'][1],'-go', label="lr=0.0005")
plt.plot(bio['acc'][2],'-bo', label="lr=0.001")
plt.plot(bio['acc'][3],'-yo', label="lr=0.01")
plt.plot(bio['acc'][4],'-co', label="lr=0.03")
plt.title('influence of lr in BioSourceType-7 task')
#plt.plot(history.history['val_acc'])
plt.xlabel('epoch')
plt.ylabel('Accuracy')
# plt.legend(loc='upper right')
plt.legend()
plt.savefig('lr_acc_bio.pdf')
plt.show()
| 30.802083
| 52
| 0.663172
| 531
| 2,957
| 3.640301
| 0.141243
| 0.086912
| 0.082773
| 0.09105
| 0.915158
| 0.915158
| 0.915158
| 0.898603
| 0.898603
| 0.668908
| 0
| 0.045289
| 0.074062
| 2,957
| 96
| 53
| 30.802083
| 0.660701
| 0.144403
| 0
| 0.597222
| 0
| 0
| 0.259849
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027778
| 0
| 0.027778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
935b1778e42946bb7cdf30b8564192f66cd6dac9
| 83
|
py
|
Python
|
data_vis/views/__init__.py
|
jneuendorf/dkb_pdf2csv
|
836257403054242fe2971fb3e9c0dfd909b2d199
|
[
"MIT"
] | null | null | null |
data_vis/views/__init__.py
|
jneuendorf/dkb_pdf2csv
|
836257403054242fe2971fb3e9c0dfd909b2d199
|
[
"MIT"
] | null | null | null |
data_vis/views/__init__.py
|
jneuendorf/dkb_pdf2csv
|
836257403054242fe2971fb3e9c0dfd909b2d199
|
[
"MIT"
] | null | null | null |
from .index import index # NOQA
from .test import test # NOQA
from . import api
| 16.6
| 32
| 0.710843
| 13
| 83
| 4.538462
| 0.461538
| 0.271186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228916
| 83
| 4
| 33
| 20.75
| 0.921875
| 0.108434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9371297822d97dacff675bb9124a06ec453ed8c5
| 150
|
py
|
Python
|
cantileveroptimizer/problems/__init__.py
|
simoore/cantilever-designer
|
dfc3849b5c772708dc173054cc2da4b2b1f18e94
|
[
"MIT"
] | 1
|
2019-06-03T13:15:47.000Z
|
2019-06-03T13:15:47.000Z
|
cantileveroptimizer/problems/__init__.py
|
ccwanggl/cantilever-optimizer
|
e7447cbb227a3c04bdd2810c8f43bae6278ec7ee
|
[
"MIT"
] | null | null | null |
cantileveroptimizer/problems/__init__.py
|
ccwanggl/cantilever-optimizer
|
e7447cbb227a3c04bdd2810c8f43bae6278ec7ee
|
[
"MIT"
] | 1
|
2019-06-03T13:15:41.000Z
|
2019-06-03T13:15:41.000Z
|
from .problem_frequency import FrequencyProblem
from .problem_bimodal import BimodalProblem
from .problem_fast_cantilever import FastCantileverProblem
| 50
| 58
| 0.906667
| 16
| 150
| 8.25
| 0.625
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073333
| 150
| 3
| 58
| 50
| 0.94964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fae03c9c25204bd0f05a5f76175f1fa016e3ba34
| 7,492
|
py
|
Python
|
barotropy/_core/diffusion.py
|
njweber2/barotropy
|
2cbf9fcba82052e956c52c138f4bfefef77b6381
|
[
"MIT"
] | null | null | null |
barotropy/_core/diffusion.py
|
njweber2/barotropy
|
2cbf9fcba82052e956c52c138f4bfefef77b6381
|
[
"MIT"
] | null | null | null |
barotropy/_core/diffusion.py
|
njweber2/barotropy
|
2cbf9fcba82052e956c52c138f4bfefef77b6381
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Module containing the Prognostic object Diffusion and some helper functions
"""
from sympl import Prognostic, get_numpy_arrays_with_properties, restore_data_arrays_with_properties
import spharm
class LinearizedDiffusion(Prognostic):
"""
Prescribes vorticity tendency based due to del^4 hyperdiffusion.
"""
# INPUT: vortcity (mean & pert), latitude, longitude
input_properties = {
'perturbation_atmosphere_relative_vorticity': {
'dims': ['lat', 'lon'],
'units': 's^-1',
'alias': 'vortp',
},
'base_atmosphere_relative_vorticity': {
'dims': ['lat', 'lon'],
'units': 's^-1',
'alias': 'vortb',
}
}
# DIAGS: none
diagnostic_properties = {}
# TENDENCIES: vorticity (prime only)
tendency_properties = {
'perturbation_atmosphere_relative_vorticity': {
'units': 's^-2',
}
}
def __init__(self, ntrunc=21, k=2.338e16):
self._ntrunc = ntrunc
self._k = k
def __call__(self, state):
"""
Gets tendencies and diagnostics from the passed model state.
Copied from sympl develop branch (to-be v0.3.3), ignoring checks.
Args
----
state : dict
A model state dictionary.
Returns
-------
tendencies : dict
A dictionary whose keys are strings indicating
state quantities and values are the time derivative of those
quantities in units/second at the time of the input state.
diagnostics : dict
A dictionary whose keys are strings indicating
state quantities and values are the value of those quantities
at the time of the input state.
Raises
------
KeyError
If a required quantity is missing from the state.
InvalidStateError
If state is not a valid input for the Prognostic instance.
"""
raw_state = get_numpy_arrays_with_properties(state, self.input_properties)
raw_state['time'] = state['time']
raw_tendencies, raw_diagnostics = self.array_call(raw_state)
tendencies = restore_data_arrays_with_properties(
raw_tendencies, self.tendency_properties,
state, self.input_properties)
diagnostics = restore_data_arrays_with_properties(
raw_diagnostics, self.diagnostic_properties,
state, self.input_properties)
return tendencies, diagnostics
def array_call(self, state):
"""
Calculates the vorticity tendency from the current state using:
diffusion = k * del^4(vorticity)
Args
----
state : dict
A dictionary of numpy arrays containing the model state.
Returns
-------
tendencies : dict
A single-item dictionary containing the vorticity
tendency numpy array.
diagnostics : dict
An empty dictionary.
"""
# Get numpy arrays with specifications from input_properties
vortp = state['vortp']
vortb = state['vortb']
# Approximate del^4(total vorticity)
del4vort = compute_del4vort(vortp+vortb, self._ntrunc)
tendencies = {'vortp': -self._k * del4vort}
diagnostics = {}
return tendencies, diagnostics
class NonlinearDiffusion(Prognostic):
"""
Prescribes vorticity tendency based due to del^4 hyperdiffusion.
"""
# INPUT: vortcity (mean & pert), latitude, longitude
input_properties = {
'atmosphere_relative_vorticity': {
'dims': ['lat', 'lon'],
'units': 's^-1',
'alias': 'vort',
}
}
# DIAGS: none
diagnostic_properties = {}
# TENDENCIES: vorticity (prime only)
tendency_properties = {
'atmosphere_relative_vorticity': {
'units': 's^-2',
}
}
def __init__(self, ntrunc=21, k=2.338e16):
self._ntrunc = ntrunc
self._k = k
def __call__(self, state):
"""
Gets tendencies and diagnostics from the passed model state.
Copied from sympl develop branch (to-be v0.3.3), ignoring checks.
Args
----
state : dict
A model state dictionary.
Returns
-------
tendencies : dict
A dictionary whose keys are strings indicating
state quantities and values are the time derivative of those
quantities in units/second at the time of the input state.
diagnostics : dict
A dictionary whose keys are strings indicating
state quantities and values are the value of those quantities
at the time of the input state.
Raises
------
KeyError
If a required quantity is missing from the state.
InvalidStateError
If state is not a valid input for the Prognostic instance.
"""
raw_state = get_numpy_arrays_with_properties(state, self.input_properties)
raw_state['time'] = state['time']
raw_tendencies, raw_diagnostics = self.array_call(raw_state)
tendencies = restore_data_arrays_with_properties(
raw_tendencies, self.tendency_properties,
state, self.input_properties)
diagnostics = restore_data_arrays_with_properties(
raw_diagnostics, self.diagnostic_properties,
state, self.input_properties)
return tendencies, diagnostics
def array_call(self, state):
"""
Calculates the vorticity tendency from the current state using:
diffusion = k * del^4(vorticity)
Args
----
state : dict
A dictionary of numpy arrays containing the model state.
Returns
-------
tendencies : dict
A single-item dictionary containing the vorticity
tendency numpy array.
diagnostics : dict
An empty dictionary.
"""
# Get numpy arrays with specifications from input_properties
vort = state['vort']
# Approximate del^4(total vorticity)
del4vort = compute_del4vort(vort, self._ntrunc)
tendencies = {'vort': -self._k * del4vort}
diagnostics = {}
return tendencies, diagnostics
def compute_del4vort(vort, ntrunc):
# Compute del^4(vorticity) with spherical harmonics
# Approximating del^4 as: d4_dx4 + d4_dy4 + 2 * (d2_dx2 * d2_dy2)
s = spharm.Spharmt(vort.shape[1], vort.shape[0], rsphere=6378100.,
gridtype='gaussian', legfunc='computed')
vspec = s.grdtospec(vort, ntrunc=ntrunc)
# First order
dvort_dx, dvort_dy = s.getgrad(vspec)
# Second order
d2vort_dx2, _ = s.getgrad(s.grdtospec(dvort_dx, ntrunc=ntrunc))
_, d2vort_dy2 = s.getgrad(s.grdtospec(dvort_dy, ntrunc=ntrunc))
# Fourth order
d4vort_dx4, _ = s.getgrad(s.grdtospec(s.getgrad(s.grdtospec(d2vort_dx2,
ntrunc=ntrunc))[0], ntrunc=ntrunc))
_, d4vort_dy4 = s.getgrad(s.grdtospec(s.getgrad(s.grdtospec(d2vort_dy2,
ntrunc=ntrunc))[1], ntrunc=ntrunc))
# Put it all together to approximate del^4
del4vort = d4vort_dx4 + d4vort_dy4 + (2 * d2vort_dx2 * d2vort_dy2)
return del4vort
| 33.004405
| 99
| 0.601575
| 802
| 7,492
| 5.457606
| 0.205736
| 0.022847
| 0.036555
| 0.032899
| 0.822938
| 0.793466
| 0.793466
| 0.770162
| 0.746402
| 0.72721
| 0
| 0.016916
| 0.313534
| 7,492
| 226
| 100
| 33.150442
| 0.834144
| 0.391751
| 0
| 0.58427
| 0
| 0
| 0.086274
| 0.045326
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078652
| false
| 0
| 0.022472
| 0
| 0.247191
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
faf6f5f2eeac6f625c4fac6adf29b2647c7b9965
| 215
|
py
|
Python
|
__tests__/unit/test_is_number.py
|
tetrascience/ts-task-script-utils
|
7e7992c8345f478b74ed42dd0ba990e21933141c
|
[
"Apache-2.0"
] | 2
|
2022-01-24T15:33:03.000Z
|
2022-03-23T10:59:44.000Z
|
__tests__/unit/test_is_number.py
|
tetrascience/ts-task-script-utils
|
7e7992c8345f478b74ed42dd0ba990e21933141c
|
[
"Apache-2.0"
] | 11
|
2021-04-21T13:57:24.000Z
|
2022-03-31T20:19:34.000Z
|
__tests__/unit/test_is_number.py
|
tetrascience/ts-task-script-utils
|
7e7992c8345f478b74ed42dd0ba990e21933141c
|
[
"Apache-2.0"
] | null | null | null |
from task_script_utils.is_number import isnumber
def test_is_number():
assert isnumber(10)
assert isnumber("10")
assert isnumber("NaN")
assert not isnumber(True)
assert not isnumber("cheese")
| 19.545455
| 48
| 0.716279
| 29
| 215
| 5.137931
| 0.551724
| 0.281879
| 0.214765
| 0.295302
| 0.308725
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022989
| 0.190698
| 215
| 10
| 49
| 21.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.051163
| 0
| 0
| 0
| 0
| 0
| 0.714286
| 1
| 0.142857
| true
| 0
| 0.142857
| 0
| 0.285714
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
87892ea37e51c4d406aa6c3baab17723117047e4
| 106
|
py
|
Python
|
bitmovin_api_sdk/encoding/encodings/live/insertable_content/schedule/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/encodings/live/insertable_content/schedule/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/encodings/live/insertable_content/schedule/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.encodings.live.insertable_content.schedule.schedule_api import ScheduleApi
| 53
| 105
| 0.90566
| 14
| 106
| 6.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 106
| 1
| 106
| 106
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87c520f5bbee65e9cac83cdc0ef9586c3bc9b12c
| 385
|
py
|
Python
|
src/behavior_tree_learning/core/gp/__init__.py
|
dgerod/bt_learning_using_gp
|
ac1fb6ba4dbd6d18b5d002c7ad2647771f8b0fb9
|
[
"Apache-2.0"
] | 6
|
2021-08-09T22:05:00.000Z
|
2022-01-31T11:01:44.000Z
|
src/behavior_tree_learning/core/gp/__init__.py
|
dgerod/bt_learning_using_gp
|
ac1fb6ba4dbd6d18b5d002c7ad2647771f8b0fb9
|
[
"Apache-2.0"
] | 6
|
2021-12-12T15:38:40.000Z
|
2022-01-31T11:02:12.000Z
|
src/behavior_tree_learning/core/gp/__init__.py
|
dgerod/bt_learning_using_gp
|
ac1fb6ba4dbd6d18b5d002c7ad2647771f8b0fb9
|
[
"Apache-2.0"
] | null | null | null |
from behavior_tree_learning.core.gp.environment import GeneticEnvironment
from behavior_tree_learning.core.gp.parameters import GeneticParameters
from behavior_tree_learning.core.gp.selection import SelectionMethods as GeneticSelectionMethods
from behavior_tree_learning.core.gp.operators import GeneticOperators
from behavior_tree_learning.core.gp.algorithm import GeneticProgramming
| 64.166667
| 96
| 0.903896
| 47
| 385
| 7.191489
| 0.404255
| 0.177515
| 0.236686
| 0.35503
| 0.443787
| 0.443787
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 385
| 5
| 97
| 77
| 0.931129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
35542d2e31ae4fcaca706b203010ad7258f00c4a
| 6,011
|
py
|
Python
|
z2/part2/interactive/jm/random_normal_1/613566861.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part2/interactive/jm/random_normal_1/613566861.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part2/interactive/jm/random_normal_1/613566861.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 613566861
"""
"""
random actions, total chaos
"""
board = gamma_new(7, 5, 4, 7)
assert board is not None
assert gamma_move(board, 1, 4, 2) == 1
assert gamma_golden_possible(board, 1) == 0
assert gamma_move(board, 2, 1, 4) == 1
assert gamma_move(board, 3, 0, 6) == 0
assert gamma_move(board, 4, 4, 2) == 0
assert gamma_move(board, 1, 1, 3) == 1
assert gamma_move(board, 1, 5, 1) == 1
assert gamma_move(board, 2, 1, 0) == 1
assert gamma_move(board, 2, 4, 2) == 0
assert gamma_move(board, 3, 2, 3) == 1
assert gamma_move(board, 4, 1, 1) == 1
assert gamma_move(board, 4, 5, 3) == 1
assert gamma_move(board, 1, 2, 1) == 1
assert gamma_move(board, 2, 0, 0) == 1
assert gamma_move(board, 3, 3, 6) == 0
assert gamma_busy_fields(board, 3) == 1
assert gamma_move(board, 4, 3, 4) == 1
assert gamma_move(board, 4, 6, 2) == 1
assert gamma_move(board, 1, 2, 5) == 0
assert gamma_move(board, 1, 6, 1) == 1
assert gamma_golden_move(board, 1, 0, 1) == 0
assert gamma_move(board, 2, 3, 0) == 1
assert gamma_move(board, 2, 3, 3) == 1
assert gamma_move(board, 3, 2, 0) == 1
assert gamma_busy_fields(board, 3) == 2
assert gamma_move(board, 4, 0, 4) == 1
assert gamma_move(board, 4, 2, 2) == 1
assert gamma_move(board, 1, 3, 6) == 0
assert gamma_free_fields(board, 1) == 17
assert gamma_move(board, 2, 4, 4) == 1
assert gamma_free_fields(board, 2) == 16
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_move(board, 3, 3, 3) == 0
board564774384 = gamma_board(board)
assert board564774384 is not None
assert board564774384 == ("42.42..\n"
".132.4.\n"
"..4.1.4\n"
".41..11\n"
"2232...\n")
del board564774384
board564774384 = None
assert gamma_move(board, 4, 3, 4) == 0
assert gamma_move(board, 1, 2, 5) == 0
assert gamma_move(board, 1, 1, 4) == 0
assert gamma_move(board, 2, 4, 5) == 0
assert gamma_busy_fields(board, 2) == 6
assert gamma_golden_possible(board, 2) == 1
board619051289 = gamma_board(board)
assert board619051289 is not None
assert board619051289 == ("42.42..\n"
".132.4.\n"
"..4.1.4\n"
".41..11\n"
"2232...\n")
del board619051289
board619051289 = None
assert gamma_move(board, 3, 4, 1) == 1
assert gamma_move(board, 4, 4, 1) == 0
assert gamma_move(board, 4, 4, 2) == 0
assert gamma_move(board, 1, 2, 3) == 0
assert gamma_busy_fields(board, 1) == 5
assert gamma_move(board, 2, 0, 2) == 1
board375775435 = gamma_board(board)
assert board375775435 is not None
assert board375775435 == ("42.42..\n"
".132.4.\n"
"2.4.1.4\n"
".41.311\n"
"2232...\n")
del board375775435
board375775435 = None
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_move(board, 4, 4, 2) == 0
assert gamma_move(board, 1, 0, 2) == 0
assert gamma_move(board, 3, 2, 3) == 0
assert gamma_move(board, 3, 6, 3) == 1
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 0, 3) == 1
assert gamma_free_fields(board, 4) == 12
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_move(board, 3, 2, 1) == 0
assert gamma_golden_move(board, 3, 3, 3) == 1
assert gamma_move(board, 4, 5, 4) == 1
assert gamma_busy_fields(board, 1) == 5
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 2, 0, 0) == 0
assert gamma_move(board, 3, 3, 4) == 0
assert gamma_move(board, 4, 5, 2) == 1
assert gamma_move(board, 4, 1, 0) == 0
assert gamma_move(board, 1, 0, 1) == 1
assert gamma_move(board, 2, 6, 4) == 1
assert gamma_move(board, 2, 5, 4) == 0
assert gamma_move(board, 3, 1, 2) == 1
assert gamma_move(board, 4, 2, 0) == 0
assert gamma_move(board, 4, 2, 0) == 0
assert gamma_move(board, 1, 6, 1) == 0
assert gamma_move(board, 1, 6, 3) == 0
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 3, 2, 0) == 0
assert gamma_move(board, 4, 0, 5) == 0
assert gamma_move(board, 4, 4, 0) == 1
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_busy_fields(board, 2) == 7
assert gamma_move(board, 3, 2, 3) == 0
assert gamma_move(board, 4, 5, 3) == 0
assert gamma_move(board, 1, 2, 3) == 0
assert gamma_move(board, 1, 2, 4) == 1
assert gamma_move(board, 2, 0, 6) == 0
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_busy_fields(board, 2) == 7
assert gamma_move(board, 3, 2, 3) == 0
assert gamma_free_fields(board, 3) == 5
assert gamma_move(board, 4, 1, 0) == 0
assert gamma_free_fields(board, 4) == 5
assert gamma_move(board, 1, 2, 2) == 0
assert gamma_free_fields(board, 1) == 5
assert gamma_move(board, 2, 3, 4) == 0
assert gamma_move(board, 2, 0, 3) == 0
assert gamma_busy_fields(board, 2) == 7
assert gamma_free_fields(board, 2) == 5
board580183913 = gamma_board(board)
assert board580183913 is not None
assert board580183913 == ("4214242\n"
"4133.43\n"
"234.144\n"
"141.311\n"
"22324..\n")
del board580183913
board580183913 = None
assert gamma_move(board, 3, 2, 3) == 0
assert gamma_move(board, 3, 0, 4) == 0
assert gamma_busy_fields(board, 3) == 6
assert gamma_move(board, 4, 4, 0) == 0
assert gamma_move(board, 4, 6, 2) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 1, 0, 6) == 0
assert gamma_move(board, 1, 0, 2) == 0
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 2, 3, 4) == 0
assert gamma_move(board, 3, 0, 5) == 0
assert gamma_move(board, 4, 3, 3) == 0
assert gamma_move(board, 1, 2, 3) == 0
assert gamma_move(board, 1, 4, 1) == 0
assert gamma_free_fields(board, 1) == 5
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_move(board, 2, 1, 4) == 0
assert gamma_move(board, 4, 0, 3) == 0
assert gamma_busy_fields(board, 4) == 10
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_move(board, 1, 5, 0) == 1
assert gamma_move(board, 2, 3, 3) == 0
assert gamma_busy_fields(board, 2) == 7
gamma_delete(board)
| 31.636842
| 46
| 0.658959
| 1,106
| 6,011
| 3.432188
| 0.050633
| 0.347734
| 0.375395
| 0.500527
| 0.806902
| 0.770811
| 0.666228
| 0.45627
| 0.404636
| 0.377239
| 0
| 0.146218
| 0.179671
| 6,011
| 189
| 47
| 31.804233
| 0.623606
| 0
| 0
| 0.284024
| 0
| 0
| 0.03039
| 0
| 0
| 0
| 0
| 0
| 0.763314
| 1
| 0
| false
| 0
| 0.005917
| 0
| 0.005917
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
356cea3d175f2b0c085fc8290c85373b407727fe
| 47
|
py
|
Python
|
Tests/Runnable2/r_classdoc_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | 5
|
2019-05-26T20:48:36.000Z
|
2021-07-09T01:38:38.000Z
|
Tests/Runnable2/r_classdoc_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | null | null | null |
Tests/Runnable2/r_classdoc_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | 1
|
2022-02-10T07:14:58.000Z
|
2022-02-10T07:14:58.000Z
|
from r_classdoc import Spam
print Spam.__doc__
| 15.666667
| 27
| 0.851064
| 8
| 47
| 4.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 47
| 2
| 28
| 23.5
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
359cd1fbefa19537fce7c6d9656589e18f81d53c
| 2,872
|
py
|
Python
|
tests/transformer/test_import.py
|
rahulbahal7/restricted-python
|
c39cffe71dfc30630e946977735303d3a65b0383
|
[
"ZPL-2.1"
] | 236
|
2015-01-03T17:14:53.000Z
|
2022-03-01T15:52:46.000Z
|
tests/transformer/test_import.py
|
rahulbahal7/restricted-python
|
c39cffe71dfc30630e946977735303d3a65b0383
|
[
"ZPL-2.1"
] | 149
|
2016-10-24T06:56:44.000Z
|
2022-02-24T08:09:10.000Z
|
tests/transformer/test_import.py
|
rahulbahal7/restricted-python
|
c39cffe71dfc30630e946977735303d3a65b0383
|
[
"ZPL-2.1"
] | 30
|
2015-04-03T05:38:13.000Z
|
2021-11-10T05:13:38.000Z
|
from RestrictedPython import compile_restricted_exec
import_errmsg = (
'Line 1: "%s" is an invalid variable name because it starts with "_"')
def test_RestrictingNodeTransformer__visit_Import__1():
"""It allows importing a module."""
result = compile_restricted_exec('import a')
assert result.errors == ()
assert result.code is not None
def test_RestrictingNodeTransformer__visit_Import__2():
"""It denies importing a module starting with `_`."""
result = compile_restricted_exec('import _a')
assert result.errors == (import_errmsg % '_a',)
def test_RestrictingNodeTransformer__visit_Import__3():
"""It denies importing a module starting with `_` as something."""
result = compile_restricted_exec('import _a as m')
assert result.errors == (import_errmsg % '_a',)
def test_RestrictingNodeTransformer__visit_Import__4():
"""It denies importing a module as something starting with `_`."""
result = compile_restricted_exec('import a as _m')
assert result.errors == (import_errmsg % '_m',)
def test_RestrictingNodeTransformer__visit_Import__5():
"""It allows importing from a module."""
result = compile_restricted_exec('from a import m')
assert result.errors == ()
assert result.code is not None
def test_RestrictingNodeTransformer__visit_Import_6():
"""It allows importing from a module starting with `_`."""
result = compile_restricted_exec('from _a import m')
assert result.errors == ()
assert result.code is not None
def test_RestrictingNodeTransformer__visit_Import__7():
"""It denies importing from a module as something starting with `_`."""
result = compile_restricted_exec('from a import m as _n')
assert result.errors == (import_errmsg % '_n',)
def test_RestrictingNodeTransformer__visit_Import__8():
"""It denies as-importing something starting with `_` from a module."""
result = compile_restricted_exec('from a import _m as n')
assert result.errors == (import_errmsg % '_m',)
def test_RestrictingNodeTransformer__visit_Import__9():
"""It denies relative from importing as something starting with `_`."""
result = compile_restricted_exec('from .x import y as _leading_underscore')
assert result.errors == (import_errmsg % '_leading_underscore',)
def test_RestrictingNodeTransformer__visit_Import_star__1():
"""Importing `*` is a SyntaxError in Python itself."""
result = compile_restricted_exec('import *')
assert result.errors == (
"Line 1: SyntaxError: invalid syntax at statement: 'import *'",)
assert result.code is None
def test_RestrictingNodeTransformer__visit_Import_star__2():
"""It denies importing `*` from a module."""
result = compile_restricted_exec('from a import *')
assert result.errors == ('Line 1: "*" imports are not allowed.',)
assert result.code is None
| 36.820513
| 79
| 0.726671
| 355
| 2,872
| 5.538028
| 0.177465
| 0.09766
| 0.128179
| 0.212614
| 0.817904
| 0.715158
| 0.604781
| 0.577823
| 0.564598
| 0.502543
| 0
| 0.005863
| 0.168524
| 2,872
| 77
| 80
| 37.298701
| 0.81742
| 0.199513
| 0
| 0.285714
| 0
| 0
| 0.165849
| 0
| 0
| 0
| 0
| 0
| 0.380952
| 1
| 0.261905
| false
| 0
| 0.761905
| 0
| 1.02381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
35c57e3d0a8f68bd66f2e30afe63eaf1952d163c
| 149
|
py
|
Python
|
core/agent/__init__.py
|
Marxlp/RLFrame
|
1fcfa4fb26c1f0e407c8ea77c86d9d51af8b579a
|
[
"MIT"
] | 8
|
2020-02-09T03:33:50.000Z
|
2022-01-05T06:35:28.000Z
|
core/agent/__init__.py
|
Marxlp/RLFrame
|
1fcfa4fb26c1f0e407c8ea77c86d9d51af8b579a
|
[
"MIT"
] | null | null | null |
core/agent/__init__.py
|
Marxlp/RLFrame
|
1fcfa4fb26c1f0e407c8ea77c86d9d51af8b579a
|
[
"MIT"
] | 2
|
2020-02-10T03:21:31.000Z
|
2020-03-22T15:40:37.000Z
|
from .agent_sync import Agent_sync as Agent_sync
from .agent_async import Agent_async as Agent_async
from .agent_single import Agent_single as Agent
| 37.25
| 51
| 0.85906
| 26
| 149
| 4.615385
| 0.269231
| 0.225
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120805
| 149
| 3
| 52
| 49.666667
| 0.916031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
35cad388a4aa0669d41622943dcf845fc0333280
| 2,467
|
py
|
Python
|
tests/test_version.py
|
ericbusboom/metapack
|
c08c84e1208755ddc57d2f758d0a99223ded19cc
|
[
"MIT",
"BSD-3-Clause"
] | 3
|
2018-09-17T15:08:12.000Z
|
2020-05-28T21:48:24.000Z
|
tests/test_version.py
|
ericbusboom/metapack
|
c08c84e1208755ddc57d2f758d0a99223ded19cc
|
[
"MIT",
"BSD-3-Clause"
] | 11
|
2017-10-13T12:29:09.000Z
|
2020-03-22T17:20:29.000Z
|
tests/test_version.py
|
ericbusboom/metapack
|
c08c84e1208755ddc57d2f758d0a99223ded19cc
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2017-12-08T20:17:19.000Z
|
2017-12-08T20:17:19.000Z
|
import unittest
from textwrap import dedent
from metatab.rowgenerators import TextRowGenerator
from metapack import Downloader, MetapackDoc
downloader = Downloader()
class TestIssues(unittest.TestCase):
"""Test Metapack AppUrls and Row Generators"""
def setUp(self):
import warnings
warnings.simplefilter('ignore')
def test_integer_version(self):
mt_lines = dedent("""
Declare: metatab-latest
Identifier: foobar
Dataset: foobar
Origin: foo.com
Version: 1
Section: References
""")
doc = MetapackDoc(TextRowGenerator(mt_lines))
self.assertEqual('foo.com-foobar-1', doc._generate_identity_name())
self.assertEqual('foo.com-foobar-1', doc._generate_identity_name(False))
self.assertEqual('foo.com-foobar', doc._generate_identity_name(None))
self.assertEqual('foo.com-foobar-10', doc._generate_identity_name(10))
self.assertEqual('foo.com-foobar', doc.nonver_name)
# Semantic
mt_lines = dedent("""
Declare: metatab-latest
Identifier: foobar
Dataset: foobar
Origin: foo.com
Version:
Version.Major: 1
Version.Minor: 2
Version.Patch: 3
Section: References
""")
doc = MetapackDoc(TextRowGenerator(mt_lines))
self.assertEqual('foo.com-foobar-1.2.3', doc._generate_identity_name())
self.assertEqual('foo.com-foobar-1.2.3', doc._generate_identity_name(False))
self.assertEqual('foo.com-foobar', doc._generate_identity_name(None))
self.assertEqual('foo.com-foobar-1.2.10', doc._generate_identity_name(10))
self.assertEqual('foo.com-foobar', doc.nonver_name)
mt_lines = dedent("""
Declare: metatab-latest
Identifier: foobar
Dataset: foobar
Origin: foo.com
Version:
Version.Major: 1
Section: References
""")
doc = MetapackDoc(TextRowGenerator(mt_lines))
self.assertEqual('foo.com-foobar-1.1.1', doc._generate_identity_name())
self.assertEqual('foo.com-foobar-1.1.1', doc._generate_identity_name(False))
self.assertEqual('foo.com-foobar', doc._generate_identity_name(None))
self.assertEqual('foo.com-foobar-1.1.10', doc._generate_identity_name(10))
self.assertEqual('foo.com-foobar', doc.nonver_name)
if __name__ == '__main__':
unittest.main()
| 26.815217
| 84
| 0.650993
| 285
| 2,467
| 5.442105
| 0.2
| 0.069633
| 0.174081
| 0.203095
| 0.779497
| 0.779497
| 0.779497
| 0.778208
| 0.778208
| 0.778208
| 0
| 0.01847
| 0.231861
| 2,467
| 91
| 85
| 27.10989
| 0.8
| 0.020268
| 0
| 0.586207
| 0
| 0
| 0.358358
| 0.01742
| 0
| 0
| 0
| 0
| 0.258621
| 1
| 0.034483
| false
| 0
| 0.086207
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
35e091b557f67445abd60e802736a0ce05e290ec
| 63
|
py
|
Python
|
bmcs_beam/mxn/matresdev/db/exdb/__init__.py
|
bmcs-group/bmcs_beam
|
b53967d0d0461657ec914a3256ec40f9dcff80d5
|
[
"MIT"
] | 1
|
2021-05-07T11:10:27.000Z
|
2021-05-07T11:10:27.000Z
|
bmcs_beam/mxn/matresdev/db/exdb/__init__.py
|
bmcs-group/bmcs_beam
|
b53967d0d0461657ec914a3256ec40f9dcff80d5
|
[
"MIT"
] | null | null | null |
bmcs_beam/mxn/matresdev/db/exdb/__init__.py
|
bmcs-group/bmcs_beam
|
b53967d0d0461657ec914a3256ec40f9dcff80d5
|
[
"MIT"
] | null | null | null |
from .ex_run import ExRun
from .ex_run_view import ExRunView
| 12.6
| 34
| 0.809524
| 11
| 63
| 4.363636
| 0.636364
| 0.25
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15873
| 63
| 4
| 35
| 15.75
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
ea1e9100bc6c3808dba52dd7bc0d2e598af3c604
| 43
|
py
|
Python
|
lambdata_lrizika/__init__.py
|
Lrizika/Lambdata
|
7f6646597ab23e097ec3d93d29c161636618f25c
|
[
"MIT"
] | null | null | null |
lambdata_lrizika/__init__.py
|
Lrizika/Lambdata
|
7f6646597ab23e097ec3d93d29c161636618f25c
|
[
"MIT"
] | 4
|
2020-03-24T17:57:33.000Z
|
2021-06-02T01:01:32.000Z
|
lambdata_lrizika/__init__.py
|
Lrizika/Lambdata
|
7f6646597ab23e097ec3d93d29c161636618f25c
|
[
"MIT"
] | null | null | null |
from lambdata_lrizika import pandas_utils
| 14.333333
| 41
| 0.883721
| 6
| 43
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 43
| 2
| 42
| 21.5
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea28f3ac4d9b288fe5294e2cf0ca6b1ecc854f0a
| 269
|
py
|
Python
|
polrev/portals/run/views.py
|
polrev-github/polrev-django
|
99108ace1a5307b14c3eccb424a9f9616e8c02ae
|
[
"MIT"
] | 1
|
2021-12-10T05:54:16.000Z
|
2021-12-10T05:54:16.000Z
|
polrev/portals/run/views.py
|
polrev-github/polrev-django
|
99108ace1a5307b14c3eccb424a9f9616e8c02ae
|
[
"MIT"
] | null | null | null |
polrev/portals/run/views.py
|
polrev-github/polrev-django
|
99108ace1a5307b14c3eccb424a9f9616e8c02ae
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from accounts.models import User
def index(request):
return render(request, 'run/index.html')
def amascheduler(request):
return render(request, 'run/amascheduler.html')
| 24.454545
| 54
| 0.784387
| 37
| 269
| 5.621622
| 0.567568
| 0.096154
| 0.182692
| 0.25
| 0.278846
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012766
| 0.126394
| 269
| 10
| 55
| 26.9
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0.130597
| 0.078358
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.428571
| 0.285714
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
ea3055a8822fa8ef1bc981ad2cd0bba0ac54e23e
| 43
|
py
|
Python
|
textpreprocess/__init__.py
|
dreamvrutik/textclean
|
4e823421489b7861fbb89e9b4824720e6d55c798
|
[
"MIT"
] | null | null | null |
textpreprocess/__init__.py
|
dreamvrutik/textclean
|
4e823421489b7861fbb89e9b4824720e6d55c798
|
[
"MIT"
] | null | null | null |
textpreprocess/__init__.py
|
dreamvrutik/textclean
|
4e823421489b7861fbb89e9b4824720e6d55c798
|
[
"MIT"
] | null | null | null |
from textpreprocess.process import process
| 21.5
| 42
| 0.883721
| 5
| 43
| 7.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.974359
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea5eeb33d3dc9ba7d158105003b2ee1011247220
| 1,181
|
py
|
Python
|
src/decision_tree_writer/__init__.py
|
AndreBacic/DecisionTreeWriter
|
58eb229df949fda5f0560b985ed0f9e62c626d4c
|
[
"MIT"
] | 1
|
2021-10-19T22:58:20.000Z
|
2021-10-19T22:58:20.000Z
|
src/decision_tree_writer/__init__.py
|
AndreBacic/DecisionTreeWriter
|
58eb229df949fda5f0560b985ed0f9e62c626d4c
|
[
"MIT"
] | null | null | null |
src/decision_tree_writer/__init__.py
|
AndreBacic/DecisionTreeWriter
|
58eb229df949fda5f0560b985ed0f9e62c626d4c
|
[
"MIT"
] | null | null | null |
from decision_tree_writer.TreeWriter import DecisionTreeWriter
# ✓ (v0.4.1): delete the unused root parameter from BaseDecisionTree for cleaner scaffolded code.
# ✓ (v0.4.2): move data validating and cleaning to a separate class from DecisionTreeWriter and add better validating and cleaning methods
# ✓ (v0.5.1): Have max_depth and min_node_size as parameters for DecisionTreeWriter.create_tree()
# TODO: Refactor DataCleaner (see class todos) (v0.5.2)
# TODO: Refactor TreeWriter fields and properties to be more consistent with what actions change them (ex. passing in max_depth changes it but passing in folder doesn't, and you can't pass in label_name) (v0.5.3)
# TODO: Possibly good idea: Why would you need multiple tree writers? Wouldn't a static class work for multithreading? If so, make DecisionTreeWriter (and the data validating and cleaning class) static (v0.?.1)
# TODO: Make code so that a program can use the new model immediately after it's been trained without having to stop and let the devs write the code to use the new model (v0.6.1)
# TODO: Add support for regression trees as well, and perhaps tries and n-ary trees instead of just binary trees (v0.7.1)
| 107.363636
| 212
| 0.782388
| 202
| 1,181
| 4.549505
| 0.589109
| 0.009793
| 0.068553
| 0.054407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023023
| 0.154107
| 1,181
| 10
| 213
| 118.1
| 0.893894
| 0.931414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea6bf7c57caa0e54f2a8962d602238ba9dda2872
| 110
|
py
|
Python
|
opentimesheet/core/api/serializers.py
|
valerymelou/opentimesheet-server
|
0da97ebb3c3e59962132d1bc5e83e1d727f7331b
|
[
"MIT"
] | null | null | null |
opentimesheet/core/api/serializers.py
|
valerymelou/opentimesheet-server
|
0da97ebb3c3e59962132d1bc5e83e1d727f7331b
|
[
"MIT"
] | 95
|
2021-02-20T21:53:29.000Z
|
2022-01-14T17:24:50.000Z
|
opentimesheet/core/api/serializers.py
|
valerymelou/opentimesheet-server
|
0da97ebb3c3e59962132d1bc5e83e1d727f7331b
|
[
"MIT"
] | null | null | null |
from rest_framework_json_api import serializers
class BaseSerializer(serializers.ModelSerializer):
pass
| 18.333333
| 50
| 0.845455
| 12
| 110
| 7.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118182
| 110
| 5
| 51
| 22
| 0.927835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
eaa2295de747b8e53dfaba8831501909d4cbab01
| 191
|
py
|
Python
|
modoboa/relaydomains/lib.py
|
vietbm-hcm/modoboa
|
ceeec1bf0a63cf1b3ef7b4087c06e2a1eb35598d
|
[
"ISC"
] | null | null | null |
modoboa/relaydomains/lib.py
|
vietbm-hcm/modoboa
|
ceeec1bf0a63cf1b3ef7b4087c06e2a1eb35598d
|
[
"ISC"
] | null | null | null |
modoboa/relaydomains/lib.py
|
vietbm-hcm/modoboa
|
ceeec1bf0a63cf1b3ef7b4087c06e2a1eb35598d
|
[
"ISC"
] | null | null | null |
"""Internal library."""
from .models import RelayDomain
def import_relaydomain(user, row, formopts):
"""Specific code for relay domains import"""
RelayDomain().from_csv(user, row)
| 21.222222
| 48
| 0.717277
| 23
| 191
| 5.869565
| 0.695652
| 0.377778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151832
| 191
| 8
| 49
| 23.875
| 0.833333
| 0.293194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
577d9855d8890f64a4615ded83eec0e6ed1396ca
| 37
|
py
|
Python
|
slaid/classifiers/__init__.py
|
mdrio/slaid
|
67c85f0d1702bced1c089bfb3c20ba1cfbc9c225
|
[
"MIT"
] | null | null | null |
slaid/classifiers/__init__.py
|
mdrio/slaid
|
67c85f0d1702bced1c089bfb3c20ba1cfbc9c225
|
[
"MIT"
] | null | null | null |
slaid/classifiers/__init__.py
|
mdrio/slaid
|
67c85f0d1702bced1c089bfb3c20ba1cfbc9c225
|
[
"MIT"
] | 1
|
2022-02-11T15:54:47.000Z
|
2022-02-11T15:54:47.000Z
|
from slaid.classifiers.base import *
| 18.5
| 36
| 0.810811
| 5
| 37
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5795738d93c741b449009809771b25fc6e28bef5
| 36
|
py
|
Python
|
toppy/__init__.py
|
ashdnazg/toppy
|
cefda8b2177576331252265d257532ae79d34a94
|
[
"MIT"
] | null | null | null |
toppy/__init__.py
|
ashdnazg/toppy
|
cefda8b2177576331252265d257532ae79d34a94
|
[
"MIT"
] | 1
|
2021-02-07T09:52:48.000Z
|
2021-02-07T09:52:48.000Z
|
toppy/__init__.py
|
ashdnazg/toppy
|
cefda8b2177576331252265d257532ae79d34a94
|
[
"MIT"
] | 1
|
2021-02-07T09:45:13.000Z
|
2021-02-07T09:45:13.000Z
|
from .toppy import Toppy, ToppyArgs
| 18
| 35
| 0.805556
| 5
| 36
| 5.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 36
| 1
| 36
| 36
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
57c855dfb29b5085e7165c3976518decf7c20390
| 2,518
|
py
|
Python
|
FormManagement/views.py
|
SajedeNick1999/Payslip-Management-
|
21e40c5c3ea040733d609a014b66b24135624b73
|
[
"MIT"
] | 1
|
2020-08-16T14:27:00.000Z
|
2020-08-16T14:27:00.000Z
|
FormManagement/views.py
|
SajedeNick1999/Payslip-Management-System
|
21e40c5c3ea040733d609a014b66b24135624b73
|
[
"MIT"
] | null | null | null |
FormManagement/views.py
|
SajedeNick1999/Payslip-Management-System
|
21e40c5c3ea040733d609a014b66b24135624b73
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import JsonResponse
from .models import Form
from ManagerManagement.models import Manager
from EmployeeManagement.models import Employee
def addfield_view(request,name,type,token,id):
manager = Employee.objects.filter(ID=id).first()
if manager == None:
return JsonResponse({'ACK':0,'status':404}) # user not found
elif manager.Token != token:
return JsonResponse({'ACK':0,'status':403}) # user is not authorized
else:
form = Form.objects.filter(CompanyID=manager.CompanyID).first()
if form == None:
return JsonResponse({'ACK':0,'status':404}) # form not found
else:
form.Form_AddField(name=name,type=type)
return JsonResponse({'ACK':1,'status':200})
def editfield_view(request,name,index,token,id):
manager = Employee.objects.filter(ID=id).first()
if manager == None:
return JsonResponse({'ACK':0,'status':404}) # user not found
elif manager.Token != token:
return JsonResponse({'ACK':0,'status':403}) # user is not authorized
else:
form = Form.objects.filter(CompanyID=manager.CompanyID).first()
if form == None:
return JsonResponse({'ACK':0,'status':404}) # form not found
else:
form.Form_EditField(name=name,id=index)
return JsonResponse({'ACK':1,'status':200})
def deletefield_view(request,index,token,id):
manager = Employee.objects.filter(ID=id).first()
if manager == None:
return JsonResponse({'ACK':0,'status':404}) # user not found
elif manager.Token != token:
return JsonResponse({'ACK':0,'status':403}) # user is not authorized
else:
form = Form.objects.filter(CompanyID=manager.CompanyID).first()
if form == None:
return JsonResponse({'ACK':0,'status':404}) # form not found
else:
form.Form_DeleteField(id=index)
return JsonResponse({'ACK':1,'status':200})
def showform_view(request,token,id):
manager = Employee.objects.filter(ID=id).first()
if manager == None:
return JsonResponse({'ACK':0,'status':404}) # user not found
elif manager.Token != token:
return JsonResponse({'ACK':0,'status':403}) # user is not authorized
else:
form = Form.objects.filter(CompanyID=manager.CompanyID).first()
if form == None:
return JsonResponse({'ACK':0,'status':404}) # form not found
else:
formlist = form.Form_GetForm()
output = {'status':200}
i = 0
output['count'] = len(formlist)
output['fields']={}
for field in formlist:
output['fields'][str(i)] = {
'name':field[0],
'type':field[1]
}
i=i+1
return JsonResponse(output)
| 33.573333
| 70
| 0.697776
| 345
| 2,518
| 5.069565
| 0.165217
| 0.164666
| 0.180103
| 0.150943
| 0.73299
| 0.73299
| 0.73299
| 0.713551
| 0.713551
| 0.666667
| 0
| 0.031076
| 0.143765
| 2,518
| 74
| 71
| 34.027027
| 0.780148
| 0.083797
| 0
| 0.641791
| 0
| 0
| 0.072363
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0
| 0.074627
| 0
| 0.373134
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
57ef5534785ab04910b6cab258eda9285c47a909
| 123
|
py
|
Python
|
exputils/data/__init__.py
|
craymichael/exputils
|
cd9cf08e6556104e4be10f1765d41fc9717a8f82
|
[
"MIT"
] | 2
|
2020-09-30T20:23:35.000Z
|
2021-04-23T03:44:49.000Z
|
exputils/data/__init__.py
|
craymichael/exputils
|
cd9cf08e6556104e4be10f1765d41fc9717a8f82
|
[
"MIT"
] | 1
|
2021-03-09T20:39:35.000Z
|
2021-03-16T20:13:36.000Z
|
exputils/data/__init__.py
|
craymichael/exputils
|
cd9cf08e6556104e4be10f1765d41fc9717a8f82
|
[
"MIT"
] | 1
|
2021-04-07T19:04:35.000Z
|
2021-04-07T19:04:35.000Z
|
from exputils.data.confusion_matrix import ConfusionMatrix
from exputils.data import labels
__all__ = ['ConfusionMatrix']
| 24.6
| 58
| 0.837398
| 14
| 123
| 7
| 0.642857
| 0.244898
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 123
| 4
| 59
| 30.75
| 0.882883
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
17a7d5cf526eb48ad4fb0451c4b3cd55bc74c244
| 34
|
py
|
Python
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/l10n_fr_certification/models/__init__.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | 1
|
2019-12-19T01:53:13.000Z
|
2019-12-19T01:53:13.000Z
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/l10n_fr_certification/models/__init__.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/l10n_fr_certification/models/__init__.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
import res_company
import account
| 11.333333
| 18
| 0.882353
| 5
| 34
| 5.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 2
| 19
| 17
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
17dc383428dffcc8bee32abb6457d27ee9d69115
| 89
|
py
|
Python
|
sites/pycharm-guide/demos/tutorials/visual_pytest/tdd_flow/test_guardian01.py
|
stevewhitmore/jetbrains_guide
|
234eb44a3ecc670048e4d02b3b5b39affe4a9e31
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 16
|
2019-02-01T14:03:53.000Z
|
2019-08-18T13:57:33.000Z
|
sites/pycharm-guide/demos/tutorials/visual_pytest/tdd_flow/test_guardian01.py
|
stevewhitmore/jetbrains_guide
|
234eb44a3ecc670048e4d02b3b5b39affe4a9e31
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 3
|
2019-03-22T07:40:33.000Z
|
2019-04-03T16:04:35.000Z
|
sites/pycharm-guide/demos/tutorials/visual_pytest/tdd_flow/test_guardian01.py
|
stevewhitmore/jetbrains_guide
|
234eb44a3ecc670048e4d02b3b5b39affe4a9e31
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 2
|
2019-03-25T15:00:12.000Z
|
2019-08-18T13:57:36.000Z
|
from laxleague.guardian import Guardian
def test_construction():
assert Guardian()
| 14.833333
| 39
| 0.775281
| 10
| 89
| 6.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157303
| 89
| 5
| 40
| 17.8
| 0.906667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
17e6888353f78b3afddbf3d2fcc1efc0873d98ce
| 173
|
py
|
Python
|
setup.py
|
kngxscn/Sentence-Splitter-for-Russian
|
cbd356c29a3c39a15840d97dc0ed2e6ddc9b83d9
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
kngxscn/Sentence-Splitter-for-Russian
|
cbd356c29a3c39a15840d97dc0ed2e6ddc9b83d9
|
[
"Apache-2.0"
] | 5
|
2020-01-28T22:28:24.000Z
|
2022-02-09T23:58:30.000Z
|
setup.py
|
kngxscn/Sentence-Splitter-for-Russian
|
cbd356c29a3c39a15840d97dc0ed2e6ddc9b83d9
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup
setup(
name='sentence_splitter_for_russian',
version='0.1',
description='俄语系分句器',
packages=['sentence_splitter_for_russian']
)
| 19.222222
| 46
| 0.728324
| 20
| 173
| 6
| 0.75
| 0.266667
| 0.316667
| 0.433333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013699
| 0.156069
| 173
| 8
| 47
| 21.625
| 0.808219
| 0
| 0
| 0
| 0
| 0
| 0.387283
| 0.33526
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.142857
| 0
| 0.142857
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3511c1c75e81fc66ee9e09851b5a06241b1c2c70
| 46
|
py
|
Python
|
girder_worker_tasks/arbor_nova_tasks/arbor_tasks/app_support/__init__.py
|
girder/arbor_tasks
|
8a0b6c94b3aea1722878c7425a70197d206a8414
|
[
"Apache-2.0"
] | 4
|
2019-04-30T16:52:31.000Z
|
2020-03-11T17:18:08.000Z
|
girder_worker_tasks/arbor_nova_tasks/arbor_tasks/app_support/__init__.py
|
girder/arbor_tasks
|
8a0b6c94b3aea1722878c7425a70197d206a8414
|
[
"Apache-2.0"
] | 6
|
2019-04-08T20:54:32.000Z
|
2019-04-19T19:29:11.000Z
|
girder_worker_tasks/arbor_nova_tasks/arbor_tasks/app_support/__init__.py
|
girder/arbor_tasks
|
8a0b6c94b3aea1722878c7425a70197d206a8414
|
[
"Apache-2.0"
] | 2
|
2020-04-30T20:23:34.000Z
|
2020-10-23T14:18:59.000Z
|
from .pgls import pgls
from .asr import asr
| 11.5
| 22
| 0.73913
| 8
| 46
| 4.25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 46
| 3
| 23
| 15.333333
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
354962539fe54fce61541a57de93ffdc88ec57ad
| 491
|
py
|
Python
|
test/test_registry.py
|
wangjunyan305/homura
|
815750e927e0eeaac3f5c0bf49d8d46dabaf5988
|
[
"Apache-2.0"
] | 102
|
2018-05-25T05:59:09.000Z
|
2022-03-08T22:38:44.000Z
|
test/test_registry.py
|
Xiangyu-Han/homura
|
c366ca70b4b65f6a4809bf76926bbd926320262e
|
[
"Apache-2.0"
] | 31
|
2018-04-22T17:35:40.000Z
|
2021-12-07T19:06:18.000Z
|
test/test_registry.py
|
Xiangyu-Han/homura
|
c366ca70b4b65f6a4809bf76926bbd926320262e
|
[
"Apache-2.0"
] | 24
|
2019-02-05T11:39:37.000Z
|
2022-02-09T01:01:25.000Z
|
import pytest
from homura.register import Registry
def test_registry():
MODEL_REGISTRY = Registry('model')
MODEL_REGISTRY2 = Registry('model')
assert MODEL_REGISTRY is MODEL_REGISTRY2
@MODEL_REGISTRY.register
def something():
return 1
@MODEL_REGISTRY.register
def anything():
return 2
assert MODEL_REGISTRY('something')() == 1
with pytest.raises(KeyError):
@MODEL_REGISTRY.register
def something():
pass
| 19.64
| 45
| 0.661914
| 53
| 491
| 5.962264
| 0.396226
| 0.246835
| 0.199367
| 0.227848
| 0.208861
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013661
| 0.254582
| 491
| 24
| 46
| 20.458333
| 0.849727
| 0
| 0
| 0.294118
| 0
| 0
| 0.038697
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.235294
| false
| 0.058824
| 0.117647
| 0.117647
| 0.470588
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 6
|
1051e511a79a5d78e8ebdbf0ab59184eb08a0097
| 36
|
py
|
Python
|
tictac/__main__.py
|
pxeger/tictac
|
59e1313f30e82c4ca5f58e7f7f24eed535b13207
|
[
"Artistic-2.0"
] | 2
|
2021-12-13T22:40:53.000Z
|
2022-01-01T11:57:04.000Z
|
tictac/__main__.py
|
pxeger/tictac
|
59e1313f30e82c4ca5f58e7f7f24eed535b13207
|
[
"Artistic-2.0"
] | null | null | null |
tictac/__main__.py
|
pxeger/tictac
|
59e1313f30e82c4ca5f58e7f7f24eed535b13207
|
[
"Artistic-2.0"
] | null | null | null |
import tictac.cli
tictac.cli.main()
| 12
| 17
| 0.777778
| 6
| 36
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 2
| 18
| 18
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
10a078b31cb7c5263748a9b08fd920ee5d1a8bd4
| 9,118
|
py
|
Python
|
test/entry/cli/test_cli.py
|
HansBug/jentry
|
69817fc19df1c8b31b32a834cfe1aa93841d6022
|
[
"Apache-2.0"
] | null | null | null |
test/entry/cli/test_cli.py
|
HansBug/jentry
|
69817fc19df1c8b31b32a834cfe1aa93841d6022
|
[
"Apache-2.0"
] | 1
|
2022-03-20T01:42:56.000Z
|
2022-03-20T01:42:56.000Z
|
test/entry/cli/test_cli.py
|
HansBug/jentry
|
69817fc19df1c8b31b32a834cfe1aa93841d6022
|
[
"Apache-2.0"
] | null | null | null |
import json
import pytest
from click.testing import CliRunner
from jentry.entry.cli import cli
from ...testing import DEMO_PROJECT_PATH, demo_mark, DEMO_COMPLEX_PROJECT_PATH, demo_complex_mark, \
DEMO_ALL_PROJECT_PATH, demo_all_mark
class TestEntryCliCli:
@pytest.mark.unittest
def test_version(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-v'])
assert result.exit_code == 0
assert "jentry" in result.stdout.lower()
@demo_mark
def test_common_demo(self):
runner = CliRunner()
result = runner.invoke(cli, args=[DEMO_PROJECT_PATH])
assert result.exit_code == 0
assert result.stdout.rstrip() == 'homework.Main'
@demo_complex_mark
def test_common_demo_complex(self):
runner = CliRunner()
result = runner.invoke(cli, args=[DEMO_COMPLEX_PROJECT_PATH])
assert result.exit_code == 0
assert result.stdout.rstrip() == 'Main'
@demo_all_mark
def test_common_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=[DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert set(map(lambda x: x.rstrip(), result.stdout.splitlines())) == {'Main', 'homework.Main'}
@demo_all_mark
def test_entry_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'entry', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert set(map(lambda x: x.rstrip(), result.stdout.splitlines())) == {'Main', 'homework.Main'}
@demo_all_mark
def test_json_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'json', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert json.loads(result.stdout) == [
{
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
},
{
"entrance": "homework.Main",
"package": "homework",
"class": "Main",
"file": "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java"
}
]
@demo_all_mark
def test_table_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'table', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert "Entry" in result.stdout
assert "Package" in result.stdout
assert "Class" in result.stdout
assert "Filename" in result.stdout
assert "Main" in result.stdout
assert "demo/2018_spring_16061104_10/src/Main.java" in result.stdout
assert 'homework.Main' in result.stdout
assert "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java" in result.stdout
@demo_mark
def test_file_demo(self):
runner = CliRunner()
result = runner.invoke(cli, args=['demo/oo_course_2019_17373331_homework_2/src/homework/Main.java'])
assert result.exit_code == 0
assert result.stdout.rstrip() == 'homework.Main'
@demo_all_mark
def test_common_first_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-F', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert set(map(lambda x: x.rstrip(), result.stdout.splitlines())) == {'Main'}
@demo_all_mark
def test_entry_first_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'entry', '-F', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert set(map(lambda x: x.rstrip(), result.stdout.splitlines())) == {'Main'}
@demo_all_mark
def test_json_first_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'json', '-F', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert json.loads(result.stdout) == {
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
}
@demo_all_mark
def test_table_first_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'table', '-F', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert "Entry" in result.stdout
assert "Package" in result.stdout
assert "Class" in result.stdout
assert "Filename" in result.stdout
assert "Main" in result.stdout
assert "demo/2018_spring_16061104_10/src/Main.java" in result.stdout
@demo_all_mark
def test_json_sorted_by_file_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'json', '-s', 'file', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert json.loads(result.stdout) == [
{
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
},
{
"entrance": "homework.Main",
"package": "homework",
"class": "Main",
"file": "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java"
}
]
@demo_all_mark
def test_json_sorted_by_package_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'json', '-s', 'package', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert json.loads(result.stdout) == [
{
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
},
{
"entrance": "homework.Main",
"package": "homework",
"class": "Main",
"file": "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java"
}
]
@demo_all_mark
def test_json_sorted_by_entry_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'json', '-s', 'entry', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert json.loads(result.stdout) == [
{
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
},
{
"entrance": "homework.Main",
"package": "homework",
"class": "Main",
"file": "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java"
}
]
@demo_all_mark
def test_json_sorted_by_class_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'json', '-s', 'class', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert (json.loads(result.stdout) == [
{
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
},
{
"entrance": "homework.Main",
"package": "homework",
"class": "Main",
"file": "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java"
}
]) or (json.loads(result.stdout) == [
{
"entrance": "homework.Main",
"package": "homework",
"class": "Main",
"file": "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java"
},
{
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
}
])
@demo_all_mark
def test_json_sorted_by_file_reverse_demo_all(self):
runner = CliRunner()
result = runner.invoke(cli, args=['-f', 'json', '-s', 'file', '-r', DEMO_ALL_PROJECT_PATH])
assert result.exit_code == 0
assert json.loads(result.stdout) == [
{
"entrance": "homework.Main",
"package": "homework",
"class": "Main",
"file": "demo/oo_course_2019_17373331_homework_2/src/homework/Main.java"
},
{
"entrance": "Main",
"package": None,
"class": "Main",
"file": "demo/2018_spring_16061104_10/src/Main.java"
}
]
@pytest.mark.unittest
def test_no_entry(self):
runner = CliRunner()
result = runner.invoke(cli, args=['test'])
assert result.exit_code == 0
assert not result.stdout.strip()
runner = CliRunner()
result = runner.invoke(cli, args=['-F', 'test'])
assert result.exit_code == 1
| 33.895911
| 108
| 0.557469
| 1,017
| 9,118
| 4.752212
| 0.072763
| 0.059383
| 0.082557
| 0.106145
| 0.926547
| 0.902545
| 0.882682
| 0.87006
| 0.841299
| 0.823505
| 0
| 0.044019
| 0.312349
| 9,118
| 268
| 109
| 34.022388
| 0.726794
| 0
| 0
| 0.606195
| 0
| 0
| 0.203224
| 0.10726
| 0
| 0
| 0
| 0
| 0.216814
| 1
| 0.079646
| false
| 0
| 0.022124
| 0
| 0.106195
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
529ccd8cdc3485a6edbc0ff7791774ff3f769f1b
| 220
|
py
|
Python
|
ccal/write_gps_map.py
|
alex-wenzel/ccal
|
74dfc604d93e6ce9e12f34a828b601618df51faa
|
[
"MIT"
] | null | null | null |
ccal/write_gps_map.py
|
alex-wenzel/ccal
|
74dfc604d93e6ce9e12f34a828b601618df51faa
|
[
"MIT"
] | null | null | null |
ccal/write_gps_map.py
|
alex-wenzel/ccal
|
74dfc604d93e6ce9e12f34a828b601618df51faa
|
[
"MIT"
] | null | null | null |
from gzip import open as gzip_open
from pickle import dump
def write_gps_map(gps_map, pickle_gz_file_path):
with gzip_open(pickle_gz_file_path, mode="wb") as pickle_gz_file:
dump(gps_map, pickle_gz_file)
| 22
| 69
| 0.772727
| 40
| 220
| 3.85
| 0.425
| 0.207792
| 0.311688
| 0.181818
| 0.233766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 220
| 9
| 70
| 24.444444
| 0.836957
| 0
| 0
| 0
| 0
| 0
| 0.009091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
52cb71fcbd0028ce63e97fc7ae8db67ed6e74094
| 48
|
py
|
Python
|
tools/__init__.py
|
VincentXWD/codebase-segmentation
|
d25a6adc87aa75e03421cfa07a7d45b2dcff3249
|
[
"MIT"
] | 14
|
2019-10-31T12:55:28.000Z
|
2019-11-01T16:10:04.000Z
|
tools/__init__.py
|
VincentXWD/codebase-segmentation
|
d25a6adc87aa75e03421cfa07a7d45b2dcff3249
|
[
"MIT"
] | 3
|
2019-11-15T13:30:00.000Z
|
2020-06-27T17:04:11.000Z
|
tools/__init__.py
|
VincentXWD/codebase-segmentation
|
d25a6adc87aa75e03421cfa07a7d45b2dcff3249
|
[
"MIT"
] | 2
|
2019-11-10T13:34:05.000Z
|
2019-12-12T06:59:40.000Z
|
from . import benchmark
from . import statistics
| 24
| 24
| 0.8125
| 6
| 48
| 6.5
| 0.666667
| 0.512821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 2
| 24
| 24
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
5e1ec64a6a0c8221d35eec3463c0f401ec6b96a1
| 14
|
pyde
|
Python
|
Talk 1/a_grey_dullness/a_grey_dullness.pyde
|
purrcat259/introduction-to-artistic-programming
|
6694c9c8c8c4ff321e6ec8b6917cc0d6c3c95e20
|
[
"MIT"
] | 1
|
2018-12-05T20:04:48.000Z
|
2018-12-05T20:04:48.000Z
|
Talk 1/a_grey_dullness/a_grey_dullness.pyde
|
purrcat259/introduction-to-artistic-programming
|
6694c9c8c8c4ff321e6ec8b6917cc0d6c3c95e20
|
[
"MIT"
] | null | null | null |
Talk 1/a_grey_dullness/a_grey_dullness.pyde
|
purrcat259/introduction-to-artistic-programming
|
6694c9c8c8c4ff321e6ec8b6917cc0d6c3c95e20
|
[
"MIT"
] | null | null | null |
size(800, 600)
| 14
| 14
| 0.714286
| 3
| 14
| 3.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.461538
| 0.071429
| 14
| 1
| 14
| 14
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5e21b8ed88bf4096d4f2d5b19e988b1e15c1dca6
| 451
|
py
|
Python
|
asyncupbankapi/models/__init__.py
|
unchartedshark/async-up-bank-api
|
9e789d16aa05fd9ea8d7981c78d35015a67142ae
|
[
"MIT"
] | 1
|
2021-12-07T09:04:49.000Z
|
2021-12-07T09:04:49.000Z
|
asyncupbankapi/models/__init__.py
|
unchartedshark/async-up-bank-api
|
9e789d16aa05fd9ea8d7981c78d35015a67142ae
|
[
"MIT"
] | null | null | null |
asyncupbankapi/models/__init__.py
|
unchartedshark/async-up-bank-api
|
9e789d16aa05fd9ea8d7981c78d35015a67142ae
|
[
"MIT"
] | null | null | null |
"""Typed python client for interacting with Up's banking API."""
from asyncupbankapi.models.accounts import Account, Accounts
from asyncupbankapi.models.categories import Category, Categories
from asyncupbankapi.models.tags import Tags
from asyncupbankapi.models.transactions import Transaction, Transactions
from asyncupbankapi.models.utility import Ping
from asyncupbankapi.models.webhooks import Webhook, WebhookEvent, WebhookLogs, Webhooks
| 56.375
| 88
| 0.840355
| 52
| 451
| 7.288462
| 0.519231
| 0.28496
| 0.379947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101996
| 451
| 7
| 89
| 64.428571
| 0.935802
| 0.128603
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eaba397d14e6e76eb9cd3b451361cca81b073b6f
| 14,546
|
py
|
Python
|
tests/api/v1/test_fields.py
|
omertuc/CTFd
|
2d2674aceee07c468742d9e2b48479df95089c45
|
[
"Apache-2.0"
] | 3,592
|
2017-03-12T19:44:07.000Z
|
2022-03-30T16:03:33.000Z
|
tests/api/v1/test_fields.py
|
omertuc/CTFd
|
2d2674aceee07c468742d9e2b48479df95089c45
|
[
"Apache-2.0"
] | 1,648
|
2017-03-12T23:44:34.000Z
|
2022-03-31T15:28:38.000Z
|
tests/api/v1/test_fields.py
|
omertuc/CTFd
|
2d2674aceee07c468742d9e2b48479df95089c45
|
[
"Apache-2.0"
] | 1,736
|
2017-03-13T14:01:28.000Z
|
2022-03-31T08:14:24.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from CTFd.models import Fields, TeamFieldEntries, Teams, UserFieldEntries, Users
from tests.helpers import (
create_ctfd,
destroy_ctfd,
gen_field,
gen_team,
login_as_user,
register_user,
)
def test_api_custom_fields():
app = create_ctfd()
with app.app_context():
register_user(app)
gen_field(app.db, name="CustomField1")
gen_field(app.db, name="CustomField2")
with login_as_user(app) as user:
r = user.get("/api/v1/configs/fields", json="")
assert r.status_code == 403
with login_as_user(app, name="admin") as admin:
r = admin.get("/api/v1/configs/fields", json="")
resp = r.get_json()
assert resp == {
"success": True,
"data": [
{
"public": True,
"required": True,
"type": "user",
"editable": True,
"id": 1,
"field_type": "text",
"description": "CustomFieldDescription",
"name": "CustomField1",
},
{
"public": True,
"required": True,
"type": "user",
"editable": True,
"id": 2,
"field_type": "text",
"description": "CustomFieldDescription",
"name": "CustomField2",
},
],
}
r = admin.post(
"/api/v1/configs/fields",
json={
"public": True,
"required": True,
"editable": True,
"id": 2,
"type": "user",
"field_type": "text",
"description": "CustomFieldDescription",
"name": "CustomField3",
},
)
assert r.status_code == 200
r = admin.get("/api/v1/configs/fields", json="")
resp = r.get_json()
assert resp == {
"success": True,
"data": [
{
"public": True,
"required": True,
"type": "user",
"editable": True,
"id": 1,
"field_type": "text",
"description": "CustomFieldDescription",
"name": "CustomField1",
},
{
"public": True,
"required": True,
"type": "user",
"editable": True,
"id": 2,
"field_type": "text",
"description": "CustomFieldDescription",
"name": "CustomField2",
},
{
"public": True,
"required": True,
"editable": True,
"id": 3,
"type": "user",
"field_type": "text",
"description": "CustomFieldDescription",
"name": "CustomField3",
},
],
}
r = admin.patch(
"/api/v1/configs/fields/3",
json={
"public": False,
"required": False,
"editable": False,
"id": 4,
"type": "user",
"field_type": "text",
"description": "CustomFieldDescription",
"name": "PatchedCustomField3",
},
)
assert r.status_code == 200
assert r.get_json()["data"] == {
"public": False,
"required": False,
"editable": False,
"id": 3,
"type": "user",
"field_type": "text",
"description": "CustomFieldDescription",
"name": "PatchedCustomField3",
}
r = admin.get("/api/v1/configs/fields/3", json="")
assert r.status_code == 200
assert r.get_json()["data"] == {
"public": False,
"required": False,
"editable": False,
"id": 3,
"type": "user",
"field_type": "text",
"description": "CustomFieldDescription",
"name": "PatchedCustomField3",
}
r = admin.delete("/api/v1/configs/fields/3", json="")
assert r.status_code == 200
r = admin.get("/api/v1/configs/fields/3", json="")
assert r.status_code == 404
destroy_ctfd(app)
def test_api_self_fields_permissions():
app = create_ctfd()
with app.app_context():
gen_field(app.db, name="CustomField1", public=False, editable=False)
gen_field(app.db, name="CustomField2", public=True, editable=True)
with app.test_client() as client:
client.get("/register")
with client.session_transaction() as sess:
data = {
"name": "user",
"email": "user@examplectf.com",
"password": "password",
"nonce": sess.get("nonce"),
"fields[1]": "CustomValue1",
"fields[2]": "CustomValue2",
}
r = client.post("/register", data=data)
with client.session_transaction() as sess:
assert sess["id"]
with login_as_user(app) as user, login_as_user(app, name="admin") as admin:
r = user.get("/api/v1/users/me")
resp = r.get_json()
assert resp["data"]["fields"] == [
{
"value": "CustomValue2",
"name": "CustomField2",
"description": "CustomFieldDescription",
"type": "text",
"field_id": 2,
}
]
r = admin.get("/api/v1/users/2")
resp = r.get_json()
assert len(resp["data"]["fields"]) == 2
field = Fields.query.filter_by(id=1).first()
field.public = True
app.db.session.commit()
r = user.get("/api/v1/users/me")
resp = r.get_json()
assert len(resp["data"]["fields"]) == 2
destroy_ctfd(app)
def test_partial_field_update():
app = create_ctfd()
with app.app_context():
register_user(app)
gen_field(app.db, name="CustomField1")
gen_field(app.db, name="CustomField2")
with login_as_user(app) as user:
r = user.patch(
"/api/v1/users/me",
json={
"fields": [
{"field_id": 1, "value": "CustomValue1"},
{"field_id": 2, "value": "CustomValue2"},
]
},
)
assert r.status_code == 200
assert UserFieldEntries.query.count() == 2
r = user.patch(
"/api/v1/users/me",
json={"fields": [{"field_id": 2, "value": "NewCustomValue2"}]},
)
assert r.status_code == 200
assert UserFieldEntries.query.count() == 2
assert (
UserFieldEntries.query.filter_by(field_id=1, user_id=2).first().value
== "CustomValue1"
)
assert (
UserFieldEntries.query.filter_by(field_id=2, user_id=2).first().value
== "NewCustomValue2"
)
with login_as_user(app, name="admin") as admin:
r = admin.patch(
"/api/v1/users/2",
json={"fields": [{"field_id": 2, "value": "AdminNewCustomValue2"}]},
)
assert r.status_code == 200
assert UserFieldEntries.query.count() == 2
assert (
UserFieldEntries.query.filter_by(field_id=1, user_id=2).first().value
== "CustomValue1"
)
assert (
UserFieldEntries.query.filter_by(field_id=2, user_id=2).first().value
== "AdminNewCustomValue2"
)
destroy_ctfd(app)
def test_api_team_self_fields_permissions():
app = create_ctfd(user_mode="teams")
with app.app_context():
register_user(app)
team = gen_team(app.db)
user = Users.query.filter_by(id=2).first()
user.team_id = team.id
app.db.session.commit()
team = Teams.query.filter_by(id=1).first()
team.captain_id = 2
app.db.session.commit()
gen_field(
app.db, name="CustomField1", type="team", public=False, editable=False
)
gen_field(app.db, name="CustomField2", type="team", public=True, editable=True)
app.db.session.add(
TeamFieldEntries(type="team", value="CustomValue1", team_id=1, field_id=1)
)
app.db.session.add(
TeamFieldEntries(type="team", value="CustomValue2", team_id=1, field_id=2)
)
app.db.session.commit()
assert len(team.field_entries) == 2
with login_as_user(app) as user, login_as_user(app, name="admin") as admin:
r = user.get("/api/v1/teams/me")
resp = r.get_json()
assert resp["data"]["fields"] == [
{
"value": "CustomValue2",
"name": "CustomField2",
"description": "CustomFieldDescription",
"type": "text",
"field_id": 2,
}
]
assert len(resp["data"]["fields"]) == 1
# Admin gets data and should see all fields
r = admin.get("/api/v1/teams/1")
resp = r.get_json()
assert len(resp["data"]["fields"]) == 2
r = user.patch(
"/api/v1/teams/me",
json={
"fields": [
{"field_id": 1, "value": "NewCustomValue1"},
{"field_id": 2, "value": "NewCustomValue2"},
]
},
)
assert r.get_json() == {
"success": False,
"errors": {"fields": ["Field 'CustomField1' cannot be editted"]},
}
assert r.status_code == 400
assert (
TeamFieldEntries.query.filter_by(id=1).first().value == "CustomValue1"
)
assert (
TeamFieldEntries.query.filter_by(id=2).first().value == "CustomValue2"
)
# After making the field public the user should see both fields
field = Fields.query.filter_by(id=1).first()
field.public = True
app.db.session.commit()
r = user.get("/api/v1/teams/me")
resp = r.get_json()
assert len(resp["data"]["fields"]) == 2
# Captain should be able to edit their values after it's made editable
field = Fields.query.filter_by(id=1).first()
field.editable = True
app.db.session.commit()
r = user.patch(
"/api/v1/teams/me",
json={
"fields": [
{"field_id": 1, "value": "NewCustomValue1"},
{"field_id": 2, "value": "NewCustomValue2"},
]
},
)
print(r.get_json())
assert r.status_code == 200
assert (
TeamFieldEntries.query.filter_by(id=1).first().value
== "NewCustomValue1"
)
assert (
TeamFieldEntries.query.filter_by(id=2).first().value
== "NewCustomValue2"
)
destroy_ctfd(app)
def test_team_partial_field_update():
app = create_ctfd(user_mode="teams")
with app.app_context():
register_user(app)
team = gen_team(app.db)
user = Users.query.filter_by(id=2).first()
user.team_id = team.id
team = Teams.query.filter_by(id=1).first()
team.captain_id = 2
app.db.session.commit()
gen_field(app.db, name="CustomField1", type="team")
gen_field(app.db, name="CustomField2", type="team")
with login_as_user(app) as user:
r = user.patch(
"/api/v1/teams/me",
json={
"fields": [
{"field_id": 1, "value": "CustomValue1"},
{"field_id": 2, "value": "CustomValue2"},
]
},
)
assert r.status_code == 200
assert TeamFieldEntries.query.count() == 2
r = user.patch(
"/api/v1/teams/me",
json={"fields": [{"field_id": 2, "value": "NewCustomValue2"}]},
)
assert r.status_code == 200
assert TeamFieldEntries.query.count() == 2
assert (
TeamFieldEntries.query.filter_by(field_id=1, team_id=1).first().value
== "CustomValue1"
)
assert (
TeamFieldEntries.query.filter_by(field_id=2, team_id=1).first().value
== "NewCustomValue2"
)
with login_as_user(app, name="admin") as admin:
r = admin.patch(
"/api/v1/teams/1",
json={"fields": [{"field_id": 2, "value": "AdminNewCustomValue2"}]},
)
assert r.status_code == 200
assert TeamFieldEntries.query.count() == 2
assert (
TeamFieldEntries.query.filter_by(field_id=1, team_id=1).first().value
== "CustomValue1"
)
assert (
TeamFieldEntries.query.filter_by(field_id=2, team_id=1).first().value
== "AdminNewCustomValue2"
)
destroy_ctfd(app)
| 34.882494
| 87
| 0.440533
| 1,306
| 14,546
| 4.772588
| 0.096478
| 0.013477
| 0.039628
| 0.038184
| 0.871651
| 0.838922
| 0.783411
| 0.755816
| 0.731269
| 0.676721
| 0
| 0.023517
| 0.429946
| 14,546
| 416
| 88
| 34.966346
| 0.728172
| 0.014781
| 0
| 0.685637
| 0
| 0
| 0.172972
| 0.029736
| 0
| 0
| 0
| 0
| 0.124661
| 1
| 0.01355
| false
| 0.00271
| 0.00542
| 0
| 0.01897
| 0.00271
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d814bb85a11d6a82daadb95642fb12ef8f0f52bd
| 246
|
py
|
Python
|
tests/wfm/common.py
|
Pasarus/ess
|
ec0a7e3080137c51316513c394903d5d98473b44
|
[
"BSD-3-Clause"
] | null | null | null |
tests/wfm/common.py
|
Pasarus/ess
|
ec0a7e3080137c51316513c394903d5d98473b44
|
[
"BSD-3-Clause"
] | null | null | null |
tests/wfm/common.py
|
Pasarus/ess
|
ec0a7e3080137c51316513c394903d5d98473b44
|
[
"BSD-3-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2021 Scipp contributors (https://github.com/scipp)
import scipp as sc
# TODO replace with sc.allclose after 0.8 scipp release
def allclose(x, y):
return sc.all(sc.isclose(x, y)).value
| 27.333333
| 66
| 0.731707
| 41
| 246
| 4.390244
| 0.780488
| 0.022222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033175
| 0.142276
| 246
| 8
| 67
| 30.75
| 0.819905
| 0.634146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
d82b3d6d5751c5bcb5a63d857e5df18689b4fee5
| 215
|
py
|
Python
|
conversational_sentence_encoder/definitions.py
|
SamEleos/ConveRT
|
311ec836e029aaa96b0bc93f1d1575ff8f8d359c
|
[
"Apache-2.0"
] | 38
|
2020-11-08T18:30:29.000Z
|
2022-03-01T09:29:54.000Z
|
conversational_sentence_encoder/definitions.py
|
SamEleos/ConveRT
|
311ec836e029aaa96b0bc93f1d1575ff8f8d359c
|
[
"Apache-2.0"
] | 2
|
2021-02-19T15:02:59.000Z
|
2021-07-16T15:38:05.000Z
|
conversational_sentence_encoder/definitions.py
|
SamEleos/ConveRT
|
311ec836e029aaa96b0bc93f1d1575ff8f8d359c
|
[
"Apache-2.0"
] | 10
|
2020-11-20T08:11:41.000Z
|
2022-02-28T18:33:10.000Z
|
nocontext_model = "https://github.com/davidalami/ConveRT/releases/download/1.0/nocontext_tf_model.tar.gz"
multicontext_model = "https://github.com/davidalami/ConveRT/releases/download/1.0/multicontext_tf_model.tar"
| 71.666667
| 108
| 0.827907
| 31
| 215
| 5.548387
| 0.483871
| 0.116279
| 0.186047
| 0.22093
| 0.627907
| 0.627907
| 0.627907
| 0.627907
| 0.627907
| 0.627907
| 0
| 0.019139
| 0.027907
| 215
| 3
| 108
| 71.666667
| 0.803828
| 0
| 0
| 0
| 0
| 1
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dc46fed9cd426c0f10b6a777303cc423911ea2ff
| 92
|
py
|
Python
|
lib/layers/diffeq_layers/__init__.py
|
arnabgho/ffjord
|
06343e840b9d01c67a69508550be6447341fcdb6
|
[
"MIT"
] | 518
|
2018-11-14T19:03:52.000Z
|
2022-03-31T10:44:24.000Z
|
lib/layers/diffeq_layers/__init__.py
|
arnabgho/ffjord
|
06343e840b9d01c67a69508550be6447341fcdb6
|
[
"MIT"
] | 18
|
2020-06-03T22:12:01.000Z
|
2022-02-16T16:17:28.000Z
|
lib/layers/diffeq_layers/__init__.py
|
arnabgho/ffjord
|
06343e840b9d01c67a69508550be6447341fcdb6
|
[
"MIT"
] | 134
|
2018-11-15T23:06:03.000Z
|
2022-01-13T22:30:03.000Z
|
from .container import *
from .resnet import *
from .basic import *
from .wrappers import *
| 18.4
| 24
| 0.73913
| 12
| 92
| 5.666667
| 0.5
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 92
| 4
| 25
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc86adaaff40f927c39c4d5b583cfbaddfdbf4b3
| 18,600
|
py
|
Python
|
phasor/optics/polarization.py
|
mccullerlp/OpenLoop
|
fe86dc6dec3740d4b6be6b88d8eef8566e2aa78d
|
[
"Apache-2.0"
] | 5
|
2018-02-28T00:43:37.000Z
|
2020-01-21T11:39:15.000Z
|
phasor/optics/polarization.py
|
mccullerlp/OpenLoop
|
fe86dc6dec3740d4b6be6b88d8eef8566e2aa78d
|
[
"Apache-2.0"
] | 1
|
2019-09-07T23:15:43.000Z
|
2019-09-07T23:15:43.000Z
|
phasor/optics/polarization.py
|
mccullerlp/OpenLoop
|
fe86dc6dec3740d4b6be6b88d8eef8566e2aa78d
|
[
"Apache-2.0"
] | 1
|
2020-08-21T04:42:09.000Z
|
2020-08-21T04:42:09.000Z
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import division, print_function, unicode_literals
import declarative as decl
from ..utilities.future_from_2 import super
from . import bases
from . import ports
from .mirror import Mirror
from .selective_mirrors import PolarizingMirror
from . import standard_attrs
class BaseRotator(
bases.OpticalCouplerBase,
bases.SystemElementBase
):
@decl.dproperty
def po_Fr(self):
return ports.OpticalPort(sname = 'po_Fr' , pchain = 'po_Bk')
@decl.dproperty
def po_Bk(self):
return ports.OpticalPort(sname = 'po_Bk' , pchain = 'po_Fr')
_rotate_default = ('rotate_deg', 0)
rotate = standard_attrs.generate_rotate()
@decl.mproperty
def ports_optical(self):
return [
self.po_Fr,
self.po_Bk,
]
@decl.mproperty
def pmap(self):
return {
self.po_Fr : self.po_Bk,
self.po_Bk : self.po_Fr,
}
def system_setup_ports(self, ports_algorithm):
if self.rotate_deg.val in (0, 180, -180):
for port in self.ports_optical:
for kfrom in ports_algorithm.port_update_get(port.i):
ports_algorithm.port_coupling_needed(self.pmap[port].o, kfrom)
for kto in ports_algorithm.port_update_get(port.o):
ports_algorithm.port_coupling_needed(self.pmap[port].i, kto)
elif self.rotate_deg.val in (90, -90, 270, -270):
for port in self.ports_optical:
for kfrom in ports_algorithm.port_update_get(port.i):
if kfrom & ports.PolS:
ports_algorithm.port_coupling_needed(self.pmap[port].o, kfrom.replace_keys(ports.PolP))
elif kfrom & ports.PolP:
ports_algorithm.port_coupling_needed(self.pmap[port].o, kfrom.replace_keys(ports.PolS))
for kto in ports_algorithm.port_update_get(port.o):
if kto & ports.PolS:
ports_algorithm.port_coupling_needed(self.pmap[port].i, kto.replace_keys(ports.PolP))
elif kto & ports.PolP:
ports_algorithm.port_coupling_needed(self.pmap[port].i, kto.replace_keys(ports.PolS))
else:
for port in self.ports_optical:
for kfrom in ports_algorithm.port_update_get(port.i):
ports_algorithm.port_coupling_needed(self.pmap[port].o, kfrom.replace_keys(ports.PolS))
ports_algorithm.port_coupling_needed(self.pmap[port].o, kfrom.replace_keys(ports.PolP))
for kto in ports_algorithm.port_update_get(port.o):
ports_algorithm.port_coupling_needed(self.pmap[port].i, kto.replace_keys(ports.PolS))
ports_algorithm.port_coupling_needed(self.pmap[port].i, kto.replace_keys(ports.PolP))
return
def system_setup_coupling(self, matrix_algorithm):
if self.rotate_deg.val in (0, 180, -180):
if self.rotate_deg.val == 0:
cplg = 1
elif self.rotate_deg.val in (180, -180):
cplg = -1
for port in self.ports_optical:
for kfrom in matrix_algorithm.port_set_get(port.i):
matrix_algorithm.port_coupling_insert(port.i, kfrom, self.pmap[port].o, kfrom, cplg)
elif self.rotate_deg.val in (90, -90, 270, -270):
if self.rotate_deg.val in (90, -270):
cplg_O = 1
elif self.rotate_deg.val in (-90, 270):
cplg_O = -1
for port in self.ports_optical:
if port is self.po_Fr:
cplg = cplg_O
else:
cplg = -cplg_O
for kfrom in matrix_algorithm.port_set_get(port.i):
if kfrom & ports.PolS:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolP),
cplg,
)
elif kfrom & ports.PolP:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolS),
-cplg,
)
else:
cplgC = self.symbols.math.cos(self.rotate_deg.val / 180 * self.symbols.pi)
cplgS_O = self.symbols.math.sin(self.rotate_deg.val / 180 * self.symbols.pi)
for port in self.ports_optical:
if port is self.po_Fr:
cplgS = cplgS_O
else:
cplgS = -cplgS_O
for kfrom in matrix_algorithm.port_set_get(port.i):
if kfrom & ports.PolS:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgC,
)
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolP),
cplgS,
)
elif kfrom & ports.PolP:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgC,
)
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolS),
-cplgS,
)
return
class PolarizationRotator(
BaseRotator
):
def system_setup_coupling(self, matrix_algorithm):
if self.rotate_deg.val in (0, 180, -180):
if self.rotate_deg.val == 0:
cplg = 1
elif self.rotate_deg.val in (180, -180):
cplg = -1
for port in self.ports_optical:
for kfrom in matrix_algorithm.port_set_get(port.i):
matrix_algorithm.port_coupling_insert(port.i, kfrom, self.pmap[port].o, kfrom, cplg)
elif self.rotate_deg.val in (90, -90, 270, -270):
if self.rotate_deg.val in (90, -270):
cplg_O = 1
elif self.rotate_deg.val in (-90, 270):
cplg_O = -1
for port in self.ports_optical:
if port is self.po_Fr:
cplg = cplg_O
else:
cplg = -cplg_O
for kfrom in matrix_algorithm.port_set_get(port.i):
if kfrom & ports.PolS:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolP),
cplg,
)
elif kfrom & ports.PolP:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolS),
-cplg,
)
else:
cplgC = self.symbols.math.cos(self.rotate_deg.val / 180 * self.symbols.pi)
cplgS_O = self.symbols.math.sin(self.rotate_deg.val / 180 * self.symbols.pi)
for port in self.ports_optical:
if port is self.po_Fr:
cplgS = cplgS_O
else:
cplgS = -cplgS_O
for kfrom in matrix_algorithm.port_set_get(port.i):
if kfrom & ports.PolS:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgC,
)
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolP),
cplgS,
)
elif kfrom & ports.PolP:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgC,
)
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolS),
-cplgS,
)
return
class FaradayRotator(
BaseRotator
):
def system_setup_coupling(self, matrix_algorithm):
if self.rotate_deg.val in (0, 180, -180):
if self.rotate_deg.val == 0:
cplg = 1
elif self.rotate_deg.val in (180, -180):
cplg = -1
for port in self.ports_optical:
for kfrom in matrix_algorithm.port_set_get(port.i):
matrix_algorithm.port_coupling_insert(port.i, kfrom, self.pmap[port].o, kfrom, cplg)
elif self.rotate_deg.val in (90, -90, 270, -270):
if self.rotate_deg.val in (90, -270):
cplg = 1
elif self.rotate_deg.val in (-90, 270):
cplg = -1
for port in self.ports_optical:
for kfrom in matrix_algorithm.port_set_get(port.i):
if kfrom & ports.PolS:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolP),
cplg,
)
elif kfrom & ports.PolP:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolS),
-cplg,
)
else:
cplgC = self.symbols.math.cos(self.rotate_deg.val / 180 * self.symbols.pi)
cplgS = self.symbols.math.sin(self.rotate_deg.val / 180 * self.symbols.pi)
for port in self.ports_optical:
for kfrom in matrix_algorithm.port_set_get(port.i):
if kfrom & ports.PolS:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgC,
)
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolP),
cplgS,
)
elif kfrom & ports.PolP:
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgC,
)
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom.replace_keys(ports.PolS),
-cplgS,
)
return
class WavePlate(
bases.OpticalCouplerBase,
bases.SystemElementBase
):
def __init__(
self,
cplgP = 1,
cplgPC = None,
cplgS = 1,
cplgSC = None,
**kwargs
):
#TODO Make these generic properties
super().__init__(**kwargs)
bases.PTREE_ASSIGN(self).cplgP = cplgP
if cplgPC is None:
cplgPC = self.cplgP.conjugate()
bases.PTREE_ASSIGN(self).cplgPC = cplgPC
bases.PTREE_ASSIGN(self).cplgS = cplgS
if cplgSC is None:
cplgSC = self.cplgS.conjugate()
bases.PTREE_ASSIGN(self).cplgSC = cplgSC
@decl.dproperty
def po_Fr(self):
return ports.OpticalPort(sname = 'po_Fr')
@decl.dproperty
def po_Bk(self):
return ports.OpticalPort(sname = 'po_Bk')
@decl.mproperty
def ports_optical(self):
return [
self.po_Fr,
self.po_Bk,
]
@decl.mproperty
def pmap(self):
return {
self.po_Fr : self.po_Bk,
self.po_Bk : self.po_Fr,
}
def system_setup_ports(self, ports_algorithm):
for port in self.ports_optical:
for kfrom in ports_algorithm.port_update_get(port.i):
ports_algorithm.port_coupling_needed(self.pmap[port].o, kfrom)
for kto in ports_algorithm.port_update_get(port.o):
ports_algorithm.port_coupling_needed(self.pmap[port].i, kto)
return
def system_setup_coupling(self, matrix_algorithm):
for port in self.ports_optical:
for kfrom in matrix_algorithm.port_set_get(port.i):
if kfrom & ports.PolS:
if ports.LOWER & kfrom:
cplgS = self.cplgS
elif ports.RAISE & kfrom:
cplgS = self.cplgSC
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgS,
)
elif kfrom & ports.PolP:
if ports.LOWER & kfrom:
cplgP = self.cplgP
elif ports.RAISE & kfrom:
cplgP = self.cplgPC
matrix_algorithm.port_coupling_insert(
port.i,
kfrom,
self.pmap[port].o,
kfrom,
cplgP,
)
return
class UnmountedQuarterWavePlate(WavePlate):
def __init__(self, **kwargs):
super().__init__(
cplgS = 1,
cplgP = 1j, # #TODO, use global/system ps_In
**kwargs
)
class UnmountedHalfWavePlate(WavePlate):
def __init__(self, **kwargs):
super().__init__(
cplgS = 1,
cplgP = -1,
**kwargs
)
class WavePlateMount(
bases.OpticalCouplerBase,
bases.SystemElementBase,
):
_rotate_default = ('rotate_deg', 0)
rotate = standard_attrs.generate_rotate()
@decl.dproperty
def plate(self, sled):
return sled
def __init__(
self,
**kwargs
):
super().__init__(**kwargs)
self.own.coord_Fr = PolarizationRotator(rotate = self.rotate)
self.own.coord_Bk = PolarizationRotator(rotate = -self.rotate)
self.system.bond(self.coord_Fr.po_Bk, self.plate.po_Fr)
self.system.bond(self.plate.po_Bk, self.coord_Bk.po_Fr)
self.own.po_Fr = ports.PortIndirect(inner_port = self.coord_Fr.po_Fr, pchain = lambda : self.po_Bk)
self.own.po_Bk = ports.PortIndirect(inner_port = self.coord_Bk.po_Bk, pchain = lambda : self.po_Fr)
class HalfWavePlate(WavePlateMount):
def __init__(
self,
**kwargs
):
super().__init__(
plate = UnmountedHalfWavePlate(),
**kwargs
)
class QuarterWavePlate(WavePlateMount):
def __init__(
self,
**kwargs
):
super().__init__(
plate = UnmountedQuarterWavePlate(),
**kwargs
)
class PolarizingBeamsplitter(PolarizingMirror):
#TODO allow loss
def __init__(
self,
pass_polarization = 'P',
selection_defect = 0,
rejection_defect = 0,
select_loss = 0,
reject_loss = 0,
AOI_deg = 45,
**kwargs
):
self.__init_ctree__(**kwargs)
bases.PTREE_ASSIGN(self).pass_polarization = pass_polarization
bases.PTREE_ASSIGN(self).selection_defect = selection_defect
bases.PTREE_ASSIGN(self).rejection_defect = rejection_defect
bases.PTREE_ASSIGN(self).select_loss = select_loss
bases.PTREE_ASSIGN(self).reject_loss = reject_loss
select_mirror = Mirror(
T_hr = 1 - self.selection_defect,
L_hr = self.reject_loss,
L_t = self.select_loss,
)
reject_mirror = Mirror(
T_hr = self.rejection_defect,
L_hr = self.reject_loss,
L_t = self.select_loss,
)
if self.pass_polarization.upper() == 'P':
super().__init__(
mirror_P = select_mirror,
mirror_S = reject_mirror,
AOI_deg = AOI_deg,
**kwargs
)
elif self.pass_polarization.upper() == 'S':
super().__init__(
mirror_S = select_mirror,
mirror_P = reject_mirror,
AOI_deg = AOI_deg,
**kwargs
)
def polarization_opposite(pol_str):
if pol_str.upper() == 'S':
return 'P'
elif pol_str.upper() == 'P':
return 'S'
return None
| 36.54224
| 111
| 0.472204
| 1,846
| 18,600
| 4.519502
| 0.077465
| 0.082584
| 0.088098
| 0.045188
| 0.777778
| 0.745295
| 0.733789
| 0.725758
| 0.709697
| 0.709697
| 0
| 0.016145
| 0.447204
| 18,600
| 508
| 112
| 36.614173
| 0.795273
| 0.005376
| 0
| 0.751634
| 0
| 0
| 0.003083
| 0
| 0
| 0
| 0
| 0.001969
| 0
| 1
| 0.050109
| false
| 0.008715
| 0.017429
| 0.019608
| 0.137255
| 0.002179
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dcbacc4928a81a3b3a1b736699210b11d1fae816
| 4,177
|
py
|
Python
|
migrations/versions/f6056503a291_v0_1_0_feature_157.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 2
|
2021-08-19T12:35:25.000Z
|
2022-02-16T04:13:38.000Z
|
migrations/versions/f6056503a291_v0_1_0_feature_157.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 46
|
2021-09-02T03:22:05.000Z
|
2022-03-31T09:20:00.000Z
|
migrations/versions/f6056503a291_v0_1_0_feature_157.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 1
|
2021-11-17T23:18:27.000Z
|
2021-11-17T23:18:27.000Z
|
"""v0.1.0_feature/#157
Revision ID: f6056503a291
Revises: 0d4133a5b0e3
Create Date: 2021-06-23 16:00:23.143197
"""
from alembic import op
import sqlalchemy as sa
from app.database import get_db_schema, engine
# revision identifiers, used by Alembic.
revision = 'f6056503a291'
down_revision = '0d4133a5b0e3'
branch_labels = None
depends_on = None
def upgrade():
"""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('idx_transfer', sa.Column('from_address', sa.String(length=42), nullable=True), schema=get_db_schema())
op.add_column('idx_transfer', sa.Column('to_address', sa.String(length=42), nullable=True), schema=get_db_schema())
op.drop_index('ix_idx_transfer_transfer_from', table_name='idx_transfer', schema=get_db_schema())
op.drop_index('ix_idx_transfer_transfer_to', table_name='idx_transfer', schema=get_db_schema())
op.create_index(op.f('ix_idx_transfer_from_address'), 'idx_transfer', ['from_address'], unique=False, schema=get_db_schema())
op.create_index(op.f('ix_idx_transfer_to_address'), 'idx_transfer', ['to_address'], unique=False, schema=get_db_schema())
op.drop_column('idx_transfer', 'transfer_from', schema=get_db_schema())
op.drop_column('idx_transfer', 'transfer_to', schema=get_db_schema())
# ### end Alembic commands ###
"""
op.alter_column('idx_transfer', 'transfer_from', new_column_name='from_address', existing_type=sa.String(length=42), schema=get_db_schema())
op.alter_column('idx_transfer', 'transfer_to', new_column_name='to_address', existing_type=sa.String(length=42), schema=get_db_schema())
if engine.name == "postgresql":
schema = get_db_schema()
schema = f"{schema}." if schema is not None else ""
op.execute(f"ALTER INDEX {schema}ix_idx_transfer_transfer_from RENAME TO ix_idx_transfer_from_address")
op.execute(f"ALTER INDEX {schema}ix_idx_transfer_transfer_to RENAME TO ix_idx_transfer_from_to")
elif engine.name == "mysql":
op.execute("ALTER TABLE idx_transfer RENAME INDEX ix_idx_transfer_transfer_from TO ix_idx_transfer_from_address")
op.execute("ALTER TABLE idx_transfer RENAME INDEX ix_idx_transfer_transfer_to TO ix_idx_transfer_to_address")
def downgrade():
"""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('idx_transfer', sa.Column('transfer_to', sa.VARCHAR(length=42), autoincrement=False, nullable=True), schema=get_db_schema())
op.add_column('idx_transfer', sa.Column('transfer_from', sa.VARCHAR(length=42), autoincrement=False, nullable=True), schema=get_db_schema())
op.drop_index(op.f('ix_idx_transfer_to_address'), table_name='idx_transfer', schema=get_db_schema())
op.drop_index(op.f('ix_idx_transfer_from_address'), table_name='idx_transfer', schema=get_db_schema())
op.create_index('ix_idx_transfer_transfer_to', 'idx_transfer', ['transfer_to'], unique=False, schema=get_db_schema())
op.create_index('ix_idx_transfer_transfer_from', 'idx_transfer', ['transfer_from'], unique=False, schema=get_db_schema())
op.drop_column('idx_transfer', 'to_address', schema=get_db_schema())
op.drop_column('idx_transfer', 'from_address', schema=get_db_schema())
# ### end Alembic commands ###
"""
op.alter_column('idx_transfer', 'from_address', new_column_name='transfer_from', existing_type=sa.String(length=42), schema=get_db_schema())
op.alter_column('idx_transfer', 'to_address', new_column_name='transfer_to', existing_type=sa.String(length=42), schema=get_db_schema())
if engine.name == "postgresql":
schema = get_db_schema()
schema = f"{schema}." if schema is not None else ""
op.execute(f"ALTER INDEX {schema}ix_idx_transfer_from_address RENAME TO ix_idx_transfer_transfer_from")
op.execute(f"ALTER INDEX {schema}ix_idx_transfer_from_to RENAME TO ix_idx_transfer_transfer_to")
elif engine.name == "mysql":
op.execute("ALTER TABLE idx_transfer RENAME INDEX ix_idx_transfer_from_address TO ix_idx_transfer_transfer_from")
op.execute("ALTER TABLE idx_transfer RENAME INDEX ix_idx_transfer_to_address TO ix_idx_transfer_transfer_to")
| 60.536232
| 144
| 0.75006
| 621
| 4,177
| 4.681159
| 0.133655
| 0.181631
| 0.107327
| 0.128655
| 0.857929
| 0.812178
| 0.762986
| 0.762986
| 0.707258
| 0.685931
| 0
| 0.021219
| 0.119943
| 4,177
| 68
| 145
| 61.426471
| 0.769587
| 0.482643
| 0
| 0.275862
| 0
| 0
| 0.457338
| 0.227206
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.103448
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f4ef2a75f8cd489d939df4440f17d4293a5beb56
| 36
|
py
|
Python
|
src/__init__.py
|
grzegorznowak/tensorflow-rrn-server
|
1011ea465c298263fa177ba34ba0db0897985d8f
|
[
"Unlicense"
] | null | null | null |
src/__init__.py
|
grzegorznowak/tensorflow-rrn-server
|
1011ea465c298263fa177ba34ba0db0897985d8f
|
[
"Unlicense"
] | 4
|
2018-02-12T11:13:54.000Z
|
2018-02-12T20:24:23.000Z
|
src/__init__.py
|
grzegorznowak/tensorflow-rrn-server
|
1011ea465c298263fa177ba34ba0db0897985d8f
|
[
"Unlicense"
] | 1
|
2018-02-11T23:19:47.000Z
|
2018-02-11T23:19:47.000Z
|
from . import rnn_time_series_server
| 36
| 36
| 0.888889
| 6
| 36
| 4.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
761380b5806666a88551f4e37c9e6ba3f6f12400
| 34,715
|
py
|
Python
|
tests/test_constrained_numbers.py
|
LiamPattinson/proper_tea
|
617a4916db0b4a0e63fa14f80eec35c6f9acc74e
|
[
"MIT"
] | 1
|
2022-02-07T16:47:45.000Z
|
2022-02-07T16:47:45.000Z
|
tests/test_constrained_numbers.py
|
LiamPattinson/proper_tea
|
617a4916db0b4a0e63fa14f80eec35c6f9acc74e
|
[
"MIT"
] | 1
|
2022-02-09T11:11:32.000Z
|
2022-02-10T16:45:45.000Z
|
tests/test_constrained_numbers.py
|
LiamPattinson/proper_tea
|
617a4916db0b4a0e63fa14f80eec35c6f9acc74e
|
[
"MIT"
] | null | null | null |
import proper_tea as pt
import pytest
from itertools import product
@pytest.fixture
def basic_test():
class MyClass:
floating_point = pt.floating_point()
integer = pt.integer()
boolean = pt.boolean()
return MyClass()
class TestBasics:
def test_floating_point(self, basic_test):
# Ensure the user can assign
basic_test.floating_point = 81.3
assert basic_test.floating_point == 81.3
assert isinstance(basic_test.floating_point, float)
# Ensure that they can set using int, but it remains a float
basic_test.floating_point = 60
assert basic_test.floating_point == 60
assert isinstance(basic_test.floating_point, float)
# Ensure it won't allow something that won't convert
with pytest.raises(ValueError) as excinfo:
basic_test.floating_point = "hello world"
assert "convert" in str(excinfo.value)
def test_boolean(self, basic_test):
# Ensure the user can assign
basic_test.boolean = True
assert basic_test.boolean
assert isinstance(basic_test.boolean, bool)
basic_test.boolean = False
assert not basic_test.boolean
assert isinstance(basic_test.boolean, bool)
# Anything should work, in theory
basic_test.boolean = 60.5
assert basic_test.boolean
assert isinstance(basic_test.boolean, bool)
basic_test.boolean = 0.0
assert not basic_test.boolean
assert isinstance(basic_test.boolean, bool)
basic_test.boolean = "hello world"
assert basic_test.boolean
assert isinstance(basic_test.boolean, bool)
basic_test.boolean = []
assert not basic_test.boolean
assert isinstance(basic_test.boolean, bool)
basic_test.boolean = None
assert not basic_test.boolean
assert isinstance(basic_test.boolean, bool)
def test_integer(self, basic_test):
# Ensure the user can assign
basic_test.integer = 27
assert basic_test.integer == 27
assert isinstance(basic_test.integer, int)
# Ensure that they can set using float, but it rounds to int
basic_test.integer = 60.5
assert basic_test.integer == 60
assert isinstance(basic_test.integer, int)
# Ensure it won't allow something that won't convert
with pytest.raises(ValueError) as excinfo:
basic_test.integer = "hello world"
assert "convert" in str(excinfo.value)
@pytest.fixture
def pos_neg_test(allow_zero: bool):
class MyClass:
pos = pt.positive(allow_zero=allow_zero)
pos_float = pt.positive_float(allow_zero=allow_zero)
pos_int = pt.positive_int(allow_zero=allow_zero)
neg = pt.negative(allow_zero=allow_zero)
neg_float = pt.negative_float(allow_zero=allow_zero)
neg_int = pt.negative_int(allow_zero=allow_zero)
def __init__(self):
self.allow_zero = allow_zero
return MyClass()
class TestPosNeg:
@pytest.mark.parametrize("allow_zero", [False, True])
def test_positive(self, pos_neg_test):
# Ensure the user can assign any positive
pos_neg_test.pos = 4
assert pos_neg_test.pos == 4
assert isinstance(pos_neg_test.pos, int)
pos_neg_test.pos = 4.5
assert pos_neg_test.pos == 4.5
assert isinstance(pos_neg_test.pos, float)
# Ensure negatives fail
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos = -4
assert "greater than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos = -4.5
assert "greater than" in str(excinfo.value)
# Ensure 0 works if allow_zero is true
if pos_neg_test.allow_zero:
pos_neg_test.pos = 0
assert pos_neg_test.pos == 0
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos = -4.5
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos = 0
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos = "hello world"
@pytest.mark.parametrize("allow_zero", [False, True])
def test_positive_float(self, pos_neg_test):
# Ensure the user can assign any positive float
pos_neg_test.pos_float = 4.5
assert pos_neg_test.pos_float == 4.5
assert isinstance(pos_neg_test.pos_float, float)
# Ensure ints are converted
pos_neg_test.pos_float = 4
assert pos_neg_test.pos_float == 4
assert isinstance(pos_neg_test.pos_float, float)
# Ensure negatives fail
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_float = -4.5
assert "greater than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_float = -4
assert "greater than" in str(excinfo.value)
# Ensure 0 works if allow_zero is true
if pos_neg_test.allow_zero:
pos_neg_test.pos_float = 0
assert pos_neg_test.pos_float == 0
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_float = -4.5
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_float = 0
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_float = "hello world"
@pytest.mark.parametrize("allow_zero", [False, True])
def test_positive_int(self, pos_neg_test):
# Ensure the user can assign any positive int
pos_neg_test.pos_int = 4
assert pos_neg_test.pos_int == 4
assert isinstance(pos_neg_test.pos_int, int)
# Ensure floats are converted
pos_neg_test.pos_int = 4.4
assert pos_neg_test.pos_int == 4
assert isinstance(pos_neg_test.pos_int, int)
# Ensure negatives fail
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_int = -4
assert "greater than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_int = -4.5
assert "greater than" in str(excinfo.value)
# Ensure 0 works if allow_zero is true
if pos_neg_test.allow_zero:
pos_neg_test.pos_int = 0
assert pos_neg_test.pos_int == 0
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_int = -4
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_int = 0
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
pos_neg_test.pos_int = "hello world"
@pytest.mark.parametrize("allow_zero", [False, True])
def test_negative(self, pos_neg_test):
# Ensure the user can assign any negative
pos_neg_test.neg = -4
assert pos_neg_test.neg == -4
assert isinstance(pos_neg_test.neg, int)
pos_neg_test.neg = -4.5
assert pos_neg_test.neg == -4.5
assert isinstance(pos_neg_test.neg, float)
# Ensure positives fail
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg = 4
assert "less than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg = 4.5
assert "less than" in str(excinfo.value)
# Ensure 0 works if allow_zero is true
if pos_neg_test.allow_zero:
pos_neg_test.neg = 0
assert pos_neg_test.neg == 0
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg = 4.5
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg = 0
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg = "hello world"
@pytest.mark.parametrize("allow_zero", [False, True])
def test_negative_float(self, pos_neg_test):
# Ensure the user can assign any negative float
pos_neg_test.neg_float = -4.5
assert pos_neg_test.neg_float == -4.5
assert isinstance(pos_neg_test.neg_float, float)
# Ensure ints are converted
pos_neg_test.neg_float = -4
assert pos_neg_test.neg_float == -4
assert isinstance(pos_neg_test.neg_float, float)
# Ensure positives fail
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_float = 4.5
assert "less than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_float = 4
assert "less than" in str(excinfo.value)
# Ensure 0 works if allow_zero is true
if pos_neg_test.allow_zero:
pos_neg_test.neg_float = 0
assert pos_neg_test.neg_float == 0
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_float = 4.5
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_float = 0
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_float = "hello world"
@pytest.mark.parametrize("allow_zero", [False, True])
def test_negative_int(self, pos_neg_test):
# Ensure the user can assign any negative int
pos_neg_test.neg_int = -4
assert pos_neg_test.neg_int == -4
assert isinstance(pos_neg_test.neg_int, int)
# Ensure floats are converted (should round up)
pos_neg_test.neg_int = -4.4
assert pos_neg_test.neg_int == -4
assert isinstance(pos_neg_test.neg_int, int)
# Ensure positives fail
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_int = 4
assert "less than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_int = 4.5
assert "less than" in str(excinfo.value)
# Ensure 0 works if allow_zero is true
if pos_neg_test.allow_zero:
pos_neg_test.neg_int = 0
assert pos_neg_test.neg_int == 0
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_int = 4
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_int = 0
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
pos_neg_test.neg_int = "hello world"
@pytest.fixture
def gt_lt_test(limit: float, inclusive: bool):
class MyClass:
gt = pt.greater_than(limit, inclusive=inclusive)
gt_float = pt.float_greater_than(limit, inclusive=inclusive)
gt_int = pt.int_greater_than(limit, inclusive=inclusive)
lt = pt.less_than(limit, inclusive=inclusive)
lt_float = pt.float_less_than(limit, inclusive=inclusive)
lt_int = pt.int_less_than(limit, inclusive=inclusive)
def __init__(self):
self.limit = limit
self.inclusive = inclusive
return MyClass()
class TestGtLt:
@pytest.mark.parametrize(
"limit, inclusive",
[(5, False), (5.5, True), (5.5, False), (5, True)],
)
def test_greater_than(self, gt_lt_test):
limit = gt_lt_test.limit
# Ensure the user can assign any number over limit
gt_lt_test.gt = int(limit + 3)
assert gt_lt_test.gt == int(limit + 3)
assert isinstance(gt_lt_test.gt, int)
gt_lt_test.gt = limit + 3.5
assert gt_lt_test.gt == limit + 3.5
assert isinstance(gt_lt_test.gt, float)
# Ensure anything less than fails
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt = int(limit - 4)
assert "greater than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt = limit - 6.2
assert "greater than" in str(excinfo.value)
# Ensure limit works if inclusive is true
if gt_lt_test.inclusive:
gt_lt_test.gt = limit
assert gt_lt_test.gt == limit
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt = limit - 1
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt = limit
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt = "hello world"
@pytest.mark.parametrize(
"limit, inclusive",
[(5, False), (5.5, True), (5.5, False), (5, True)],
)
def test_float_greater_than(self, gt_lt_test):
limit = gt_lt_test.limit
# Ensure the user can assign any float over limit
gt_lt_test.gt_float = limit + 2.2
assert gt_lt_test.gt_float == limit + 2.2
assert isinstance(gt_lt_test.gt_float, float)
# test that ints are converted
gt_lt_test.gt_float = int(limit + 1)
assert gt_lt_test.gt_float == int(limit + 1)
assert isinstance(gt_lt_test.gt_float, float)
# Ensure anything less than fails
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_float = limit - 1.1
assert "greater than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_float = limit - 10
assert "greater than" in str(excinfo.value)
# Ensure limit works if inclusive is true
if gt_lt_test.inclusive:
gt_lt_test.gt_float = limit
assert gt_lt_test.gt_float == limit
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_float = limit - 1
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_float = limit
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_float = "hello world"
@pytest.mark.parametrize(
"limit, inclusive",
[(5, False), (5.5, True), (5.5, False), (5, True)],
)
def test_int_greater_than(self, gt_lt_test):
limit = gt_lt_test.limit
# Ensure the user can assign any int over limit
gt_lt_test.gt_int = limit + 7
assert gt_lt_test.gt_int == int(limit + 7)
assert isinstance(gt_lt_test.gt_int, int)
# test that floats are converted
gt_lt_test.gt_int = limit + 2.3
assert gt_lt_test.gt_int == int(limit + 2.3)
assert isinstance(gt_lt_test.gt_int, int)
# Ensure anything less than fails
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_int = limit - 1
assert "greater than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_int = limit - 0.1
assert "greater than" in str(excinfo.value)
# Ensure limit works if inclusive is true
if gt_lt_test.inclusive:
gt_lt_test.gt_int = limit
assert gt_lt_test.gt_int == int(limit)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_int = limit - 1
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_int = limit
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
gt_lt_test.gt_int = "hello world"
@pytest.mark.parametrize(
"limit, inclusive",
[(5, False), (5.5, True), (5.5, False), (5, True)],
)
def test_less_than(self, gt_lt_test):
limit = gt_lt_test.limit
# Ensure the user can assign any number under limit
gt_lt_test.lt = int(limit - 3)
assert gt_lt_test.lt == int(limit - 3)
assert isinstance(gt_lt_test.lt, int)
gt_lt_test.lt = limit - 3.5
assert gt_lt_test.lt == limit - 3.5
assert isinstance(gt_lt_test.lt, float)
# Ensure anything greater than fails
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt = int(limit + 4)
assert "less than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt = limit + 6.2
assert "less than" in str(excinfo.value)
# Ensure limit works if inclusive is true
if gt_lt_test.inclusive:
gt_lt_test.lt = limit
assert gt_lt_test.lt == limit
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt = limit + 1
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt = limit
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt = "hello world"
@pytest.mark.parametrize(
"limit, inclusive",
[(5, False), (5.5, True), (5.5, False), (5, True)],
)
def test_float_less_than(self, gt_lt_test):
limit = gt_lt_test.limit
# Ensure the user can assign any float under limit
gt_lt_test.lt_float = limit - 2.2
assert gt_lt_test.lt_float == limit - 2.2
assert isinstance(gt_lt_test.lt_float, float)
# test that ints are converted
gt_lt_test.lt_float = int(limit - 1)
assert gt_lt_test.lt_float == int(limit - 1)
assert isinstance(gt_lt_test.lt_float, float)
# Ensure anything greater than fails
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_float = limit + 1.1
assert "less than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_float = limit + 10
assert "less than" in str(excinfo.value)
# Ensure limit works if inclusive is true
if gt_lt_test.inclusive:
gt_lt_test.lt_float = limit
assert gt_lt_test.lt_float == limit
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_float = limit + 1
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_float = limit
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_float = "hello world"
@pytest.mark.parametrize(
"limit, inclusive",
[(5, False), (5.5, True), (5.5, False), (5, True)],
)
def test_int_less_than(self, gt_lt_test):
limit = gt_lt_test.limit
# Ensure the user can assign any int under limit
gt_lt_test.lt_int = limit - 7
assert gt_lt_test.lt_int == int(limit - 7)
assert isinstance(gt_lt_test.lt_int, int)
# test that floats are converted
gt_lt_test.lt_int = limit - 2.3
assert gt_lt_test.lt_int == int(limit - 2.3)
assert isinstance(gt_lt_test.lt_int, int)
# Ensure anything greater than fails
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_int = limit + 1
assert "less than" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_int = limit + 0.1
assert "less than" in str(excinfo.value)
# Ensure limit works if inclusive is true
if gt_lt_test.inclusive:
gt_lt_test.lt_int = limit
assert gt_lt_test.lt_int == int(limit)
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_int = limit + 1
assert "or equal to" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_int = limit
assert "or equal to" not in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
gt_lt_test.lt_int = "hello world"
@pytest.fixture
def ranged_test(bounds: (float, float), inclusive: (bool, bool)):
class MyClass:
in_range = pt.in_range(bounds=bounds, inclusive=inclusive)
float_in_range = pt.float_in_range(bounds=bounds, inclusive=inclusive)
int_in_range = pt.int_in_range(bounds=bounds, inclusive=inclusive)
not_in_range = pt.not_in_range(bounds=bounds, inclusive=inclusive)
float_not_in_range = pt.float_not_in_range(bounds=bounds, inclusive=inclusive)
int_not_in_range = pt.int_not_in_range(bounds=bounds, inclusive=inclusive)
def __init__(self):
self.bounds = bounds
self.inclusive = inclusive
if isinstance(inclusive, bool):
self.inclusive = (inclusive, inclusive)
return MyClass()
def get_ranged_params():
return product(
product((-2.2, -2), (+2.2, +2)),
[*product((False, True), (False, True)), False, True],
)
class TestRanged:
@pytest.mark.parametrize("bounds, inclusive", get_ranged_params())
def test_in_range(self, ranged_test):
bounds = ranged_test.bounds
inclusive = ranged_test.inclusive
# Ensure the user can assign number within bounds
ranged_test.in_range = 0.5 * sum(bounds)
assert ranged_test.in_range == 0.5 * sum(bounds)
assert isinstance(ranged_test.in_range, float)
ranged_test.in_range = int(0.5 * sum(bounds))
assert ranged_test.in_range == int(0.5 * sum(bounds))
assert isinstance(ranged_test.in_range, int)
# Ensure anything outside fails
with pytest.raises(ValueError) as excinfo:
ranged_test.in_range = bounds[0] - 1
assert "range" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
ranged_test.in_range = bounds[1] + 1
assert "range" in str(excinfo.value)
# Ensure limit works if inclusive is true
if ranged_test.inclusive[0]:
ranged_test.in_range = bounds[0]
assert ranged_test.in_range == bounds[0]
with pytest.raises(ValueError) as excinfo:
ranged_test.in_range = bounds[0] - 1
assert "[" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.in_range = bounds[0]
assert "(" in str(excinfo.value)
if ranged_test.inclusive[1]:
ranged_test.in_range = bounds[1]
assert ranged_test.in_range == bounds[1]
with pytest.raises(ValueError) as excinfo:
ranged_test.in_range = bounds[1] + 1
assert "]" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.in_range = bounds[1]
assert ")" in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
ranged_test.in_range = "hello world"
@pytest.mark.parametrize("bounds, inclusive", get_ranged_params())
def test_float_in_range(self, ranged_test):
bounds = ranged_test.bounds
inclusive = ranged_test.inclusive
# Ensure the user can assign number within bounds
ranged_test.float_in_range = 0.5 * sum(bounds)
assert ranged_test.float_in_range == 0.5 * sum(bounds)
assert isinstance(ranged_test.float_in_range, float)
ranged_test.float_in_range = int(0.5 * sum(bounds))
assert ranged_test.float_in_range == int(0.5 * sum(bounds))
assert isinstance(ranged_test.float_in_range, float)
# Ensure anything outside fails
with pytest.raises(ValueError) as excinfo:
ranged_test.float_in_range = bounds[0] - 1
assert "range" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
ranged_test.float_in_range = bounds[1] + 1
# Ensure limit works if inclusive is true
if ranged_test.inclusive[0]:
ranged_test.float_in_range = bounds[0]
assert ranged_test.float_in_range == bounds[0]
with pytest.raises(ValueError) as excinfo:
ranged_test.float_in_range = bounds[0] - 1
assert "[" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.float_in_range = bounds[0]
assert "(" in str(excinfo.value)
if ranged_test.inclusive[1]:
ranged_test.float_in_range = bounds[1]
assert ranged_test.float_in_range == bounds[1]
with pytest.raises(ValueError) as excinfo:
ranged_test.float_in_range = bounds[1] + 1
assert "]" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.float_in_range = bounds[1]
assert ")" in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
ranged_test.float_in_range = "hello world"
@pytest.mark.parametrize("bounds, inclusive", get_ranged_params())
def test_int_in_range(self, ranged_test):
bounds = ranged_test.bounds
inclusive = ranged_test.inclusive
# Ensure the user can assign number within bounds
ranged_test.int_in_range = 0.5 * sum(bounds)
assert ranged_test.int_in_range == int(0.5 * sum(bounds))
assert isinstance(ranged_test.int_in_range, int)
# Ensure anything outside fails
with pytest.raises(ValueError) as excinfo:
ranged_test.int_in_range = bounds[0] - 1
assert "range" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
ranged_test.int_in_range = bounds[1] + 1
# Ensure limit works if inclusive is true
if ranged_test.inclusive[0]:
ranged_test.int_in_range = bounds[0]
assert ranged_test.int_in_range == int(bounds[0])
with pytest.raises(ValueError) as excinfo:
ranged_test.int_in_range = bounds[0] - 1
assert "[" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.int_in_range = bounds[0]
assert "(" in str(excinfo.value)
if ranged_test.inclusive[1]:
ranged_test.int_in_range = bounds[1]
assert ranged_test.int_in_range == int(bounds[1])
with pytest.raises(ValueError) as excinfo:
ranged_test.int_in_range = bounds[1] + 1
assert "]" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.int_in_range = bounds[1]
assert ")" in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
ranged_test.int_in_range = "hello world"
@pytest.mark.parametrize("bounds, inclusive", get_ranged_params())
def test_not_in_range(self, ranged_test):
bounds = ranged_test.bounds
inclusive = ranged_test.inclusive
# Ensure the user can assign number outside bounds
ranged_test.not_in_range = bounds[0] - 1.1
assert ranged_test.not_in_range == bounds[0] - 1.1
assert isinstance(ranged_test.not_in_range, float)
ranged_test.not_in_range = bounds[1] + 1.1
assert ranged_test.not_in_range == bounds[1] + 1.1
assert isinstance(ranged_test.not_in_range, float)
ranged_test.not_in_range = int(bounds[0]) - 1
assert ranged_test.not_in_range == int(bounds[0]) - 1
assert isinstance(ranged_test.not_in_range, int)
ranged_test.not_in_range = int(bounds[1]) + 1
assert ranged_test.not_in_range == int(bounds[1]) + 1
assert isinstance(ranged_test.not_in_range, int)
# Ensure anything inside fails
with pytest.raises(ValueError) as excinfo:
ranged_test.not_in_range = 0.5 * sum(bounds)
assert "range" in str(excinfo.value)
# Ensure limit works if inclusive is true
if ranged_test.inclusive[0]:
ranged_test.not_in_range = bounds[0]
assert ranged_test.not_in_range == bounds[0]
with pytest.raises(ValueError) as excinfo:
ranged_test.not_in_range = bounds[0] + 1
assert "[" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.not_in_range = bounds[0]
assert "(" in str(excinfo.value)
if ranged_test.inclusive[1]:
ranged_test.not_in_range = bounds[1]
assert ranged_test.not_in_range == bounds[1]
with pytest.raises(ValueError) as excinfo:
ranged_test.not_in_range = bounds[1] - 1
assert "]" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.not_in_range = bounds[1]
assert ")" in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
ranged_test.not_in_range = "hello world"
@pytest.mark.parametrize("bounds, inclusive", get_ranged_params())
def test_float_not_in_range(self, ranged_test):
bounds = ranged_test.bounds
inclusive = ranged_test.inclusive
# Ensure the user can assign number outside bounds
ranged_test.float_not_in_range = bounds[0] - 1.1
assert ranged_test.float_not_in_range == bounds[0] - 1.1
assert isinstance(ranged_test.float_not_in_range, float)
ranged_test.float_not_in_range = bounds[1] + 1.1
assert ranged_test.float_not_in_range == bounds[1] + 1.1
assert isinstance(ranged_test.float_not_in_range, float)
ranged_test.float_not_in_range = int(bounds[0]) - 1
assert ranged_test.float_not_in_range == int(bounds[0]) - 1
assert isinstance(ranged_test.float_not_in_range, float)
ranged_test.float_not_in_range = int(bounds[1]) + 1
assert ranged_test.float_not_in_range == int(bounds[1]) + 1
assert isinstance(ranged_test.float_not_in_range, float)
# Ensure anything inside fails
with pytest.raises(ValueError) as excinfo:
ranged_test.float_not_in_range = 0.5 * sum(bounds)
assert "range" in str(excinfo.value)
# Ensure limit works if inclusive is true
if ranged_test.inclusive[0]:
ranged_test.float_not_in_range = bounds[0]
assert ranged_test.float_not_in_range == bounds[0]
with pytest.raises(ValueError) as excinfo:
ranged_test.float_not_in_range = bounds[0] + 1
assert "[" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.float_not_in_range = bounds[0]
assert "(" in str(excinfo.value)
if ranged_test.inclusive[1]:
ranged_test.float_not_in_range = bounds[1]
assert ranged_test.float_not_in_range == bounds[1]
with pytest.raises(ValueError) as excinfo:
ranged_test.float_not_in_range = bounds[1] - 1
assert "]" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.float_not_in_range = bounds[1]
assert ")" in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
ranged_test.float_not_in_range = "hello world"
@pytest.mark.parametrize("bounds, inclusive", get_ranged_params())
def test_int_not_in_range(self, ranged_test):
bounds = ranged_test.bounds
inclusive = ranged_test.inclusive
# Ensure the user can assign number outside bounds
ranged_test.int_not_in_range = bounds[0] - 1.1
assert ranged_test.int_not_in_range == int(bounds[0] - 1.1)
assert isinstance(ranged_test.int_not_in_range, int)
ranged_test.int_not_in_range = bounds[1] + 1.1
assert ranged_test.int_not_in_range == int(bounds[1] + 1.1)
assert isinstance(ranged_test.int_not_in_range, int)
# Ensure anything inside fails
with pytest.raises(ValueError) as excinfo:
ranged_test.int_not_in_range = 0.5 * sum(bounds)
assert "range" in str(excinfo.value)
# Ensure limit works if inclusive is true
if ranged_test.inclusive[0]:
ranged_test.int_not_in_range = bounds[0]
assert ranged_test.int_not_in_range == int(bounds[0])
with pytest.raises(ValueError) as excinfo:
ranged_test.int_not_in_range = bounds[0] + 1
assert "[" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.int_not_in_range = bounds[0]
assert "(" in str(excinfo.value)
if ranged_test.inclusive[1]:
ranged_test.int_not_in_range = bounds[1]
assert ranged_test.int_not_in_range == int(bounds[1])
with pytest.raises(ValueError) as excinfo:
ranged_test.int_not_in_range = bounds[1] - 1
assert "]" in str(excinfo.value)
else:
with pytest.raises(ValueError) as excinfo:
ranged_test.int_not_in_range = bounds[1]
assert ")" in str(excinfo.value)
# Ensure it won't allow non-numbers
with pytest.raises(ValueError) as excinfo:
ranged_test.int_not_in_range = "hello world"
| 43.557089
| 86
| 0.62751
| 4,784
| 34,715
| 4.331731
| 0.026129
| 0.067075
| 0.077981
| 0.126719
| 0.94909
| 0.934469
| 0.902862
| 0.869565
| 0.820586
| 0.780196
| 0
| 0.015202
| 0.287541
| 34,715
| 796
| 87
| 43.611809
| 0.822666
| 0.093389
| 0
| 0.634992
| 0
| 0
| 0.034349
| 0
| 0
| 0
| 0
| 0
| 0.309201
| 1
| 0.043741
| false
| 0
| 0.004525
| 0.001508
| 0.099548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
520fae20a058a9b45fe50d3346dbfb706cc55341
| 203
|
py
|
Python
|
idle.py
|
oudoubleyang/KumaTea-bot
|
6e6d7b200decf89c1cfbca870a461ac5d04c6a10
|
[
"MIT"
] | null | null | null |
idle.py
|
oudoubleyang/KumaTea-bot
|
6e6d7b200decf89c1cfbca870a461ac5d04c6a10
|
[
"MIT"
] | null | null | null |
idle.py
|
oudoubleyang/KumaTea-bot
|
6e6d7b200decf89c1cfbca870a461ac5d04c6a10
|
[
"MIT"
] | null | null | null |
from session import idle_mark
def set_busy(operation):
def wrapper(*args, **kwargs):
idle_mark.buf[0] = 0
operation(*args, **kwargs)
idle_mark.buf[0] = 1
return wrapper
| 20.3
| 34
| 0.62069
| 28
| 203
| 4.357143
| 0.571429
| 0.196721
| 0.229508
| 0.295082
| 0.360656
| 0.360656
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.261084
| 203
| 9
| 35
| 22.555556
| 0.786667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
5219acb28e253d76c57a33161817b2dec24cc844
| 1,502
|
py
|
Python
|
robot/migrations/0004_auto_20200104_1405.py
|
Misschl/wechat
|
8ce76dae32b1086bb83ee6e3fe64cf84845012c0
|
[
"Apache-2.0"
] | 1
|
2020-01-07T06:51:19.000Z
|
2020-01-07T06:51:19.000Z
|
robot/migrations/0004_auto_20200104_1405.py
|
Misschl/wechat
|
8ce76dae32b1086bb83ee6e3fe64cf84845012c0
|
[
"Apache-2.0"
] | null | null | null |
robot/migrations/0004_auto_20200104_1405.py
|
Misschl/wechat
|
8ce76dae32b1086bb83ee6e3fe64cf84845012c0
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.1.5 on 2020-01-04 14:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('robot', '0003_auto_20200104_1236'),
]
operations = [
migrations.AlterField(
model_name='appmodel',
name='insert_time',
field=models.DateTimeField(auto_now_add=True, verbose_name='插入时间'),
),
migrations.AlterField(
model_name='appmodel',
name='update_time',
field=models.DateTimeField(auto_now=True, verbose_name='更新时间'),
),
migrations.AlterField(
model_name='wxgroup',
name='insert_time',
field=models.DateTimeField(auto_now_add=True, verbose_name='插入时间'),
),
migrations.AlterField(
model_name='wxgroup',
name='update_time',
field=models.DateTimeField(auto_now=True, verbose_name='更新时间'),
),
migrations.AlterField(
model_name='wxuser',
name='insert_time',
field=models.DateTimeField(auto_now_add=True, verbose_name='插入时间'),
),
migrations.AlterField(
model_name='wxuser',
name='is_friend',
field=models.BooleanField(null=True),
),
migrations.AlterField(
model_name='wxuser',
name='update_time',
field=models.DateTimeField(auto_now=True, verbose_name='更新时间'),
),
]
| 30.653061
| 79
| 0.57723
| 147
| 1,502
| 5.680272
| 0.319728
| 0.167665
| 0.209581
| 0.243114
| 0.776048
| 0.776048
| 0.644311
| 0.644311
| 0.644311
| 0.644311
| 0
| 0.029722
| 0.305593
| 1,502
| 48
| 80
| 31.291667
| 0.770853
| 0.02996
| 0
| 0.785714
| 1
| 0
| 0.120275
| 0.015808
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
521f997ffbc78cfe164d99778ed26b87425cbb5f
| 27
|
py
|
Python
|
src/data/alphabets/elements.py
|
PhilHarnish/forge
|
663f19d759b94d84935c14915922070635a4af65
|
[
"MIT"
] | 2
|
2020-08-18T18:43:09.000Z
|
2020-08-18T20:05:59.000Z
|
src/data/alphabets/elements.py
|
PhilHarnish/forge
|
663f19d759b94d84935c14915922070635a4af65
|
[
"MIT"
] | null | null | null |
src/data/alphabets/elements.py
|
PhilHarnish/forge
|
663f19d759b94d84935c14915922070635a4af65
|
[
"MIT"
] | null | null | null |
# TODO: Element JSON data.
| 13.5
| 26
| 0.703704
| 4
| 27
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 27
| 1
| 27
| 27
| 0.863636
| 0.888889
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 1
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
524eeeb2e1c5a576f64146964b53d9ef941d4794
| 11,296
|
py
|
Python
|
graviti/openapi/sheet.py
|
AChenQ/graviti-python-sdk
|
e321234c05d6c2cbaf55c2314b4bcdfba0e152d7
|
[
"MIT"
] | null | null | null |
graviti/openapi/sheet.py
|
AChenQ/graviti-python-sdk
|
e321234c05d6c2cbaf55c2314b4bcdfba0e152d7
|
[
"MIT"
] | null | null | null |
graviti/openapi/sheet.py
|
AChenQ/graviti-python-sdk
|
e321234c05d6c2cbaf55c2314b4bcdfba0e152d7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright 2022 Graviti. Licensed under MIT License.
#
"""Interfaces about the sheet."""
from typing import Any, Dict, Optional
from urllib.parse import urljoin
from graviti.openapi.requests import open_api_do
def _list_sheet(
access_key: str,
url: str,
with_record_count: Optional[bool],
offset: int,
limit: int,
) -> Dict[str, Any]:
params: Dict[str, Any] = {"offset": offset, "limit": limit}
if with_record_count is not None:
params["with_record_count"] = with_record_count
return open_api_do("GET", access_key, url, params=params).json() # type: ignore[no-any-return]
def _get_sheet(
access_key: str,
url: str,
with_record_count: Optional[bool],
schema_format: Optional[str],
) -> Dict[str, Any]:
params: Dict[str, Any] = {}
if schema_format is not None:
params["schema_format"] = schema_format
if with_record_count is not None:
params["with_record_count"] = with_record_count
return open_api_do("GET", access_key, url, params=params).json() # type: ignore[no-any-return]
def create_sheet(
access_key: str,
url: str,
owner: str,
dataset: str,
*,
draft_number: int,
name: str,
schema: str,
_avro_schema: str,
_arrow_schema: Optional[str] = None,
record_key_strategy: Optional[str] = None,
) -> None:
"""Execute the OpenAPI `POST /v2/datasets/{owner}/{dataset}/drafts/{draft_number}/sheets`.
Arguments:
access_key: User's access key.
url: The URL of the graviti website.
owner: The owner of the dataset.
dataset: Name of the dataset, unique for a user.
draft_number: The draft number.
name: The sheet name.
schema: The portex schema of the sheet.
record_key_strategy: The ``__record_key`` generation strategy.
If None, it is batch auto-increment sorting record key.
Examples:
>>> create_sheet(
... "ACCESSKEY-********",
... "https://api.graviti.com/",
... "czhual",
... "MNIST",
... draft_number = 1,
... name = "val",
... schema = '{"imports": [{"repo": "https://github.com/Project-OpenBytes/standard@\
main", "types": [{"name": "file.Image"}]}], "type": "record", "fields": [{"name": "filename", \
"type": "string"}, {"name": "image", "type": "file.Image"}]}',
... _avro_schema = '{"type": "record", "name": "root", "namespace": "cn.graviti.portex"\
, "aliases": [], "fields": [{"name": "filename", "type": "string"}, {"name": "image", "type": \
{"type": "record", "name": "image", "namespace": "cn.graviti.portex.root", "aliases": [], \
"fields": [{"name": "checksum", "type": [null, "string"]}]}}]}',
... )
"""
url = urljoin(url, f"v2/datasets/{owner}/{dataset}/drafts/{draft_number}/sheets")
post_data = {"name": name, "schema": schema, "_avro_schema": _avro_schema}
if _arrow_schema is not None:
post_data["_arrow_schema"] = _arrow_schema
if record_key_strategy is not None:
post_data["record_key_strategy"] = record_key_strategy
open_api_do("POST", access_key, url, json=post_data)
def list_draft_sheets(
access_key: str,
url: str,
owner: str,
dataset: str,
*,
draft_number: int,
with_record_count: Optional[bool] = None,
offset: int = 0,
limit: int = 128,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v2/datasets/{owner}/{dataset}/drafts/{draft_number}/sheets`.
Arguments:
access_key: User's access key.
url: The URL of the graviti website.
owner: The owner of the dataset.
dataset: Name of the dataset, unique for a user.
draft_number: The draft number.
with_record_count: Whether return the record count of each sheet. The default value of
this param in OpenAPI is False.
offset: The offset of the page.
limit: The limit of the page.
Returns:
The response of OpenAPI.
Examples:
>>> list_draft_sheets(
... "ACCESSKEY-********",
... "https://api.graviti.com/",
... "czhual",
... "MNIST",
... draft_number = 1,
... )
{
"sheets": [
{
"name": "test",
"created_at": "2021-03-03T18:58:10Z",
"updated_at": "2021-03-04T18:58:10Z",
},
{
"name": "trainval",
"created_at": "2021-03-05T18:58:10Z",
"updated_at": "2021-03-06T18:58:10Z",
}
],
"offset": 0,
"record_size": 2,
"total_count": 2
}
"""
url = urljoin(url, f"v2/datasets/{owner}/{dataset}/drafts/{draft_number}/sheets")
return _list_sheet(
access_key, url, with_record_count=with_record_count, offset=offset, limit=limit
)
def list_commit_sheets(
access_key: str,
url: str,
owner: str,
dataset: str,
*,
commit_id: str,
with_record_count: Optional[bool] = None,
offset: int = 0,
limit: int = 128,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v2/datasets/{owner}/{dataset}/commits/{commit_id}/sheets`.
Arguments:
access_key: User's access key.
url: The URL of the graviti website.
owner: The owner of the dataset.
dataset: Name of the dataset, unique for a user.
commit_id: The commit id.
with_record_count: Whether return the record count of each sheet. The default value of
this param in OpenAPI is False.
offset: The offset of the page.
limit: The limit of the page.
Returns:
The response of OpenAPI.
Examples:
>>> list_commit_sheets(
... "ACCESSKEY-********",
... "https://api.graviti.com/",
... "czhual",
... "MNIST",
... commit_id = "fde63f357daf46088639e9f57fd81cad",
... )
{
"sheets": [
{
"name": "test",
"created_at": "2021-03-03T18:58:10Z",
"updated_at": "2021-03-04T18:58:10Z",
},
{
"name": "trainval",
"created_at": "2021-03-05T18:58:10Z",
"updated_at": "2021-03-06T18:58:10Z",
}
],
"offset": 0,
"record_size": 2,
"total_count": 2
}
"""
url = urljoin(url, f"v2/datasets/{owner}/{dataset}/commits/{commit_id}/sheets")
return _list_sheet(
access_key, url, with_record_count=with_record_count, offset=offset, limit=limit
)
def get_draft_sheet(
access_key: str,
url: str,
owner: str,
dataset: str,
*,
draft_number: int,
sheet: str,
with_record_count: Optional[bool] = None,
schema_format: Optional[str] = None,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v2/datasets/{owner}/{dataset}/drafts/{draft_number}/sheets`.
Arguments:
access_key: User's access key.
url: The URL of the graviti website.
owner: The owner of the dataset.
dataset: Name of the dataset, unique for a user.
draft_number: The draft number.
sheet: The sheet name.
with_record_count: Whether return the record count of each sheet. The default value of
this param in OpenAPI is False.
schema_format: Fill "JSON"/"YAML" to determine whether the schema_format of the returned
schema is json or yaml. None means "JSON" format.
Returns:
The response of OpenAPI.
Examples:
>>> get_draft_sheet(
... "ACCESSKEY-********",
... "https://api.graviti.com/",
... "czhual",
... "MNIST",
... draft_number = 1,
... sheet = "sheet-2",
... with_record_count=True,
... )
{
"name": "trainval",
"created_at": "2021-03-05T18:58:10Z",
"updated_at": "2021-03-06T18:58:10Z",
"record_count": 10000,
"schema": '{"imports": [{"repo": "https://github.com/Project-OpenBytes/...'
}
"""
url = urljoin(url, f"v2/datasets/{owner}/{dataset}/drafts/{draft_number}/sheets/{sheet}")
return _get_sheet(
access_key, url, with_record_count=with_record_count, schema_format=schema_format
)
def get_commit_sheet(
access_key: str,
url: str,
owner: str,
dataset: str,
*,
commit_id: str,
sheet: str,
with_record_count: Optional[bool] = None,
schema_format: Optional[str] = None,
) -> Dict[str, Any]:
"""Execute the OpenAPI `GET /v2/datasets/{owner}/{dataset}/commits/{commit_id}/sheets/{sheet}`.
Arguments:
access_key: User's access key.
url: The URL of the graviti website.
owner: The owner of the dataset.
dataset: Name of the dataset, unique for a user.
commit_id: The commit id..
sheet: The sheet name.
with_record_count: Whether return the record count of each sheet. The default value of
this param in OpenAPI is False.
schema_format: Fill "JSON"/"YAML" to determine whether the schema_format of the returned
schema is json or yaml. None means "JSON" format.
Returns:
The response of OpenAPI.
Examples:
>>> get_commit_sheet(
... "ACCESSKEY-********",
... "https://api.graviti.com/",
... "czhual",
... "MNIST",
... commit_id = "fde63f357daf46088639e9f57fd81cad",
... sheet = "sheet-2",
... with_record_count=True,
... )
{
"name": "trainval",
"created_at": "2021-03-05T18:58:10Z",
"updated_at": "2021-03-06T18:58:10Z",
"record_count": 10000,
"schema": '{"imports": [{"repo": "https://github.com/Project-OpenBytes/...'
}
"""
url = urljoin(url, f"v2/datasets/{owner}/{dataset}/commits/{commit_id}/sheets/{sheet}")
return _get_sheet(
access_key, url, with_record_count=with_record_count, schema_format=schema_format
)
def delete_sheet(
access_key: str,
url: str,
owner: str,
dataset: str,
*,
draft_number: int,
sheet: str,
) -> None:
"""Execute the OpenAPI `DELETE /v2/datasets/{owner}/{dataset}/drafts/\
{draft_number}/sheets/{sheet}`.
Arguments:
access_key: User's access key.
url: The URL of the graviti website.
owner: The owner of the dataset.
dataset: Name of the dataset, unique for a user.
draft_number: The draft number.
sheet: The name of the sheet to be deleted.
Examples:
>>> delete_sheet(
... "ACCESSKEY-********",
... "https://api.graviti.com/",
... "czhual",
... "MNIST",
... draft_number=1,
... sheet="sheet-2"
... )
"""
url = urljoin(url, f"v2/datasets/{owner}/{dataset}/drafts/{draft_number}/sheets/{sheet}")
open_api_do("DELETE", access_key, url)
| 31.20442
| 100
| 0.560641
| 1,324
| 11,296
| 4.623867
| 0.120091
| 0.057498
| 0.063705
| 0.043123
| 0.818523
| 0.81297
| 0.81297
| 0.804476
| 0.780627
| 0.774257
| 0
| 0.031694
| 0.296123
| 11,296
| 361
| 101
| 31.290859
| 0.738272
| 0.601894
| 0
| 0.692308
| 0
| 0
| 0.13319
| 0.098818
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068376
| false
| 0
| 0.025641
| 0
| 0.145299
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
52631bf8590822d6db16e7a6d2cdb9fbec82d642
| 20,505
|
py
|
Python
|
misc/visitplot.py
|
barbagroup/cloud-repro
|
ca1a4fbf2188219d3d326e2d56f6cf79ae552eb8
|
[
"BSD-3-Clause"
] | 9
|
2019-09-19T17:27:22.000Z
|
2021-12-01T03:23:16.000Z
|
misc/visitplot.py
|
barbagroup/cloud-repro
|
ca1a4fbf2188219d3d326e2d56f6cf79ae552eb8
|
[
"BSD-3-Clause"
] | 3
|
2019-07-29T13:25:03.000Z
|
2020-05-05T19:51:41.000Z
|
misc/visitplot.py
|
barbagroup/cloud-repro
|
ca1a4fbf2188219d3d326e2d56f6cf79ae552eb8
|
[
"BSD-3-Clause"
] | 5
|
2019-06-21T11:35:44.000Z
|
2021-12-01T03:23:32.000Z
|
"""Functions to generate plots of the flow field with VisIt."""
import os
import sys
import yaml
def visit_check_version(version):
# Check version of VisIt.
script_version = '2.12.1'
tested_versions = [script_version, '2.12.3']
print('VisIt version: {}\n'.format(version))
if version not in tested_versions:
print('[warning] You are using VisIt-{}.'.format(version))
print('[warning] This script was created with VisIt-{}.'
.format(script_version))
print('[warning] This script was tested with versions: {}.'
.format(tested_versions))
print('[warning] It may not work as expected')
return
def visit_plot_pseudocolor_2d(xdmf_path, name,
value_range=(-5.0, 5.0),
curve2d_paths=None,
config_view=None,
out_dir=os.getcwd(), out_prefix='wake2d_',
figsize=(1024, 1024),
visit_dir=None, visit_arch='linux-x86_64',
state=None, states=None,
states_range=[0, None, 1]):
# Import VisIt package.
if visit_dir is None:
visit_dir = os.environ.get('VISIT_DIR')
if visit_dir is None:
raise ValueError('Provide VisIt installation path or '
'set env variable VISIT_DIR')
sys.path.append(os.path.join(visit_dir, visit_arch,
'lib', 'site-packages'))
import visit
visit.LaunchNowin()
# Check version of VisIt.
visit_check_version(visit.Version())
# Create database correlation with optional Curve2D files.
num_bodies = 0
databases = [str(xdmf_path)]
if curve2d_paths is not None:
num_bodies = len(curve2d_paths)
databases = [str(path) for path in curve2d_paths]
databases.append(str(xdmf_path))
visit.CreateDatabaseCorrelation('common', databases[num_bodies:], 0)
# Open the file with the coordinates of the immersed boundary.
if num_bodies > 0:
for i in range(num_bodies):
visit.OpenDatabase(databases[i], 0)
# Add plot the mesh points.
visit.AddPlot('Curve', 'curve', 1, 1)
# Set attributes of the curve.
CurveAtts = visit.CurveAttributes()
CurveAtts.lineWidth = 1
CurveAtts.curveColorSource = CurveAtts.Custom
CurveAtts.curveColor = (0, 0, 0, 255)
CurveAtts.showLegend = 0
CurveAtts.showLabels = 0
visit.SetPlotOptions(CurveAtts)
# Open the XMF file for the spanwise-averaged z-component of the vorticity.
visit.OpenDatabase(databases[-1], 0)
# Add a pseudocolor plot of the scalar field.
visit.AddPlot('Pseudocolor', name, 1, 1)
# Set attributes of the pseudocolor.
PseudocolorAtts = visit.PseudocolorAttributes()
PseudocolorAtts.minFlag = 1
PseudocolorAtts.min = value_range[0]
PseudocolorAtts.maxFlag = 1
PseudocolorAtts.max = value_range[1]
PseudocolorAtts.colorTableName = 'viridis'
visit.SetPlotOptions(PseudocolorAtts)
# Parse the 2D view configuration file.
if config_view is not None:
with open(str(config_view), 'r') as infile:
config_view = yaml.load(infile, Loader=yaml.FullLoader)
config_view = config_view['View2DAtts']
# Set attributes of the view.
View2DAtts = visit.View2DAttributes()
for key, value in config_view.items():
if type(value) is list:
value = tuple(value)
setattr(View2DAtts, key, value)
visit.SetView2D(View2DAtts)
# Remove time and user info.
AnnotationAtts = visit.AnnotationAttributes()
AnnotationAtts.userInfoFlag = 0
AnnotationAtts.timeInfoFlag = 1
visit.SetAnnotationAttributes(AnnotationAtts)
visit.SetActiveWindow(1)
visit.Source(os.path.join(visit_dir, visit_arch, 'bin', 'makemovie.py'))
visit.ToggleCameraViewMode()
# Create output directory if necessary.
if not os.path.isdir(str(out_dir)):
os.makedirs(str(out_dir))
# Loop over the states to render and save the plots.
if state is not None:
states = [state]
elif states is None:
if states_range[1] is None:
states_range[1] = visit.TimeSliderGetNStates()
else:
states_range[1] += 1
states = range(*states_range)
for i, state in enumerate(states):
print('[state {}] Rendering and saving figure ...'.format(state))
visit.SetTimeSliderState(state)
if i == 0:
visit.DrawPlots()
RenderingAtts = visit.RenderingAttributes()
visit.SetRenderingAttributes(RenderingAtts)
SaveWindowAtts = visit.SaveWindowAttributes()
SaveWindowAtts.outputToCurrentDirectory = 0
SaveWindowAtts.outputDirectory = str(out_dir)
SaveWindowAtts.fileName = '{}{:0>4}'.format(out_prefix, state)
SaveWindowAtts.family = 0
SaveWindowAtts.format = SaveWindowAtts.PNG
SaveWindowAtts.width = figsize[0]
SaveWindowAtts.height = figsize[1]
SaveWindowAtts.quality = 100
SaveWindowAtts.resConstraint = SaveWindowAtts.NoConstraint
visit.SetSaveWindowAttributes(SaveWindowAtts)
visit.SaveWindow()
os.remove('visitlog.py')
visit.Close()
return
def visit_plot_contour_3d(xdmf_path, name,
value_range=(-5.0, 5.0),
p3d_paths=None,
config_view=None,
out_dir=os.getcwd(), out_prefix='wake3d_',
figsize=(1024, 1024),
visit_dir=None, visit_arch='linux-x86_64',
state=None, states=None,
states_range=[0, None, 1]):
# Import VisIt package.
if visit_dir is None:
visit_dir = os.environ.get('VISIT_DIR')
if visit_dir is None:
raise ValueError('Provide VisIt installation path or '
'set env variable VISIT_DIR')
sys.path.append(os.path.join(visit_dir, visit_arch,
'lib', 'site-packages'))
import visit
visit.LaunchNowin()
# Check version of VisIt.
visit_check_version(visit.Version())
# Create database correlation with optional Point3D files.
num_bodies = 0
databases = [str(xdmf_path)]
if p3d_paths is not None:
num_bodies = len(p3d_paths)
databases = [str(path) for path in p3d_paths]
databases.append(str(xdmf_path))
visit.CreateDatabaseCorrelation('common', databases[num_bodies:], 0)
# Open the file with the coordinates of the immersed boundary.
if num_bodies > 0:
for i in range(num_bodies):
visit.OpenDatabase(databases[i], 0, 'Point3D_1.0')
# Add plot the mesh points.
visit.AddPlot('Mesh', 'points', 1, 1)
# Set attributes of the mesh plot.
MeshAtts = visit.MeshAttributes()
MeshAtts.legendFlag = 0
MeshAtts.meshColor = (255, 204, 0, 1.0 * 255)
MeshAtts.meshColorSource = MeshAtts.MeshCustom
MeshAtts.pointSize = 0.05
MeshAtts.pointType = MeshAtts.Point
MeshAtts.pointSizePixels = 2
MeshAtts.opacity = 1
visit.SetPlotOptions(MeshAtts)
# Open the XMF file for the z-component of the vorticity.
visit.OpenDatabase(databases[-1], 0)
# Add the plot of the contour of the z-component of the vorticity.
visit.AddPlot('Contour', name, 1, 1)
# Set attributes of the contour.
ContourAtts = visit.ContourAttributes()
ContourAtts.contourNLevels = 2
ContourAtts.SetMultiColor(0, (0, 51, 102, 0.6 * 255))
ContourAtts.SetMultiColor(1, (255, 0, 0, 0.6 * 255))
ContourAtts.legendFlag = 1
ContourAtts.minFlag = 1
ContourAtts.maxFlag = 1
ContourAtts.min = value_range[0]
ContourAtts.max = value_range[1]
visit.SetPlotOptions(ContourAtts)
# Parse the 3D view configuration file.
if config_view is not None:
with open(str(config_view), 'r') as infile:
config_view = yaml.load(infile, Loader=yaml.FullLoader)
config_view = config_view['View3DAtts']
# Set attributes of the view.
View3DAtts = visit.View3DAttributes()
for key, value in config_view.items():
if type(value) is list:
value = tuple(value)
setattr(View3DAtts, key, value)
visit.SetView3D(View3DAtts)
# Remove time and user info.
AnnotationAtts = visit.AnnotationAttributes()
AnnotationAtts.userInfoFlag = 0
AnnotationAtts.timeInfoFlag = 0
AnnotationAtts.axes3D.visible = 0
AnnotationAtts.axes3D.triadFlag = 1
AnnotationAtts.axes3D.bboxFlag = 0
visit.SetAnnotationAttributes(AnnotationAtts)
visit.SetActiveWindow(1)
visit.Source(os.path.join(visit_dir, visit_arch, 'bin', 'makemovie.py'))
visit.ToggleCameraViewMode()
# Create output directory if necessary.
if not os.path.isdir(str(out_dir)):
os.makedirs(str(out_dir))
# Loop over the states to render and save the plots.
if state is not None:
states = [state]
elif states is None:
if states_range[1] is None:
states_range[1] = visit.TimeSliderGetNStates()
else:
states_range[1] += 1
states = range(*states_range)
for i, state in enumerate(states):
print('[state {}] Rendering and saving figure ...'.format(state))
visit.SetTimeSliderState(state)
if i == 0:
visit.DrawPlots()
RenderingAtts = visit.RenderingAttributes()
visit.SetRenderingAttributes(RenderingAtts)
SaveWindowAtts = visit.SaveWindowAttributes()
SaveWindowAtts.outputToCurrentDirectory = 0
SaveWindowAtts.outputDirectory = str(out_dir)
SaveWindowAtts.fileName = '{}{:0>4}'.format(out_prefix, state)
SaveWindowAtts.family = 0
SaveWindowAtts.format = SaveWindowAtts.PNG
SaveWindowAtts.width = figsize[0]
SaveWindowAtts.height = figsize[1]
SaveWindowAtts.quality = 100
SaveWindowAtts.resConstraint = SaveWindowAtts.NoConstraint
visit.SetSaveWindowAttributes(SaveWindowAtts)
visit.SaveWindow()
os.remove('visitlog.py')
visit.Close()
return
def visit_plot_qcrit_wx_3d(xdmf_dir,
wx_range=(-5.0, 5.0),
q_value=0.1,
config_view=None,
out_dir=os.getcwd(), out_prefix='wake3d_',
figsize=(1024, 1024),
visit_dir=None, visit_arch='linux-x86_64',
state=None, states=None,
states_range=[0, None, 1]):
# Import VisIt package.
if visit_dir is None:
visit_dir = os.environ.get('VISIT_DIR')
if visit_dir is None:
raise ValueError('Provide VisIt installation path or '
'set env variable VISIT_DIR')
sys.path.append(os.path.join(visit_dir, visit_arch,
'lib', 'site-packages'))
import visit
visit.LaunchNowin()
# Check version of VisIt.
visit_check_version(visit.Version())
# Define some variables to get the q_crit and wx_cc.
p_xdmf_path = os.path.join(str(xdmf_dir), 'p.xmf')
visit.OpenDatabase(p_xdmf_path, 0)
visit.DefineScalarExpression("operators/ConnectedComponents/p Grid", "cell_constant(<p Grid>, 0.)")
visit.DefineCurveExpression("operators/DataBinning/1D/p Grid", "cell_constant(<p Grid>, 0)")
visit.DefineScalarExpression("operators/DataBinning/2D/p Grid", "cell_constant(<p Grid>, 0)")
visit.DefineScalarExpression("operators/DataBinning/3D/p Grid", "cell_constant(<p Grid>, 0)")
visit.DefineScalarExpression("operators/Flux/p Grid", "cell_constant(<p Grid>, 0.)")
visit.DefineCurveExpression("operators/Lineout/p", "cell_constant(<p>, 0.)")
visit.DefineCurveExpression("operators/Lineout/time_derivative/p Grid_time", "cell_constant(time_derivative/p Grid_time, 0.)")
visit.DefineCurveExpression("operators/Lineout/time_derivative/p Grid_lasttime", "cell_constant(time_derivative/p Grid_lasttime, 0.)")
visit.DefineCurveExpression("operators/Lineout/time_derivative/p", "cell_constant(time_derivative/p, 0.)")
visit.DefineScalarExpression("operators/ModelFit/model", "point_constant(<p Grid>, 0)")
visit.DefineScalarExpression("operators/ModelFit/distance", "point_constant(<p Grid>, 0)")
visit.DefineScalarExpression("operators/StatisticalTrends/Sum/p", "cell_constant(<p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Mean/p", "cell_constant(<p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Variance/p", "cell_constant(<p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Std. Dev./p", "cell_constant(<p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Slope/p", "cell_constant(<p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Residuals/p", "cell_constant(<p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Sum/time_derivative/p Grid_time", "cell_constant(<time_derivative/p Grid_time>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Sum/time_derivative/p Grid_lasttime", "cell_constant(<time_derivative/p Grid_lasttime>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Sum/time_derivative/p", "cell_constant(<time_derivative/p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Mean/time_derivative/p Grid_time", "cell_constant(<time_derivative/p Grid_time>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Mean/time_derivative/p Grid_lasttime", "cell_constant(<time_derivative/p Grid_lasttime>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Mean/time_derivative/p", "cell_constant(<time_derivative/p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Variance/time_derivative/p Grid_time", "cell_constant(<time_derivative/p Grid_time>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Variance/time_derivative/p Grid_lasttime", "cell_constant(<time_derivative/p Grid_lasttime>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Variance/time_derivative/p", "cell_constant(<time_derivative/p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Std. Dev./time_derivative/p Grid_time", "cell_constant(<time_derivative/p Grid_time>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Std. Dev./time_derivative/p Grid_lasttime", "cell_constant(<time_derivative/p Grid_lasttime>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Std. Dev./time_derivative/p", "cell_constant(<time_derivative/p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Slope/time_derivative/p Grid_time", "cell_constant(<time_derivative/p Grid_time>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Slope/time_derivative/p Grid_lasttime", "cell_constant(<time_derivative/p Grid_lasttime>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Slope/time_derivative/p", "cell_constant(<time_derivative/p>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Residuals/time_derivative/p Grid_time", "cell_constant(<time_derivative/p Grid_time>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Residuals/time_derivative/p Grid_lasttime", "cell_constant(<time_derivative/p Grid_lasttime>, 0.)")
visit.DefineScalarExpression("operators/StatisticalTrends/Residuals/time_derivative/p", "cell_constant(<time_derivative/p>, 0.)")
visit.DefineVectorExpression("operators/SurfaceNormal/p Grid", "cell_constant(<p Grid>, 0.)")
# Define cell-centered velocity vector field.
ux_xdmf_path = os.path.join(str(xdmf_dir), 'u.xmf')
uy_xdmf_path = os.path.join(str(xdmf_dir), 'v.xmf')
uz_xdmf_path = os.path.join(str(xdmf_dir), 'w.xmf')
vel_exp = ('{' +
'pos_cmfe(<{}[0]id:u>, <p Grid>, 1.0),'.format(ux_xdmf_path) +
'pos_cmfe(<{}[0]id:v>, <p Grid>, 0.0),'.format(uy_xdmf_path) +
'pos_cmfe(<{}[0]id:w>, <p Grid>, 0.0)'.format(uz_xdmf_path) +
'}')
visit.DefineVectorExpression('velocity', vel_exp)
# Define Q-criterion.
qcrit_exp = ('q_criterion(' +
'gradient(velocity[0]),' +
'gradient(velocity[1]),' +
'gradient(velocity[2])' +
')')
visit.DefineScalarExpression('q_crit', qcrit_exp)
# Define cell-centered streamwise vorticity.
wx_xdmf_path = os.path.join(str(xdmf_dir), 'wx.xmf')
wx_exp = 'pos_cmfe(<{}[0]id:wx>, <p Grid>, 0.0)'.format(wx_xdmf_path)
visit.DefineScalarExpression('wx_cc', wx_exp)
# Add a pseudocolor of the cell-centered streamwise vorticity.
visit.AddPlot('Pseudocolor', 'wx_cc', 1, 1)
PseudocolorAtts = visit.PseudocolorAttributes()
PseudocolorAtts.minFlag = 1
PseudocolorAtts.min = wx_range[0]
PseudocolorAtts.maxFlag = 1
PseudocolorAtts.max = wx_range[1]
PseudocolorAtts.colorTableName = 'viridis'
PseudocolorAtts.invertColorTable = 1
PseudocolorAtts.opacityType = PseudocolorAtts.Constant
PseudocolorAtts.opacity = 0.8
PseudocolorAtts.legendFlag = 0
visit.SetPlotOptions(PseudocolorAtts)
# Add an isosurface of the Q-criterion.
visit.AddOperator('Isosurface', 1)
IsosurfaceAtts = visit.IsosurfaceAttributes()
IsosurfaceAtts.variable = 'q_crit'
IsosurfaceAtts.contourMethod = IsosurfaceAtts.Value
IsosurfaceAtts.contourValue = (q_value)
IsosurfaceAtts.scaling = IsosurfaceAtts.Linear
visit.SetOperatorOptions(IsosurfaceAtts, 1)
# Remove info about user, time, database, and legend.
AnnotationAtts = visit.AnnotationAttributes()
AnnotationAtts.userInfoFlag = 0
AnnotationAtts.databaseInfoFlag = 0
AnnotationAtts.timeInfoFlag = 0
AnnotationAtts.legendInfoFlag = 0
AnnotationAtts.axes3D.visible = 0
AnnotationAtts.axes3D.triadFlag = 1
AnnotationAtts.axes3D.bboxFlag = 0
visit.SetAnnotationAttributes(AnnotationAtts)
# Parse the 3D view configuration file.
if config_view is not None:
with open(str(config_view), 'r') as infile:
config_view = yaml.load(infile, Loader=yaml.FullLoader)
config_view = config_view['View3DAtts']
# Set attributes of the view.
View3DAtts = visit.View3DAttributes()
for key, value in config_view.items():
if type(value) is list:
value = tuple(value)
setattr(View3DAtts, key, value)
visit.SetView3D(View3DAtts)
visit.SetActiveWindow(1)
visit.Source(os.path.join(visit_dir, visit_arch, 'bin', 'makemovie.py'))
visit.ToggleCameraViewMode()
# Create output directory if necessary.
if not os.path.isdir(str(out_dir)):
os.makedirs(str(out_dir))
# Loop over the states to render and save the plots.
if state is not None:
states = [state]
elif states is None:
if states_range[1] is None:
states_range[1] = visit.TimeSliderGetNStates()
else:
states_range[1] += 1
states = range(*states_range)
for i, state in enumerate(states):
print('[state {}] Rendering and saving figure ...'.format(state))
visit.SetTimeSliderState(state)
if i == 0:
visit.DrawPlots()
RenderingAtts = visit.RenderingAttributes()
visit.SetRenderingAttributes(RenderingAtts)
SaveWindowAtts = visit.SaveWindowAttributes()
SaveWindowAtts.outputToCurrentDirectory = 0
SaveWindowAtts.outputDirectory = str(out_dir)
SaveWindowAtts.fileName = '{}{:0>4}'.format(out_prefix, state)
SaveWindowAtts.family = 0
SaveWindowAtts.format = SaveWindowAtts.PNG
SaveWindowAtts.width = figsize[0]
SaveWindowAtts.height = figsize[1]
SaveWindowAtts.quality = 100
SaveWindowAtts.resConstraint = SaveWindowAtts.NoConstraint
visit.SetSaveWindowAttributes(SaveWindowAtts)
visit.SaveWindow()
os.remove('visitlog.py')
visit.CloseComputeEngine()
visit.Close()
return
| 44.002146
| 161
| 0.666081
| 2,304
| 20,505
| 5.796875
| 0.136285
| 0.017221
| 0.04717
| 0.083109
| 0.800913
| 0.78257
| 0.766771
| 0.735475
| 0.696391
| 0.652291
| 0
| 0.019883
| 0.224921
| 20,505
| 465
| 162
| 44.096774
| 0.820487
| 0.083053
| 0
| 0.61326
| 1
| 0
| 0.222074
| 0.121141
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01105
| false
| 0
| 0.016575
| 0
| 0.038674
| 0.022099
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bfec549b06e1505995d70270e0b8e9dcade976e4
| 55
|
py
|
Python
|
fftbg/brains/__main__.py
|
rainbowbismuth/birb-brains-bot
|
f168ec06c5c5cc8d41589437c6f91f0d97289167
|
[
"MIT"
] | 1
|
2020-12-01T01:31:31.000Z
|
2020-12-01T01:31:31.000Z
|
fftbg/brains/__main__.py
|
rainbowbismuth/birb-brains-bot
|
f168ec06c5c5cc8d41589437c6f91f0d97289167
|
[
"MIT"
] | 2
|
2021-05-30T21:10:16.000Z
|
2021-05-30T21:10:44.000Z
|
fftbg/brains/__main__.py
|
rainbowbismuth/birb-brains-bot
|
f168ec06c5c5cc8d41589437c6f91f0d97289167
|
[
"MIT"
] | null | null | null |
import fftbg.brains.server
fftbg.brains.server.main()
| 13.75
| 26
| 0.8
| 8
| 55
| 5.5
| 0.625
| 0.5
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 55
| 3
| 27
| 18.333333
| 0.862745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
bff92731eb56641d56298c3385b69dd89c2024c8
| 232
|
py
|
Python
|
src/test_corpus.py
|
karianjahi/word_processing
|
ee653d63a6b81c8efb8134f396ab55fa72a4d106
|
[
"MIT"
] | null | null | null |
src/test_corpus.py
|
karianjahi/word_processing
|
ee653d63a6b81c8efb8134f396ab55fa72a4d106
|
[
"MIT"
] | null | null | null |
src/test_corpus.py
|
karianjahi/word_processing
|
ee653d63a6b81c8efb8134f396ab55fa72a4d106
|
[
"MIT"
] | null | null | null |
from word_processor import count_words
def test_corpus(get_text_from_file):
"""
testing a fixture
:param get_text_from_file: name of the function
:return: None
"""
assert count_words(get_text_from_file) > 10
| 25.777778
| 51
| 0.728448
| 35
| 232
| 4.457143
| 0.685714
| 0.134615
| 0.211538
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010811
| 0.202586
| 232
| 9
| 52
| 25.777778
| 0.832432
| 0.340517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5e3f6e7548f8ce955ae028387cb670c6eae2b85c
| 1,364
|
py
|
Python
|
course/code/tests/backend.py
|
vadym-khodak/python-basic-course
|
e62f03e4828edd30f44e97a3fab5089f79880dc3
|
[
"MIT"
] | 1
|
2022-01-31T15:12:37.000Z
|
2022-01-31T15:12:37.000Z
|
course/code/tests/backend.py
|
vadym-khodak/python-basic-course
|
e62f03e4828edd30f44e97a3fab5089f79880dc3
|
[
"MIT"
] | 1
|
2022-02-03T19:07:06.000Z
|
2022-02-03T19:07:06.000Z
|
course/code/tests/backend.py
|
vadym-khodak/python-basic-course
|
e62f03e4828edd30f44e97a3fab5089f79880dc3
|
[
"MIT"
] | null | null | null |
def test_index():
from .web import app
# GIVEN a Flask test client
with app.test_client() as client:
# WHEN send get request to "/" url
response = client.get("/")
# THEN response status equals 200 and content_type should be text/html
assert response.status_code == 200
assert response.content_type == "text/html; charset=utf-8"
def test_ping():
from .web import app
# GIVEN a Flask test client
with app.test_client() as client:
# WHEN send get request to "/ping" url
response = client.get("/ping")
# THEN response status equals 200 and content_type should be "text/html" decoded data is "pong"
assert response.status_code == 200
assert response.data.decode() == "pong"
assert response.content_type == "text/html; charset=utf-8"
def test_api_ping():
from .web import app
# GIVEN a Flask test client
with app.test_client() as client:
# WHEN send get request to "/api/ping" url
response = client.get("/api/ping")
# THEN response status equals 200 and content_type should be "application/json" json.message is "pong"
assert response.status_code == 200
assert response.json["message"] == "pong"
assert response.content_type == "application/json"
if __name__ == "__main__":
pass
| 29.021277
| 110
| 0.645894
| 185
| 1,364
| 4.632432
| 0.27027
| 0.130688
| 0.084014
| 0.056009
| 0.850642
| 0.756126
| 0.756126
| 0.724621
| 0.724621
| 0.614936
| 0
| 0.019743
| 0.257331
| 1,364
| 46
| 111
| 29.652174
| 0.826259
| 0.331378
| 0
| 0.5
| 0
| 0
| 0.113082
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 1
| 0.136364
| false
| 0.045455
| 0.136364
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5e4ff7b7f52ffe03d7a2b49cf869a0f25f2931cc
| 22,989
|
py
|
Python
|
tests/test_negative_cache.py
|
marceljahnke/negative-cache
|
8b1ac5995915631b42a756159b9f1dd796736975
|
[
"Apache-2.0"
] | 1
|
2022-01-26T08:27:45.000Z
|
2022-01-26T08:27:45.000Z
|
tests/test_negative_cache.py
|
marceljahnke/negative-cache
|
8b1ac5995915631b42a756159b9f1dd796736975
|
[
"Apache-2.0"
] | null | null | null |
tests/test_negative_cache.py
|
marceljahnke/negative-cache
|
8b1ac5995915631b42a756159b9f1dd796736975
|
[
"Apache-2.0"
] | 1
|
2022-02-26T07:54:29.000Z
|
2022-02-26T07:54:29.000Z
|
# coding=utf-8
# Copyright 2022 Marcel Jahnke
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for negative_cache.negative_cache."""
import torch
import unittest
import pytest
from negative_cache import negative_cache
from negative_cache.negative_cache import FixedLenFeature
class NegativeCacheTest(unittest.TestCase):
def test_init_cache(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
"3": FixedLenFeature(shape=[3, 2], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=6)
cache = cache_manager.init_cache()
self.assertEqual({"1", "2", "3"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(torch.zeros([6, 2], dtype=torch.int32), cache.data["1"])
)
self.assertTrue(
torch.equal(torch.zeros([6, 3], dtype=torch.float32), cache.data["2"])
)
self.assertTrue(
torch.equal(torch.zeros([6, 3, 2], dtype=torch.float32), cache.data["3"])
)
self.assertTrue(torch.equal(torch.zeros([6], dtype=torch.int32), cache.age))
@pytest.mark.xfail
def test_update_cache(self):
"""
NOTE: The original implementation of negative_cache used tf.math.top_k which returned the lowest
indeces for duplicate values. The torch implementation of topk returns any indices for duplicates.
There is no specific rule for indices of duplicates. Because of this the test cases needed to be
changed to match the torch.topk behavior. I tried to make this as reproducible as possible by
seeding torch. Test was written with: torch==1.10.1 and python 3.9.7
"""
torch.manual_seed(42)
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = cache_manager.init_cache()
updates = {
"1": torch.ones(size=[2, 2], dtype=torch.int32),
"2": torch.ones(size=[2, 3], dtype=torch.float32),
}
cache = cache_manager.update_cache(cache, new_items=updates)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
print(cache.data["1"])
self.assertTrue(
torch.equal(
torch.tensor([[0, 0], [0, 0], [1, 1], [1, 1]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
[1.0, 1.0, 1.0],
[1.0, 1.0, 1.0],
],
dtype=torch.float32,
),
cache.data["2"],
)
)
self.assertTrue(
torch.equal(torch.tensor([1, 1, 0, 0], dtype=torch.int32), cache.age)
)
updates = {
"1": 2 * torch.ones(size=[2, 2], dtype=torch.int32),
"2": 2.0 * torch.ones(size=[2, 3], dtype=torch.float32),
}
cache = cache_manager.update_cache(cache, new_items=updates)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[2, 2], [2, 2], [1, 1], [1, 1]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[
[2.0, 2.0, 2.0],
[2.0, 2.0, 2.0],
[1.0, 1.0, 1.0],
[1.0, 1.0, 1.0],
],
dtype=torch.float32,
),
cache.data["2"],
)
)
updates = {
"1": 3 * torch.ones(size=[2, 2], dtype=torch.int32),
"2": 3.0 * torch.ones(size=[2, 3], dtype=torch.float32),
}
cache = cache_manager.update_cache(cache, new_items=updates)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[2, 2], [2, 2], [3, 3], [3, 3]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[
[2.0, 2.0, 2.0],
[2.0, 2.0, 2.0],
[3.0, 3.0, 3.0],
[3.0, 3.0, 3.0],
],
dtype=torch.float32,
),
cache.data["2"],
)
)
def test_update_cache_with_non_multiple_cache_size(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=3)
cache = cache_manager.init_cache()
updates = {
"1": torch.ones(size=[2, 2], dtype=torch.int32),
"2": torch.ones(size=[2, 3], dtype=torch.float32),
}
cache = cache_manager.update_cache(cache, new_items=updates)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[1, 1], [1, 1], [0, 0]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [0.0, 0.0, 0.0]],
dtype=torch.float32,
),
cache.data["2"],
)
)
self.assertTrue(
torch.equal(torch.tensor([0, 0, 1], dtype=torch.int32), cache.age)
)
updates = {
"1": 2 * torch.ones(size=[2, 2], dtype=torch.int32),
"2": 2.0 * torch.ones(size=[2, 3], dtype=torch.float32),
}
cache = cache_manager.update_cache(cache, new_items=updates)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[2, 2], [1, 1], [2, 2]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[[2.0, 2.0, 2.0], [1.0, 1.0, 1.0], [2.0, 2.0, 2.0]],
dtype=torch.float32,
),
cache.data["2"],
)
)
self.assertTrue(
torch.equal(torch.tensor([0, 1, 0], dtype=torch.int32), cache.age)
)
@pytest.mark.xfail
def test_update_caches_with_function(self):
"""
NOTE: The original implementation of negative_cache used tf.math.top_k which returned the lowest
indeces for duplicate values. The torch implementation of topk returns any indices for duplicates.
There is no specific rule for indices of duplicates. Because of this the test cases needed to be
changed to match the torch.topk behavior. I tried to make this as reproducible as possible by
seeding torch. Test was written with: torch==1.10.1 and python 3.9.7
"""
torch.manual_seed(42)
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
init_cache_fn = cache_manager.init_cache
cache = init_cache_fn()
updates = {
"1": torch.ones(size=[2, 2], dtype=torch.int32),
"2": torch.ones(size=[2, 3], dtype=torch.float32),
}
update_cache_fn = cache_manager.update_cache
cache = update_cache_fn(cache, new_items=updates)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[0, 0], [0, 0], [1, 1], [1, 1]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
[1.0, 1.0, 1.0],
[1.0, 1.0, 1.0],
],
dtype=torch.float32,
),
cache.data["2"],
)
)
def test_raises_value_error_if_different_update_sizes(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
init_cache_fn = cache_manager.init_cache
cache = init_cache_fn()
updates = {
"1": torch.ones(size=[2, 2], dtype=torch.int32),
"2": torch.ones(size=[1, 3], dtype=torch.float32),
}
update_cache_fn = cache_manager.update_cache
with self.assertRaises(IndexError):
cache = update_cache_fn(cache, new_items=updates)
def test_update_cache_with_existing_items(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = cache_manager.init_cache()
updated_item_indices = torch.tensor([1, 3], dtype=torch.int32)
updated_item_data = {
"1": torch.ones(size=[2, 2], dtype=torch.int32),
"2": torch.ones(size=[2, 3], dtype=torch.float32),
}
cache = cache_manager.update_cache(
cache,
updated_item_data=updated_item_data,
updated_item_indices=updated_item_indices,
)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[0, 0], [1, 1], [0, 0], [1, 1]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[
[0.0, 0.0, 0.0],
[1.0, 1.0, 1.0],
[0.0, 0.0, 0.0],
[1.0, 1.0, 1.0],
],
dtype=torch.float32,
),
cache.data["2"],
)
)
self.assertTrue(
torch.equal(torch.tensor([1, 0, 1, 0], dtype=torch.int32), cache.age)
)
def test_partial_update_cache_with_existing_items(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = cache_manager.init_cache()
updated_item_indices = torch.tensor([1, 3], dtype=torch.int32)
updated_item_data = {
"1": torch.ones(size=[2, 2], dtype=torch.int32),
}
cache = cache_manager.update_cache(
cache,
updated_item_data=updated_item_data,
updated_item_indices=updated_item_indices,
)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[0, 0], [1, 1], [0, 0], [1, 1]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(torch.zeros([4, 3], dtype=torch.float32), cache.data["2"])
)
self.assertTrue(
torch.equal(torch.tensor([1, 0, 1, 0], dtype=torch.int32), cache.age)
)
def test_update_cache_with_new_items_and_existing_items(self):
specs = {
"1": FixedLenFeature(shape=[1], dtype=torch.int32),
"2": FixedLenFeature(shape=[1], dtype=torch.int32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=2)
data = {
"1": torch.tensor([[0], [0], [3]], dtype=torch.int32),
"2": torch.tensor([[1], [2], [4]], dtype=torch.int32),
}
age = torch.tensor([2, 1, 0], dtype=torch.int32)
cache = negative_cache.NegativeCache(data=data, age=age)
updated_item_indices = torch.tensor([0], dtype=torch.int32)
updated_item_data = {
"1": torch.tensor([[10]], dtype=torch.int32),
}
new_items = {
"1": torch.tensor([[11]], dtype=torch.int32),
"2": torch.tensor([[12]], dtype=torch.int32),
}
cache = cache_manager.update_cache(
cache,
new_items=new_items,
updated_item_data=updated_item_data,
updated_item_indices=updated_item_indices,
)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[10], [11], [3]], dtype=torch.int32), cache.data["1"]
)
)
self.assertTrue(
torch.equal(
torch.tensor([[1], [12], [4]], dtype=torch.int32), cache.data["2"]
)
)
self.assertTrue(
torch.equal(torch.tensor([0, 0, 1], dtype=torch.int32), cache.age)
)
def test_raises_value_error_if_new_item_keys_not_equal_specs(self):
specs = {
"1": FixedLenFeature(shape=[1], dtype=torch.int32),
"2": FixedLenFeature(shape=[1], dtype=torch.int32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = cache_manager.init_cache()
updates = {
"1": torch.ones(size=[2, 1], dtype=torch.int32),
}
with self.assertRaises(ValueError):
cache = cache_manager.update_cache(cache, new_items=updates)
updates = {
"1": torch.ones(size=[2, 1], dtype=torch.int32),
"2": torch.ones(size=[2, 1], dtype=torch.int32),
"3": torch.ones(size=[2, 1], dtype=torch.int32),
}
with self.assertRaises(ValueError):
cache = cache_manager.update_cache(cache, new_items=updates)
def test_raises_value_error_if_update_item_keys_not_in_specs(self):
specs = {
"1": FixedLenFeature(shape=[1], dtype=torch.int32),
"2": FixedLenFeature(shape=[1], dtype=torch.int32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = cache_manager.init_cache()
updated_item_data = {
"1": torch.ones(size=[2, 1], dtype=torch.int32),
"3": torch.ones(size=[2, 1], dtype=torch.int32),
}
updated_item_indices = torch.tensor([0])
with self.assertRaises(ValueError):
cache = cache_manager.update_cache(
cache,
updated_item_data=updated_item_data,
updated_item_indices=updated_item_indices,
)
def test_masked_update_cache_with_existing_items(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = cache_manager.init_cache()
updated_item_indices = torch.tensor([1, 3], dtype=torch.int32)
updated_item_data = {
"1": torch.tensor([[1, 1], [2, 2]], dtype=torch.int32),
"2": torch.tensor([[3.0, 3.0, 3.0], [4.0, 4.0, 4.0]], dtype=torch.float32),
}
updated_item_mask = torch.tensor([True, False])
cache = cache_manager.update_cache(
cache,
updated_item_data=updated_item_data,
updated_item_indices=updated_item_indices,
updated_item_mask=updated_item_mask,
)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[0, 0], [1, 1], [0, 0], [0, 0]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[
[0.0, 0.0, 0.0],
[3.0, 3.0, 3.0],
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
],
dtype=torch.float32,
),
cache.data["2"],
)
)
self.assertTrue(
torch.equal(torch.tensor([1, 0, 1, 1], dtype=torch.int32), cache.age)
)
def test_masked_update_cache_with_existing_items_when_all_items_masked(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = negative_cache.NegativeCache(
data={
"1": torch.tensor(
[[5, 5], [10, 10], [15, 15], [20, 20]], dtype=torch.int32
)
},
age=torch.tensor([2, 2, 2, 2], dtype=torch.int32),
)
updated_item_indices = torch.tensor([1, 3], dtype=torch.int32)
updated_item_data = {
"1": torch.tensor([[1, 1], [2, 2]], dtype=torch.int32),
}
updated_item_mask = torch.tensor([False, False])
cache = cache_manager.update_cache(
cache,
updated_item_data=updated_item_data,
updated_item_indices=updated_item_indices,
updated_item_mask=updated_item_mask,
)
self.assertEqual({"1"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[5, 5], [10, 10], [15, 15], [20, 20]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(torch.tensor([3, 3, 3, 3], dtype=torch.int32), cache.age)
)
def test_update_cache_without_lru(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
}
cache_manager = negative_cache.CacheManager(
specs=specs, cache_size=4, use_lru=False
)
cache = negative_cache.NegativeCache(
data={
"1": torch.tensor(
[[5, 5], [10, 10], [15, 15], [20, 20]], dtype=torch.int32
)
},
age=torch.tensor([1, 0, 1, 1], dtype=torch.int32),
)
updated_item_indices = torch.tensor([1, 3], dtype=torch.int32)
updated_item_data = {
"1": torch.tensor([[1, 1], [2, 2]], dtype=torch.int32),
}
cache = cache_manager.update_cache(
cache,
updated_item_indices=updated_item_indices,
updated_item_data=updated_item_data,
)
cache_data_expected = torch.tensor(
[[5, 5], [1, 1], [15, 15], [2, 2]], dtype=torch.int32
)
cache_age_expected = torch.tensor([2, 1, 2, 2], dtype=torch.int32)
self.assertTrue(torch.equal(cache_data_expected, cache.data["1"]))
self.assertTrue(torch.equal(cache_age_expected, cache.age))
def test_masked_update_cache_with_existing_items_not_in_index_one(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
"2": FixedLenFeature(shape=[3], dtype=torch.float32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = cache_manager.init_cache()
updated_item_indices = torch.tensor([0, 3], dtype=torch.int32)
updated_item_data = {
"1": torch.tensor([[1, 1], [2, 2]], dtype=torch.int32),
"2": torch.tensor([[3.0, 3.0, 3.0], [4.0, 4.0, 4.0]], dtype=torch.float32),
}
updated_item_mask = torch.tensor([True, False])
cache = cache_manager.update_cache(
cache,
updated_item_data=updated_item_data,
updated_item_indices=updated_item_indices,
updated_item_mask=updated_item_mask,
)
self.assertEqual({"1", "2"}, set(cache.data.keys()))
self.assertTrue(
torch.equal(
torch.tensor([[1, 1], [0, 0], [0, 0], [0, 0]], dtype=torch.int32),
cache.data["1"],
)
)
self.assertTrue(
torch.equal(
torch.tensor(
[
[3.0, 3.0, 3.0],
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
],
dtype=torch.float32,
),
cache.data["2"],
)
)
self.assertTrue(
torch.equal(torch.tensor([0, 1, 1, 1], dtype=torch.int32), cache.age)
)
def test_new_items_with_mask(self):
specs = {
"1": FixedLenFeature(shape=[2], dtype=torch.int32),
}
cache_manager = negative_cache.CacheManager(specs=specs, cache_size=4)
cache = negative_cache.NegativeCache(
data={
"1": torch.tensor([[1, 1], [2, 2], [3, 3], [4, 4]], dtype=torch.int32)
},
age=torch.tensor([0, 2, 1, 3], dtype=torch.int32),
)
new_items = {"1": torch.tensor([[5, 5], [6, 6], [7, 7]], dtype=torch.int32)}
new_items_mask = torch.tensor([True, False, True])
cache = cache_manager.update_cache(
cache, new_items=new_items, new_items_mask=new_items_mask
)
self.assertTrue(
torch.equal(
torch.tensor([[1, 1], [7, 7], [3, 3], [5, 5]], dtype=torch.int32),
cache.data["1"],
)
)
if __name__ == "__main__":
unittest.main()
| 38.124378
| 106
| 0.516551
| 2,696
| 22,989
| 4.262982
| 0.072329
| 0.100931
| 0.109632
| 0.022274
| 0.902897
| 0.879927
| 0.855042
| 0.840425
| 0.818846
| 0.792047
| 0
| 0.064463
| 0.342077
| 22,989
| 602
| 107
| 38.187708
| 0.695405
| 0.068642
| 0
| 0.626838
| 0
| 0
| 0.006062
| 0
| 0
| 0
| 0
| 0
| 0.102941
| 1
| 0.027574
| false
| 0
| 0.009191
| 0
| 0.038603
| 0.001838
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5e776e84ee4e64f5fbd811532c63dae95298c636
| 4,582
|
py
|
Python
|
flatdata-generator/tests/generators/py_expectations/archives/subarchive.py
|
heremaps/flatdata
|
42e62c00a25d9b65930c90044578a583efb6ed6d
|
[
"Apache-2.0"
] | 140
|
2018-01-26T21:59:38.000Z
|
2022-02-17T10:23:29.000Z
|
flatdata-generator/tests/generators/py_expectations/archives/subarchive.py
|
VeaaC/flatdata
|
5df78d89938dbbd1566fa85d417b9674ef402561
|
[
"Apache-2.0"
] | 114
|
2018-01-26T17:49:20.000Z
|
2021-11-26T13:27:08.000Z
|
flatdata-generator/tests/generators/py_expectations/archives/subarchive.py
|
VeaaC/flatdata
|
5df78d89938dbbd1566fa85d417b9674ef402561
|
[
"Apache-2.0"
] | 22
|
2018-01-26T16:51:24.000Z
|
2021-04-27T13:32:44.000Z
|
class n_X(flatdata.archive.Archive):
_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
"""
_PAYLOAD_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
"""
_PAYLOAD_DOC = """"""
_NAME = "X"
_RESOURCES = {
"X.archive" : flatdata.archive.ResourceSignature(
container=flatdata.resources.RawData,
initializer=None,
schema=_SCHEMA,
is_optional=False,
doc="Archive signature"),
"payload": flatdata.archive.ResourceSignature(container=flatdata.resources.RawData,
initializer=None,
schema=_PAYLOAD_SCHEMA,
is_optional=False,
doc=_PAYLOAD_DOC),
}
def __init__(self, resource_storage):
flatdata.archive.Archive.__init__(self, resource_storage)
class n_XBuilder(flatdata.archive_builder.ArchiveBuilder):
_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
"""
_PAYLOAD_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
"""
_PAYLOAD_DOC = """"""
_NAME = "X"
_RESOURCES = {
"X.archive" : flatdata.archive_builder.ResourceSignature(
container=flatdata.resources.RawData,
initializer=None,
schema=_SCHEMA,
is_optional=False,
doc="Archive signature"),
"payload": flatdata.archive_builder.ResourceSignature(container=flatdata.resources.RawData,
initializer=None,
schema=_PAYLOAD_SCHEMA,
is_optional=False,
doc=_PAYLOAD_DOC),
}
def __init__(self, resource_storage):
flatdata.archive_builder.ArchiveBuilder.__init__(self, resource_storage)
class n_A(flatdata.archive.Archive):
_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
namespace n {
archive A
{
data : archive .n.X;
@optional
optional_data : archive .n.X;
}
}
"""
_DATA_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
namespace n {
archive A
{
data : archive .n.X;
}
}
"""
_DATA_DOC = """"""
_OPTIONAL_DATA_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
namespace n {
archive A
{
@optional
optional_data : archive .n.X;
}
}
"""
_OPTIONAL_DATA_DOC = """"""
_NAME = "A"
_RESOURCES = {
"A.archive" : flatdata.archive.ResourceSignature(
container=flatdata.resources.RawData,
initializer=None,
schema=_SCHEMA,
is_optional=False,
doc="Archive signature"),
"data": flatdata.archive.ResourceSignature(container=flatdata.archive.Archive,
initializer=n_X,
schema=_DATA_SCHEMA,
is_optional=False,
doc=_DATA_DOC),
"optional_data": flatdata.archive.ResourceSignature(container=flatdata.archive.Archive,
initializer=n_X,
schema=_OPTIONAL_DATA_SCHEMA,
is_optional=True,
doc=_OPTIONAL_DATA_DOC),
}
def __init__(self, resource_storage):
flatdata.archive.Archive.__init__(self, resource_storage)
class n_ABuilder(flatdata.archive_builder.ArchiveBuilder):
_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
namespace n {
archive A
{
data : archive .n.X;
@optional
optional_data : archive .n.X;
}
}
"""
_DATA_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
namespace n {
archive A
{
data : archive .n.X;
}
}
"""
_DATA_DOC = """"""
_OPTIONAL_DATA_SCHEMA = """namespace n {
archive X
{
payload : raw_data;
}
}
namespace n {
archive A
{
@optional
optional_data : archive .n.X;
}
}
"""
_OPTIONAL_DATA_DOC = """"""
_NAME = "A"
_RESOURCES = {
"A.archive" : flatdata.archive_builder.ResourceSignature(
container=flatdata.resources.RawData,
initializer=None,
schema=_SCHEMA,
is_optional=False,
doc="Archive signature"),
"data": flatdata.archive_builder.ResourceSignature(container=flatdata.archive.Archive,
initializer=n_X,
schema=_DATA_SCHEMA,
is_optional=False,
doc=_DATA_DOC),
"optional_data": flatdata.archive_builder.ResourceSignature(container=flatdata.archive.Archive,
initializer=n_X,
schema=_OPTIONAL_DATA_SCHEMA,
is_optional=True,
doc=_OPTIONAL_DATA_DOC),
}
def __init__(self, resource_storage):
flatdata.archive_builder.ArchiveBuilder.__init__(self, resource_storage)
| 20.922374
| 103
| 0.614579
| 459
| 4,582
| 5.797386
| 0.071895
| 0.124014
| 0.102217
| 0.086434
| 0.990981
| 0.990981
| 0.988726
| 0.988726
| 0.988726
| 0.988726
| 0
| 0
| 0.27237
| 4,582
| 219
| 104
| 20.922374
| 0.79814
| 0
| 0
| 0.663265
| 0
| 0
| 0.258346
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020408
| false
| 0
| 0
| 0
| 0.163265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0d696924aed45c65c31e7672ba5da79fc4150e55
| 256
|
py
|
Python
|
file/WatchDogFileHandler.py
|
compix/MetadataManagerCore
|
59ff0bd908d69c624834d72e64f4e0bff01f2a49
|
[
"MIT"
] | null | null | null |
file/WatchDogFileHandler.py
|
compix/MetadataManagerCore
|
59ff0bd908d69c624834d72e64f4e0bff01f2a49
|
[
"MIT"
] | null | null | null |
file/WatchDogFileHandler.py
|
compix/MetadataManagerCore
|
59ff0bd908d69c624834d72e64f4e0bff01f2a49
|
[
"MIT"
] | null | null | null |
from MetadataManagerCore.file.FileHandler import FileHandler
from MetadataManagerCore.file.WatchDog import WatchDog
class WatchDogFileHandler(FileHandler):
def __init__(self) -> None:
super().__init__()
self.watchDog : WatchDog = None
| 32
| 60
| 0.761719
| 25
| 256
| 7.48
| 0.52
| 0.245989
| 0.28877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160156
| 256
| 8
| 61
| 32
| 0.869767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0d8ed5d479a6771d6d90986c1f480093a796d3b4
| 141
|
py
|
Python
|
home/views.py
|
smlaming/Roomate-Finder
|
864d6633f4303b53596d8fe62572bf7808c6c443
|
[
"MIT"
] | null | null | null |
home/views.py
|
smlaming/Roomate-Finder
|
864d6633f4303b53596d8fe62572bf7808c6c443
|
[
"MIT"
] | null | null | null |
home/views.py
|
smlaming/Roomate-Finder
|
864d6633f4303b53596d8fe62572bf7808c6c443
|
[
"MIT"
] | null | null | null |
# Create your views here.
from django.http import HttpResponse
def index(request):
return HttpResponse("Our roommate finder page ^_^")
| 20.142857
| 55
| 0.751773
| 18
| 141
| 5.833333
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163121
| 141
| 6
| 56
| 23.5
| 0.889831
| 0.163121
| 0
| 0
| 0
| 0
| 0.241379
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
0da375ff032090ac25414b4af7dcffe8a5903028
| 21,863
|
py
|
Python
|
matrix-python-project/cover_generator/typesetting/model/four.py
|
hokaso/hocassian-media-matrix
|
2c2e5a4c72dfa43d2eed0f083f5b19238aea2765
|
[
"MIT"
] | 141
|
2021-06-27T03:18:54.000Z
|
2022-03-17T03:24:26.000Z
|
matrix-python-project/cover_generator/typesetting/model/four.py
|
hokaso/hocassian-media-matrix
|
2c2e5a4c72dfa43d2eed0f083f5b19238aea2765
|
[
"MIT"
] | 1
|
2021-08-06T17:35:01.000Z
|
2021-08-06T17:35:01.000Z
|
matrix-python-project/cover_generator/typesetting/model/four.py
|
hokaso/hocassian-media-matrix
|
2c2e5a4c72dfa43d2eed0f083f5b19238aea2765
|
[
"MIT"
] | 24
|
2021-06-29T01:58:59.000Z
|
2022-03-02T01:42:43.000Z
|
import sys, os, time, json, random
from PIL import Image, ImageDraw, ImageFont, ImageFilter
from cover_generator.typesetting.more import More
from cover_generator.typesetting.mark import Mark
from cover_generator.typesetting.build import Build
from utils.snow_id import SnowId
sys.path.append(os.getcwd())
class Four(object):
def __init__(self, folder_key):
self.image_list = None
self.rank_model = None
self.tb = None
with open("cover_generator/typesetting/style.json", 'r') as f0:
style_config = json.load(f0)
self.model = style_config["four"]
self.func_map = {
1: self.quadruple_vertical_build,
2: self.quadruple_horizontal_build,
3: self.chairs_build,
4: self.chairs_spin_build,
5: self.h2v2_build,
6: self.h2v2_spin_build,
7: self.windows_build,
8: self.windows_vertical_build,
9: self.windows_horizontal_build,
}
self._build = Build(folder_key, folder_key + "_temp")
def quadruple_vertical(self, image_list):
return More(image_list, self.model[0]["unit_detail"], "41").main()
def quadruple_horizontal(self, image_list):
return More(image_list, self.model[1]["unit_detail"], "42").main()
def chairs(self, image_list):
return More(image_list, self.model[2]["unit_detail"], "43").main()
def chairs_spin(self, image_list):
return More(image_list, self.model[3]["unit_detail"], "44").main()
def h2v2(self, image_list):
return More(image_list, self.model[4]["unit_detail"], "45").main()
def h2v2_spin(self, image_list):
return More(image_list, self.model[5]["unit_detail"], "46").main()
def windows(self, image_list):
return More(image_list, self.model[6]["unit_detail"], "47").main()
def windows_vertical(self, image_list):
return More(image_list, self.model[7]["unit_detail"], "48").main()
def windows_horizontal(self, image_list):
return More(image_list, self.model[8]["unit_detail"], "49").main()
def build(self, image_list, model):
self.tb = Image.open("cover_generator/background.jpg")
self.image_list = image_list
self.rank_model = model
self.func_map[int(model["model_id"][1])]()
def quadruple_vertical_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[0]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[0]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[0]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[0]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_1, pic_2, pic_3]
random.shuffle(pic_list)
# 结构也需要shuffle
kind = random.randint(0, 1)
# 保存
if kind == 0:
self.tb.paste(pic_list[0], (0, 0))
self.tb.paste(pic_list[1], (0, 480))
self.tb.paste(pic_list[2], (0, 960))
self.tb.paste(pic_4, (540, 0))
else:
self.tb.paste(pic_list[0], (540, 0))
self.tb.paste(pic_list[1], (540, 480))
self.tb.paste(pic_list[2], (540, 960))
self.tb.paste(pic_4, (0, 0))
self._build.save(self.tb)
def quadruple_horizontal_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[1]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[1]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[1]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[1]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_1, pic_2, pic_3]
random.shuffle(pic_list)
# 结构也需要shuffle
kind = random.randint(0, 1)
# {
# "width": 360,
# "height": 720
# },
# {
# "width": 360,
# "height": 720
# },
# {
# "width": 360,
# "height": 720
# },
# {
# "width": 1080,
# "height": 720
# }
# 保存
if kind == 0:
self.tb.paste(pic_list[0], (0, 0))
self.tb.paste(pic_list[1], (360, 0))
self.tb.paste(pic_list[2], (720, 0))
self.tb.paste(pic_4, (0, 720))
else:
self.tb.paste(pic_list[0], (0, 720))
self.tb.paste(pic_list[1], (360, 720))
self.tb.paste(pic_list[2], (720, 720))
self.tb.paste(pic_4, (0, 0))
self._build.save(self.tb)
def chairs_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[2]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[2]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[2]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[2]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_2, pic_3]
random.shuffle(pic_list)
# 结构也需要shuffle
kind = random.randint(0, 3)
# {
# "width": 720,
# "height": 720
# },
# {
# "width": 360,
# "height": 720
# },
# {
# "width": 360,
# "height": 720
# },
# {
# "width": 360,
# "height": 1440
# }
# 保存
if kind == 0:
self.tb.paste(pic_1, (0, 0))
self.tb.paste(pic_list[1], (0, 720))
self.tb.paste(pic_list[0], (360, 720))
self.tb.paste(pic_4, (720, 0))
elif kind == 1:
self.tb.paste(pic_1, (360, 0))
self.tb.paste(pic_list[1], (360, 720))
self.tb.paste(pic_list[0], (720, 720))
self.tb.paste(pic_4, (0, 0))
elif kind == 2:
self.tb.paste(pic_1, (0, 720))
self.tb.paste(pic_list[1], (0, 0))
self.tb.paste(pic_list[0], (360, 0))
self.tb.paste(pic_4, (720, 0))
else:
self.tb.paste(pic_1, (360, 720))
self.tb.paste(pic_list[1], (360, 0))
self.tb.paste(pic_list[0], (720, 0))
self.tb.paste(pic_4, (0, 0))
self._build.save(self.tb)
def chairs_spin_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[3]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[3]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[3]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[3]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_3, pic_4]
random.shuffle(pic_list)
# 结构也需要shuffle
kind = random.randint(0, 3)
# 保存
# {
# "width": 1080,
# "height": 480
# },
# {
# "width": 540,
# "height": 960
# },
# {
# "width": 540,
# "height": 480
# },
# {
# "width": 540,
# "height": 480
# }
if kind == 0:
self.tb.paste(pic_1, (0, 0))
self.tb.paste(pic_2, (0, 480))
self.tb.paste(pic_list[1], (540, 480))
self.tb.paste(pic_list[0], (540, 960))
elif kind == 1:
self.tb.paste(pic_1, (0, 0))
self.tb.paste(pic_2, (540, 480))
self.tb.paste(pic_list[1], (0, 480))
self.tb.paste(pic_list[0], (0, 960))
elif kind == 2:
self.tb.paste(pic_1, (0, 960))
self.tb.paste(pic_2, (0, 0))
self.tb.paste(pic_list[1], (540, 0))
self.tb.paste(pic_list[0], (540, 480))
else:
self.tb.paste(pic_1, (0, 960))
self.tb.paste(pic_2, (540, 0))
self.tb.paste(pic_list[1], (0, 480))
self.tb.paste(pic_list[0], (0, 0))
self._build.save(self.tb)
def h2v2_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[4]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[4]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[4]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[4]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list_1 = [pic_1, pic_2]
random.shuffle(pic_list_1)
pic_list_2 = [pic_3, pic_4]
random.shuffle(pic_list_2)
# 结构也需要shuffle,此处三种结构
kind = random.randint(0, 2)
# 保存
if kind == 0:
self.tb.paste(pic_list_1[0], (0, 0))
self.tb.paste(pic_list_1[1], (0, 720))
self.tb.paste(pic_list_2[0], (360, 0))
self.tb.paste(pic_list_2[1], (720, 0))
elif kind == 1:
self.tb.paste(pic_list_1[0], (720, 0))
self.tb.paste(pic_list_1[1], (720, 720))
self.tb.paste(pic_list_2[0], (0, 0))
self.tb.paste(pic_list_2[1], (360, 0))
else:
self.tb.paste(pic_list_1[0], (360, 0))
self.tb.paste(pic_list_1[1], (360, 720))
self.tb.paste(pic_list_2[0], (0, 0))
self.tb.paste(pic_list_2[1], (720, 0))
self._build.save(self.tb)
def h2v2_spin_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[5]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[5]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[5]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[5]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list_1 = [pic_1, pic_2]
random.shuffle(pic_list_1)
pic_list_2 = [pic_3, pic_4]
random.shuffle(pic_list_2)
# 结构也需要shuffle,此处三种结构
kind = random.randint(0, 2)
# 保存
# {
# "width": 1080,
# "height": 480
# },
# {
# "width": 1080,
# "height": 480
# },
# {
# "width": 540,
# "height": 480
# },
# {
# "width": 540,
# "height": 480
# }
if kind == 0:
self.tb.paste(pic_list_1[0], (0, 0))
self.tb.paste(pic_list_1[1], (0, 480))
self.tb.paste(pic_list_2[0], (0, 960))
self.tb.paste(pic_list_2[1], (540, 960))
elif kind == 1:
self.tb.paste(pic_list_1[0], (0, 480))
self.tb.paste(pic_list_1[1], (0, 960))
self.tb.paste(pic_list_2[0], (0, 0))
self.tb.paste(pic_list_2[1], (540, 0))
else:
self.tb.paste(pic_list_1[0], (0, 0))
self.tb.paste(pic_list_1[1], (0, 960))
self.tb.paste(pic_list_2[0], (0, 480))
self.tb.paste(pic_list_2[1], (540, 480))
self._build.save(self.tb)
def windows_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[6]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[6]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[6]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[6]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_1, pic_2, pic_3, pic_4]
random.shuffle(pic_list)
self.tb.paste(pic_list[0], (0, 0))
self.tb.paste(pic_list[1], (540, 0))
self.tb.paste(pic_list[2], (0, 720))
self.tb.paste(pic_list[3], (540, 720))
self._build.save(self.tb)
def windows_vertical_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[7]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[7]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[7]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[7]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list_1 = [pic_1, pic_2]
random.shuffle(pic_list_1)
pic_list_2 = [pic_3, pic_4]
random.shuffle(pic_list_2)
# 结构也需要shuffle,此处2种结构
kind = random.randint(0, 1)
# 保存
if kind == 0:
self.tb.paste(pic_list_1[0], (0, 0))
self.tb.paste(pic_list_1[1], (360, 720))
self.tb.paste(pic_list_2[0], (720, 0))
self.tb.paste(pic_list_2[1], (0, 720))
else:
self.tb.paste(pic_list_1[0], (360, 0))
self.tb.paste(pic_list_1[1], (0, 720))
self.tb.paste(pic_list_2[0], (0, 0))
self.tb.paste(pic_list_2[1], (720, 720))
self._build.save(self.tb)
def windows_horizontal_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[8]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[8]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[8]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 贴第四张图
image = self.image_list[self.rank_model["model_match"][3][1]]
model = self.model[8]["unit_detail"][3]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_4 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list_1 = [pic_1, pic_2]
random.shuffle(pic_list_1)
pic_list_2 = [pic_3, pic_4]
random.shuffle(pic_list_2)
# 结构也需要shuffle,此处2种结构
kind = random.randint(0, 1)
# 保存
if kind == 0:
self.tb.paste(pic_list_1[0], (0, 0))
self.tb.paste(pic_list_1[1], (540, 1080))
self.tb.paste(pic_list_2[0], (540, 0))
self.tb.paste(pic_list_2[1], (0, 360))
else:
self.tb.paste(pic_list_1[0], (540, 0))
self.tb.paste(pic_list_1[1], (0, 1080))
self.tb.paste(pic_list_2[0], (0, 0))
self.tb.paste(pic_list_2[1], (540, 360))
self._build.save(self.tb)
| 36.682886
| 98
| 0.551251
| 2,974
| 21,863
| 3.866174
| 0.038668
| 0.053748
| 0.088015
| 0.112019
| 0.914159
| 0.903896
| 0.895547
| 0.878935
| 0.841799
| 0.811272
| 0
| 0.056967
| 0.27096
| 21,863
| 595
| 99
| 36.744538
| 0.664408
| 0.055253
| 0
| 0.6
| 0
| 0
| 0.101077
| 0.003312
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.016667
| 0.025
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0deb181f3e5c360ee58a4abcc80c6bebbba60d4f
| 4,811
|
py
|
Python
|
src/frr/tests/topotests/bgp_l3vpn_to_bgp_direct/scripts/add_routes.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/bgp_l3vpn_to_bgp_direct/scripts/add_routes.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/bgp_l3vpn_to_bgp_direct/scripts/add_routes.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
from lib.lutil import luCommand
luCommand(
"r1", 'vtysh -c "show bgp next"', "99.0.0.. valid", "wait", "See CE static NH"
)
luCommand(
"r3", 'vtysh -c "show bgp next"', "99.0.0.. valid", "wait", "See CE static NH"
)
luCommand(
"r4", 'vtysh -c "show bgp next"', "99.0.0.. valid", "wait", "See CE static NH"
)
luCommand("r1", 'vtysh -c "show bgp ipv4 uni"', "i5.*i5", "wait", "See CE routes")
luCommand("r3", 'vtysh -c "show bgp ipv4 uni"', "i5.*i5", "wait", "See CE routes")
luCommand("r4", 'vtysh -c "show bgp ipv4 uni"', "i5.*i5", "wait", "See CE routes")
luCommand("ce1", 'vtysh -c "show bgp ipv4 uni 5.1.0.0/24"', "", "none", "See CE routes")
luCommand("r1", 'vtysh -c "show bgp ipv4 uni 5.1.0.0/24"', "", "none", "See CE routes")
luCommand("ce2", 'vtysh -c "show bgp ipv4 uni 5.1.0.0/24"', "", "none", "See CE routes")
luCommand("r3", 'vtysh -c "show bgp ipv4 uni 5.1.0.0/24"', "", "none", "See CE routes")
luCommand("ce3", 'vtysh -c "show bgp ipv4 uni 5.1.2.0/24"', "", "none", "See CE routes")
luCommand("r4", 'vtysh -c "show bgp ipv4 uni 5.1.2.0/24"', "", "none", "See CE routes")
luCommand(
"r1", 'vtysh -c "add vrf cust1 prefix 99.0.0.1/32"', ".", "none", "IP Address"
)
luCommand(
"r1",
'vtysh -c "show vnc registrations local"',
"99.0.0.1",
"wait",
"Local Registration",
)
luCommand(
"r1",
'vtysh -c "show vnc registrations imported"',
"2 out of 2 imported",
"wait",
"Imported Registrations",
)
luCommand(
"r3",
'vtysh -c "show bgp ipv4 vpn"',
"i99.0.0.1/32",
"wait",
"See R1s static address",
)
luCommand(
"r4",
'vtysh -c "show bgp ipv4 vpn"',
"i99.0.0.1/32",
"wait",
"See R1s static address",
)
luCommand(
"r3", 'vtysh -c "show bgp ipv4 vpn rd 10:1"', "i5.*i5", "wait", "See R1s imports"
)
luCommand(
"r4", 'vtysh -c "show bgp ipv4 vpn rd 10:1"', "i5.*i5", "wait", "See R1s imports"
)
luCommand(
"r3", 'vtysh -c "add vrf cust1 prefix 99.0.0.2/32"', ".", "none", "IP Address"
)
luCommand(
"r3",
'vtysh -c "show vnc registrations local"',
"99.0.0.2",
"wait",
"Local Registration",
)
have2ndImports = luCommand(
"r3",
'vtysh -c "show vnc registrations imported"',
"2 out of 2 imported",
"none",
"Imported Registrations",
2,
)
if have2ndImports:
luCommand(
"r3",
'vtysh -c "show vnc registrations imported"',
"2 out of 2 imported",
"pass",
"Imported Registrations",
)
luCommand(
"r1",
'vtysh -c "show bgp ipv4 vpn"',
"i99.0.0.2/32",
"wait",
"See R3s static address",
)
luCommand(
"r4",
'vtysh -c "show bgp ipv4 vpn"',
"i99.0.0.2/32",
"wait",
"See R3s static address",
)
if have2ndImports:
luCommand(
"r1",
'vtysh -c "show bgp ipv4 vpn rd 10:3"',
"i5.*i5",
"none",
"See R3s imports",
)
luCommand(
"r4",
'vtysh -c "show bgp ipv4 vpn rd 10:3"',
"i5.*i5",
"none",
"See R3s imports",
)
luCommand(
"r4", 'vtysh -c "add vrf cust1 prefix 99.0.0.3/32"', ".", "none", "IP Address"
)
luCommand(
"r4",
'vtysh -c "show vnc registrations local"',
"99.0.0.3",
"wait",
"Local Registration",
)
luCommand(
"r4",
'vtysh -c "show vnc registrations imported"',
"2 out of 2 imported",
"wait",
"Imported Registrations",
)
luCommand(
"r1",
'vtysh -c "show bgp ipv4 vpn"',
"i99.0.0.3/32",
"wait",
"See R4s static address",
)
luCommand(
"r3",
'vtysh -c "show bgp ipv4 vpn"',
"i99.0.0.3/32",
"wait",
"See R4s static address",
)
luCommand(
"r1", 'vtysh -c "show bgp ipv4 vpn rd 10:4"', "i5.*i5", "wait", "See R4s imports"
)
luCommand(
"r3", 'vtysh -c "show bgp ipv4 vpn rd 10:4"', "i5.*i5", "wait", "See R4s imports"
)
luCommand(
"r1",
'vtysh -c "show vnc registrations remote"',
"5.1.2.0/24 .*5.1.3.0/24",
"wait",
"R4s registrations",
)
luCommand(
"r3",
'vtysh -c "show vnc registrations remote"',
"5.1.2.0/24 .*5.1.3.0/24",
"wait",
"R4s registrations",
)
if have2ndImports:
luCommand(
"r1",
'vtysh -c "show vnc registrations remote"',
"5.1.0.0/24 .*5.1.1.0/24",
"wait",
"Remote registrations",
)
luCommand(
"r3",
'vtysh -c "show vnc registrations remote"',
"5.1.0.0/24 .*5.1.1.0/24",
"wait",
"Remote registrations",
)
luCommand(
"r4",
'vtysh -c "show vnc registrations remote"',
"5.1.0.0/24 .*5.1.1.0/24",
"wait",
"Remote registrations",
)
luCommand("r1", 'vtysh -c "show vnc registrations"', ".", "none")
luCommand("r3", 'vtysh -c "show vnc registrations"', ".", "none")
luCommand("r4", 'vtysh -c "show vnc registrations"', ".", "none")
| 24.798969
| 88
| 0.549574
| 701
| 4,811
| 3.771755
| 0.087019
| 0.09531
| 0.147504
| 0.118003
| 0.948185
| 0.938351
| 0.923222
| 0.847958
| 0.847201
| 0.781392
| 0
| 0.087017
| 0.247558
| 4,811
| 193
| 89
| 24.927461
| 0.64337
| 0
| 0
| 0.684492
| 0
| 0
| 0.578674
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.005348
| 0.122995
| 0
| 0.122995
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
217553147fafba3de640bf80f61309a4f93cb5db
| 66
|
py
|
Python
|
uq360/algorithms/classification_calibration/__init__.py
|
Sclare87/UQ360
|
2378bfa4a8d61f813afbf6854341888434c9eb11
|
[
"Apache-2.0"
] | 148
|
2021-05-27T20:52:51.000Z
|
2022-03-16T22:49:48.000Z
|
uq360/algorithms/classification_calibration/__init__.py
|
Sclare87/UQ360
|
2378bfa4a8d61f813afbf6854341888434c9eb11
|
[
"Apache-2.0"
] | 9
|
2021-06-21T18:45:07.000Z
|
2021-11-08T14:42:30.000Z
|
uq360/algorithms/classification_calibration/__init__.py
|
Sclare87/UQ360
|
2378bfa4a8d61f813afbf6854341888434c9eb11
|
[
"Apache-2.0"
] | 27
|
2021-06-01T18:29:02.000Z
|
2022-03-02T06:56:03.000Z
|
from .classification_calibration import ClassificationCalibration
| 33
| 65
| 0.924242
| 5
| 66
| 12
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 66
| 1
| 66
| 66
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2178a012104cc40af268f1e87a0a1f6424e3beea
| 9,762
|
py
|
Python
|
storage/per_buffer.py
|
nsortur/equi_rl
|
83bd2ee9dfaab715e51b71ffff90ab990aaed5f8
|
[
"MIT"
] | 9
|
2022-02-20T18:18:51.000Z
|
2022-03-24T03:04:44.000Z
|
storage/per_buffer.py
|
nsortur/equi_rl
|
83bd2ee9dfaab715e51b71ffff90ab990aaed5f8
|
[
"MIT"
] | null | null | null |
storage/per_buffer.py
|
nsortur/equi_rl
|
83bd2ee9dfaab715e51b71ffff90ab990aaed5f8
|
[
"MIT"
] | 2
|
2022-02-19T05:17:06.000Z
|
2022-02-21T20:53:26.000Z
|
import sys
import numpy as np
import random
from .buffer import QLearningBuffer, QLearningBufferExpert
from .segment_tree import SumSegmentTree, MinSegmentTree
NORMAL = 0
EXPERT = 1
class PrioritizedQLearningBuffer:
def __init__(self, size, alpha, base_buffer=NORMAL):
if base_buffer == EXPERT:
self.buffer = QLearningBufferExpert(size)
elif base_buffer == NORMAL:
self.buffer = QLearningBuffer(size)
else:
raise NotImplementedError
assert alpha > 0
self._alpha = alpha
it_capacity = 1
while it_capacity < size:
it_capacity *= 2
self._it_sum = SumSegmentTree(it_capacity)
self._it_min = MinSegmentTree(it_capacity)
self._max_priority = 1.0
def __len__(self):
return len(self.buffer)
def __getitem__(self, key):
return self.buffer[key]
def __setitem__(self, key, value):
self.buffer[key] = value
def add(self, *args, **kwargs):
'''
See ReplayBuffer.store_effect
'''
idx = self.buffer._next_idx
self.buffer.add(*args, **kwargs)
self._it_sum[idx] = self._max_priority ** self._alpha
self._it_min[idx] = self._max_priority ** self._alpha
def _sample_proportional(self, batch_size):
res = []
for _ in range(batch_size):
mass = random.random() * self._it_sum.sum(0, len(self.buffer) - 1)
idx = self._it_sum.find_prefixsum_idx(mass)
res.append(idx)
return res
def sample(self, batch_size, beta):
'''
Sample a batch of experiences.
compared to ReplayBuffer.sample
it also returns importance weights and idxes
of sampled experiences.
Args:
- batch_size: How many transitions to sample.
- beta: To what degree to use importance weights
(0 - no corrections, 1 - full correction)
Returns (obs_batch, act_batch, rew_batch, next_obs_batch, done_mask, weights, idxes)
- obs_batch: batch of observations
- act_batch: batch of actions executed given obs_batch
- rew_batch: rewards received as results of executing act_batch
- next_obs_batch: next set of observations seen after executing act_batch
- done_mask: done_mask[i] = 1 if executing act_batch[i] resulted in
the end of an episode and 0 otherwise.
- weights: Array of shape (batch_size,) and dtype np.float32
denoting importance weight of each sampled transition
- idxes: Array of shape (batch_size,) and dtype np.int32
idexes in buffer of sampled experiences
'''
assert beta > 0
idxes = self._sample_proportional(batch_size)
weights = []
p_min = self._it_min.min() / self._it_sum.sum()
max_weight = (p_min * len(self.buffer)) ** (-beta)
for idx in idxes:
p_sample = self._it_sum[idx] / self._it_sum.sum()
weight = (p_sample * len(self.buffer)) ** (-beta)
weights.append(weight / max_weight)
weights = np.array(weights)
batch = [self.buffer._storage[idx] for idx in idxes]
return batch, weights, idxes
def update_priorities(self, idxes, priorities):
'''
Update priorities of sampled transitions.
sets priority of transition at index idxes[i] in buffer
to priorities[i].
Args:
- idxes: List of idxes of sampled transitions
- priorities: List of updated priorities corresponding to
transitions at the sampled idxes denoted by
variable `idxes`.
'''
assert len(idxes) == len(priorities)
for idx, priority in zip(idxes, priorities):
if priority <= 0:
print("Invalid priority:", priority)
print("All priorities:", priorities)
assert priority > 0
assert 0 <= idx < len(self.buffer)
self._it_sum[idx] = priority ** self._alpha
self._it_min[idx] = priority ** self._alpha
self._max_priority = max(self._max_priority, priority)
def getSaveState(self):
state = self.buffer.getSaveState()
state.update(
{
'alpha': self._alpha,
'it_sum': self._it_sum,
'it_min': self._it_min,
'max_priority': self._max_priority
}
)
return state
def loadFromState(self, save_state):
self.buffer.loadFromState(save_state)
self._alpha = save_state['alpha']
self._it_sum = save_state['it_sum']
self._it_min = save_state['it_min']
self._max_priority = save_state['max_priority']
# class PrioritizedQLearningBuffer(QLearningBufferExpert):
# def __init__(self, size, alpha):
# '''
# Create Prioritized Replay buffer.
#
# Args:
# - size: Max number of transitions to store in the buffer.
# - alpha: How much prioritization is used
# (0 - no prioritization, 1 - full prioritization)
#
# See Also
# --------
# ReplayBuffer.__init__
# '''
# super(PrioritizedQLearningBuffer, self).__init__(size)
# assert alpha > 0
# self._alpha = alpha
#
# it_capacity = 1
# while it_capacity < size:
# it_capacity *= 2
#
# self._it_sum = SumSegmentTree(it_capacity)
# self._it_min = MinSegmentTree(it_capacity)
# self._max_priority = 1.0
#
# def add(self, *args, **kwargs):
# '''
# See ReplayBuffer.store_effect
# '''
# idx = self._next_idx
# super(PrioritizedQLearningBuffer, self).add(*args, **kwargs)
# self._it_sum[idx] = self._max_priority ** self._alpha
# self._it_min[idx] = self._max_priority ** self._alpha
#
# def _sample_proportional(self, batch_size):
# res = []
# for _ in range(batch_size):
# mass = random.random() * self._it_sum.sum(0, len(self._storage) - 1)
# idx = self._it_sum.find_prefixsum_idx(mass)
# res.append(idx)
# return res
#
# def sample(self, batch_size, beta):
# '''
# Sample a batch of experiences.
#
# compared to ReplayBuffer.sample
# it also returns importance weights and idxes
# of sampled experiences.
#
# Args:
# - batch_size: How many transitions to sample.
# - beta: To what degree to use importance weights
# (0 - no corrections, 1 - full correction)
#
# Returns (obs_batch, act_batch, rew_batch, next_obs_batch, done_mask, weights, idxes)
# - obs_batch: batch of observations
# - act_batch: batch of actions executed given obs_batch
# - rew_batch: rewards received as results of executing act_batch
# - next_obs_batch: next set of observations seen after executing act_batch
# - done_mask: done_mask[i] = 1 if executing act_batch[i] resulted in
# the end of an episode and 0 otherwise.
# - weights: Array of shape (batch_size,) and dtype np.float32
# denoting importance weight of each sampled transition
# - idxes: Array of shape (batch_size,) and dtype np.int32
# idexes in buffer of sampled experiences
# '''
# assert beta > 0
#
# idxes = self._sample_proportional(batch_size)
#
# weights = []
# p_min = self._it_min.min() / self._it_sum.sum()
# max_weight = (p_min * len(self._storage)) ** (-beta)
#
# for idx in idxes:
# p_sample = self._it_sum[idx] / self._it_sum.sum()
# weight = (p_sample * len(self._storage)) ** (-beta)
# weights.append(weight / max_weight)
# weights = np.array(weights)
# batch = [self._storage[idx] for idx in idxes]
# return batch, weights, idxes
#
# def update_priorities(self, idxes, priorities):
# '''
# Update priorities of sampled transitions.
#
# sets priority of transition at index idxes[i] in buffer
# to priorities[i].
#
# Args:
# - idxes: List of idxes of sampled transitions
# - priorities: List of updated priorities corresponding to
# transitions at the sampled idxes denoted by
# variable `idxes`.
# '''
# assert len(idxes) == len(priorities)
# for idx, priority in zip(idxes, priorities):
#
# if priority <= 0:
# print("Invalid priority:", priority)
# print("All priorities:", priorities)
#
# assert priority > 0
# assert 0 <= idx < len(self._storage)
# self._it_sum[idx] = priority ** self._alpha
# self._it_min[idx] = priority ** self._alpha
#
# self._max_priority = max(self._max_priority, priority)
#
# def getSaveState(self):
# state = super().getSaveState()
# state.update(
# {
# 'alpha': self._alpha,
# 'it_sum': self._it_sum,
# 'it_min': self._it_min,
# 'max_priority': self._max_priority
# }
# )
# return state
#
# def loadFromState(self, save_state):
# super().loadFromState(save_state)
# self._alpha = save_state['alpha']
# self._it_sum = save_state['it_sum']
# self._it_min = save_state['it_min']
# self._max_priority = save_state['max_priority']
| 35.627737
| 94
| 0.579287
| 1,109
| 9,762
| 4.864743
| 0.13706
| 0.035589
| 0.033364
| 0.013346
| 0.843188
| 0.835774
| 0.835774
| 0.835774
| 0.835774
| 0.835774
| 0
| 0.006356
| 0.32309
| 9,762
| 273
| 95
| 35.758242
| 0.810079
| 0.612375
| 0
| 0
| 0
| 0
| 0.02658
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 1
| 0.121951
| false
| 0
| 0.060976
| 0.02439
| 0.256098
| 0.02439
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
217bf266e87ce20c95654f207de827f0ad1c2ca4
| 25
|
py
|
Python
|
dml/__init__.py
|
Vinegret43/Dialog-Markup-Language
|
f4659d2bad1e8ef0a1c1c8ff39ed6703f062ca33
|
[
"MIT"
] | null | null | null |
dml/__init__.py
|
Vinegret43/Dialog-Markup-Language
|
f4659d2bad1e8ef0a1c1c8ff39ed6703f062ca33
|
[
"MIT"
] | null | null | null |
dml/__init__.py
|
Vinegret43/Dialog-Markup-Language
|
f4659d2bad1e8ef0a1c1c8ff39ed6703f062ca33
|
[
"MIT"
] | null | null | null |
from .dialog import Dml
| 8.333333
| 23
| 0.76
| 4
| 25
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 25
| 2
| 24
| 12.5
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
218826ac50bc3ce94386274635382521790f8fcd
| 41
|
py
|
Python
|
Model/G2E/__init__.py
|
Yottaxx/T-LSTM
|
92618d8c3ee2418b194a2e1592512548da955b77
|
[
"MIT"
] | 9
|
2020-05-23T05:40:27.000Z
|
2021-11-19T01:29:36.000Z
|
Model/G2E/__init__.py
|
ayyyq/T-LSTM
|
36dbc88ac710d3925851cd87c2368ecfc7061b70
|
[
"MIT"
] | 1
|
2020-11-29T04:35:52.000Z
|
2021-01-29T07:39:37.000Z
|
Model/G2E/__init__.py
|
Yottaxx/T-LSTM
|
92618d8c3ee2418b194a2e1592512548da955b77
|
[
"MIT"
] | 2
|
2020-10-26T13:42:49.000Z
|
2020-11-01T02:01:33.000Z
|
from .GraphStateEEP import GraphState2eep
| 41
| 41
| 0.902439
| 4
| 41
| 9.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.073171
| 41
| 1
| 41
| 41
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2199f1233443485901838dcc18b073c8b2d5ea50
| 27,094
|
py
|
Python
|
sandbox/grist/test_types.py
|
combinatorist/grist-core
|
9a6369a4ff2ebe9f0ad477cd43b90fb2b8341b36
|
[
"Apache-2.0"
] | null | null | null |
sandbox/grist/test_types.py
|
combinatorist/grist-core
|
9a6369a4ff2ebe9f0ad477cd43b90fb2b8341b36
|
[
"Apache-2.0"
] | null | null | null |
sandbox/grist/test_types.py
|
combinatorist/grist-core
|
9a6369a4ff2ebe9f0ad477cd43b90fb2b8341b36
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# pylint: disable=line-too-long
import logger
import testutil
import test_engine
log = logger.Logger(__name__, logger.INFO)
class TestTypes(test_engine.EngineTestCase):
sample = testutil.parse_test_sample({
"SCHEMA": [
[1, "Types", [
[21, "text", "Text", False, "", "", ""],
[22, "numeric", "Numeric", False, "", "", ""],
[23, "int", "Int", False, "", "", ""],
[24, "bool", "Bool", False, "", "", ""],
[25, "date", "Date", False, "", "", ""]
]],
[2, "Formulas", [
[30, "division", "Any", True, "Types.lookupOne(id=18).numeric / 2", "", ""]
]]
],
"DATA": {
"Types": [
["id", "text", "numeric", "int", "bool", "date"],
[11, "New York", "New York", "New York", "New York", "New York"],
[12, "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö"],
[13, False, False, False, False, False],
[14, True, True, True, True, True],
[15, 1509556595, 1509556595, 1509556595, 1509556595, 1509556595],
[16, 8.153, 8.153, 8.153, 8.153, 8.153],
[17, 0, 0, 0, 0, 0],
[18, 1, 1, 1, 1, 1],
[19, "", "", "", "", ""],
[20, None, None, None, None, None]],
"Formulas": [
["id"],
[1]]
},
})
all_row_ids = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
def test_update_typed_cells(self):
"""
Tests that updated typed values are set as expected in the sandbox. Types should follow
the rules:
- After updating a cell with a value of a type compatible to the column type,
the cell value should have the column's standard type
- Otherwise, the cell value should have the type AltText
"""
self.load_sample(self.sample)
out_actions = self.apply_user_action(["BulkUpdateRecord", "Types", self.all_row_ids, {
"text": [None, "", 1, 0, 8.153, 1509556595, True, False, u"Chîcágö", "New York"],
"numeric": [None, "", 1, 0, 8.153, 1509556595, True, False, u"Chîcágö", "New York"],
"int": [None, "", 1, 0, 8.153, 1509556595, True, False, u"Chîcágö", "New York"],
"bool": [None, "", 1, 0, 8.153, 1509556595, True, False, u"Chîcágö", "New York"],
"date": [None, "", 1, 0, 8.153, 1509556595, True, False, u"2019-01-22 00:47:39", "New York"]
}])
self.assertPartialOutActions(out_actions, {
"stored": [["BulkUpdateRecord", "Types", self.all_row_ids, {
"text": [None,"","1","0","8.153","1509556595","True","False","Chîcágö","New York"],
"numeric": [None, None, 1.0, 0.0, 8.153, 1509556595.0, 1.0, 0.0, "Chîcágö", "New York"],
"int": [None, None, 1, 0, 8, 1509556595, 1, 0, "Chîcágö", "New York"],
"bool": [False, False, True, False, True, True, True, False, "Chîcágö", "New York"],
"date": [None, None, 1.0, 0.0, 8.153, 1509556595.0, 1.0, 0.0, 1548115200.0, "New York"]
}],
["UpdateRecord", "Formulas", 1, {"division": 0.0}],
],
"undo": [["BulkUpdateRecord", "Types", self.all_row_ids, {
"text": ["New York", "Chîcágö", False, True, 1509556595, 8.153, 0, 1, "", None],
"numeric": ["New York", "Chîcágö", False, True, 1509556595, 8.153, 0, 1, "", None],
"int": ["New York", "Chîcágö", False, True, 1509556595, 8.153, 0, 1, "", None],
"bool": ["New York", "Chîcágö", False, True, 1509556595, 8.153, False, True, "", None],
"date": ["New York", "Chîcágö", False, True, 1509556595, 8.153, 0, 1, "", None]
}],
["UpdateRecord", "Formulas", 1, {"division": 0.5}],
]
})
self.assertTableData("Types", data=[
["id", "text", "numeric", "int", "bool", "date"],
[11, None, None, None, False, None],
[12, "", None, None, False, None],
[13, "1", 1.0, 1, True, 1.0],
[14, "0", 0.0, 0, False, 0.0],
[15, "8.153", 8.153, 8, True, 8.153],
[16, "1509556595", 1509556595, 1509556595, True, 1509556595.0],
[17, "True", 1.0, 1, True, 1.0],
[18, "False", 0.0, 0, False, 0.0],
[19, "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö", 1548115200.0],
[20, "New York", "New York", "New York", "New York", "New York"]
])
def test_text_conversions(self):
"""
Tests that column type changes occur as expected in the sandbox:
- Resulting cell values should all be Text
- Only non-compatible values should appear in the resulting BulkUpdateRecord
"""
self.load_sample(self.sample)
# Test Text -> Text conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "text", { "type" : "Text" }])
self.assertPartialOutActions(out_actions, {
"stored": [],
"undo": []
})
# Test Numeric -> Text conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "numeric", { "type" : "Text" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "numeric", {"type": "Text"}],
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"numeric": ["False", "True", "1509556595.0", "8.153", "0.0", "1.0"]}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Text"}],
["UpdateRecord", "Formulas", 1, {"division": ["E", "TypeError"]}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"numeric": [False, True, 1509556595, 8.153, 0, 1]}],
["ModifyColumn", "Types", "numeric", {"type": "Numeric"}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Numeric"}],
["UpdateRecord", "Formulas", 1, {"division": 0.5}],
]
})
# Test Int -> Text conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "int", { "type" : "Text" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "int", {"type": "Text"}],
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"int": ["False", "True", "1509556595", "8.153", "0", "1"]}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Text"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"int": [False, True, 1509556595, 8.153, 0, 1]}],
["ModifyColumn", "Types", "int", {"type": "Int"}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Int"}],
]
})
# Test Bool -> Text
out_actions = self.apply_user_action(["ModifyColumn", "Types", "bool", { "type" : "Text" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "bool", {"type": "Text"}],
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"bool": ["False", "True", "1509556595", "8.153", "False", "True"]}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Text"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"bool": [False, True, 1509556595, 8.153, False, True]}],
["ModifyColumn", "Types", "bool", {"type": "Bool"}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Bool"}],
]
})
# Test Date -> Text
out_actions = self.apply_user_action(["ModifyColumn", "Types", "date", { "type" : "Text" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "date", {"type": "Text"}],
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"date": ["False", "True", "1509556595.0", "8.153", "0.0", "1.0"]}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Text"}]
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 15, 16, 17, 18],
{"date": [False, True, 1509556595.0, 8.153, 0.0, 1.0]}],
["ModifyColumn", "Types", "date", {"type": "Date"}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Date"}]
]
})
# Assert that the final table is as expected
self.assertTableData("Types", data=[
["id", "text", "numeric", "int", "bool", "date"],
[11, "New York", "New York", "New York", "New York", "New York"],
[12, "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö"],
[13, False, "False", "False", "False", "False"],
[14, True, "True", "True", "True", "True"],
[15, 1509556595, "1509556595.0","1509556595","1509556595","1509556595.0"],
[16, 8.153, "8.153", "8.153", "8.153", "8.153"],
[17, 0, "0.0", "0", "False", "0.0"],
[18, 1, "1.0", "1", "True", "1.0"],
[19, "", "", "", "", ""],
[20, None, None, None, None, None]
])
def test_numeric_conversions(self):
"""
Tests that column type changes occur as expected in the sandbox:
- Resulting cell values should all be of type Numeric or AltText
- Only non-compatible values should appear in the resulting BulkUpdateRecord
"""
self.load_sample(self.sample)
# Test Text -> Numeric conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "text", { "type" : "Numeric" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "text", {"type": "Numeric"}],
["BulkUpdateRecord", "Types", [13, 14, 19],
{"text": [0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Numeric"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 19],
{"text": [False, True, ""]}],
["ModifyColumn", "Types", "text", {"type": "Text"}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Text"}],
]
})
# Test Numeric -> Numeric conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "numeric", {"type": "Numeric"}])
self.assertPartialOutActions(out_actions, {
"stored": [],
"undo": []
})
# Test Int -> Numeric conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "int", { "type" : "Numeric" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "int", {"type": "Numeric"}],
["BulkUpdateRecord", "Types", [13, 14, 19],
{"int": [0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Numeric"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 19],
{"int": [False, True, ""]}],
["ModifyColumn", "Types", "int", {"type": "Int"}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Int"}],
]
})
# Test Bool -> Numeric conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "bool", { "type" : "Numeric" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "bool", {"type": "Numeric"}],
["BulkUpdateRecord", "Types", [13, 14, 17, 18, 19],
{"bool": [0.0, 1.0, 0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Numeric"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 17, 18, 19],
{"bool": [False, True, False, True, ""]}],
["ModifyColumn", "Types", "bool", {"type": "Bool"}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Bool"}],
]
})
# Test Date -> Numeric conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "date", { "type" : "Numeric" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "date", {"type": "Numeric"}],
["BulkUpdateRecord", "Types", [13, 14, 19],
{"date": [0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Numeric"}]
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 19],
{"date": [False, True, ""]}],
["ModifyColumn", "Types", "date", {"type": "Date"}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Date"}]
]
})
# Assert that the final table is as expected
self.assertTableData("Types", data=[
["id", "text", "numeric", "int", "bool", "date"],
[11, "New York", "New York", "New York", "New York", "New York"],
[12, "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö"],
[13, 0.0, False, 0.0, 0.0, 0.0],
[14, 1.0, True, 1.0, 1.0, 1.0],
[15, 1509556595, 1509556595, 1509556595, 1509556595, 1509556595],
[16, 8.153, 8.153, 8.153, 8.153, 8.153],
[17, 0.0, 0.0, 0.0, 0.0, 0.0],
[18, 1.0, 1.0, 1.0, 1.0, 1.0],
[19, None, "", None, None, None],
[20, None, None, None, None, None],
])
def test_int_conversions(self):
"""
Tests that column type changes occur as expected in the sandbox:
- Resulting cell values should all be of type Int or AltText
- Only non-compatible values should appear in the resulting BulkUpdateRecord
"""
self.load_sample(self.sample)
# Test Text -> Int conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "text", { "type" : "Int" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "text", {"type": "Int"}],
["BulkUpdateRecord", "Types", [13, 14, 16, 19], {"text": [0, 1, 8, None]}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Int"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 16, 19],
{"text": [False, True, 8.153, ""]}],
["ModifyColumn", "Types", "text", {"type": "Text"}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Text"}],
]
})
# Test Numeric -> Int conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "numeric", { "type" : "Int" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "numeric", {"type": "Int"}],
["BulkUpdateRecord", "Types", [13, 14, 16, 19],
{"numeric": [0, 1, 8, None]}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Int"}],
["UpdateRecord", "Formulas", 1, {"division": 0}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 16, 19],
{"numeric": [False, True, 8.153, ""]}],
["ModifyColumn", "Types", "numeric", {"type": "Numeric"}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Numeric"}],
["UpdateRecord", "Formulas", 1, {"division": 0.5}],
]
})
# Test Int -> Int conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "int", { "type" : "Int" }])
self.assertPartialOutActions(out_actions, {
"stored": [],
"undo": []
})
# Test Bool -> Int conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "bool", { "type" : "Int" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "bool", {"type": "Int"}],
["BulkUpdateRecord", "Types", [13, 14, 16, 17, 18, 19],
{"bool": [0, 1, 8, 0, 1, None]}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Int"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 16, 17, 18, 19],
{"bool": [False, True, 8.153, False, True, ""]}],
["ModifyColumn", "Types", "bool", {"type": "Bool"}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Bool"}],
]
})
# Test Date -> Int conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "date", { "type" : "Int" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "date", {"type": "Int"}],
["BulkUpdateRecord", "Types", [13, 14, 16, 19],
{"date": [0, 1, 8, None]}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Int"}]
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 16, 19],
{"date": [False, True, 8.153, ""]}],
["ModifyColumn", "Types", "date", {"type": "Date"}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Date"}]
]
})
# Assert that the final table is as expected
self.assertTableData("Types", data=[
["id", "text", "numeric", "int", "bool", "date"],
[11, "New York", "New York", "New York", "New York", "New York"],
[12, "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö"],
[13, 0, 0, False, 0, 0],
[14, 1, 1, True, 1, 1],
[15, 1509556595, 1509556595, 1509556595, 1509556595, 1509556595],
[16, 8, 8, 8.153, 8, 8],
[17, 0, 0, 0, 0, 0],
[18, 1, 1, 1, 1, 1],
[19, None, None, "", None, None],
[20, None, None, None, None, None]
])
def test_bool_conversions(self):
"""
Tests that column type changes occur as expected in the sandbox:
- Resulting cell values should all be of type Bool or AltText
- Only non-compatible values should appear in the resulting BulkUpdateRecord
"""
self.load_sample(self.sample)
# Test Text -> Bool conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "text", { "type" : "Bool" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "text", {"type": "Bool"}],
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"text": [True, True, False, True, False, False]}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Bool"}],
],
"undo": [
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"text": [1509556595, 8.153, 0, 1, "", None]}],
["ModifyColumn", "Types", "text", {"type": "Text"}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Text"}],
]
})
# Test Numeric -> Bool conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "numeric", { "type" : "Bool" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "numeric", {"type": "Bool"}],
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"numeric": [True, True, False, True, False, False]}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Bool"}],
["UpdateRecord", "Formulas", 1, {"division": 0}],
],
"undo": [
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"numeric": [1509556595.0, 8.153, 0.0, 1.0, "", None]}],
["ModifyColumn", "Types", "numeric", {"type": "Numeric"}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Numeric"}],
["UpdateRecord", "Formulas", 1, {"division": 0.5}],
]
})
# Test Int -> Bool conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "int", { "type" : "Bool" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "int", {"type": "Bool"}],
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"int": [True, True, False, True, False, False]}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Bool"}],
],
"undo": [
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"int": [1509556595, 8.153, 0, 1, "", None]}],
["ModifyColumn", "Types", "int", {"type": "Int"}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Int"}],
]
})
# Test Bool -> Bool conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "bool", { "type" : "Bool" }])
self.assertPartialOutActions(out_actions, {
"stored": [],
"undo": []
})
# Test Date -> Bool conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "date", { "type" : "Bool" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "date", {"type": "Bool"}],
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"date": [True, True, False, True, False, False]}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Bool"}]
],
"undo": [
["BulkUpdateRecord", "Types", [15, 16, 17, 18, 19, 20],
{"date": [1509556595, 8.153, 0, 1, "", None]}],
["ModifyColumn", "Types", "date", {"type": "Date"}],
["UpdateRecord", "_grist_Tables_column", 25, {"type": "Date"}]
]
})
# Assert that the final table is as expected
self.assertTableData("Types", data=[
["id", "text", "numeric", "int", "bool", "date"],
[11, "New York", "New York", "New York", "New York", "New York"],
[12, "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö"],
[13, False, False, False, False, False],
[14, True, True, True, True, True],
[15, True, True, True, 1509556595, True],
[16, True, True, True, 8.153, True],
[17, False, False, False, 0, False],
[18, True, True, True, 1, True],
[19, False, False, False, "", False],
[20, False, False, False, None, False]
])
def test_date_conversions(self):
"""
Tests that column type changes occur as expected in the sandbox:
- Resulting cell values should all be of type Date or AltText
- Only non-compatible values should appear in the resulting BulkUpdateRecord
"""
self.load_sample(self.sample)
# Test Text -> Date conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "text", { "type" : "Date" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "text", {"type": "Date"}],
["BulkUpdateRecord", "Types", [13, 14, 19],
{"text": [0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Date"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 19],
{"text": [False, True, ""]}],
["ModifyColumn", "Types", "text", {"type": "Text"}],
["UpdateRecord", "_grist_Tables_column", 21, {"type": "Text"}],
]
})
# Test Numeric -> Date conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "numeric", { "type" : "Date" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "numeric", {"type": "Date"}],
["BulkUpdateRecord", "Types", [13, 14, 19],
{"numeric": [0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Date"}],
["UpdateRecord", "Formulas", 1, {"division": ["E", "TypeError"]}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 19],
{"numeric": [False, True, ""]}],
["ModifyColumn", "Types", "numeric", {"type": "Numeric"}],
["UpdateRecord", "_grist_Tables_column", 22, {"type": "Numeric"}],
["UpdateRecord", "Formulas", 1, {"division": 0.5}],
]
})
# Test Int -> Date conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "int", { "type" : "Date" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "int", {"type": "Date"}],
["BulkUpdateRecord", "Types", [13, 14, 19],
{"int": [0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Date"}],
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 19],
{"int": [False, True, ""]}],
["ModifyColumn", "Types", "int", {"type": "Int"}],
["UpdateRecord", "_grist_Tables_column", 23, {"type": "Int"}],
]
})
# Test Bool -> Date conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "bool", { "type" : "Date" }])
self.assertPartialOutActions(out_actions, {
"stored": [
["ModifyColumn", "Types", "bool", {"type": "Date"}],
["BulkUpdateRecord", "Types", [13, 14, 17, 18, 19],
{"bool": [0.0, 1.0, 0.0, 1.0, None]}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Date"}]
],
"undo": [
["BulkUpdateRecord", "Types", [13, 14, 17, 18, 19],
{"bool": [False, True, False, True, ""]}],
["ModifyColumn", "Types", "bool", {"type": "Bool"}],
["UpdateRecord", "_grist_Tables_column", 24, {"type": "Bool"}]
]
})
# Test Date -> Date conversion
out_actions = self.apply_user_action(["ModifyColumn", "Types", "date", { "type" : "Date" }])
self.assertPartialOutActions(out_actions, {
"stored": [],
"undo": []
})
# Assert that the final table is as expected
self.assertTableData("Types", data=[
["id", "text", "numeric", "int", "bool", "date"],
[11, "New York", "New York", "New York", "New York", "New York"],
[12, "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö", "Chîcágö"],
[13, 0.0, 0.0, 0.0, 0.0, False],
[14, 1.0, 1.0, 1.0, 1.0, True],
[15, 1509556595, 1509556595, 1509556595, 1509556595, 1509556595],
[16, 8.153, 8.153, 8.153, 8.153, 8.153],
[17, 0.0, 0.0, 0.0, 0.0, 0],
[18, 1.0, 1.0, 1.0, 1.0, 1],
[19, None, None, None, None, ""],
[20, None, None, None, None, None]
])
def test_numerics_are_floats(self):
"""
Tests that in formulas, numeric values are floats, not integers.
Important to avoid truncation.
"""
self.load_sample(self.sample)
self.assertTableData('Formulas', data=[
['id', 'division'],
[ 1, 0.5],
])
| 44.489327
| 101
| 0.502288
| 2,829
| 27,094
| 4.717568
| 0.051255
| 0.011389
| 0.009441
| 0.086917
| 0.904391
| 0.885883
| 0.861606
| 0.818897
| 0.775513
| 0.642964
| 0
| 0.090645
| 0.289474
| 27,094
| 608
| 102
| 44.5625
| 0.602618
| 0.088285
| 0
| 0.579268
| 0
| 0
| 0.271251
| 0.001224
| 0
| 0
| 0
| 0
| 0.067073
| 1
| 0.014228
| false
| 0
| 0.006098
| 0
| 0.026423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
21fe1042e06610490194c733ac8677ef24f9fc1d
| 101
|
py
|
Python
|
emtcyclegan/utils.py
|
dhritimaandas/EMT-CycleGAN
|
6a36b5adc329b4a9decc00e7351d5fa804bf8abe
|
[
"MIT"
] | null | null | null |
emtcyclegan/utils.py
|
dhritimaandas/EMT-CycleGAN
|
6a36b5adc329b4a9decc00e7351d5fa804bf8abe
|
[
"MIT"
] | null | null | null |
emtcyclegan/utils.py
|
dhritimaandas/EMT-CycleGAN
|
6a36b5adc329b4a9decc00e7351d5fa804bf8abe
|
[
"MIT"
] | null | null | null |
def initialize_weights_normal(network):
pass
def initialize_weights_xavier(network):
pass
| 12.625
| 39
| 0.772277
| 12
| 101
| 6.166667
| 0.583333
| 0.351351
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168317
| 101
| 7
| 40
| 14.428571
| 0.880952
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
df7dc207599808cd300532991432868fafd07e5f
| 33
|
py
|
Python
|
exetera/io/__init__.py
|
deng113jie/ExeTera
|
613532a419b93a9838bf5ae5594fc7bb9738cd03
|
[
"Apache-2.0"
] | 14
|
2021-03-01T16:57:46.000Z
|
2021-12-01T10:49:19.000Z
|
exetera/io/__init__.py
|
deng113jie/ExeTera
|
613532a419b93a9838bf5ae5594fc7bb9738cd03
|
[
"Apache-2.0"
] | 208
|
2021-02-16T13:47:04.000Z
|
2022-03-31T11:27:03.000Z
|
exetera/io/__init__.py
|
deng113jie/ExeTera
|
613532a419b93a9838bf5ae5594fc7bb9738cd03
|
[
"Apache-2.0"
] | 5
|
2021-03-08T08:50:26.000Z
|
2021-12-03T09:26:43.000Z
|
from . import importer, parsers
| 11
| 31
| 0.757576
| 4
| 33
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 33
| 2
| 32
| 16.5
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
802310363a2d9c29ef0e4609ccd183507201673e
| 7,493
|
py
|
Python
|
tests/test_routes/test_routes_events.py
|
hed-standard/hed-web
|
8603526266dff78cf07e49e6c0f0c715a9225289
|
[
"MIT"
] | null | null | null |
tests/test_routes/test_routes_events.py
|
hed-standard/hed-web
|
8603526266dff78cf07e49e6c0f0c715a9225289
|
[
"MIT"
] | null | null | null |
tests/test_routes/test_routes_events.py
|
hed-standard/hed-web
|
8603526266dff78cf07e49e6c0f0c715a9225289
|
[
"MIT"
] | 2
|
2022-02-04T19:55:40.000Z
|
2022-02-04T21:36:04.000Z
|
import io
import os
import unittest
from flask import Response
from tests.test_web_base import TestWebBase
from hedweb.constants import base_constants
class Test(TestWebBase):
def test_events_results_empty_data(self):
response = self.app.test.post('/events_submit')
self.assertEqual(200, response.status_code, 'HED events request succeeds even when no data')
self.assertTrue(isinstance(response, Response),
'events_results validate should return a response object when empty events')
header_dict = dict(response.headers)
self.assertEqual("error", header_dict["Category"], "The header msg_category when no events is error ")
self.assertFalse(response.data, "The response data for empty events request is empty")
def test_events_results_assemble_valid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.tsv')
with open(json_path, 'r') as sc:
x = sc.read()
json_buffer = io.BytesIO(bytes(x, 'utf-8'))
with open(events_path, 'r') as sc:
y = sc.read()
events_buffer = io.BytesIO(bytes(y, 'utf-8'))
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_ASSEMBLE,
'json_file': (json_buffer, 'bids_events.json'),
'events_file': (events_buffer, 'bids_events.tsv'),
'expand_defs': 'on',
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
self.assertEqual(200, response.status_code, 'Assembly of a valid events file has a response')
headers_dict = dict(response.headers)
self.assertEqual("success", headers_dict["Category"],
"The valid events file should assemble successfully")
self.assertTrue(response.data, "The assembled events file should not be empty")
json_buffer.close()
events_buffer.close()
def test_events_results_assemble_invalid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.tsv')
with open(json_path, 'r') as sc:
x = sc.read()
json_buffer = io.BytesIO(bytes(x, 'utf-8'))
with open(events_path, 'r') as sc:
y = sc.read()
events_buffer = io.BytesIO(bytes(y, 'utf-8'))
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '7.2.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_ASSEMBLE,
base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
base_constants.EVENTS_FILE: (events_buffer, 'bids_events.tsv'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
self.assertEqual(200, response.status_code, 'Assembly of invalid events files has a response')
headers_dict = dict(response.headers)
self.assertEqual("warning", headers_dict["Category"],
"Assembly with invalid events files generates a warning")
self.assertTrue(response.data,
"The response data for invalid event assembly should have error messages")
json_buffer.close()
def test_events_results_validate_valid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.tsv')
with open(json_path, 'r') as sc:
x = sc.read()
json_buffer = io.BytesIO(bytes(x, 'utf-8'))
with open(events_path, 'r') as sc:
y = sc.read()
events_buffer = io.BytesIO(bytes(y, 'utf-8'))
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '8.0.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_VALIDATE,
base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
base_constants.EVENTS_FILE: (events_buffer, 'bids_events.tsv'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
self.assertTrue(isinstance(response, Response),
'events_submit validate should return a Response when events valid')
self.assertEqual(200, response.status_code, 'Validation of a valid events file has a valid status code')
headers_dict = dict(response.headers)
self.assertEqual("success", headers_dict["Category"],
"The valid events file should validate successfully")
self.assertFalse(response.data, "The validated events file should not return data")
json_buffer.close()
events_buffer.close()
def test_events_results_validate_invalid(self):
json_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.json')
events_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../data/bids_events.tsv')
with open(json_path, 'r') as sc:
x = sc.read()
json_buffer = io.BytesIO(bytes(x, 'utf-8'))
with open(events_path, 'r') as sc:
y = sc.read()
events_buffer = io.BytesIO(bytes(y, 'utf-8'))
with self.app.app_context():
input_data = {base_constants.SCHEMA_VERSION: '7.2.0',
base_constants.COMMAND_OPTION: base_constants.COMMAND_VALIDATE,
base_constants.JSON_FILE: (json_buffer, 'bids_events.json'),
base_constants.EVENTS_FILE: (events_buffer, 'events_file'),
base_constants.CHECK_FOR_WARNINGS: 'on'}
response = self.app.test.post('/events_submit', content_type='multipart/form-data', data=input_data)
self.assertTrue(isinstance(response, Response),
'events_submit validate should return a Response when events invalid')
self.assertEqual(200, response.status_code, 'Validation of invalid events files has a response')
headers_dict = dict(response.headers)
self.assertEqual("warning", headers_dict["Category"],
"Validation of invalid events files generates a warning")
self.assertTrue(response.data,
"The response data for invalid event validation should have error messages")
json_buffer.close()
events_buffer.close()
if __name__ == '__main__':
unittest.main()
| 55.095588
| 117
| 0.611371
| 889
| 7,493
| 4.917885
| 0.127109
| 0.032937
| 0.018298
| 0.025618
| 0.874199
| 0.839661
| 0.811985
| 0.743367
| 0.721866
| 0.718207
| 0
| 0.00649
| 0.280262
| 7,493
| 135
| 118
| 55.503704
| 0.804191
| 0
| 0
| 0.669565
| 0
| 0
| 0.222479
| 0.02555
| 0
| 0
| 0
| 0
| 0.156522
| 1
| 0.043478
| false
| 0
| 0.052174
| 0
| 0.104348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
80247316f26f027c301af3e0895c40e1b96228f8
| 194
|
py
|
Python
|
website/about/urls.py
|
zckoh/ecommerce-fullstack
|
c4cecea3ebaec900da484954d01dcbc2cba325c9
|
[
"Apache-2.0"
] | 1
|
2021-12-14T22:24:20.000Z
|
2021-12-14T22:24:20.000Z
|
website/about/urls.py
|
zckoh/ecommerce-fullstack
|
c4cecea3ebaec900da484954d01dcbc2cba325c9
|
[
"Apache-2.0"
] | 11
|
2021-03-30T13:59:29.000Z
|
2022-03-12T00:48:40.000Z
|
website/about/urls.py
|
zckoh/ecommerce-fullstack
|
c4cecea3ebaec900da484954d01dcbc2cba325c9
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path("", views.about_view, name="about_view"),
]
| 24.25
| 50
| 0.757732
| 28
| 194
| 5.178571
| 0.464286
| 0.206897
| 0.193103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139175
| 194
| 8
| 51
| 24.25
| 0.868263
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8043c266280c2533b7394e903a5a2b3568b6c722
| 33
|
py
|
Python
|
opt/todo/recurrent.py
|
or-fusion/OptML
|
c0aae08e8504cbf53a94df2b2da42d33c2bc0ff6
|
[
"BSD-3-Clause"
] | 1
|
2022-03-17T19:44:44.000Z
|
2022-03-17T19:44:44.000Z
|
opt/todo/recurrent.py
|
or-fusion/OptML
|
c0aae08e8504cbf53a94df2b2da42d33c2bc0ff6
|
[
"BSD-3-Clause"
] | null | null | null |
opt/todo/recurrent.py
|
or-fusion/OptML
|
c0aae08e8504cbf53a94df2b2da42d33c2bc0ff6
|
[
"BSD-3-Clause"
] | null | null | null |
#TODO:
#Recurrent neural network
| 11
| 25
| 0.787879
| 4
| 33
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 2
| 26
| 16.5
| 0.896552
| 0.878788
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.5
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
337b4aea634bb66d5ca7db70fa888e7a897a50d2
| 17,263
|
py
|
Python
|
baoming/webapp/controller/download.py
|
hanxiaoshun/RegistrationSystem
|
2f7310508fc1725e96fe941b1062ce7f26f265a4
|
[
"Apache-2.0"
] | null | null | null |
baoming/webapp/controller/download.py
|
hanxiaoshun/RegistrationSystem
|
2f7310508fc1725e96fe941b1062ce7f26f265a4
|
[
"Apache-2.0"
] | 14
|
2020-06-06T01:24:24.000Z
|
2022-03-12T00:17:22.000Z
|
baoming/webapp/controller/download.py
|
hanxiaoshun/RegistrationSystem
|
2f7310508fc1725e96fe941b1062ce7f26f265a4
|
[
"Apache-2.0"
] | null | null | null |
import inspect
import json
from django.http import FileResponse
from django.shortcuts import HttpResponseRedirect
from webapp.controller.search.search_param_deal_down import search_parameter
from webapp.controller.renderUtil import render_result
from webapp.controller.search.search_common import *
from webapp.utils.all_student_base_info_list import *
from webapp.utils.electronic_communication import *
from webapp.utils.electronic_communication_format import *
from webapp.utils.spin_format import *
from webapp.utils.apply_chemical import *
from webapp.utils.apply_electronic_communiction import *
from webapp.utils.apply_not import *
from webapp.utils.reporter_chemical_list import *
from webapp.utils.reporter_chemical_not_list_format import *
from webapp.utils.worker_year_6 import *
from webapp.utils.save_operation_log import save_operation_log
from baoming.settings import MEDIA_URL, MEDIA_ROOT, BASE_DIR
sys_msg = '报名系统'
result = {'status': True, 'message': ''}
def download_apply(request):
"""
下载对应的汇总表
:param request:
:return:
"""
title_msg = '下载山东省职业技能考核表格'
student_id = request.GET.get('studentId', None)
try:
if student_id:
student = StudentInfo.objects.get(id=student_id)
if student:
print(student.declaration_of_occupation)
print(student.condition_selected.skill_main_class.skill_main_class_name)
skill_main_class_name = student.condition_selected.skill_main_class.skill_main_class_name
if '化工' in skill_main_class_name:
file_uuid = apply_chemical(student)
print(str(file_uuid))
if file_uuid:
file_message_one = '&file_message_one=《化工行业特有工种职业技能鉴定申请表》'
return HttpResponseRedirect(
'/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
elif '电子' in skill_main_class_name:
file_uuid = apply_electronic_communiction(student)
print(str(file_uuid))
if file_uuid:
file_message_one = '&file_message_one=《电子行业职业技能鉴定申报表》'
return HttpResponseRedirect(
'/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
else:
# 除了化工、电子
file_uuid, file_uuid2 = apply_not(student)
print(str(file_uuid))
if file_uuid:
message = "文件获取成功,请点击下载查看"
file_message_one = '《山东省职业技能鉴定考评表》'
file_message_two = '《工作年限承诺书》'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': False, 'message': message,
'file_uuid': file_uuid, 'file_uuid2': file_uuid2, 'chemical': False,
'file_message_one': file_message_one,
'file_message_two': file_message_two})
else:
message = '获取申请表出现系统异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
else:
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
else:
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
except Exception as e:
raise e
def all_student_base_info(request):
"""
生成所有已确认的学生信息
:param request:
:return:
"""
title_msg = "已确认的所有学生信息"
student_infos = None
file_uuid = all_student_base_info_list(student_infos)
file_message_one = '&file_message_one=学员报名表'
if file_uuid:
return HttpResponseRedirect('/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
# if file_uuid:
# message = "文件获取成功,请点击下载查看"
# return render_result(request, "page_main_controller/report_download_page.html",
# {'title_msg': title_msg, 'not_exist': False, 'message': message,
# 'file_uuid': file_uuid, 'chemical': True})
# else:
# message = '无数据,或者获取文件出现系统异常,请明确查询结果稍后重新下载或者联系管理员'
# return render_result(request, "page_main_controller/report_download_page.html",
# {'title_msg': title_msg, 'not_exist': True, 'message': message})
def reporter_chemical_not(request):
"""
生成所有非化学类的已确认学生信息
:param request:
:return:
"""
try:
file_uuid = reporter_chemical_not_list()
file_message_one = '&file_message_one=《德州市申请职业技能鉴定(技术等级鉴定)颁发职业资格证书花名册》'
if file_uuid:
return HttpResponseRedirect(
'/report/report_download_page?file_uuid=' + file_uuid + '&file_message_one=' + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
# if file_uuid:
# message = "文件获取成功,请点击下载查看"
# return render_result(request, "page_main_controller/report_download_page.html",
# {'title_msg': title_msg, 'not_exist': False, 'message': message,
# 'file_uuid': file_uuid, 'chemical': True})
# else:
# message = '无数据,或者获取文件出现系统异常,请明确查询结果稍后重新下载或者联系管理员'
# return render_result(request, "page_main_controller/report_download_page.html",
# {'title_msg': title_msg, 'not_exist': True, 'message': message})
except Exception as e:
raise e
def reporter_chemical(request):
"""
生成化学类的已确认学生信息
:param request:
:return:
"""
file_uuid = reporter_chemical_list()
if file_uuid:
file_message_one = '&file_message_one=《报名资料汇总表(化工类)》'
return HttpResponseRedirect(
'/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
# if file_uuid:
# message = "文件获取成功,请点击下载查看"
# return render_result(request, "page_main_controller/report_download_page.html",
# {'title_msg': title_msg, 'not_exist': False, 'message': message,
# 'file_uuid': file_uuid, 'chemical': True})
# else:
# message = '无数据,或者获取文件出现系统异常,请明确查询结果稍后重新下载或者联系管理员'
# return render_result(request, "page_main_controller/report_download_page.html",
# {'title_msg': title_msg, 'not_exist': True, 'message': message})
def administrator_search_chemical_download(request):
"""
化工类待条件查询
:param request:
:return:
"""
student_infos = get_student_by_conditions(request, 1)
if student_infos:
file_uuid = reporter_chemical_list(student_infos=student_infos)
if file_uuid:
file_message_one = '&file_message_one=《报名资料汇总表(化工类)》'
return HttpResponseRedirect(
'/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
else:
return HttpResponseRedirect('/report/report_download_page/')
def administrator_search_chemical_not_download(request):
"""
化工类待条件查询
:param request:
:return:
"""
student_infos = get_student_by_conditions(request, 2)
if student_infos:
file_uuid = reporter_chemical_not_list(student_infos=student_infos)
if file_uuid:
file_message_one = '&file_message_one=《德州市申请职业技能鉴定(技术等级鉴定)颁发职业资格证书花名册》'
return HttpResponseRedirect(
'/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
else:
return HttpResponseRedirect('/report/report_download_page/')
def all_student_base_info_download(request):
"""
化工类待条件查询
:param request:
:return:
"""
student_infos = get_student_by_conditions(request, 0)
if student_infos:
file_uuid = all_student_base_info_list(student_infos)
if file_uuid:
file_message_one = '&file_message_one=学员报名表'
return HttpResponseRedirect(
'/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
else:
return HttpResponseRedirect('/report/report_download_page/')
def report_download_page(request):
"""
跳转至下载窗口
:param request:
:return:
"""
file_uuid = request.GET.get('file_uuid', None)
file_message_one = request.GET.get('file_message_one', None)
title_msg = "学生信息下载"
if file_uuid:
if file_message_one:
message = "文件获取成功,请点击下载查看"
return render_result(request, "page_main_controller/report_download_page.html",
{'title_msg': title_msg, 'not_exist': False, 'message': message,
'file_uuid': file_uuid, 'chemical': True, 'file_message_one': file_message_one})
else:
message = '无数据,或者获取文件出现异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
def start_download(request):
"""
开始下载
:param request:
:return:
"""
try:
file_uuid = request.GET.get('file_uuid', None)
if file_uuid:
files = FileManage.objects.filter(file_uuid=file_uuid)
print(files.__len__())
if len(files) > 0:
file = files[len(files) - 1]
operation_object = file.id
file_names = str(file.file_path).split("/")
file_name = file_names[len(file_names) - 1]
print("file_name::" + file_name)
ready_file = open(file.file_path, 'rb')
response = FileResponse(ready_file)
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment;filename=' + file_name
result['status'] = True
result['message'] = '下载成功!'
result['data'] = json.dumps({}, ensure_ascii=False)
result["level"] = log_level_download
save_operation_log(request, inspect.stack()[0][3], str(operation_object), result)
return response
else:
title_msg = '下载文件异常'
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
else:
title_msg = '下载文件异常'
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
except Exception as e:
result['status'] = False
result['message'] = '下载异常!错误提示:' + str(e)
result['data'] = json.dumps({}, ensure_ascii=False)
result["level"] = log_level_download
save_operation_log(request, inspect.stack()[0][3], "", result)
title_msg = '下载文件异常'
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员。错误提示:' + str(e)
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
def start_download_picture(request):
"""
开始下载
:param request:
:return:
"""
try:
file_uuid = request.GET.get('file_uuid', None)
if file_uuid:
file_obj = IDCardPicture.objects.get(picture_uuid=file_uuid)
file_root = MEDIA_ROOT + '/' + str(file_obj.picture_path)
file_root = file_root.replace("\\", "/")
operation_object = file_obj.id
file_names = str(file_obj.picture_path).split("/")
file_name = file_names[len(file_names) - 1]
print("file_name::" + file_name)
ready_file = open(file_root, 'rb')
response = FileResponse(ready_file)
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment;filename=' + file_name
result['status'] = True
result['message'] = '下载成功!'
result['data'] = json.dumps({}, ensure_ascii=False)
result["level"] = log_level_download
save_operation_log(request, inspect.stack()[0][3], str(operation_object), result)
return response
else:
title_msg = '下载文件异常'
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
except Exception as e:
result['status'] = False
result['message'] = '下载异常!错误提示:' + str(e)
result['data'] = json.dumps({}, ensure_ascii=False)
result["level"] = log_level_download
save_operation_log(request, inspect.stack()[0][3], "", result)
title_msg = '下载文件异常'
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员。错误提示:' + str(e)
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
def electronic_communication_download(request):
"""
生成所有电子通信导入模板的学生信息
:param request:
:return:
"""
title_msg = "电子通信导入模板所有学生信息"
student_infos = get_student_by_conditions_status_skill_main_class(request, 0)
file_uuid = electronic_communication_format(student_infos)
file_message_one = '&file_message_one=电子通信导入模板'
if file_uuid:
return HttpResponseRedirect('/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
def spin_download(request):
"""
生成所有电子通信导入模板的学生信息
:param request:
:return:
"""
title_msg = "纺织导入模板所有学生信息"
student_infos = get_student_by_conditions_status_skill_main_class(request, 0)
file_uuid = spin_format(student_infos)
file_message_one = '&file_message_one=纺织类导入模板'
if file_uuid:
return HttpResponseRedirect('/report/report_download_page?file_uuid=' + file_uuid + file_message_one)
else:
return HttpResponseRedirect('/report/report_download_page/')
def worker_years_6_download(request):
"""
工作满6年(含)以上人员名单
:param request:
:return:
"""
title_msg = '工作满6年(含)以上人员名单'
try:
student_infos, kwargs = search_parameter(request, '')
file_uuid, file_uuid2 = worker_year_6(student_infos, kwargs)
if file_uuid and file_uuid2:
message = "文件获取成功,请点击下载查看"
file_message_one = '工作满6年(含)以上人员名单'
file_message_two = '证 明(工作满6年)'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': False, 'message': message,
'file_uuid': file_uuid, 'file_uuid2': file_uuid2, 'chemical': False,
'file_message_one': file_message_one,
'file_message_two': file_message_two})
else:
message = '未正确获取到您的填报信息,或者系统异常,请稍后重新下载或者联系管理员'
return render_result(request, "page_main_controller/student/report_download_page.html",
{'title_msg': title_msg, 'not_exist': True, 'message': message})
except Exception as e:
raise e
| 44.377892
| 114
| 0.609917
| 1,786
| 17,263
| 5.556551
| 0.103024
| 0.059653
| 0.077993
| 0.088069
| 0.838976
| 0.789903
| 0.761185
| 0.749295
| 0.721483
| 0.696493
| 0
| 0.002532
| 0.290911
| 17,263
| 389
| 115
| 44.377892
| 0.808186
| 0.114986
| 0
| 0.657993
| 0
| 0
| 0.212896
| 0.145129
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048327
| false
| 0
| 0.070632
| 0
| 0.260223
| 0.02974
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
33999474a4d0a9cccda72b57d453c9bcb15ac005
| 75
|
py
|
Python
|
world_manager/blueprints/user/forms.py
|
hoogamaphone/world-manager
|
8d4515b93d303cf91626f69257e7cf00e200807a
|
[
"MIT"
] | null | null | null |
world_manager/blueprints/user/forms.py
|
hoogamaphone/world-manager
|
8d4515b93d303cf91626f69257e7cf00e200807a
|
[
"MIT"
] | null | null | null |
world_manager/blueprints/user/forms.py
|
hoogamaphone/world-manager
|
8d4515b93d303cf91626f69257e7cf00e200807a
|
[
"MIT"
] | null | null | null |
from flask_wtf.form import FlaskForm
class LoginForm(FlaskForm):
pass
| 15
| 36
| 0.786667
| 10
| 75
| 5.8
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 75
| 5
| 37
| 15
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
33ba9ffac4b6c3cf778307a0bd1e35c05eb3992b
| 53
|
py
|
Python
|
python/testData/refactoring/pullup/abstractMethodPy3AddMeta/SuperClass.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/pullup/abstractMethodPy3AddMeta/SuperClass.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/refactoring/pullup/abstractMethodPy3AddMeta/SuperClass.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from abc import object
class Parent(object):
pass
| 17.666667
| 22
| 0.754717
| 8
| 53
| 5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188679
| 53
| 3
| 23
| 17.666667
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
33ed6ce09662af116547f691c3a8a2ac217c6f6a
| 155
|
py
|
Python
|
ml2/tools/bosy/__init__.py
|
reactive-systems/ml2
|
c5ecaf07816b48a76ce4f7a3255fcf3baa78fd5c
|
[
"MIT"
] | 2
|
2021-06-30T14:00:11.000Z
|
2022-02-02T14:12:26.000Z
|
ml2/tools/bosy/__init__.py
|
reactive-systems/ml2
|
c5ecaf07816b48a76ce4f7a3255fcf3baa78fd5c
|
[
"MIT"
] | null | null | null |
ml2/tools/bosy/__init__.py
|
reactive-systems/ml2
|
c5ecaf07816b48a76ce4f7a3255fcf3baa78fd5c
|
[
"MIT"
] | 1
|
2021-07-01T13:38:33.000Z
|
2021-07-01T13:38:33.000Z
|
from . import bosy_wrapper, bosy_worker, bosy_utils
from .bosy import BoSy
from .bosy_worker import bosy_worker_fn
from .bosy_wrapper import add_bosy_args
| 31
| 51
| 0.845161
| 26
| 155
| 4.692308
| 0.346154
| 0.245902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116129
| 155
| 4
| 52
| 38.75
| 0.890511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1d3b11a2bcd06e493c4c42f7e15691aa8cea9791
| 29,308
|
py
|
Python
|
academic/views.py
|
masoodazhar/-school-management-system
|
6525b3d29d12f03e05d362d81b7c5855806f57d9
|
[
"Apache-2.0"
] | 1
|
2022-01-20T10:20:05.000Z
|
2022-01-20T10:20:05.000Z
|
academic/views.py
|
masoodazhar/-school-management-system
|
6525b3d29d12f03e05d362d81b7c5855806f57d9
|
[
"Apache-2.0"
] | null | null | null |
academic/views.py
|
masoodazhar/-school-management-system
|
6525b3d29d12f03e05d362d81b7c5855806f57d9
|
[
"Apache-2.0"
] | 1
|
2022-01-20T10:20:31.000Z
|
2022-01-20T10:20:31.000Z
|
from django.shortcuts import render, redirect
from django.views.generic import CreateView, UpdateView, DeleteView, ListView, TemplateView
from django.views.generic.edit import BaseCreateView
from .models import Classes, Subject, Routine, Section, Room
from payroll.models import Teacher
from django.urls import reverse_lazy
from django.http import HttpResponse, JsonResponse
from django import forms
from django.contrib.auth.models import User, Permission, Group
# from payroll.forms import TeacherForm
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.mixins import PermissionRequiredMixin
from home.decorators import allowed_users
from django.contrib.auth.decorators import login_required
from django.contrib.messages.views import SuccessMessageMixin
from django.contrib import messages
from .forms import SubjectForm, SectionForm, ClassesForm
from home.models import Setting, SchoolProfile
from datetime import datetime, time, date, timedelta
# Create your views here.
# Section Academics
def time_diff(time_str1, time_str2):
t1 = datetime.strptime(time_str1, '%H:%M')
t2 = datetime.strptime(time_str2, '%H:%M')
dt = abs(t2 - t1)
return time(dt.seconds // 3600, (dt.seconds // 60) % 60).strftime('%H:%M')
def get_section_by_class(request):
class_id = request.GET.get('class_name')
section_from_class = Classes.objects.get(pk=class_id)
print(section_from_class.section.pk)
sections = Section.objects.get(pk=section_from_class.section.pk)
section = [{
'id': sections.pk,
'name': sections.section_name
}]
return JsonResponse({'data': section})
# @allowed_users('view_academic')
@login_required
def AcademicView(request):
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
total_teachers = Teacher.objects.filter(module_holder=module_holder).count()
teachers = Teacher.objects.filter(module_holder=module_holder)
total_subject = Subject.objects.filter(module_holder=module_holder).count()
subjects = Subject.objects.filter(module_holder=module_holder)
total_class = Classes.objects.filter(module_holder=module_holder).count()
classes = Classes.objects.filter(module_holder=module_holder)
total_routine = Routine.objects.filter(module_holder=module_holder).count()
routines = Routine.objects.filter(module_holder=module_holder)
all_data = {
'total_teachers': total_teachers,
'total_subject': total_subject,
'total_class': total_class,
'total_routine': total_routine,
'teachers': teachers,
'subjects': subjects,
'classes': classes,
'routines': routines,
}
return render(request, 'academic/academic_main.html', all_data)
# STRAT CLASSES HERE
class RoomView(LoginRequiredMixin, ListView):
# permission_required = 'academic.view_room'
model = Room
login_url = 'home:login'
# context_object_name = 'classes'
template_name = 'academic/room.html'
success_message = 'Room has been created!'
def get_context_data(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context = super(RoomView, self).get_context_data(**kwargs)
context['room'] = Room.objects.filter(module_holder=module_holder)
return context
class RoomCreate(LoginRequiredMixin , SuccessMessageMixin ,CreateView):
# permission_required = 'academic.add_room'
success_message = 'Room has been created!'
model = Room
fields = [
'room_name',
'room_number',
]
login_url = 'home:lgoin'
template_name = 'academic/room_add.html'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
return super().form_valid(form)
class RoomUpdate(LoginRequiredMixin, SuccessMessageMixin ,UpdateView):
# permission_required = 'academic.change_room'
model = Room
fields = [
'room_name',
'room_number',
]
login_url = 'home:lgoin'
template_name = 'academic/room_add.html'
success_message = 'Room has been updated!'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
return super().form_valid(form)
class RoomDelete(LoginRequiredMixin ,DeleteView):
login_url = 'home:lgoin'
# permission_required = 'academic.delete_room'
model = Room
success_url = reverse_lazy('academic:room_view')
# STRAT CLASSES HERE
class ClassesView(LoginRequiredMixin, PermissionRequiredMixin, ListView):
permission_required = 'academic.view_classes'
model = Classes
login_url = 'home:login'
# context_object_name = 'classes'
template_name = 'academic/classes.html'
def get_context_data(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context = super(ClassesView, self).get_context_data(**kwargs)
context['classes'] = Classes.objects.filter(module_holder=module_holder)
return context
class ClassesCreate(LoginRequiredMixin ,PermissionRequiredMixin, SuccessMessageMixin ,CreateView):
permission_required = 'academic.add_classes'
success_message = 'Class has been created!'
model = Classes
fields = [
'class_name',
'class_number',
'fee',
'section',
'discount',
'note'
]
sectionform = SectionForm()
login_url = 'home:lgoin'
template_name = 'academic/classes_add.html'
def get_form(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form = super(ClassesCreate, self).get_form(**kwargs)
form.fields['section'].widget.attrs = {'class': 'basic-multiple', 'autocomplete': 'off'}
form.fields['section'].queryset = Section.objects.filter(module_holder=module_holder)
return form
def get_context_data(self, **kwargs):
context = super(ClassesCreate, self).get_context_data(**kwargs)
context['sectionform'] = self.sectionform
return context
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
resolver = super().form_valid(form)
if self.request.is_ajax():
classes = Classes.objects.filter(module_holder=module_holder).order_by('-id').first()
data = {
'pk': classes.pk,
'name': classes.class_name,
'monthly_fee': classes.fee,
'discount': classes.discount,
'status': 'success'
}
return JsonResponse(data)
else:
return resolver
class ClassesUpdate(LoginRequiredMixin, SuccessMessageMixin ,PermissionRequiredMixin ,UpdateView):
permission_required = 'academic.change_classes'
model = Classes
fields = [
'class_name',
'class_number',
'fee',
'section',
'discount',
'note'
]
login_url = 'home:lgoin'
template_name = 'academic/classes_add.html'
success_message = 'Class has been updated!'
def get_form(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form = super(ClassesUpdate, self).get_form(**kwargs)
form.fields['section'].widget.attrs = {'class': 'basic-multiple', 'autocomplete': 'off'}
form.fields['section'].queryset = Section.objects.filter(module_holder=module_holder)
print(module_holder)
return form
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
return super().form_valid(form)
class ClassesDelete(LoginRequiredMixin ,PermissionRequiredMixin ,DeleteView):
login_url = 'home:lgoin'
permission_required = 'academic.delete_classes'
model = Classes
success_url = reverse_lazy('academic:classes_view')
# SECTION START HERE
class SectionCreate( LoginRequiredMixin,PermissionRequiredMixin ,SuccessMessageMixin, CreateView):
permission_required = 'academic.add_section'
template_name = 'academic/section_view_or_create.html'
model = Section
login_url = 'home:lgoin'
fields = ['section_name']
success_message = 'Section has been created successfully!'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
response = super().form_valid(form)
if self.request.is_ajax():
last = Section.objects.filter(module_holder=module_holder).order_by('-id').first()
data = {
'pk': last.pk,
'name': last.section_name,
'status': 'success'
}
return JsonResponse(data)
else:
return response
# return super().form_valid(form)
def form_invalid(self, form):
response = super().form_invalid(form)
if self.request.is_ajax():
return JsonResponse({'status': 'already'})
return response
def get_context_data(self, **kwargs):
context = super(SectionCreate, self).get_context_data(**kwargs)
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context['section'] = Section.objects.filter(module_holder=module_holder)
return context
class SectionUpdate(LoginRequiredMixin, SuccessMessageMixin ,PermissionRequiredMixin ,UpdateView):
permission_required = 'academic.change_section'
template_name = 'academic/section_view_or_create.html'
model = Section
login_url = 'home:lgoin'
fields = ['section_name']
success_message = 'Section has been updated!'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
return super().form_valid(form)
def get_context_data(self, **kwargs):
context = super(SectionUpdate, self).get_context_data(**kwargs)
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context['section'] = Section.objects.filter(module_holder=module_holder)
return context
class SectionDelete(PermissionRequiredMixin, LoginRequiredMixin ,DeleteView):
permission_required = 'academic.delete_section'
login_url = 'home:lgoin'
model = Section
success_url = reverse_lazy('student:create_section')
# STRAT SUBJECT HERE
class SubjectView(PermissionRequiredMixin, LoginRequiredMixin ,ListView):
permission_required = 'academic.view_subject'
model = Subject
login_url = 'home:lgoin'
template_name = 'academic/subject.html'
def get_context_data(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context = super(SubjectView, self).get_context_data(**kwargs)
subject_data = Subject.objects.filter(module_holder=module_holder)
all_subject_row = []
for sd in subject_data:
class_names = []
class_name_ids = sd.class_name.replace('[','').replace(']','').split(',')
for class_id in class_name_ids:
clss = Classes.objects.filter(pk=class_id).first()
class_names.append(
{'class_name': clss.class_name}
)
all_subject_row.append({
'subject_name': sd.subject_name,
'pk': sd.pk,
'subject_code': sd.subject_code,
'note': sd.note,
'class_name': class_names
})
context['subjects'] = all_subject_row
return context
class SubjectCreate(PermissionRequiredMixin, SuccessMessageMixin, LoginRequiredMixin ,CreateView):
permission_required = 'academic.add_subject'
model = Subject
login_url = 'home:lgoin'
fields = [
'class_name',
# 'teacher_name',
'subject_type',
'pass_mark',
'final_mark',
'subject_name',
'subject_code',
'note'
]
template_name = 'academic/subject_add.html'
success_message = 'Subject has been Created!'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
form.instance.module_holder = module_holder
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
form.instance.module_holder = this_holder.module_holder
class_list = []
t = self.request.POST.getlist('class_name')
for i in t:
class_list.append(int(i))
form.instance.class_name = class_list
resolver = super().form_valid(form)
return resolver
def get_form(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form = super(SubjectCreate, self).get_form(**kwargs)
form.fields['subject_type'].widget.attrs = {'class': 'basic-multiple'}
form.fields['class_name'] = forms.ModelChoiceField(queryset = Classes.objects.filter(module_holder=module_holder))
form.fields['class_name'].widget.attrs = {'class': 'basic-multiple', 'multiple': 'multiple', 'autocomplete': 'off'}
form.fields['class_name'].queryset = Classes.objects.filter(module_holder=module_holder)
print(module_holder)
return form
def get_context_data(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context = super(SubjectCreate, self).get_context_data(**kwargs)
context['classesform'] = ClassesForm(module_holder)
return context
class SubjectUpdate(PermissionRequiredMixin, SuccessMessageMixin, LoginRequiredMixin ,UpdateView):
permission_required = 'academic.change_subject'
model = Subject
login_url = 'home:lgoin'
fields = [
'class_name',
# 'teacher_name',
'subject_type',
'pass_mark',
'final_mark',
'subject_name',
'subject_code',
'note'
]
template_name = 'academic/subject_add.html'
success_message = 'Subject has been Updated!'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
form.instance.module_holder = module_holder
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
form.instance.module_holder = this_holder.module_holder
class_list = []
t = self.request.POST.getlist('class_name')
for i in t:
class_list.append(int(i))
form.instance.class_name = class_list
return super().form_valid(form)
def get_form(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form = super(SubjectUpdate, self).get_form(**kwargs)
form.fields['subject_type'].widget.attrs = {'class': 'basic-multiple'}
form.fields['class_name'] = forms.ModelChoiceField(queryset = Classes.objects.filter(module_holder=module_holder))
form.fields['class_name'].widget.attrs = {'class': 'basic-multiple', 'multiple': 'multiple', 'autocomplete': 'off'}
form.fields['class_name'].queryset = Classes.objects.filter(module_holder=module_holder)
return form
class SubjectDelete(PermissionRequiredMixin, LoginRequiredMixin ,DeleteView):
permission_required = 'academic.delete_subject'
model = Subject
login_url = 'home:lgoin'
success_url = reverse_lazy('academic:subject_view')
# STRAT ROUTINE HERE
class RoutineView(PermissionRequiredMixin, LoginRequiredMixin ,ListView):
permission_required = 'academic.view_routine'
model = Routine
login_url = 'home:lgoin'
template_name = 'academic/routine.html'
def get_context_data(self, **kwargs):
context = super(RoutineView, self).get_context_data(**kwargs)
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context['routines'] = Routine.objects.filter(module_holder=module_holder)
return context
class RoutineCreate(PermissionRequiredMixin, SuccessMessageMixin, LoginRequiredMixin ,CreateView):
permission_required = 'academic.add_routine'
model = Routine
login_url = 'home:lgoin'
fields = [
'date_from',
'date_to',
'class_name',
'section_name',
'subject_name',
'school_day',
'teacher_name',
'start_time',
'end_time',
'room'
]
template_name = 'academic/routine_add.html'
success_message = 'Routine has been Created!'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
return super().form_valid(form)
def get_context_data(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context = super(RoutineCreate, self).get_context_data(**kwargs)
context['setting_time'] = SchoolProfile.objects.filter(module_holder=module_holder).first()
setting = SchoolProfile.objects.filter(module_holder=module_holder).first()
t1 = datetime.strptime(setting.school_timing_from, '%H:%M')
t2 = datetime.strptime(setting.school_timing_to, '%H:%M')
t1 = datetime.strftime(t1, '%H:%M')
t2 = datetime.strftime(t2, '%H:%M')
hours = time_diff(t1, t2)
hourssplited = hours.split(':')
hours_minuts = int(hourssplited[0])*60
hours_minuts = hours_minuts+int(hourssplited[1])
context['times'] = hours_minuts
context['classesform'] = ClassesForm(module_holder)
context['sectionform'] = SectionForm()
return context
def get_form(self, **kwargs):
form = super(RoutineCreate, self).get_form(**kwargs)
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
setting = SchoolProfile.objects.filter(module_holder=module_holder).first()
# form['start_time'] = setting.school_timing_from
# form['end_time'] = setting.school_timing_to
form.fields['start_time'].widget = forms.TextInput( attrs = {'type': 'time', 'value': setting.school_timing_from})
form.fields['end_time'].widget = forms.TextInput( attrs = {'type': 'time', 'value': setting.school_timing_to})
form.fields['date_from'].widget=forms.TextInput(attrs = {'type': 'date'})
form.fields['date_to'].widget= forms.TextInput(attrs = {'type': 'date'})
# form.fields['school_day'].widget.attrs = {'class': 'basic-multiple'}
FAVORITE_COLORS_CHOICES = [ ('Monday', 'Monday'), ('Tuesday', 'Tuesday'), ('Wednesday', 'Wednesday'), ('Thursday', 'Thursday'), ('Friday', 'Friday'),('Saturday', 'Saturday'),('Sunday', 'Sunday'), ]
form.fields['school_day'] = forms.TypedMultipleChoiceField(choices=FAVORITE_COLORS_CHOICES,widget=forms.CheckboxSelectMultiple)
form.fields['class_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['class_name'].queryset = Classes.objects.filter(module_holder=module_holder)
form.fields['section_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['section_name'].queryset = Section.objects.filter(module_holder=module_holder)
form.fields['room'].widget.attrs = {'class': 'basic-multiple'}
form.fields['room'].queryset = Room.objects.filter(module_holder=module_holder)
form.fields['subject_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['subject_name'].queryset = Subject.objects.filter(module_holder=module_holder)
form.fields['teacher_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['teacher_name'].queryset = Teacher.objects.filter(module_holder=module_holder)
return form
class RoutineUpdate(PermissionRequiredMixin, SuccessMessageMixin, LoginRequiredMixin ,UpdateView):
permission_required = 'academic.change_routine'
model = Routine
login_url = 'home:lgoin'
fields = [
'date_from',
'date_to',
'class_name',
'section_name',
'subject_name',
'school_day',
'teacher_name',
'start_time',
'end_time',
'room'
]
template_name = 'academic/routine_add.html'
success_message = 'Routine has been updated'
def form_valid(self, form):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
form.instance.module_holder = module_holder
return super().form_valid(form)
def get_form(self, **kwargs):
form = super(RoutineUpdate, self).get_form(**kwargs)
form.fields['start_time'].widget = forms.TextInput( attrs = {'type': 'time'})
form.fields['end_time'].widget = forms.TextInput( attrs = {'type': 'time'})
return form
def get_context_data(self, **kwargs):
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
context = super(RoutineUpdate, self).get_context_data(**kwargs)
context['setting_time'] = SchoolProfile.objects.filter(module_holder=module_holder).first()
setting = SchoolProfile.objects.filter(module_holder=module_holder).first()
t1 = datetime.strptime(setting.school_timing_from, '%H:%M')
t2 = datetime.strptime(setting.school_timing_to, '%H:%M')
t1 = datetime.strftime(t1, '%H:%M')
t2 = datetime.strftime(t2, '%H:%M')
hours = time_diff(t1, t2)
hourssplited = hours.split(':')
hours_minuts = int(hourssplited[0])*60
hours_minuts = hours_minuts+int(hourssplited[1])
context['times'] = hours_minuts
context['classesform'] = ClassesForm(module_holder)
context['sectionform'] = SectionForm()
return context
def get_form(self, **kwargs):
form = super(RoutineUpdate, self).get_form(**kwargs)
if self.request.user.is_staff:
module_holder = self.request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=self.request.user.id)
module_holder = this_holder.module_holder
setting = SchoolProfile.objects.filter(module_holder=module_holder).first()
# form['start_time'] = setting.school_timing_from
# form['end_time'] = setting.school_timing_to
form.fields['start_time'].widget = forms.TextInput( attrs = {'type': 'time', 'value': setting.school_timing_from})
form.fields['end_time'].widget = forms.TextInput( attrs = {'type': 'time', 'value': setting.school_timing_to})
form.fields['date_from'].widget=forms.TextInput(attrs = {'type': 'date'})
form.fields['date_to'].widget= forms.TextInput(attrs = {'type': 'date'})
# form.fields['school_day'].widget.attrs = {'class': 'basic-multiple'}
FAVORITE_COLORS_CHOICES = [ ('Monday', 'Monday'), ('Tuesday', 'Tuesday'), ('Wednesday', 'Wednesday'), ('Thursday', 'Thursday'), ('Friday', 'Friday'),('Saturday', 'Saturday'),('Sunday', 'Sunday'), ]
form.fields['school_day'] = forms.TypedMultipleChoiceField(choices=FAVORITE_COLORS_CHOICES,widget=forms.CheckboxSelectMultiple)
form.fields['class_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['class_name'].queryset = Classes.objects.filter(module_holder=module_holder)
form.fields['section_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['section_name'].queryset = Section.objects.filter(module_holder=module_holder)
form.fields['room'].widget.attrs = {'class': 'basic-multiple'}
form.fields['room'].queryset = Room.objects.filter(module_holder=module_holder)
form.fields['subject_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['subject_name'].queryset = Subject.objects.filter(module_holder=module_holder)
form.fields['teacher_name'].widget.attrs = {'class': 'basic-multiple'}
form.fields['teacher_name'].queryset = Teacher.objects.filter(module_holder=module_holder)
return form
class RoutineDelete(PermissionRequiredMixin, LoginRequiredMixin ,DeleteView):
permission_required = 'academic.delete_routine'
login_url = 'home:lgoin'
model = Routine
success_url = reverse_lazy('academic:routine_view')
| 41.808845
| 206
| 0.648048
| 3,229
| 29,308
| 5.661815
| 0.068752
| 0.117493
| 0.061536
| 0.063013
| 0.837545
| 0.810962
| 0.785253
| 0.734876
| 0.672957
| 0.648726
| 0
| 0.0018
| 0.241811
| 29,308
| 700
| 207
| 41.868571
| 0.820935
| 0.028354
| 0
| 0.702609
| 0
| 0
| 0.119239
| 0.024792
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053913
| false
| 0.003478
| 0.031304
| 0
| 0.34087
| 0.005217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1d7460e7101ea78a7d8582687fd6c012f246031e
| 6,060
|
py
|
Python
|
test/functions/test_inv_matmul.py
|
yushangdi/gpytorch
|
3234046ecb672965af8765d47eb016f85b729bb0
|
[
"MIT"
] | null | null | null |
test/functions/test_inv_matmul.py
|
yushangdi/gpytorch
|
3234046ecb672965af8765d47eb016f85b729bb0
|
[
"MIT"
] | null | null | null |
test/functions/test_inv_matmul.py
|
yushangdi/gpytorch
|
3234046ecb672965af8765d47eb016f85b729bb0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import torch
import unittest
import os
import random
from gpytorch import settings
from gpytorch.lazy import NonLazyTensor
class TestInvMatmulNonBatch(unittest.TestCase):
def tearDown(self):
if hasattr(self, "rng_state"):
torch.set_rng_state(self.rng_state)
def setUp(self):
if os.getenv("unlock_seed") is None or os.getenv("unlock_seed").lower() == "false":
self.rng_state = torch.get_rng_state()
torch.manual_seed(1)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(1)
random.seed(1)
mat = torch.randn(8, 8)
mat = mat @ mat.transpose(-1, -2)
vec = torch.randn(8)
vecs = torch.randn(8, 4)
self.mat = mat.detach().clone().requires_grad_(True)
self.mat_copy = mat.detach().clone().requires_grad_(True)
self.vec = vec.detach().clone().requires_grad_(True)
self.vec_copy = vec.detach().clone().requires_grad_(True)
self.vecs = vecs.detach().clone().requires_grad_(True)
self.vecs_copy = vecs.detach().clone().requires_grad_(True)
def test_inv_matmul_vec(self):
# Forward
with settings.terminate_cg_by_size(False):
res = NonLazyTensor(self.mat).inv_matmul(self.vec)
actual = self.mat_copy.inverse().matmul(self.vec_copy)
self.assertLess(torch.max((res - actual).abs() / actual.abs()).item(), 1e-3)
# Backward
grad_output = torch.randn(8)
res.backward(gradient=grad_output)
actual.backward(gradient=grad_output)
self.assertLess(torch.max((self.mat_copy.grad - self.mat.grad).abs()).item(), 1e-3)
self.assertLess(torch.max((self.vec_copy.grad - self.vec.grad).abs()).item(), 1e-3)
def test_inv_matmul_multiple_vecs(self):
# Forward
with settings.terminate_cg_by_size(False):
res = NonLazyTensor(self.mat).inv_matmul(self.vecs)
actual = self.mat_copy.inverse().matmul(self.vecs_copy)
self.assertLess(torch.max((res - actual).abs() / actual.abs()).item(), 1e-3)
# Backward
grad_output = torch.randn(8, 4)
res.backward(gradient=grad_output)
actual.backward(gradient=grad_output)
self.assertLess(torch.max((self.mat_copy.grad - self.mat.grad).abs()).item(), 1e-3)
self.assertLess(torch.max((self.vecs_copy.grad - self.vecs.grad).abs()).item(), 1e-3)
class TestInvMatmulBatch(unittest.TestCase):
def tearDown(self):
if hasattr(self, "rng_state"):
torch.set_rng_state(self.rng_state)
def setUp(self):
if os.getenv("unlock_seed") is None or os.getenv("unlock_seed").lower() == "false":
self.rng_state = torch.get_rng_state()
torch.manual_seed(0)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(0)
random.seed(0)
mats = torch.randn(2, 8, 8)
mats = mats @ mats.transpose(-1, -2)
vecs = torch.randn(2, 8, 4)
self.mats = mats.detach().clone().requires_grad_(True)
self.mats_copy = mats.detach().clone().requires_grad_(True)
self.vecs = vecs.detach().clone().requires_grad_(True)
self.vecs_copy = vecs.detach().clone().requires_grad_(True)
def test_inv_matmul_multiple_vecs(self):
# Forward
with settings.terminate_cg_by_size(False):
res = NonLazyTensor(self.mats).inv_matmul(self.vecs)
actual = torch.cat(
[self.mats_copy[0].inverse().unsqueeze(0), self.mats_copy[1].inverse().unsqueeze(0)]
).matmul(self.vecs_copy)
self.assertLess(torch.max((res - actual).abs()).item(), 1e-3)
# Backward
grad_output = torch.randn(2, 8, 4)
res.backward(gradient=grad_output)
actual.backward(gradient=grad_output)
self.assertLess(torch.max((self.mats_copy.grad - self.mats.grad).abs()).item(), 1e-3)
self.assertLess(torch.max((self.vecs_copy.grad - self.vecs.grad).abs()).item(), 1e-3)
class TestInvMatmulMultiBatch(unittest.TestCase):
def tearDown(self):
if hasattr(self, "rng_state"):
torch.set_rng_state(self.rng_state)
def setUp(self):
if os.getenv("unlock_seed") is None or os.getenv("unlock_seed").lower() == "false":
self.rng_state = torch.get_rng_state()
torch.manual_seed(0)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(0)
random.seed(0)
mats = torch.randn(3, 4, 8, 8)
mats = mats @ mats.transpose(-1, -2)
mats.add_(torch.eye(8).mul_(1e-1).view(1, 1, 8, 8))
vecs = torch.randn(3, 4, 8, 2)
self.mats = mats.detach().clone().requires_grad_(True)
self.mats_copy = mats.detach().clone().requires_grad_(True)
self.vecs = vecs.detach().clone().requires_grad_(True)
self.vecs_copy = vecs.detach().clone().requires_grad_(True)
def test_inv_matmul_multiple_vecs(self):
# Forward
with settings.terminate_cg_by_size(False):
res = NonLazyTensor(self.mats).inv_matmul(self.vecs)
flattened_mats_copy = self.mats_copy.view(-1, *self.mats.shape[-2:])
flatened_mats_inverse = torch.cat([mat.inverse().unsqueeze(0) for mat in flattened_mats_copy])
mats_inverse = flatened_mats_inverse.view_as(self.mats)
actual = mats_inverse.matmul(self.vecs_copy)
self.assertLess(torch.max((res - actual).abs()).item(), 1e-3)
# Backward
grad_output = torch.randn(3, 4, 8, 2)
res.backward(gradient=grad_output)
actual.backward(gradient=grad_output)
self.assertLess(torch.max((self.mats_copy.grad - self.mats.grad).abs()).item(), 1e-3)
self.assertLess(torch.max((self.vecs_copy.grad - self.vecs.grad).abs()).item(), 1e-3)
if __name__ == "__main__":
unittest.main()
| 42.083333
| 106
| 0.616667
| 806
| 6,060
| 4.444169
| 0.116625
| 0.040201
| 0.07426
| 0.089894
| 0.828308
| 0.817141
| 0.812116
| 0.76689
| 0.752931
| 0.752931
| 0
| 0.018012
| 0.239604
| 6,060
| 143
| 107
| 42.377622
| 0.759332
| 0.014686
| 0
| 0.603604
| 0
| 0
| 0.019457
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 1
| 0.09009
| false
| 0
| 0.054054
| 0
| 0.171171
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d5169749b811857414a1950e9741c7fa8c1f5b18
| 21
|
py
|
Python
|
pysac/io/legacy/__init__.py
|
SolarDrew/pysac
|
9fd86dd03966b7e7f90653a47a2ccca7964c83bc
|
[
"BSD-2-Clause"
] | null | null | null |
pysac/io/legacy/__init__.py
|
SolarDrew/pysac
|
9fd86dd03966b7e7f90653a47a2ccca7964c83bc
|
[
"BSD-2-Clause"
] | null | null | null |
pysac/io/legacy/__init__.py
|
SolarDrew/pysac
|
9fd86dd03966b7e7f90653a47a2ccca7964c83bc
|
[
"BSD-2-Clause"
] | null | null | null |
from legacy import *
| 10.5
| 20
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d529574dd973b56c664da83417e7e13307f40d1e
| 1,325
|
py
|
Python
|
auto.apt.script.py
|
eythaniym/debian_linux_helper_scripts
|
aa540e0da028cc8a30b0e5ea4cf7368657b7ecec
|
[
"MIT"
] | 1
|
2019-06-11T05:26:25.000Z
|
2019-06-11T05:26:25.000Z
|
auto.apt.script.py
|
eythaniym/debian_linux_helper_scripts
|
aa540e0da028cc8a30b0e5ea4cf7368657b7ecec
|
[
"MIT"
] | null | null | null |
auto.apt.script.py
|
eythaniym/debian_linux_helper_scripts
|
aa540e0da028cc8a30b0e5ea4cf7368657b7ecec
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python3
__license__ = "MIT"
import os
# Welcome
print("\n\n")
print(" _______ __ __ _______ _______ _______ _______ _______ _______ _______ ______ ___ _______ _______ ")
print("| _ || | | || || | | _ || || | | || || _ | | || || |")
print("| |_| || | | ||_ _|| _ | | |_| || _ ||_ _| | _____|| || | || | || _ ||_ _|")
print("| || |_| | | | | | | | | || |_| | | | | |_____ | || |_||_ | || |_| | | | ")
print("| || | | | | |_| | ___ | || ___| | | ___ |_____ || _|| __ || || ___| | | ")
print("| _ || | | | | || || _ || | | || | _____| || |_ | | | || || | | | ")
print("|__| |__||_______| |___| |_______||___||__| |__||___| |___||___||_______||_______||___| |_||___||___| |___|")
print("\n\t\t\t\tNOTE: Run script as sudo")
print("\n\n")
# Script
print("\tAPT UPDATE")
os.system("apt update -y")
print("\n\tAPT UPGRADE")
os.system("apt upgrade -y")
print("\n\tAPT FULL-UPGRADE (DIST-UPGRADE)")
os.system("apt full-upgrade -y")
print("\n\tAPT AUTOREMOVE")
os.system("apt autoremove -y")
print("\n\tDone!")
| 45.689655
| 131
| 0.383396
| 74
| 1,325
| 4.040541
| 0.351351
| 0.140468
| 0.301003
| 0.334448
| 0.254181
| 0.133779
| 0.133779
| 0
| 0
| 0
| 0
| 0.001218
| 0.380377
| 1,325
| 28
| 132
| 47.321429
| 0.362972
| 0.024906
| 0
| 0.095238
| 0
| 0.285714
| 0.813809
| 0.025601
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.047619
| 0
| 0.047619
| 0.714286
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
d540763df6f2587513127698b08022434589b1be
| 125
|
py
|
Python
|
professor/views/painelProfessor.py
|
roimpacta/exemplos
|
cbfe7c81fc14932697c02eb63bec7d7e4a2c5d5a
|
[
"Apache-2.0"
] | null | null | null |
professor/views/painelProfessor.py
|
roimpacta/exemplos
|
cbfe7c81fc14932697c02eb63bec7d7e4a2c5d5a
|
[
"Apache-2.0"
] | null | null | null |
professor/views/painelProfessor.py
|
roimpacta/exemplos
|
cbfe7c81fc14932697c02eb63bec7d7e4a2c5d5a
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
def painelProfessor (request):
return render(request,"painel/painelProfessor.html")
| 25
| 56
| 0.8
| 14
| 125
| 7.142857
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112
| 125
| 5
| 56
| 25
| 0.900901
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
d54f5408c989b8e74dde5d50f2dd0a1026836d5d
| 8,385
|
py
|
Python
|
gitlabform/gitlabform/test/test_group_members.py
|
kowpatryk/gitlabform
|
d77c40fd212028786fa51e201e13bb0bd552960f
|
[
"MIT"
] | null | null | null |
gitlabform/gitlabform/test/test_group_members.py
|
kowpatryk/gitlabform
|
d77c40fd212028786fa51e201e13bb0bd552960f
|
[
"MIT"
] | 15
|
2020-03-04T19:39:16.000Z
|
2022-03-21T23:01:19.000Z
|
gitlabform/gitlabform/test/test_group_members.py
|
grzesuav/gitlabform
|
c62bc4b3459194e28ed78aef807d5737e88d24e7
|
[
"MIT"
] | null | null | null |
import pytest
from gitlabform.gitlabform import GitLabForm
from gitlabform.gitlabform.test import (
create_group,
create_users,
remove_users_from_group,
get_gitlab,
add_users_to_group,
OWNER_ACCESS,
GROUP_NAME,
)
USER_BASE_NAME = "group_member_user" # user1, user2, ...
@pytest.fixture(scope="function")
def gitlab(request):
gl = get_gitlab()
create_group(GROUP_NAME)
create_users(USER_BASE_NAME, 4)
add_users_to_group(GROUP_NAME, ["root"])
remove_users_from_group(
GROUP_NAME,
[
"group_member_user1",
"group_member_user2",
"group_member_user3",
"group_member_user4",
],
)
def fin():
# same at the end
add_users_to_group(GROUP_NAME, ["root"], OWNER_ACCESS)
remove_users_from_group(
GROUP_NAME,
[
"group_member_user1",
"group_member_user2",
"group_member_user3",
"group_member_user4",
],
)
request.addfinalizer(fin)
return gl # provide fixture value
some_users = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
group_members:
root: # creator of the group
access_level: 50
group_member_user2:
access_level: 30
group_member_user3:
access_level: 40
"""
add_users = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
group_members:
root: # creator of the group
access_level: 50
group_member_user1:
access_level: 50
group_member_user2:
access_level: 30
group_member_user3:
access_level: 40
group_member_user4: # new user
access_level: 40
"""
remove_users = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
enforce_group_members: true
group_members:
group_member_user1:
access_level: 50
group_member_user3:
access_level: 40
"""
not_remove_users_with_enforce_false = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
enforce_group_members: false
group_members:
root: # creator of the group
access_level: 50
group_member_user2:
access_level: 30
# a user removed
"""
not_remove_users_without_enforce = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
group_members:
root: # creator of the group
access_level: 50
group_member_user2:
access_level: 30
# a user removed
"""
change_some_users_access = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
group_members:
root: # creator of the group
access_level: 50
group_member_user1:
access_level: 50
group_member_user2:
access_level: 40 # changed level
group_member_user3:
access_level: 30 # changed level
"""
one_owner = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
group_members:
root: # creator of the group
access_level: 50
"""
change_owner = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
enforce_group_members: true
group_members:
group_member_user3: # new Owner
access_level: 50
"""
zero_owners = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
enforce_group_members: true
group_members:
group_member_user4:
access_level: 40
"""
zero_users = """
gitlab:
api_version: 4
group_settings:
gitlabform_tests_group:
enforce_group_members: true
group_members: {}
"""
class Helpers:
@staticmethod
def setup_some_users(gitlab):
gf = GitLabForm(config_string=some_users, project_or_group=GROUP_NAME)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 3
members_usernames = [member["username"] for member in members]
assert members_usernames.count("root") == 1
assert members_usernames.count("group_member_user2") == 1
assert members_usernames.count("group_member_user3") == 1
class TestGroupMembers:
def test__setup_users(self, gitlab):
Helpers.setup_some_users(gitlab)
def test__add_users(self, gitlab):
Helpers.setup_some_users(gitlab)
gf = GitLabForm(config_string=add_users, project_or_group=GROUP_NAME)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 5
members_usernames = [member["username"] for member in members]
assert members_usernames.count("root") == 1
assert members_usernames.count("group_member_user1") == 1
assert members_usernames.count("group_member_user2") == 1
assert members_usernames.count("group_member_user3") == 1
assert members_usernames.count("group_member_user4") == 1
def test__remove_users(self, gitlab):
Helpers.setup_some_users(gitlab)
gf = GitLabForm(config_string=remove_users, project_or_group=GROUP_NAME)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 2
members_usernames = [member["username"] for member in members]
assert members_usernames.count("group_member_user1") == 1
assert members_usernames.count("group_member_user3") == 1
def test__not_remove_users_with_enforce_false(self, gitlab):
Helpers.setup_some_users(gitlab)
gf = GitLabForm(
config_string=not_remove_users_with_enforce_false,
project_or_group=GROUP_NAME,
)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 3
members_usernames = [member["username"] for member in members]
assert members_usernames.count("root") == 1
assert members_usernames.count("group_member_user2") == 1
assert members_usernames.count("group_member_user3") == 1
def test__not_remove_users_without_enforce(self, gitlab):
Helpers.setup_some_users(gitlab)
gf = GitLabForm(
config_string=not_remove_users_without_enforce, project_or_group=GROUP_NAME
)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 3
members_usernames = [member["username"] for member in members]
assert members_usernames.count("root") == 1
assert members_usernames.count("group_member_user2") == 1
assert members_usernames.count("group_member_user3") == 1
def test__change_some_users_access(self, gitlab):
Helpers.setup_some_users(gitlab)
gf = GitLabForm(
config_string=change_some_users_access, project_or_group=GROUP_NAME
)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 4
for member in members:
if member["username"] == "root":
assert member["access_level"] == 50
if member["username"] == "group_member_user1":
assert member["access_level"] == 50
if member["username"] == "group_member_user2":
assert member["access_level"] == 40
if member["username"] == "group_member_user3":
assert member["access_level"] == 30
def test__change_owner(self, gitlab):
gf = GitLabForm(config_string=one_owner, project_or_group=GROUP_NAME)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 1
assert members[0]["access_level"] == 50
assert members[0]["username"] == "root"
gf = GitLabForm(config_string=change_owner, project_or_group=GROUP_NAME)
gf.main()
members = gitlab.get_group_members(GROUP_NAME)
assert len(members) == 1
assert members[0]["access_level"] == 50
assert members[0]["username"] == "group_member_user3"
def test__zero_owners(self, gitlab):
gf = GitLabForm(config_string=zero_owners, project_or_group=GROUP_NAME)
with pytest.raises(SystemExit):
gf.main()
def test__zero_users(self, gitlab):
gf = GitLabForm(config_string=zero_users, project_or_group=GROUP_NAME)
with pytest.raises(SystemExit):
gf.main()
| 26.619048
| 87
| 0.657484
| 1,003
| 8,385
| 5.121635
| 0.092722
| 0.085653
| 0.068522
| 0.084096
| 0.829083
| 0.800662
| 0.777497
| 0.744403
| 0.722017
| 0.722017
| 0
| 0.021409
| 0.253548
| 8,385
| 314
| 88
| 26.703822
| 0.799329
| 0.006559
| 0
| 0.680934
| 0
| 0
| 0.350108
| 0.040836
| 0
| 0
| 0
| 0
| 0.124514
| 1
| 0.046693
| false
| 0
| 0.011673
| 0
| 0.070039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d5a29410cf276cbc1eaa9766d7bdaa5ad561a968
| 103
|
py
|
Python
|
app/inpatient/__init__.py
|
JONGWE1/BankManagement
|
363ecdc950ee9c38538b83cddaf1c1d8bd6322d0
|
[
"MIT"
] | 1
|
2019-09-10T15:01:28.000Z
|
2019-09-10T15:01:28.000Z
|
app/inpatient/__init__.py
|
JONGWE1/BankManagement
|
363ecdc950ee9c38538b83cddaf1c1d8bd6322d0
|
[
"MIT"
] | null | null | null |
app/inpatient/__init__.py
|
JONGWE1/BankManagement
|
363ecdc950ee9c38538b83cddaf1c1d8bd6322d0
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
inpatient = Blueprint('inpatient', __name__) # 设定蓝本的名称
from .import views
| 25.75
| 55
| 0.786408
| 12
| 103
| 6.416667
| 0.666667
| 0.467532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135922
| 103
| 4
| 56
| 25.75
| 0.865169
| 0.067961
| 0
| 0
| 0
| 0
| 0.094737
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
d5afe1094b5740b4c4012ad2d061a26f577f9ed0
| 31
|
py
|
Python
|
opt/ros/kinetic/lib/python2.7/dist-packages/stereo_msgs/msg/__init__.py
|
Roboy/roboy_controlled_node_fpga
|
dbba4eff19ed04469a6196ba368cea231cad539d
|
[
"BSD-3-Clause"
] | 2
|
2018-01-29T03:10:39.000Z
|
2020-12-08T09:08:41.000Z
|
devel/lib/python3/dist-packages/stereo_msgs/msg/__init__.py
|
hyu-nani/ydlidar_ws
|
56316db999c057c4315a20ba8277826d6a043120
|
[
"MIT"
] | 1
|
2018-12-28T21:11:50.000Z
|
2018-12-28T21:11:50.000Z
|
devel/lib/python3/dist-packages/stereo_msgs/msg/__init__.py
|
hyu-nani/ydlidar_ws
|
56316db999c057c4315a20ba8277826d6a043120
|
[
"MIT"
] | 3
|
2018-01-29T12:22:56.000Z
|
2020-12-08T09:08:46.000Z
|
from ._DisparityImage import *
| 15.5
| 30
| 0.806452
| 3
| 31
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6336328442ee7fce34bad5e7e235281a755a6813
| 1,009
|
py
|
Python
|
tests/test_run_request.py
|
sodrooome/bmaclient
|
8d3609a63e3e2576642ecde77245ee0b02225e31
|
[
"MIT"
] | null | null | null |
tests/test_run_request.py
|
sodrooome/bmaclient
|
8d3609a63e3e2576642ecde77245ee0b02225e31
|
[
"MIT"
] | null | null | null |
tests/test_run_request.py
|
sodrooome/bmaclient
|
8d3609a63e3e2576642ecde77245ee0b02225e31
|
[
"MIT"
] | null | null | null |
import unittest
from bmaclient.client import MonitoringAPI
from bmaclient.exceptions import APIClientError
class FetchLavaDomesTest(unittest.TestCase):
def setUp(self):
self.api = MonitoringAPI(api_key='TEST_API_KEY')
def test_request_without_supplying_params(self):
with self.assertRaises(APIClientError):
self.api.fetch_lava_domes()
def test_request_with_some_params(self):
with self.assertRaises(APIClientError):
self.api.fetch_lava_domes(location='BARAT DAYA')
class FetchRfapDistanceTest(unittest.TestCase):
def setUp(self):
self.api = MonitoringAPI(api_key='TEST_API_KEY')
def test_request_without_supplying_params(self):
with self.assertRaises(APIClientError):
self.api.fetch_rfap_distance()
def test_request_with_some_params(self):
with self.assertRaises(APIClientError):
self.api.fetch_rfap_distance(start='2021-01-01')
if __name__ == '__main__':
unittest.main()
| 27.27027
| 60
| 0.725471
| 118
| 1,009
| 5.881356
| 0.330508
| 0.060519
| 0.080692
| 0.103746
| 0.717579
| 0.717579
| 0.717579
| 0.717579
| 0.717579
| 0.717579
| 0
| 0.009732
| 0.185332
| 1,009
| 36
| 61
| 28.027778
| 0.83455
| 0
| 0
| 0.521739
| 0
| 0
| 0.051536
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 1
| 0.26087
| false
| 0
| 0.130435
| 0
| 0.478261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
634db5cbe9c6b5d388c67b275bc0bb1c0cc879e0
| 295
|
py
|
Python
|
polished/backends/mixins/base.py
|
ckcollab/polished
|
5a00b2fbe569bc957d1647c0849fd344db29b644
|
[
"MIT"
] | 9
|
2015-04-08T07:37:55.000Z
|
2019-06-05T10:21:27.000Z
|
polished/backends/mixins/base.py
|
ckcollab/polished
|
5a00b2fbe569bc957d1647c0849fd344db29b644
|
[
"MIT"
] | 2
|
2016-07-03T11:26:02.000Z
|
2016-07-03T11:44:41.000Z
|
polished/backends/mixins/base.py
|
ckcollab/polished
|
5a00b2fbe569bc957d1647c0849fd344db29b644
|
[
"MIT"
] | 1
|
2019-06-07T16:21:37.000Z
|
2019-06-07T16:21:37.000Z
|
class Base(object):
def __init__(self, *args, **kwargs):
pass
def dispose(self, *args, **kwargs):
pass
def prepare(self, *args, **kwargs):
pass
def prepare_page(self, *args, **kwargs):
pass
def cleanup(self, *args, **kwargs):
pass
| 18.4375
| 44
| 0.545763
| 34
| 295
| 4.588235
| 0.382353
| 0.25641
| 0.448718
| 0.576923
| 0.628205
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0.308475
| 295
| 15
| 45
| 19.666667
| 0.764706
| 0
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0.454545
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
635db614b876a278d8dc5ab579837a8abe1360b2
| 35
|
py
|
Python
|
ispi/__init__.py
|
NateV/ispi
|
d8fd9c7cb8f77da23622d943033012fd1b196652
|
[
"MIT"
] | null | null | null |
ispi/__init__.py
|
NateV/ispi
|
d8fd9c7cb8f77da23622d943033012fd1b196652
|
[
"MIT"
] | null | null | null |
ispi/__init__.py
|
NateV/ispi
|
d8fd9c7cb8f77da23622d943033012fd1b196652
|
[
"MIT"
] | null | null | null |
from .speed_test import SpeedTest
| 11.666667
| 33
| 0.828571
| 5
| 35
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 35
| 2
| 34
| 17.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
63aa4b99dfe7a81a03b9b13192816478e0837f8e
| 56
|
py
|
Python
|
app/api/__init__.py
|
BANOnotIT/helpdesk-bot
|
3aedbaf7d3adbb54f7c5172912a8cf188d3cb9d3
|
[
"MIT"
] | 1
|
2018-11-25T13:13:49.000Z
|
2018-11-25T13:13:49.000Z
|
app/api/__init__.py
|
BANOnotIT/helpdesk-bot
|
3aedbaf7d3adbb54f7c5172912a8cf188d3cb9d3
|
[
"MIT"
] | 4
|
2018-11-17T20:09:06.000Z
|
2018-12-01T11:32:55.000Z
|
app/api/__init__.py
|
BANOnotIT/helpdesk-bot
|
3aedbaf7d3adbb54f7c5172912a8cf188d3cb9d3
|
[
"MIT"
] | 1
|
2018-11-27T00:04:54.000Z
|
2018-11-27T00:04:54.000Z
|
from .base import *
from .tg import *
from .vk import *
| 14
| 19
| 0.678571
| 9
| 56
| 4.222222
| 0.555556
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 56
| 3
| 20
| 18.666667
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
63bb407a58d08b397dc0c117ae5ea275259aaa7b
| 246
|
py
|
Python
|
Pytest/test/fixture/test_counter.py
|
koichi210/Python
|
9bc0be009bec15499540c1bf9ae802ffe1acfe10
|
[
"MIT"
] | null | null | null |
Pytest/test/fixture/test_counter.py
|
koichi210/Python
|
9bc0be009bec15499540c1bf9ae802ffe1acfe10
|
[
"MIT"
] | null | null | null |
Pytest/test/fixture/test_counter.py
|
koichi210/Python
|
9bc0be009bec15499540c1bf9ae802ffe1acfe10
|
[
"MIT"
] | null | null | null |
from main.counter import Counter
def test_add_01():
assert Counter().Increment() == 1
def test_add_02():
assert Counter().Increment() == 1
assert Counter().Decrement() == 0
def test_add_03():
assert Counter().Decrement() == -1
| 20.5
| 38
| 0.658537
| 33
| 246
| 4.727273
| 0.454545
| 0.333333
| 0.192308
| 0.294872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049751
| 0.182927
| 246
| 11
| 39
| 22.363636
| 0.726368
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.375
| true
| 0
| 0.125
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
63d72e141b4604592bc4bacb4b51d3d8bd3d1635
| 929
|
py
|
Python
|
test/unit_test/test_record_class_test/test_record_class_test.py
|
JE-Chen/SeleniumWrapper_JE
|
192c457ad5b3b7e1e8aa0316a461f4dd532f5ffd
|
[
"MIT"
] | null | null | null |
test/unit_test/test_record_class_test/test_record_class_test.py
|
JE-Chen/SeleniumWrapper_JE
|
192c457ad5b3b7e1e8aa0316a461f4dd532f5ffd
|
[
"MIT"
] | null | null | null |
test/unit_test/test_record_class_test/test_record_class_test.py
|
JE-Chen/SeleniumWrapper_JE
|
192c457ad5b3b7e1e8aa0316a461f4dd532f5ffd
|
[
"MIT"
] | null | null | null |
from je_web_runner import TestObject
from je_web_runner.utils.test_object.test_object_record.test_object_record_class import test_object_record
test_object_record.save_test_object("q", "name")
test_object_record.save_test_object("test_name", "name")
q_test_object: TestObject = test_object_record.test_object_record_dict.get("q")
test_name_test_object: TestObject = test_object_record.test_object_record_dict.get("test_name")
print(q_test_object)
print(test_name_test_object)
print(q_test_object.test_object_name)
print(q_test_object.test_object_type)
print(test_name_test_object.test_object_name)
print(test_name_test_object.test_object_type)
assert q_test_object is not None
assert test_name_test_object is not None
assert q_test_object.test_object_name == "q"
assert q_test_object.test_object_type == "name"
assert test_name_test_object.test_object_name == "test_name"
assert test_name_test_object.test_object_type == "name"
| 46.45
| 106
| 0.865447
| 158
| 929
| 4.556962
| 0.139241
| 0.472222
| 0.194444
| 0.25
| 0.840278
| 0.769444
| 0.383333
| 0.269444
| 0.163889
| 0.163889
| 0
| 0
| 0.057051
| 929
| 19
| 107
| 48.894737
| 0.821918
| 0
| 0
| 0
| 0
| 0
| 0.049516
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
892ea44d1903731a8de8ef15092efe7c6ec77015
| 1,041
|
py
|
Python
|
tests/test_search.py
|
jjaroszsfdc/SalesforcePy
|
cb979fde5983adc6bafc92f294457afb052f6047
|
[
"BSD-3-Clause"
] | 88
|
2018-10-11T09:34:42.000Z
|
2022-03-09T21:21:06.000Z
|
tests/test_search.py
|
jjaroszsfdc/SalesforcePy
|
cb979fde5983adc6bafc92f294457afb052f6047
|
[
"BSD-3-Clause"
] | 52
|
2018-10-10T14:35:45.000Z
|
2022-03-11T16:52:19.000Z
|
tests/test_search.py
|
jjaroszsfdc/SalesforcePy
|
cb979fde5983adc6bafc92f294457afb052f6047
|
[
"BSD-3-Clause"
] | 41
|
2018-10-10T14:19:05.000Z
|
2022-03-11T16:48:28.000Z
|
import testutil
import responses
@responses.activate
def test_search():
testutil.add_response("login_response_200")
testutil.add_response("search_response_200")
testutil.add_response("api_version_response_200")
client = testutil.get_client()
search_result = client.search(
"FIND {sfdc_py} RETURNING Account(Id, Name) LIMIT 5")
assert search_result[0] == testutil.mock_responses["search_response_200"]["body"]
assert search_result[1].status == 200
@responses.activate
def test_search_with_proxy():
testutil.add_response("login_response_200")
testutil.add_response("search_response_200")
testutil.add_response("api_version_response_200")
client = testutil.get_client_with_proxy()
search_result = client.search(
"FIND {sfdc_py} RETURNING Account(Id, Name) LIMIT 5")
assert search_result[0] == testutil.mock_responses["search_response_200"]["body"]
assert search_result[1].status == 200
assert search_result[1].proxies.get("https") is testutil.proxies.get("https")
| 37.178571
| 85
| 0.748319
| 136
| 1,041
| 5.411765
| 0.264706
| 0.119565
| 0.154891
| 0.119565
| 0.855978
| 0.774457
| 0.774457
| 0.774457
| 0.774457
| 0.774457
| 0
| 0.041157
| 0.136407
| 1,041
| 27
| 86
| 38.555556
| 0.777531
| 0
| 0
| 0.695652
| 0
| 0
| 0.267051
| 0.04611
| 0
| 0
| 0
| 0
| 0.217391
| 1
| 0.086957
| false
| 0
| 0.086957
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8938e80260d4928b4d99e135e5f85687f0c38f47
| 40
|
py
|
Python
|
serialfuzzer/__init__.py
|
vhertz/SerialFuzzer
|
64d2d3d8c32bd5614a0ff8db58106ffdf270d86e
|
[
"MIT"
] | null | null | null |
serialfuzzer/__init__.py
|
vhertz/SerialFuzzer
|
64d2d3d8c32bd5614a0ff8db58106ffdf270d86e
|
[
"MIT"
] | null | null | null |
serialfuzzer/__init__.py
|
vhertz/SerialFuzzer
|
64d2d3d8c32bd5614a0ff8db58106ffdf270d86e
|
[
"MIT"
] | null | null | null |
from .serial_fuzzer import SerialFuzzer
| 20
| 39
| 0.875
| 5
| 40
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
89acde472bae79126845743a79e48dcba74dcdfb
| 31
|
py
|
Python
|
vedaseg/assemble/__init__.py
|
E18301194/vedaseg
|
c62c8ea46dbba12f03262452dd7bed22969cfe4e
|
[
"Apache-2.0"
] | 2
|
2020-07-15T02:36:46.000Z
|
2021-03-08T03:18:26.000Z
|
vedaseg/assemble/__init__.py
|
E18301194/vedaseg
|
c62c8ea46dbba12f03262452dd7bed22969cfe4e
|
[
"Apache-2.0"
] | null | null | null |
vedaseg/assemble/__init__.py
|
E18301194/vedaseg
|
c62c8ea46dbba12f03262452dd7bed22969cfe4e
|
[
"Apache-2.0"
] | null | null | null |
from .assemble import assemble
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9821466daca4b1ac516e443374402c029768d226
| 156
|
py
|
Python
|
enumb/__init__.py
|
tombulled/enumb
|
750c756a4f49afc6421568231ddc03d039d3db07
|
[
"MIT"
] | null | null | null |
enumb/__init__.py
|
tombulled/enumb
|
750c756a4f49afc6421568231ddc03d039d3db07
|
[
"MIT"
] | 1
|
2022-02-10T23:29:06.000Z
|
2022-03-11T22:29:06.000Z
|
enumb/__init__.py
|
tombulled/enumb
|
750c756a4f49afc6421568231ddc03d039d3db07
|
[
"MIT"
] | null | null | null |
from .bases import *
from .enums import *
from .generators import *
from .meta import *
from .models import *
from .types import *
| 22.285714
| 25
| 0.608974
| 18
| 156
| 5.277778
| 0.444444
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.314103
| 156
| 6
| 26
| 26
| 0.88785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
984bc015f0b050b2e28895e48657be27b1a63178
| 31
|
py
|
Python
|
odooku/backends/s3/__init__.py
|
davejrv/import
|
0dbca8f432d1a051a2bdb30c952cc26f1ffd74ae
|
[
"Apache-2.0"
] | 55
|
2017-09-11T06:48:39.000Z
|
2022-03-31T18:14:46.000Z
|
odooku/backends/s3/__init__.py
|
davejrv/import
|
0dbca8f432d1a051a2bdb30c952cc26f1ffd74ae
|
[
"Apache-2.0"
] | 4
|
2018-01-13T09:13:48.000Z
|
2019-09-28T10:24:43.000Z
|
odooku/backends/s3/__init__.py
|
davejrv/import
|
0dbca8f432d1a051a2bdb30c952cc26f1ffd74ae
|
[
"Apache-2.0"
] | 46
|
2017-12-30T22:31:45.000Z
|
2022-02-17T05:35:55.000Z
|
from .backend import S3Backend
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.129032
| 31
| 1
| 31
| 31
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
985192e1f05e547ae324a0d4d462d877c9127047
| 266
|
py
|
Python
|
data_processing/comment.py
|
FMsunyh/keras-retinanet
|
cb86a987237d3f6bd504004e2b186cf65606c890
|
[
"Apache-2.0"
] | null | null | null |
data_processing/comment.py
|
FMsunyh/keras-retinanet
|
cb86a987237d3f6bd504004e2b186cf65606c890
|
[
"Apache-2.0"
] | null | null | null |
data_processing/comment.py
|
FMsunyh/keras-retinanet
|
cb86a987237d3f6bd504004e2b186cf65606c890
|
[
"Apache-2.0"
] | null | null | null |
def intersection(right=[], left=[]):
return list(set(right).intersection(set(left)))
def union(right=[], left=[]):
return list(set(right).union(set(left)))
def union(right=[], left=[]):
return list(set(right).difference(set(left))) # not have in left
| 26.6
| 68
| 0.650376
| 37
| 266
| 4.675676
| 0.324324
| 0.156069
| 0.260116
| 0.32948
| 0.641619
| 0.641619
| 0.485549
| 0.485549
| 0.485549
| 0.485549
| 0
| 0
| 0.131579
| 266
| 9
| 69
| 29.555556
| 0.748918
| 0.06015
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
98694844740f7fb106ad47b992b0c01775284d66
| 537
|
py
|
Python
|
oauth/serializers.py
|
annisadevin/tugas-law
|
ae7163a6772e1c657b60ef149870593019ba994c
|
[
"Unlicense"
] | null | null | null |
oauth/serializers.py
|
annisadevin/tugas-law
|
ae7163a6772e1c657b60ef149870593019ba994c
|
[
"Unlicense"
] | null | null | null |
oauth/serializers.py
|
annisadevin/tugas-law
|
ae7163a6772e1c657b60ef149870593019ba994c
|
[
"Unlicense"
] | null | null | null |
from dataclasses import field
from rest_framework import serializers
from .models import *
# from rest_framework.parsers import BaseParser
# class UserAccountSerializer(serializers.ModelSerializer):
# class Meta:
# model = UserAccount
# fields = '__all__'
# class ProfileSerializer(serializers.ModelSerializer):
# class Meta:
# model = Profile
# fields = '__all__'
# class SessionSerializer(serializers.ModelSerializer):
# class Meta:
# model = Session
# fields = '__all__'
| 26.85
| 59
| 0.692737
| 48
| 537
| 7.458333
| 0.458333
| 0.217877
| 0.259777
| 0.293296
| 0.335196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227188
| 537
| 19
| 60
| 28.263158
| 0.862651
| 0.774674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
98b3d83559f96b1106dd64da40bfd8e16978754f
| 123
|
py
|
Python
|
setup.py
|
emanuele-albini/emutils
|
d5e3939da8a14b629879f06d87d4bd371e7117ab
|
[
"MIT"
] | null | null | null |
setup.py
|
emanuele-albini/emutils
|
d5e3939da8a14b629879f06d87d4bd371e7117ab
|
[
"MIT"
] | null | null | null |
setup.py
|
emanuele-albini/emutils
|
d5e3939da8a14b629879f06d87d4bd371e7117ab
|
[
"MIT"
] | null | null | null |
import setuptools
import pkg_resources
import os
# setup.cfg
pkg_resources.require('setuptools>=39.2')
setuptools.setup()
| 15.375
| 41
| 0.804878
| 17
| 123
| 5.705882
| 0.588235
| 0.247423
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026786
| 0.089431
| 123
| 8
| 42
| 15.375
| 0.839286
| 0.073171
| 0
| 0
| 0
| 0
| 0.141593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7f55786b4181bcf71fef9b18e4e9a915e687679e
| 2,928
|
py
|
Python
|
tests/admin/test_phones.py
|
joshua-cerniglia/duo_client_python
|
9d91cdd505d9ed999f8d79305181587d626186bd
|
[
"Apache-2.0"
] | 96
|
2015-01-02T08:03:29.000Z
|
2022-03-28T13:31:39.000Z
|
tests/admin/test_phones.py
|
joshua-cerniglia/duo_client_python
|
9d91cdd505d9ed999f8d79305181587d626186bd
|
[
"Apache-2.0"
] | 87
|
2015-05-12T02:44:33.000Z
|
2022-01-20T05:53:27.000Z
|
tests/admin/test_phones.py
|
joshua-cerniglia/duo_client_python
|
9d91cdd505d9ed999f8d79305181587d626186bd
|
[
"Apache-2.0"
] | 110
|
2015-03-03T20:23:42.000Z
|
2021-12-16T23:01:29.000Z
|
from .. import util
import duo_client.admin
from .base import TestAdmin
class TestPhones(TestAdmin):
def test_get_phones_generator(self):
""" Test to get phones generator.
"""
generator = self.client_list.get_phones_generator()
response = next(generator)
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['100'],
'offset': ['0'],
})
def test_get_phones(self):
""" Test to get phones without pagination params.
"""
response = self.client_list.get_phones()
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['100'],
'offset': ['0'],
})
def test_get_phones_with_limit(self):
""" Test to get phones with pagination params.
"""
response = self.client_list.get_phones(limit=20)
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['20'],
'offset': ['0'],
})
def test_get_phones_with_limit_offset(self):
""" Test to get phones with pagination params.
"""
response = self.client_list.get_phones(limit=20, offset=2)
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['20'],
'offset': ['2'],
})
def test_get_phones_with_offset(self):
""" Test to get phones with pagination params.
"""
response = self.client_list.get_phones(offset=9001)
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['100'],
'offset': ['0'],
})
| 34.046512
| 66
| 0.527322
| 299
| 2,928
| 4.996656
| 0.133779
| 0.090361
| 0.052209
| 0.053548
| 0.880187
| 0.825971
| 0.825971
| 0.825971
| 0.771084
| 0.771084
| 0
| 0.018219
| 0.325137
| 2,928
| 85
| 67
| 34.447059
| 0.737854
| 0.081967
| 0
| 0.695652
| 0
| 0
| 0.101208
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 1
| 0.072464
| false
| 0
| 0.043478
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7f6aba614b51f504d8ecd72fb8d6e85b0b4acc26
| 6,006
|
py
|
Python
|
azure_function_sample/test/test_event_processor.py
|
isabella232/azure-machine-learning-pipeline-observability-sample
|
ad8455fc5e6d0f6dc1a835cf29bb46a798d9ffe0
|
[
"MIT"
] | 1
|
2021-02-12T23:00:53.000Z
|
2021-02-12T23:00:53.000Z
|
azure_function_sample/test/test_event_processor.py
|
Azure-Samples/azure-machine-learning-pipeline-observability-sample
|
ad8455fc5e6d0f6dc1a835cf29bb46a798d9ffe0
|
[
"MIT"
] | 1
|
2021-02-24T03:35:41.000Z
|
2021-02-24T03:35:41.000Z
|
azure_function_sample/test/test_event_processor.py
|
isabella232/azure-machine-learning-pipeline-observability-sample
|
ad8455fc5e6d0f6dc1a835cf29bb46a798d9ffe0
|
[
"MIT"
] | 4
|
2020-09-08T17:24:44.000Z
|
2022-01-18T15:35:26.000Z
|
import os
import pytest
from aml_pipeline_observability_func_sample.event_processor import process_event, to_run_metrics
from mock import patch, call, Mock
def test_process_event_raises_value_error_without_run_id():
# arrange
event = {}
# act/assert
with pytest.raises(ValueError):
process_event(event)
@patch.dict(os.environ, {'SUBSCRIPTION_ID': 'test_subscription_id', 'AML_RESOURCE_GROUP': 'test_aml_resource_group',
'AML_WORKSPACE_NAME': 'test_aml_workspace_name'})
def test_to_run_metrics():
# arrange
event = {
'experimentName': 'test_experiment',
'experimentId': 'test_experiment_id'
}
run_details = {
'runId': 'test_run_id',
'startTimeUtc': '2020-08-19T08:45:00.585171Z',
'endTimeUtc': '2020-08-19T08:47:00.585171Z',
'status': 'Completed',
'target': 'test-compute',
'properties': {
'azureml.runsource': 'Step-Run',
'StepType': 'PythonScript'
}
}
expected_run_metrics = {
'resourceGroup': 'test_aml_resource_group',
'amlWorkSpace': 'test_aml_workspace_name',
'subscriptions': 'test_subscription_id',
'run_id': 'test_run_id',
'start_time_utc': '2020-08-19T08:45:00.585171Z',
'end_time_utc': '2020-08-19T08:47:00.585171Z',
'experimentName': 'test_experiment',
'experimentId': 'test_experiment_id',
'status': 'Completed',
'compute_target': 'test-compute',
'run_type': 'Step-Run',
'step_type': 'PythonScript'
}
# act
actual_run_metrics = to_run_metrics(run_details, event)
# assert
assert actual_run_metrics == expected_run_metrics
@patch('aml_pipeline_observability_func_sample.event_processor.get_workspace')
@patch('aml_pipeline_observability_func_sample.event_processor.save_to_app_insight')
@patch('aml_pipeline_observability_func_sample.event_processor.Experiment')
@patch('aml_pipeline_observability_func_sample.event_processor.Run')
@patch.dict(os.environ, {'APPINSIGHTS_CONNECTION_STRING': 'test_connection'})
def test_save_to_app_insight_not_called_if_end_time_utc_is_missing_in_run_details(mock_run, mock_experiment,
mock_save_to_app_insight,
mock_get_workspace):
# arrange
event = {
'runId': 'test_run_id',
'experimentName': 'test',
'experimentId': 'test_exp_id'
}
mock_get_workspace.return_value = 'test_workspace'
mock_experiment.return_value = 'test_experiment'
mock_run_obj = Mock()
mock_run.return_value = mock_run_obj
mock_run_obj.get_details.return_value = {}
expected_experiment_call = call(workspace='test_workspace', name='test')
expected_run_call = call('test_experiment', 'test_run_id')
# act
process_event(event)
# assert
assert mock_save_to_app_insight.call_count == 0
assert expected_experiment_call in mock_experiment.mock_calls
assert expected_run_call in mock_run.mock_calls
@patch('aml_pipeline_observability_func_sample.event_processor.to_run_metrics')
@patch('aml_pipeline_observability_func_sample.event_processor.get_workspace')
@patch('aml_pipeline_observability_func_sample.event_processor.save_to_app_insight')
@patch('aml_pipeline_observability_func_sample.event_processor.Experiment')
@patch('aml_pipeline_observability_func_sample.event_processor.Run')
@patch.dict(os.environ, {'APPINSIGHTS_CONNECTION_STRING': 'test_connection'})
def test_save_to_app_insight_called_if_end_time_utc_is_present_in_run_details(mock_run, mock_experiment,
mock_save_to_app_insight,
mock_get_workspace, mock_to_run_metrics):
# arrange
event = {
'runId': 'test_run_id',
'experimentName': 'test_experiment',
'experimentId': 'test_experiment_id'
}
dummy_run_details = {
'runId': 'test_run_id',
'startTimeUtc': '2020-08-19T08:45:00.585171Z',
'endTimeUtc': '2020-08-19T08:47:00.585171Z',
'status': 'Completed',
'target': 'test-compute',
'properties': {
'azureml.runsource': 'Step-Run',
'StepType': 'PythonScript'
}
}
dummy_run_metrics = {
'resourceGroup': 'test_aml_resource_group',
'amlWorkSpace': 'test_aml_workspace_name',
'subscriptions': 'test_subscription_id',
'run_id': 'test_run_id',
'start_time_utc': '2020-08-19T08:45:00.585171Z',
'end_time_utc': '2020-08-19T08:47:00.585171Z',
'experimentName': 'test_experiment',
'experimentId': 'test_experiment_id',
'status': 'Completed',
'compute_target': 'test-compute',
'run_type': 'Step-Run',
'step_type': 'PythonScript'
}
mock_get_workspace.return_value = 'test_workspace'
mock_experiment.return_value = 'test_experiment'
mock_to_run_metrics.return_value = dummy_run_metrics
mock_run_obj = Mock()
mock_run.return_value = mock_run_obj
mock_run_obj.get_details.return_value = dummy_run_details
expected_experiment_call = call(workspace='test_workspace', name='test_experiment')
expected_run_call = call('test_experiment', 'test_run_id')
expected_to_run_metrics_call = call(dummy_run_details, event)
expected_save_to_app_insight_call = call(dummy_run_metrics, 'test_connection')
# act
process_event(event)
# assert
assert mock_save_to_app_insight.call_count == 1
assert expected_save_to_app_insight_call in mock_save_to_app_insight.mock_calls
assert expected_experiment_call in mock_experiment.mock_calls
assert expected_run_call in mock_run.mock_calls
assert expected_to_run_metrics_call in mock_to_run_metrics.mock_calls
| 38.5
| 119
| 0.678155
| 705
| 6,006
| 5.302128
| 0.131915
| 0.042804
| 0.026485
| 0.047084
| 0.830658
| 0.808186
| 0.776083
| 0.738095
| 0.700375
| 0.64794
| 0
| 0.034578
| 0.219947
| 6,006
| 155
| 120
| 38.748387
| 0.763287
| 0.012488
| 0
| 0.644628
| 0
| 0
| 0.372191
| 0.170806
| 0
| 0
| 0
| 0
| 0.07438
| 1
| 0.033058
| false
| 0
| 0.033058
| 0
| 0.066116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7f9b409f9afbf9febc9e55fc835daeea02c46485
| 117
|
py
|
Python
|
pre_setup.py
|
ChrisQiqiang/allocation
|
762d11052fbd2a71560a909ca4760f65da6866c3
|
[
"Apache-2.0"
] | 10
|
2021-12-31T05:42:50.000Z
|
2022-01-11T08:20:21.000Z
|
pre_setup.py
|
ChrisQiqiang/allocation
|
762d11052fbd2a71560a909ca4760f65da6866c3
|
[
"Apache-2.0"
] | null | null | null |
pre_setup.py
|
ChrisQiqiang/allocation
|
762d11052fbd2a71560a909ca4760f65da6866c3
|
[
"Apache-2.0"
] | null | null | null |
# For internal use. Please do not modify this file.
def setup():
return
def extra_make_option():
return ""
| 14.625
| 51
| 0.675214
| 17
| 117
| 4.529412
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 117
| 7
| 52
| 16.714286
| 0.855556
| 0.418803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.