hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
50678d52ee9dd55cfdf0952bdcb40a21409e0aec
| 57
|
py
|
Python
|
farabio/core/__init__.py
|
tuttelikz/farabi
|
5b65cdf39ceecbd69ae759d030b132ee74661b48
|
[
"Apache-2.0"
] | 53
|
2021-04-06T17:57:12.000Z
|
2022-03-07T17:45:45.000Z
|
farabio/core/__init__.py
|
tuttelikz/farabi
|
5b65cdf39ceecbd69ae759d030b132ee74661b48
|
[
"Apache-2.0"
] | 1
|
2022-03-07T19:48:44.000Z
|
2022-03-07T19:49:47.000Z
|
farabio/core/__init__.py
|
tuttelikz/farabi
|
5b65cdf39ceecbd69ae759d030b132ee74661b48
|
[
"Apache-2.0"
] | 2
|
2021-12-06T14:42:44.000Z
|
2021-12-07T11:33:14.000Z
|
from .basetrainer import *
from .convnettrainer import *
| 19
| 29
| 0.789474
| 6
| 57
| 7.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 57
| 2
| 30
| 28.5
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5076516b6d1bd4ebe53f85e4233e00f0c32c88d2
| 52
|
py
|
Python
|
cachy/__init__.py
|
the-dan/cachy
|
7e6620a1af4d82c4d3caae6acab998f725d0d9e5
|
[
"MIT"
] | null | null | null |
cachy/__init__.py
|
the-dan/cachy
|
7e6620a1af4d82c4d3caae6acab998f725d0d9e5
|
[
"MIT"
] | null | null | null |
cachy/__init__.py
|
the-dan/cachy
|
7e6620a1af4d82c4d3caae6acab998f725d0d9e5
|
[
"MIT"
] | null | null | null |
from .data import *
from .http import flappy_network
| 26
| 32
| 0.807692
| 8
| 52
| 5.125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134615
| 52
| 2
| 32
| 26
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
508d41267cfc3c61be1e84437e4b2f2b96665225
| 400
|
py
|
Python
|
tests/rectangle_into_squares_test.py
|
eliflores/coding-katas-python
|
db978c221d849483a4047e80dc843848fe38f05c
|
[
"MIT"
] | null | null | null |
tests/rectangle_into_squares_test.py
|
eliflores/coding-katas-python
|
db978c221d849483a4047e80dc843848fe38f05c
|
[
"MIT"
] | 3
|
2022-03-24T20:30:30.000Z
|
2022-03-24T22:00:33.000Z
|
tests/rectangle_into_squares_test.py
|
eliflores/coding-katas-python
|
db978c221d849483a4047e80dc843848fe38f05c
|
[
"MIT"
] | null | null | null |
from katas.rectangle_into_squares import squares_in_rectangle
def test_square_in_rectangles_when_length_and_width_are_equal():
assert squares_in_rectangle(5, 5) is None
def test_square_in_rectangles():
assert squares_in_rectangle(5, 3) == [3, 2, 1, 1]
assert squares_in_rectangle(20, 14) == [14, 6, 6, 2, 2, 2]
assert squares_in_rectangle(37, 14) == [14, 14, 9, 5, 4, 1, 1, 1, 1]
| 33.333333
| 72
| 0.7225
| 70
| 400
| 3.785714
| 0.428571
| 0.169811
| 0.339623
| 0.362264
| 0.377358
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10119
| 0.16
| 400
| 11
| 73
| 36.363636
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0.285714
| true
| 0
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
509e3c9f822d9807eaa510e363d89b315c4b4e2b
| 6,402
|
py
|
Python
|
unit_tests/view_modify_land_charge/test_edit_charge_date.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2019-10-03T13:58:29.000Z
|
2019-10-03T13:58:29.000Z
|
unit_tests/view_modify_land_charge/test_edit_charge_date.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | null | null | null |
unit_tests/view_modify_land_charge/test_edit_charge_date.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2021-04-11T05:24:57.000Z
|
2021-04-11T05:24:57.000Z
|
from maintain_frontend import main
from flask import url_for
from flask_testing import TestCase
from unit_tests.utilities import Utilities
from unittest.mock import patch
from maintain_frontend.dependencies.session_api.session import Session
from maintain_frontend.models import LocalLandChargeItem
from maintain_frontend.constants.permissions import Permissions
from datetime import date
HTML = 'charge_date.html'
NO_VALIDATION_ERRORS = []
VALIDATION_ERRORS = {'date': ['some error message']}
CHARGE_DAY = 'some charge day'
CHARGE_MONTH = 'some charge month'
CHARGE_YEAR = 'some charge year'
class TestEditChargeCreationDate(TestCase):
def create_app(self):
Utilities.mock_session_cookie_flask_test(self)
return main.app
def test_get(self):
"""should respond with a 200 and render the expected template"""
self.mock_session.return_value.add_charge_state = LocalLandChargeItem()
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.user.permissions = [Permissions.vary_llc]
response = self.client.get(url_for('modify_land_charge.get_charge_date'))
self.status = self.assert_status(response, 200)
self.assert_template_used(HTML)
def test_get_with_date(self):
"""should respond with a 200 and render the expected template"""
charge = LocalLandChargeItem()
charge.charge_creation_date = date(2011, 1, 1)
self.mock_session.return_value.add_charge_state = charge
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.user.permissions = [Permissions.vary_llc]
response = self.client.get(url_for('modify_land_charge.get_charge_date'))
self.status = self.assert_status(response, 200)
self.assert_template_used(HTML)
def test_get_redirects_to_error_when_state_is_none(self):
self.client.set_cookie('localhost', Session.session_cookie_name,
'cookie_value')
self.mock_session.return_value.user.permissions = [Permissions.vary_llc]
self.mock_session.return_value.add_charge_state = None
response = self.client.get(url_for('modify_land_charge.get_charge_date'))
self.assert_status(response, 302)
self.assertRedirects(response, '/error')
@patch('maintain_frontend.view_modify_land_charge.edit_charge_date.ChargeDateValidator')
def test_post_with_no_validation_errors(self, mock_validator):
"""should respond with a 302 and redirect to the add_land_charge_date page"""
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.user.permissions = [Permissions.vary_llc]
charge = LocalLandChargeItem()
charge.local_land_charge = 1
self.mock_session.return_value.add_charge_state = charge
self.mock_session.return_value.edited_fields = []
mock_validator.validate.return_value.errors = NO_VALIDATION_ERRORS
response = self.client.post(url_for('modify_land_charge.post_charge_date'), data={
"date-day": "01",
"date-month": "01",
"date-year": "2001"
})
self.assertTrue('charge_creation_date' in self.mock_session.return_value.edited_fields)
self.assertStatus(response, 302)
self.assertRedirects(response, url_for('modify_land_charge.modify_land_charge', local_land_charge="LLC-1"))
@patch('maintain_frontend.view_modify_land_charge.edit_charge_date.ChargeDateValidator')
def test_post_with_no_validation_errors_empty_date(self, mock_validator):
"""should respond with a 302 and redirect to the add_land_charge_date page"""
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.user.permissions = [Permissions.vary_llc]
charge = LocalLandChargeItem()
charge.local_land_charge = 1
charge_date = {
'day': "01",
'month': "12",
'year': "2016"
}
charge.charge_creation_date = charge_date
self.mock_session.return_value.add_charge_state = charge
self.mock_session.return_value.edited_fields = []
mock_validator.validate.return_value.errors = NO_VALIDATION_ERRORS
response = self.client.post(url_for('modify_land_charge.post_charge_date'), data={
"date-day": "",
"date-month": "",
"date-year": ""
})
self.assertTrue('charge_creation_date' in self.mock_session.return_value.edited_fields)
self.assertStatus(response, 302)
self.assertRedirects(response, url_for('modify_land_charge.modify_land_charge', local_land_charge="LLC-1"))
@patch('maintain_frontend.view_modify_land_charge.edit_charge_date.ChargeDateValidator')
def test_post_with_validation_errors(self, mock_validator):
"""Should respond with a 200, render the expected template, and return the expected error object."""
self.mock_session.return_value.add_charge_state = LocalLandChargeItem()
self.client.set_cookie('localhost', Session.session_cookie_name, 'cookie_value')
self.mock_session.return_value.user.permissions = [Permissions.vary_llc]
mock_validator.validate.return_value.errors = VALIDATION_ERRORS
response = self.client.post(url_for('modify_land_charge.post_charge_date'), data={
'date-day': CHARGE_DAY,
'date-month': CHARGE_MONTH,
'date-year': CHARGE_YEAR
})
mock_validator.validate.assert_called_with(CHARGE_DAY, CHARGE_MONTH, CHARGE_YEAR)
self.assert_status(response, 200)
self.assert_template_used(HTML)
self.assert_context('validation_errors', VALIDATION_ERRORS)
def test_post_redirects_to_error_when_state_is_none(self):
self.client.set_cookie('localhost', Session.session_cookie_name,
'cookie_value')
self.mock_session.return_value.user.permissions = [Permissions.vary_llc]
self.mock_session.return_value.add_charge_state = None
response = self.client.post(url_for('modify_land_charge.post_charge_date'))
self.assert_status(response, 302)
self.assertRedirects(response, '/error')
| 44.151724
| 115
| 0.7204
| 795
| 6,402
| 5.462893
| 0.132075
| 0.038683
| 0.062169
| 0.087037
| 0.767672
| 0.766981
| 0.758232
| 0.758232
| 0.758232
| 0.746489
| 0
| 0.011891
| 0.185567
| 6,402
| 144
| 116
| 44.458333
| 0.821059
| 0.055608
| 0
| 0.556604
| 0
| 0
| 0.161296
| 0.091362
| 0
| 0
| 0
| 0
| 0.169811
| 1
| 0.075472
| false
| 0
| 0.084906
| 0
| 0.179245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50b270e6a6533dd0c4f5022189c765a0280b8353
| 4,361
|
py
|
Python
|
toggle.py
|
lzl257/gesis_wikiwho
|
3c1367b108aebbedad8d55e6018b0c92dddf7a7e
|
[
"MIT"
] | null | null | null |
toggle.py
|
lzl257/gesis_wikiwho
|
3c1367b108aebbedad8d55e6018b0c92dddf7a7e
|
[
"MIT"
] | null | null | null |
toggle.py
|
lzl257/gesis_wikiwho
|
3c1367b108aebbedad8d55e6018b0c92dddf7a7e
|
[
"MIT"
] | null | null | null |
# functions for hiding
from IPython.display import HTML
import random
def hide_toggle(for_next=False, hiding_text='Toggle show/hide'):
this_cell = """$('div.cell.code_cell.rendered.selected')"""
next_cell = this_cell + '.next()'
toggle_text = hiding_text # text shown on toggle link
target_cell = this_cell # target cell to control with toggle
js_hide_current = '' # bit of JS to permanently hide code in current cell (only when toggling next cell)
if for_next:
target_cell = next_cell
toggle_text += ' next cell'
js_hide_current = this_cell + '.find("div.input").hide();'
js_f_name = 'code_toggle_{}'.format(str(random.randint(1,2**64)))
html = """
<script>
function {f_name}() {{
{cell_selector}.find('div.input').toggle();
}}
{js_hide_current}
</script>
<a href="javascript:{f_name}()">{toggle_text}</a>
""".format(
f_name=js_f_name,
cell_selector=target_cell,
js_hide_current=js_hide_current,
toggle_text=toggle_text
)
return HTML(html)
def hide_toggle2(for_next=False, for_next_next=False, for_next_next_next=False, hiding_text='Toggle show/hide'):
this_cell = """$('div.cell.code_cell.rendered.selected')"""
next_cell = this_cell + '.next()'
next_next = this_cell + '.next().next()'
next_next_next = this_cell + '.next().next().next()'
toggle_text = hiding_text # text shown on toggle link
target_cell = this_cell # target cell to control with toggle
js_hide_current = '' # bit of JS to permanently hide code in current cell (only when toggling next cell)
if for_next:
target_cell = next_cell
toggle_text += ' next cell'
js_hide_current = this_cell + '.find("div.input").hide();'
if for_next_next:
target_cell = next_next
toggle_text += ' next next cell'
js_hide_current = this_cell + '.find("div.input").hide();'
if for_next_next_next:
target_cell = next_next_next
toggle_text += ' next next next cell'
js_hide_current = this_cell + '.find("div.input").hide();'
js_f_name = 'code_toggle_{}'.format(str(random.randint(1,2**64)))
js_f_name2 = 'code_toggle_{}'.format(str(random.randint(1,3**70)))
html = """
<script>
function {f_name}() {{
{cell_selector}.find('div.input').toggle();
{cell_selector}.find('div.output').toggle()
}}
{js_hide_current};
{f_name}()
</script>
""".format(
f_name=js_f_name,
cell_selector=target_cell,
js_hide_current=js_hide_current,
toggle_text=toggle_text
)
return HTML(html)
def hide_cell(hide_code=True):
if hide_code:
html = """
<script>
var code_show=true;
function code_toggle() {
$('div.prompt').hide(); // always hide prompt
if (code_show){
$('div.input').hide();
} else {
$('div.input').show();
}
code_show = !code_show
}
$( document ).ready(code_toggle);
</script>
"""
else:
html = """
<script>
var code_show=false;
function code_toggle() {
$('div.prompt').hide(); // always hide prompt
if (code_show){
$('div.input').hide();
} else {
$('div.input').show();
}
code_show = !code_show
}
$( document ).ready(code_toggle);
</script>
"""
return display(HTML(html))
def show_all():
html = """
<script>
function code_toggle() {
$('div.prompt').show(); // always hide prompt
$('div.input').show();
$('div.output').show();
}
</script>
<a href="javascript:code_toggle();">Show all the code</a>
"""
return display(HTML(html))
| 31.832117
| 112
| 0.517313
| 487
| 4,361
| 4.361396
| 0.143737
| 0.07533
| 0.073446
| 0.048023
| 0.815443
| 0.766008
| 0.733992
| 0.718456
| 0.693974
| 0.693974
| 0
| 0.004952
| 0.351754
| 4,361
| 137
| 113
| 31.832117
| 0.746374
| 0.070167
| 0
| 0.696429
| 0
| 0
| 0.551877
| 0.163785
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.017857
| 0
| 0.089286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50b3c4a69bca4b0021af805bb7a5ba37f04a48aa
| 14,138
|
py
|
Python
|
sourcecode/utils/font_set.py
|
JulianJuaner/VecFontTransfer
|
266b969878cdecc821d1d6e240e87d4caa151d84
|
[
"MIT"
] | 7
|
2021-04-12T10:48:14.000Z
|
2021-12-26T10:17:18.000Z
|
sourcecode/utils/font_set.py
|
JulianJuaner/VecFontTransfer
|
266b969878cdecc821d1d6e240e87d4caa151d84
|
[
"MIT"
] | null | null | null |
sourcecode/utils/font_set.py
|
JulianJuaner/VecFontTransfer
|
266b969878cdecc821d1d6e240e87d4caa151d84
|
[
"MIT"
] | null | null | null |
WESTERN_CHAR = '1234567890-=!@#¥%……&*()~:"{{}}[]|\\?/<>,.;\'+abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
CHINESE_CHAR = [
'啰','阿','唉','哀','矮','肮','胞','并','朝','炒','匙','崇','初','雌','簇','脆','旦','氮','盗','等','叠','懂','洞','遏','伐','泛','菲','诽','沸','愤','俘','俯','腐','妇','赣','刚','稿','构','龟','浩','衡','侯','护','还','会','汇','荤','绩','季','茧','见','匠','皆','介','届','绢','可','寇','括','喇','栏','磊','历','谅','疗','辽','劣','六','窿','屡','滤','论','芒','梅','美','蜜','莫','牡','墓','哪','捏','欧','沛','碰','凄','乞','寝','渠','取','确','汝','色','啥','杉','善','生','实','适','漱','嘶','苏','速','遂','苔','瘫','探','腾','偷','退','脱','惋','旺','畏','位','纹','我','舞','享','羞','婿','宣','绚','循','岩','阎','宴','椰','印','莹','颖','勇','予','豫','援','榨','照','者','征','址','致','肿','铸','拽','缀','子','租','足','组','醉','座','匾','螃','瑞','啊','埃','挨','哎','癌','蔼','艾','碍','爱','隘','鞍','氨','安','俺','按','暗','岸','案','昂','凹','熬','袄','傲','奥','懊','澳','芭','捌','扒','叭','吧','八','疤','巴','拔','跋','靶','把','耙','坝','霸','罢','爸','白','柏','百','摆','败','拜','斑','班','搬','扳','般','颁','板','版','扮','拌','伴','瓣','半','办','绊','邦','帮','梆','榜','膀','绑','棒','磅','蚌','镑','傍','谤','苞','包','褒','剥','薄','雹','保','堡','饱','宝','抱','报','暴','豹','鲍','爆','杯','碑','悲','卑','北','辈','背','贝','倍','狈','备','惫','被','奔','本','笨','崩','绷','泵','蹦','逼','鼻','比','鄙','笔','彼','碧','蔽','毕','毙','币','庇','痹','闭','弊','必','辟','壁','臂','避','鞭','边','编','贬','扁','便','变','辨','辩','辫','遍','标','彪','表','鳖','憋','别','瘪','彬','斌','濒','滨','宾','兵','冰','柄','丙','秉','饼','瞭','病','玻','菠','播','拨','波','博','勃','搏','伯','舶','脖','膊','渤','泊','驳','捕','卜','哺','补','埠','不','布','步','簿','部','怖','擦','猜','裁','材','才','财','睬','踩','采','彩','菜','蔡','餐','参','蚕','残','惭','惨','灿','苍','舱','仓','沧','藏','操','糙','槽','曹','草','厕','策','侧','册','测','层','蹭','插','叉','茬','茶','查','察','岔','差','拆','柴','豺','搀','掺','蝉','馋','缠','铲','产','阐','颤','昌','猖','场','尝','常','长','偿','肠','厂','敞','畅','唱','倡','超','抄','钞','嘲','潮','巢','吵','车','扯','撤','彻','澈','臣','辰','尘','晨','忱','沉','陈','趁','衬','撑','称','城','橙','成','呈','乘','程','惩','澄','诚','承','逞','秤','吃','痴','持','池','迟','弛','驰','耻','齿','侈','尺','赤','翅','斥','充','冲','虫','宠','抽','酬','畴','稠','愁','筹','仇','绸','瞅','丑','臭','出','橱','厨','锄','雏','除','楚','础','储','矗','触','处','揣','川','穿','传','船','喘','串','疮','窗','幢','床','闯','创','吹','炊','捶','锤','垂','春','椿','醇','唇','淳','纯','蠢','戳','绰','磁','辞','慈','瓷','词','此','刺','赐','次','聪','葱','囱','匆','从','丛','凑','粗','醋','促','篡','窜','摧','崔','催','粹','翠','村','存','寸','撮','搓','措','挫','错','搭','达','答','瘩','打','大','呆','歹','戴','带','代','贷','袋','待','逮','怠','耽','担','丹','单','胆','但','淡','诞','弹','蛋','当','挡','党','荡','档','刀','捣','蹈','倒','岛','祷','导','到','稻','悼','道','德','得','的','蹬','灯','登','瞪','凳','邓','堤','低','滴','迪','敌','笛','涤','抵','底','地','蒂','第','帝','弟','递','缔','颠','掂','滇','碘','点','典','垫','电','佃','甸','店','惦','奠','淀','殿','叼','雕','刁','掉','吊','钓','调','跌','爹','碟','蝶','迭','谍','丁','盯','叮','钉','顶','鼎','定','订','丢','东','冬','董','动','栋','冻','兜','抖','斗','陡','豆','逗','痘','都','督','毒','独','读','堵','睹','赌','杜','镀','肚','度','渡','妒','端','短','锻','段','断','缎','堆','兑','队','对','墩','吨','蹲','敦','顿','囤','钝','盾','哆','多','夺','垛','躲','朵','跺','舵','惰','堕','蛾','峨','鹅','俄','额','讹','娥','恶','扼','鄂','饿','恩','而','儿','耳','尔','饵','二','贰','发','罚','筏','乏','阀','法','帆','番','翻','矾','繁','凡','烦','反','返','范','贩','犯','饭','坊','芳','方','肪','房','防','妨','仿','访','纺','放','非','啡','飞','肥','匪','吠','肺','废','费','芬','吩','氛','分','纷','坟','焚','粉','奋','份','忿','粪','丰','封','枫','蜂','峰','锋','风','疯','逢','冯','缝','讽','奉','凤','佛','否','夫','敷','肤','孵','扶','拂','辐','幅','符','伏','服','浮','福','袱','弗','甫','抚','辅','斧','脯','府','赴','副','覆','赋','复','傅','付','父','腹','负','富','附','缚','咐','该','改','概','钙','盖','溉','干','甘','杆','柑','竿','肝','赶','感','秆','敢','冈','钢','缸','肛','纲','岗','港','杠','高','膏','羔','糕','搞','镐','告','哥','歌','搁','戈','鸽','胳','疙','割','革','葛','格','蛤','阁','隔','个','各','给','根','跟','耕','更','庚','羹','埂','耿','梗','工','攻','功','恭','供','躬','公','宫','弓','巩','汞','拱','贡','共','钩','勾','沟','苟','狗','垢','购','够','辜','菇','咕','估','沽','孤','姑','鼓','古','骨','谷','股','故','顾','固','雇','刮','瓜','寡','挂','褂','乖','拐','怪','棺','关','官','冠','观','管','馆','罐','惯','灌','贯','光','广','逛','瑰','规','硅','归','闺','轨','鬼','诡','癸','桂','柜','跪','贵','滚','棍','锅','郭','国','果','裹','过','哈','孩','海','亥','害','骇','酣','憨','韩','含','涵','寒','函','喊','罕','翰','撼','捍','旱','憾','悍','焊','汗','汉','夯','杭','航','嚎','豪','毫','好','耗','号','呵','喝','荷','核','禾','和','何','合','盒','河','赫','褐','鹤','贺','嘿','黑','痕','很','狠','恨','哼','横','恒','轰','哄','烘','虹','鸿','洪','宏','弘','红','喉','猴','吼','厚','候','后','呼','乎','忽','壶','葫','胡','蝴','狐','糊','湖','弧','虎','唬','互','沪','户','花','哗','华','猾','滑','画','划','化','话','槐','徊','怀','淮','坏','欢','环','缓','换','患','唤','痪','焕','涣','宦','幻','荒','慌','黄','蝗','簧','皇','凰','惶','煌','晃','幌','恍','谎','灰','挥','辉','徽','恢','回','毁','悔','慧','卉','惠','晦','贿','秽','讳','诲','绘','昏','婚','魂','浑','混','豁','活','伙','火','获','或','惑','霍','货','祸','击','圾','基','机','畸','稽','积','箕','肌','饥','迹','激','讥','鸡','缉','吉','极','棘','辑','籍','集','及','急','疾','即','嫉','级','挤','几','脊','己','技','冀','祭','剂','济','寄','寂','计','记','既','忌','际','妓','继','纪','嘉','夹','佳','家','加','颊','贾','甲','钾','假','稼','价','架','驾','嫁','歼','监','坚','尖','间','煎','兼','肩','艰','奸','检','柬','碱','拣','捡','简','俭','剪','减','荐','槛','鉴','践','贱','键','箭','件','健','舰','剑','渐','溅','涧','建','僵','姜','将','浆','江','疆','蒋','桨','奖','讲','酱','降','蕉','椒','礁','焦','胶','交','郊','浇','骄','娇','嚼','搅','矫','侥','脚','狡','角','饺','缴','绞','剿','教','酵','轿','较','叫','窖','揭','接','秸','街','阶','截','劫','节','桔','杰','捷','睫','竭','洁','结','解','姐','戒','藉','芥','界','借','诫','巾','筋','斤','金','今','津','襟','紧','锦','仅','谨','进','晋','禁','近','浸','尽','劲','荆','兢','茎','睛','晶','鲸','京','惊','精','经','井','警','景','颈','静','境','敬','镜','径','靖','竟','竞','净','窘','揪','究','纠','玖','韭','久','灸','九','酒','救','旧','臼','舅','就','疚','鞠','拘','居','驹','菊','局','矩','举','沮','聚','拒','据','巨','具','距','锯','俱','句','惧','炬','剧','捐','鹃','娟','倦','眷','卷','掘','倔','爵','觉','决','诀','绝','均','菌','钧','军','君','峻','俊','竣','骏','咖','卡','开','揩','楷','凯','慨','刊','堪','勘','坎','砍','看','康','慷','糠','扛','抗','亢','炕','考','拷','烤','靠','坷','苛','棵','磕','颗','科','壳','咳','渴','克','刻','客','课','肯','啃','垦','恳','坑','吭','空','恐','孔','控','抠','口','扣','枯','哭','窟','苦','酷','库','裤','夸','垮','挎','跨','块','筷','快','宽','款','筐','狂','框','矿','眶','旷','况','亏','盔','窥','葵','魁','馈','愧','溃','坤','昆','捆','困','扩','廓','阔','垃','拉','蜡','腊','辣','啦','莱','来','赖','蓝','婪','拦','篮','兰','澜','揽','览','懒','缆','烂','滥','琅','榔','狼','廊','郎','朗','浪','捞','劳','牢','老','姥','酪','烙','涝','勒','乐','雷','蕾','累','垒','擂','肋','类','泪','棱','冷','厘','梨','犁','黎','篱','狸','离','漓','理','李','里','鲤','礼','莉','荔','吏','栗','丽','厉','励','砾','利','例','俐','痢','立','粒','沥','隶','力','璃','哩','俩','联','莲','连','镰','廉','怜','帘','敛','脸','链','恋','炼','练','粮','凉','梁','粱','良','两','辆','量','晾','亮','撩','聊','僚','寥','了','料','列','裂','烈','猎','琳','林','磷','临','邻','鳞','淋','凛','赁','吝','拎','玲','菱','零','龄','铃','伶','羚','凌','灵','陵','岭','领','另','令','溜','琉','榴','硫','馏','留','刘','瘤','流','柳','龙','聋','咙','笼','隆','垄','拢','楼','娄','搂','篓','漏','陋','芦','卢','颅','庐','炉','卤','虏','鲁','碌','露','路','赂','鹿','禄','录','陆','驴','吕','铝','侣','旅','履','缕','虑','氯','律','率','绿','峦','卵','乱','掠','略','抡','轮','伦','仑','沦','萝','螺','罗','逻','锣','箩','骡','裸','落','洛','骆','络','妈','麻','玛','码','蚂','马','骂','嘛','吗','埋','买','麦','卖','迈','脉','瞒','馒','蛮','满','蔓','曼','慢','漫','茫','盲','氓','忙','莽','猫','茅','锚','毛','矛','卯','茂','冒','帽','貌','贸','么','玫','枚','霉','煤','没','眉','媒','每','昧','妹','媚','门','闷','们','萌','蒙','檬','盟','锰','猛','梦','孟','眯','靡','迷','谜','弥','米','秘','觅','泌','密','棉','眠','绵','免','勉','缅','面','苗','描','瞄','藐','秒','渺','庙','妙','蔑','灭','民','皿','敏','悯','闽','明','鸣','铭','名','命','谬','摸','摹','蘑','模','膜','磨','摩','魔','抹','末','墨','默','沫','漠','寞','陌','谋','某','拇','亩','姆','母','暮','幕','募','慕','木','目','睦','牧','穆','拿','呐','钠','那','娜','纳','乃','奶','耐','奈','南','男','难','囊','挠','脑','恼','闹','呢','馁','内','嫩','能','妮','泥','尼','拟','你','匿','腻','逆','溺','年','碾','撵','捻','念','娘','酿','鸟','尿','聂','孽','您','柠','狞','凝','宁','拧','泞','牛','扭','钮','纽','脓','浓','农','弄','奴','努','怒','女','暖','虐','疟','挪','懦','糯','诺','哦','鸥','殴','藕','呕','偶','啪','趴','爬','帕','怕','拍','排','牌','徘','湃','派','攀','潘','盘','盼','畔','判','叛','乓','庞','旁','胖','抛','刨','炮','袍','跑','泡','胚','培','赔','陪','配','佩','喷','盆','砰','烹','澎','彭','蓬','棚','篷','膨','朋','鹏','捧','坯','霹','批','披','劈','啤','脾','疲','皮','匹','僻','屁','譬','篇','偏','片','骗','飘','漂','瓢','票','撇','拼','频','贫','品','聘','乒','坪','苹','萍','平','凭','瓶','评','屏','坡','泼','颇','婆','破','魄','迫','剖','扑','铺','仆','葡','菩','蒲','朴','圃','普','浦','谱','曝','瀑','期','欺','栖','戚','妻','七','漆','柒','其','棋','奇','歧','崎','脐','齐','旗','祈','骑','起','岂','企','启','契','砌','器','气','迄','弃','汽','泣','掐','恰','洽','牵','铅','千','迁','签','谦','乾','黔','钱','钳','前','潜','遣','浅','谴','嵌','欠','歉','枪','呛','腔','墙','强','抢','锹','敲','悄','桥','瞧','乔','侨','巧','撬','翘','峭','俏','窍','切','茄','且','怯','窃','钦','侵','亲','秦','琴','勤','芹','擒','禽','沁','青','轻','氢','倾','卿','清','擎','晴','情','顷','请','庆','琼','穷','秋','丘','球','求','囚','趋','区','曲','躯','屈','驱','娶','趣','去','圈','权','泉','全','痊','拳','犬','券','劝','缺','瘸','却','鹊','雀','裙','群','然','燃','冉','染','瓤','壤','嚷','让','饶','扰','绕','惹','热','壬','仁','人','忍','韧','任','认','刃','纫','扔','仍','日','戎','茸','蓉','荣','融','熔','溶','容','绒','冗','揉','柔','肉','蠕','儒','如','辱','乳','入','褥','软','蕊','锐','闰','润','若','弱','撒','洒','萨','腮','塞','赛','三','叁','伞','散','桑','嗓','丧','搔','骚','扫','嫂','瑟','涩','森','僧','砂','杀','刹','沙','纱','傻','煞','筛','晒','珊','山','删','煽','衫','闪','陕','擅','赡','扇','伤','商','赏','晌','上','尚','裳','梢','捎','稍','烧','勺','少','哨','绍','奢','蛇','舌','舍','赦','摄','射','涉','社','设','申','呻','伸','身','深','绅','神','沈','审','婶','甚','肾','慎','渗','声','甥','牲','升','绳','省','盛','剩','胜','圣','师','失','狮','施','湿','诗','尸','十','石','拾','时','什','食','蚀','识','史','矢','使','屎','驶','始','式','示','士','世','柿','事','拭','誓','逝','势','是','嗜','侍','释','饰','氏','市','恃','室','视','试','收','手','首','守','寿','授','售','受','瘦','兽','蔬','枢','梳','殊','抒','输','叔','舒','淑','疏','书','赎','熟','薯','暑','曙','署','蜀','鼠','属','术','述','树','束','竖','墅','庶','数','恕','刷','耍','摔','衰','甩','帅','栓','拴','霜','双','爽','谁','水','睡','税','吮','瞬','顺','说','硕','烁','斯','撕','思','私','司','丝','死','肆','寺','四','伺','似','饲','巳','松','耸','颂','送','宋','讼','诵','搜','艘','嗽','酥','俗','素','粟','塑','溯','宿','诉','肃','酸','蒜','算','虽','隋','随','髓','碎','岁','穗','隧','祟','孙','损','笋','梭','唆','缩','琐','索','锁','所','塌','他','它','她','塔','蹋','踏','胎','抬','台','泰','太','态','汰','摊','贪','滩','坛','檀','痰','潭','谭','谈','坦','毯','碳','叹','炭','汤','塘','堂','棠','膛','唐','糖','倘','躺','淌','趟','烫','掏','涛','滔','萄','桃','逃','淘','陶','讨','套','特','藤','疼','誊','梯','剔','踢','提','题','蹄','啼','体','替','惕','涕','剃','屉','天','添','填','田','甜','恬','舔','挑','条','迢','跳','贴','铁','帖','厅','听','廷','停','亭','庭','挺','艇','通','桐','瞳','同','铜','彤','童','桶','捅','筒','统','痛','投','头','透','凸','秃','突','图','徒','途','涂','屠','土','吐','兔','团','推','颓','腿','蜕','褪','吞','屯','臀','拖','托','鸵','驮','驼','椭','妥','拓','唾','挖','哇','蛙','洼','娃','瓦','袜','歪','外','豌','弯','湾','玩','顽','丸','完','碗','挽','晚','皖','宛','婉','万','腕','汪','王','亡','枉','网','往','望','忘','妄','威','巍','微','危','违','围','唯','惟','为','维','苇','萎','委','伟','伪','尾','纬','未','蔚','味','胃','喂','魏','谓','尉','慰','卫','瘟','温','蚊','文','闻','吻','稳','紊','问','嗡','翁','蜗','涡','窝','卧','握','沃','巫','呜','乌','污','诬','屋','无','芜','梧','吾','吴','武','五','捂','午','伍','侮','戊','雾','物','勿','务','悟','误','昔','熙','析','西','晰','嘻','吸','锡','牺','稀','息','希','悉','膝','夕','惜','熄','溪','犀','袭','席','习','媳','喜','洗','系','隙','戏','细','瞎','虾','匣','霞','辖','暇','峡','侠','狭','下','厦','夏','吓','掀','先','仙','鲜','纤','咸','贤','衔','闲','弦','嫌','显','险','现','献','县','腺','馅','羡','宪','陷','限','线','相','厢','镶','香','箱','湘','乡','翔','祥','详','想','响','项','巷','橡','像','向','象','萧','硝','霄','削','哮','嚣','销','消','宵','淆','晓','小','孝','校','肖','啸','笑','效','些','歇','蝎','鞋','协','挟','携','邪','斜','胁','谐','写','械','卸','蟹','懈','泄','泻','谢','屑','薪','芯','锌','欣','辛','新','心','信','衅','星','腥','猩','兴','刑','型','形','行','醒','幸','杏','性','姓','兄','凶','胸','匈','汹','雄','熊','休','修','朽','嗅','锈','秀','袖','绣','墟','戌','需','虚','须','徐','许','蓄','酗','叙','旭','序','畜','恤','絮','绪','续','轩','喧','悬','旋','玄','选','癣','靴','薛','学','穴','雪','血','勋','熏','旬','询','寻','驯','巡','殉','汛','训','讯','逊','迅','压','押','鸦','鸭','呀','丫','芽','牙','崖','衙','涯','雅','哑','亚','讶','焉','咽','烟','淹','盐','严','研','蜒','延','言','颜','炎','沿','掩','眼','衍','演','艳','堰','燕','厌','砚','雁','唁','焰','谚','验','殃','央','鸯','秧','杨','扬','羊','洋','阳','氧','仰','痒','养','样','漾','邀','腰','妖','摇','尧','遥','窑','谣','姚','咬','舀','药','要','耀','爷','野','冶','也','页','业','叶','腋','夜','液','一','壹','医','依','伊','衣','夷','遗','移','仪','胰','疑','宜','姨','椅','蚁','倚','已','乙','矣','以','艺','抑','易','邑','屹','亿','役','逸','疫','亦','意','毅','忆','义','益','溢','议','谊','译','异','翼','绎','茵','荫','因','殷','音','阴','姻','吟','银','淫','寅','饮','引','隐','英','樱','婴','鹰','应','萤','营','荧','蝇','迎','赢','盈','影','硬','映','哟','拥','佣','庸','踊','咏','泳','涌','永','用','幽','优','悠','忧','尤','由','邮','犹','油','游','酉','有','友','右','佑','诱','又','幼','迂','淤','于','榆','愚','舆','余','逾','鱼','愉','渝','渔','隅','娱','雨','与','屿','禹','宇','语','羽','玉','域','芋','郁','吁','遇','喻','御','愈','欲','狱','育','誉','浴','寓','裕','预','鸳','渊','冤','元','袁','原','园','员','圆','猿','源','缘','远','苑','愿','怨','院','曰','约','越','跃','钥','岳','粤','月','悦','阅','耘','云','匀','陨','允','运','蕴','酝','晕','韵','孕','砸','杂','栽','哉','灾','宰','载','再','在','咱','暂','赞','赃','脏','葬','遭','糟','凿','藻','枣','早','澡','蚤','躁','噪','造','皂','灶','燥','责','择','则','泽','贼','怎','增','憎','曾','赠','扎','喳','渣','轧','闸','眨','栅','咋','乍','炸','诈','摘','斋','宅','窄','债','寨','瞻','毡','粘','沾','盏','斩','辗','崭','展','栈','占','战','站','绽','樟','章','彰','张','掌','涨','杖','丈','帐','账','仗','胀','障','招','昭','找','沼','赵','罩','兆','肇','召','遮','折','哲','辙','蔗','这','浙','珍','斟','真','贞','针','侦','枕','疹','诊','震','振','镇','阵','蒸','挣','睁','狰','争','怔','整','拯','正','政','症','郑','证','芝','枝','支','吱','蜘','知','肢','脂','汁','之','织','职','直','植','殖','执','值','侄','指','止','趾','只','旨','纸','志','挚','掷','至','置','帜','制','智','秩','稚','质','滞','治','窒','中','忠','钟','衷','终','种','重','仲','众','舟','周','州','洲','粥','轴','肘','帚','咒','皱','宙','昼','骤','珠','株','蛛','朱','猪','诸','逐','竹','烛','煮','拄','瞩','嘱','主','著','柱','助','蛀','贮','筑','住','注','祝','驻','抓','爪','专','砖','转','撰','赚','桩','庄','装','妆','撞','壮','状','椎','锥','追','赘','坠','谆','准','捉','拙','卓','桌','琢','茁','酌','啄','着','灼','浊','兹','咨','资','姿','滋','紫','仔','籽','自','字','棕','踪','宗','综','总','纵','走','奏','揍','卒','族','祖','阻','钻','嘴','最','罪','尊','遵','昨','左','佐','做','作','坐','丐','噩','匕','夭','卦','偎','禀','冥','馨','芙','茉','莺','薇','尬','尴','拗','捺','叽','叩','叨','吆','咄','咧','咪','唠','唧','嗦','嘀','嘹','帷','岖','崛','巅','徙','猬','馍','庵','怡','悖','悴','愕','愣','憔','沐','浏','涮','渲','潇','妃','姊','缤','缭','缰','玷','瑙','璧','桦','榄','榕','橄','橘','檐','贻','掰','肴','胧','豚','朦','臊','炫','祀','祠','禅','盹','睐','铐','铛','皓','鹉','鹦','瘾','聆','蚣','蚪','蚓','蚯','蜓','蜈','蜻','蝠','蝌','蝙','蟆','蟋','蟀','笙','筝','箫','簸','翩','跛','跷','跤','踱','蹂','躏','雳','霎','鲨','鲫','鳄','鳍','魅','鬓','黯'
]
| 2,827.6
| 14,003
| 0.253643
| 3,508
| 14,138
| 1.02366
| 0.999715
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000708
| 0.000849
| 14,138
| 5
| 14,004
| 2,827.6
| 0.253009
| 0
| 0
| 0
| 0
| 0
| 0.250583
| 0.003041
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50b64d19912b681ecdc59611d8715a73f826e9da
| 108
|
py
|
Python
|
src/bio2bel_excape/__init__.py
|
bio2bel/excape
|
dd3cfa446dd47135e754da41d52caf700fddbe0e
|
[
"MIT"
] | null | null | null |
src/bio2bel_excape/__init__.py
|
bio2bel/excape
|
dd3cfa446dd47135e754da41d52caf700fddbe0e
|
[
"MIT"
] | 12
|
2018-09-02T16:38:56.000Z
|
2019-03-06T12:46:00.000Z
|
src/bio2bel_excape/__init__.py
|
bio2bel/excape
|
dd3cfa446dd47135e754da41d52caf700fddbe0e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Bio2BEL ExCAPE-DB."""
from bio2bel_excape.manager import Manager # noqa: F401
| 18
| 56
| 0.657407
| 14
| 108
| 5
| 0.785714
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065934
| 0.157407
| 108
| 5
| 57
| 21.6
| 0.703297
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
50c94a8d27383d5f2f605aaaeeb3e7740d6fa757
| 833
|
py
|
Python
|
mar_prior/convolutional_rnn/__init__.py
|
Catherine0505/mar-scf-flow
|
aa7c3564cb9f2967c5e580a633516dba1b597f98
|
[
"Apache-2.0"
] | 101
|
2020-03-05T06:47:05.000Z
|
2022-03-31T03:42:51.000Z
|
Net/LSTM/__init__.py
|
jarrycyx/dual-channel-low-light-video-public
|
9d0bd612c9563cc62742b66c14b5d8404b0fd9b3
|
[
"Apache-2.0"
] | 12
|
2020-03-12T11:10:57.000Z
|
2022-01-14T03:58:03.000Z
|
Net/LSTM/__init__.py
|
jarrycyx/dual-channel-low-light-video-public
|
9d0bd612c9563cc62742b66c14b5d8404b0fd9b3
|
[
"Apache-2.0"
] | 31
|
2020-06-17T22:00:13.000Z
|
2022-01-20T06:18:20.000Z
|
from .module import Conv1dRNN
from .module import Conv1dLSTM
from .module import Conv1dPeepholeLSTM
from .module import Conv1dGRU
from .module import Conv2dRNN
from .module import Conv2dLSTM
from .module import Conv2dPeepholeLSTM
from .module import Conv2dGRU
from .module import Conv3dRNN
from .module import Conv3dLSTM
from .module import Conv3dPeepholeLSTM
from .module import Conv3dGRU
from .module import Conv1dRNNCell
from .module import Conv1dLSTMCell
from .module import Conv1dPeepholeLSTMCell
from .module import Conv1dGRUCell
from .module import Conv2dRNNCell
from .module import Conv2dLSTMCell
from .module import Conv2dPeepholeLSTMCell
from .module import Conv2dGRUCell
from .module import Conv3dRNNCell
from .module import Conv3dLSTMCell
from .module import Conv3dPeepholeLSTMCell
from .module import Conv3dGRUCell
| 27.766667
| 42
| 0.84994
| 96
| 833
| 7.375
| 0.28125
| 0.338983
| 0.542373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032787
| 0.121249
| 833
| 29
| 43
| 28.724138
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
50d5c5b7294ff5a0615af137d2f5b24eb656903c
| 120
|
py
|
Python
|
torchkge/evaluation/__init__.py
|
MacOS/torchkge
|
89ed724368f3a5279c0f79c6ba1f948ed2a5696f
|
[
"BSD-3-Clause"
] | 248
|
2019-04-03T10:04:43.000Z
|
2022-03-30T13:01:51.000Z
|
torchkge/evaluation/__init__.py
|
MacOS/torchkge
|
89ed724368f3a5279c0f79c6ba1f948ed2a5696f
|
[
"BSD-3-Clause"
] | 52
|
2019-04-04T05:54:35.000Z
|
2022-03-02T17:18:15.000Z
|
torchkge/evaluation/__init__.py
|
MacOS/torchkge
|
89ed724368f3a5279c0f79c6ba1f948ed2a5696f
|
[
"BSD-3-Clause"
] | 41
|
2019-06-05T08:08:00.000Z
|
2022-03-26T09:18:05.000Z
|
from .link_prediction import LinkPredictionEvaluator
from .triplet_classification import TripletClassificationEvaluator
| 40
| 66
| 0.916667
| 10
| 120
| 10.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 120
| 2
| 67
| 60
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
50de16f898ab6ebb1ba1b1184b1df16768a80d79
| 14,937
|
py
|
Python
|
azure-mgmt-web/azure/mgmt/web/operations/global_domain_registration_operations.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
azure-mgmt-web/azure/mgmt/web/operations/global_domain_registration_operations.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
azure-mgmt-web/azure/mgmt/web/operations/global_domain_registration_operations.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class GlobalDomainRegistrationOperations(object):
"""GlobalDomainRegistrationOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_all_domains(
self, custom_headers=None, raw=False, **operation_config):
"""
Lists all domains in a subscription
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DomainCollection
<azure.mgmt.web.models.DomainCollection>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/domains'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DomainCollection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_domain_control_center_sso_request(
self, custom_headers=None, raw=False, **operation_config):
"""
Generates a single sign on request for domain management portal
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DomainControlCenterSsoRequest
<azure.mgmt.web.models.DomainControlCenterSsoRequest>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/generateSsoRequest'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DomainControlCenterSsoRequest', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def validate_domain_purchase_information(
self, domain_registration_input, custom_headers=None, raw=False, **operation_config):
"""
Validates domain registration information
:param domain_registration_input: Domain registration information
:type domain_registration_input: :class:`DomainRegistrationInput
<azure.mgmt.web.models.DomainRegistrationInput>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: object
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/validateDomainRegistrationInformation'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(domain_registration_input, 'DomainRegistrationInput')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def check_domain_availability(
self, name=None, custom_headers=None, raw=False, **operation_config):
"""
Checks if a domain is available for registration
:param name: Name of the object
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DomainAvailablilityCheckResult
<azure.mgmt.web.models.DomainAvailablilityCheckResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
identifier = models.NameIdentifier(name=name)
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/checkDomainAvailability'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(identifier, 'NameIdentifier')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DomainAvailablilityCheckResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_domain_recommendations(
self, keywords=None, max_domain_recommendations=None, custom_headers=None, raw=False, **operation_config):
"""
Lists domain recommendations based on keywords
:param keywords: Keywords to be used for generating domain
recommendations
:type keywords: str
:param max_domain_recommendations: Maximum number of recommendations
:type max_domain_recommendations: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`NameIdentifierCollection
<azure.mgmt.web.models.NameIdentifierCollection>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
parameters = models.DomainRecommendationSearchParameters(keywords=keywords, max_domain_recommendations=max_domain_recommendations)
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/listDomainRecommendations'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'DomainRecommendationSearchParameters')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NameIdentifierCollection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| 42.677143
| 140
| 0.675772
| 1,555
| 14,937
| 6.315113
| 0.138907
| 0.041752
| 0.02444
| 0.03666
| 0.736253
| 0.736253
| 0.736253
| 0.732179
| 0.697352
| 0.683707
| 0
| 0.003899
| 0.227288
| 14,937
| 349
| 141
| 42.799427
| 0.846907
| 0.287206
| 0
| 0.767857
| 0
| 0
| 0.166783
| 0.109859
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.02381
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0fca8190a8280034fcbde2422ec57bcb5e0de6b3
| 220
|
py
|
Python
|
TestImage.py
|
uesp/skyrimmaps-scripts
|
19acbec216487978d85afa67df98073bd43dcb84
|
[
"MIT"
] | null | null | null |
TestImage.py
|
uesp/skyrimmaps-scripts
|
19acbec216487978d85afa67df98073bd43dcb84
|
[
"MIT"
] | null | null | null |
TestImage.py
|
uesp/skyrimmaps-scripts
|
19acbec216487978d85afa67df98073bd43dcb84
|
[
"MIT"
] | null | null | null |
import os
import sys
import Image
import shutil
ImageNW = Image.open("d:\\steam\\steamapps\\common\\skyrim\\data\\textures\\maps\\skyrim\\tamriel.61.-7.dds")
ImageNW.Save("d:\\test.jpg")
| 22
| 110
| 0.622727
| 29
| 220
| 4.724138
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017143
| 0.204545
| 220
| 9
| 111
| 24.444444
| 0.765714
| 0
| 0
| 0
| 0
| 0.166667
| 0.459716
| 0.402844
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0fe555fd31bb6b2837e6e532f9f20b3cfef053fb
| 162
|
py
|
Python
|
test/stream-monitor/test/support/stream-base/two_pass/test_two_pass.py
|
arunrordell/RackHD
|
079c21f45cb38f538c502363aa1ff86dbcac3169
|
[
"Apache-2.0"
] | 451
|
2015-11-09T13:19:25.000Z
|
2022-03-16T08:00:16.000Z
|
test/stream-monitor/test/support/stream-base/two_pass/test_two_pass.py
|
arunrordell/RackHD
|
079c21f45cb38f538c502363aa1ff86dbcac3169
|
[
"Apache-2.0"
] | 824
|
2015-11-10T15:25:50.000Z
|
2018-04-09T09:59:49.000Z
|
test/stream-monitor/test/support/stream-base/two_pass/test_two_pass.py
|
arunrordell/RackHD
|
079c21f45cb38f538c502363aa1ff86dbcac3169
|
[
"Apache-2.0"
] | 221
|
2015-11-10T23:00:46.000Z
|
2022-03-16T08:00:22.000Z
|
"""
Copyright (c) 2016-2017 Dell Inc. or its subsidiaries. All Rights Reserved.
"""
def test_one_of_two_pass():
pass
def test_two_of_two_pass():
pass
| 13.5
| 75
| 0.697531
| 26
| 162
| 4.038462
| 0.692308
| 0.133333
| 0.171429
| 0.247619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061069
| 0.191358
| 162
| 11
| 76
| 14.727273
| 0.740458
| 0.462963
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 1
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ba0f54a32f7ec2ee8ddd70184152f9b526668ab1
| 9,493
|
py
|
Python
|
tests/unittests/cli.py
|
wilzbach/storyscript-sls
|
d71d74a53852ebae54bdaab341678b04f2775411
|
[
"Apache-2.0"
] | null | null | null |
tests/unittests/cli.py
|
wilzbach/storyscript-sls
|
d71d74a53852ebae54bdaab341678b04f2775411
|
[
"Apache-2.0"
] | null | null | null |
tests/unittests/cli.py
|
wilzbach/storyscript-sls
|
d71d74a53852ebae54bdaab341678b04f2775411
|
[
"Apache-2.0"
] | null | null | null |
import json
from unittest.mock import call
import click
from click.testing import CliRunner
from pytest import fixture, mark
from sls import App, Cli
from sls.version import version
@fixture
def runner():
return CliRunner()
@fixture
def echo(patch):
patch.object(click, "echo")
return click.echo
@fixture
def app(patch):
patch.init(App)
patch.many(
App,
[
"click",
"complete",
"start_tcp_server",
"start_stdio_server",
"start_websocket_server",
],
)
return App
@mark.parametrize("option", ["version", "v"])
def test_cli_version_flag(runner, echo, option):
"""
Ensures --version outputs the version
"""
e = runner.invoke(Cli.main, option)
click.echo.assert_called_with(version)
assert e.exit_code == 0
def test_cli_help_flag(runner, echo):
e = runner.invoke(Cli.main, ["--help"])
assert e.output.startswith("Usage: main")
assert click.echo.call_count == 0
assert e.exit_code == 0
@mark.parametrize("option", ["help", "h"])
def test_cli_help_page(runner, echo, option):
e = runner.invoke(Cli.main, [option])
assert click.echo.call_count == 1
assert click.echo.call_args[0][0].startswith("Usage: main")
assert e.exit_code == 0
def test_cli_empty(runner, echo):
e = runner.invoke(Cli.main, [])
assert click.echo.call_count == 1
assert click.echo.call_args[0][0].startswith("Usage: main")
assert e.exit_code == 0
def test_cli_hub_flag(runner, echo, app):
"""
Ensures --version outputs the version
"""
with runner.isolated_filesystem():
with open("my.hub", "w") as f:
f.write("Hello World!")
e = runner.invoke(Cli.main, ["--hub=my.hub"])
assert e.exit_code == 0
app.__init__.assert_called_with(hub_path="my.hub")
def test_cli_hub_manual(patch, runner, echo, app, magic):
"""
Allows to manually overwrite --hub from outside calls.
"""
Cli.main(args=["stdio"], standalone_mode=False, obj="my.hub")
app.__init__.assert_called_with(hub_path="my.hub")
app.start_stdio_server.assert_called()
def test_cli_tcp(runner, echo, app):
"""
Ensures tcp starts a server.
"""
e = runner.invoke(Cli.main, ["tcp"])
app.start_tcp_server.assert_called_with(addr="127.0.0.1", port=2042)
assert e.exit_code == 0
def test_cli_tcp_port(runner, echo, app):
"""
Ensures tcp starts a server.
"""
e = runner.invoke(Cli.main, ["tcp", "--port=123"])
app.start_tcp_server.assert_called_with(addr="127.0.0.1", port=123)
assert e.exit_code == 0
def test_cli_tcp_host(runner, echo, app):
"""
Ensures tcp starts a server.
"""
e = runner.invoke(Cli.main, ["tcp", "--host=foo"])
app.start_tcp_server.assert_called_with(addr="foo", port=2042)
assert e.exit_code == 0
def test_cli_tcp_hub(runner, echo, app):
"""
Ensures tcp starts a server.
"""
with runner.isolated_filesystem():
with open("my.hub", "w") as f:
f.write("Hello World!")
e = runner.invoke(Cli.main, ["--hub=my.hub", "tcp"])
assert e.exit_code == 0
app.__init__.assert_called_with(hub_path="my.hub")
app.start_tcp_server.assert_called_with(addr="127.0.0.1", port=2042)
def test_cli_stdio(runner, echo, app):
"""
Ensures stdio spawns a server.
"""
e = runner.invoke(Cli.main, ["stdio"])
app.start_stdio_server.assert_called()
assert e.exit_code == 0
def test_cli_stdio_hub(runner, echo, app):
"""
Ensures tcp starts a server.
"""
with runner.isolated_filesystem():
with open("my.hub", "w") as f:
f.write("Hello World!")
e = runner.invoke(Cli.main, ["--hub=my.hub", "stdio"])
assert e.exit_code == 0
app.__init__.assert_called_with(hub_path="my.hub")
app.start_stdio_server.assert_called()
def test_cli_websocket(runner, echo, app):
"""
Ensures websocket starts a server.
"""
e = runner.invoke(Cli.main, ["websocket"])
app.start_websocket_server.assert_called_with(addr="0.0.0.0", port=2042)
assert e.exit_code == 0
def test_cli_websocket_port(runner, echo, app):
"""
Ensures websocket starts a server.
"""
e = runner.invoke(Cli.main, ["websocket", "--port=123"])
app.start_websocket_server.assert_called_with(addr="0.0.0.0", port=123)
assert e.exit_code == 0
def test_cli_websocket_host(runner, echo, app):
"""
Ensures websocket starts a server.
"""
e = runner.invoke(Cli.main, ["websocket", "--host=foo"])
app.start_websocket_server.assert_called_with(addr="foo", port=2042)
assert e.exit_code == 0
def test_cli_websocket_hub(runner, echo, app):
"""
Ensures websocket starts a server.
"""
with runner.isolated_filesystem():
with open("my.hub", "w") as f:
f.write("Hello World!")
e = runner.invoke(Cli.main, ["--hub=my.hub", "websocket"])
assert e.exit_code == 0
app.__init__.assert_called_with(hub_path="my.hub")
app.start_websocket_server.assert_called_with(
addr="0.0.0.0", port=2042
)
def test_cli_complete_missing(patch, runner, echo, app):
"""
Ensures that the completion file exists.
"""
e = runner.invoke(Cli.main, ["complete"])
assert click.echo.call_count == 0
assert e.exit_code == 2
def test_cli_complete_hub(patch, runner, echo, app):
"""
Ensures CLI completion with a custom hub works.
"""
with runner.isolated_filesystem():
patch.object(json, "dumps")
text = "foobar"
with open("my.story", "w") as f:
f.write(text)
with open("my.hub", "w") as f:
f.write("Hello World!")
e = runner.invoke(Cli.main, ["--hub=my.hub", "complete", "my.story"])
app.__init__.assert_called_with(hub_path="my.hub")
app.complete.assert_called_with(
"|completion|", text, line=None, column=None
)
json.dumps.assert_called_with(App.complete(), indent=2, sort_keys=True)
click.echo.assert_called_with(json.dumps())
assert e.exit_code == 0
@mark.parametrize(
"options,expected",
[
([], {"line": None, "column": None}),
(["--line", "2"], {"line": 2, "column": None}),
(["-l", "2"], {"line": 2, "column": None}),
(["--column", "3"], {"line": None, "column": 3}),
(["-c", "3"], {"line": None, "column": 3}),
(["-l", "2", "-c", "3"], {"line": 2, "column": 3}),
],
)
def test_cli_complete_line_column(patch, runner, echo, app, options, expected):
"""
Ensures CLI completion with custom line and column works.
"""
with runner.isolated_filesystem():
patch.object(json, "dumps")
text = "foobar"
with open("my.story", "w") as f:
f.write(text)
e = runner.invoke(Cli.main, ["complete", "my.story", *options])
app.complete.assert_called_with("|completion|", text, **expected)
json.dumps.assert_called_with(App.complete(), indent=2, sort_keys=True)
click.echo.assert_called_with(json.dumps())
assert e.exit_code == 0
def test_cli_complete_short(patch, runner, echo, app):
"""
Ensures CLI completion with shortened output.
"""
app.complete.return_value = [{"label": "foo"}, {"label": "bar"}]
with runner.isolated_filesystem():
text = "foobar"
with open("my.story", "w") as f:
f.write(text)
e = runner.invoke(Cli.main, ["complete", "--short", "my.story"])
app.complete.assert_called_with(
"|completion|", text, line=None, column=None
)
assert echo.call_args_list == [
call(),
call("foo"),
call("bar"),
]
assert e.exit_code == 0
@mark.parametrize(
"options,expected",
[
([], {"line": None, "column": None}),
(["--line", "2"], {"line": 2, "column": None}),
(["-l", "2"], {"line": 2, "column": None}),
(["--column", "3"], {"line": None, "column": 3}),
(["-c", "3"], {"line": None, "column": 3}),
(["-l", "2", "-c", "3"], {"line": 2, "column": 3}),
],
)
def test_cli_click_line_column(patch, runner, echo, app, options, expected):
"""
Ensures CLI click with custom line and column works.
"""
with runner.isolated_filesystem():
patch.object(json, "dumps")
text = "foobar"
with open("my.story", "w") as f:
f.write(text)
e = runner.invoke(Cli.main, ["click", "my.story", *options])
app.click.assert_called_with("|click|", text, **expected)
json.dumps.assert_called_with(App.click(), indent=2, sort_keys=True)
click.echo.assert_called_with(json.dumps())
assert e.exit_code == 0
def test_cli_click_short(patch, runner, echo, app):
"""
Ensures CLI click with shortened output.
"""
app.click.return_value = [{"label": "foo"}, {"label": "bar"}]
with runner.isolated_filesystem():
text = "foobar"
with open("my.story", "w") as f:
f.write(text)
e = runner.invoke(Cli.main, ["click", "--short", "my.story"])
app.click.assert_called_with("|click|", text, line=None, column=None)
assert echo.call_args_list == [
call(),
call("foo"),
call("bar"),
]
assert e.exit_code == 0
| 29.758621
| 79
| 0.596123
| 1,269
| 9,493
| 4.283688
| 0.096139
| 0.064018
| 0.076527
| 0.06181
| 0.814202
| 0.786424
| 0.765269
| 0.721854
| 0.676784
| 0.674577
| 0
| 0.016839
| 0.236806
| 9,493
| 318
| 80
| 29.852201
| 0.733471
| 0.077215
| 0
| 0.543269
| 0
| 0
| 0.11553
| 0.002596
| 0
| 0
| 0
| 0
| 0.283654
| 1
| 0.120192
| false
| 0
| 0.033654
| 0.004808
| 0.168269
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e8416a61f2b86bc72810d71126773a647eccb84d
| 105
|
py
|
Python
|
services/weblogic/__init__.py
|
claudijd/honeycomb_plugins
|
f25132812b5119259d8562117c93d1e57a74b920
|
[
"MIT"
] | 24
|
2018-04-27T08:49:57.000Z
|
2022-03-03T15:55:19.000Z
|
services/weblogic/__init__.py
|
claudijd/honeycomb_plugins
|
f25132812b5119259d8562117c93d1e57a74b920
|
[
"MIT"
] | 353
|
2018-04-27T12:06:37.000Z
|
2019-12-11T07:15:34.000Z
|
services/weblogic/__init__.py
|
claudijd/honeycomb_plugins
|
f25132812b5119259d8562117c93d1e57a74b920
|
[
"MIT"
] | 12
|
2018-05-25T10:29:29.000Z
|
2022-01-24T13:35:49.000Z
|
# -*- coding: utf-8 -*-
"""Honeycomb Oracle WebLogic Service."""
from __future__ import unicode_literals
| 26.25
| 40
| 0.72381
| 12
| 105
| 5.916667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.12381
| 105
| 3
| 41
| 35
| 0.76087
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e85b9c321a23c68ef0fb1b780ac525750b393583
| 88
|
py
|
Python
|
sim-services/python/__init__.py
|
hschwane/offline_production
|
e14a6493782f613b8bbe64217559765d5213dc1e
|
[
"MIT"
] | 1
|
2020-12-24T22:00:01.000Z
|
2020-12-24T22:00:01.000Z
|
sim-services/python/__init__.py
|
hschwane/offline_production
|
e14a6493782f613b8bbe64217559765d5213dc1e
|
[
"MIT"
] | null | null | null |
sim-services/python/__init__.py
|
hschwane/offline_production
|
e14a6493782f613b8bbe64217559765d5213dc1e
|
[
"MIT"
] | 3
|
2020-07-17T09:20:29.000Z
|
2021-03-30T16:44:18.000Z
|
from icecube.load_pybindings import load_pybindings
load_pybindings(__name__, __path__)
| 29.333333
| 51
| 0.886364
| 11
| 88
| 6.090909
| 0.636364
| 0.626866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 88
| 2
| 52
| 44
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e87f1d2c7a3017b77c3ca9af54fdf6079f945022
| 32
|
py
|
Python
|
python/testData/resolve/multiFile/importOsPath/yos/__init__.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/resolve/multiFile/importOsPath/yos/__init__.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/resolve/multiFile/importOsPath/yos/__init__.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def makedir(foo):
print foo
| 10.666667
| 17
| 0.65625
| 5
| 32
| 4.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 32
| 2
| 18
| 16
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e8b04fb8b4e29e9c9239239d686c5686a6380dab
| 143
|
py
|
Python
|
NSE/__init__.py
|
GamesBond008/NSE-India-Scrapper
|
9963d1b99ee5557e61d6be329bf9f50444d2820a
|
[
"MIT"
] | 1
|
2021-06-01T19:36:42.000Z
|
2021-06-01T19:36:42.000Z
|
NSE/__init__.py
|
GamesBond008/NSE-India-Scrapper
|
9963d1b99ee5557e61d6be329bf9f50444d2820a
|
[
"MIT"
] | null | null | null |
NSE/__init__.py
|
GamesBond008/NSE-India-Scrapper
|
9963d1b99ee5557e61d6be329bf9f50444d2820a
|
[
"MIT"
] | null | null | null |
import importlib.resources,json
with importlib.resources.path('NSE','ValidSymbols.json') as data_path:
ValidSymbols=json.load(open(data_path))
| 47.666667
| 70
| 0.818182
| 20
| 143
| 5.75
| 0.6
| 0.313043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048951
| 143
| 3
| 71
| 47.666667
| 0.845588
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fa120f1d587d44b6452582ab91218f1c1599d8dc
| 99
|
py
|
Python
|
enthought/block_canvas/canvas/canvas_grid.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/block_canvas/canvas/canvas_grid.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/block_canvas/canvas/canvas_grid.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from blockcanvas.canvas.canvas_grid import *
| 24.75
| 44
| 0.848485
| 13
| 99
| 6
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 99
| 3
| 45
| 33
| 0.886364
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fa64459d20607c8b78aff014dfb29c365986acea
| 85
|
py
|
Python
|
headbot/settings.py
|
headbot/headbot-python
|
adc4fb389821f771b8c916490fa85efa21d035f4
|
[
"Apache-2.0"
] | null | null | null |
headbot/settings.py
|
headbot/headbot-python
|
adc4fb389821f771b8c916490fa85efa21d035f4
|
[
"Apache-2.0"
] | null | null | null |
headbot/settings.py
|
headbot/headbot-python
|
adc4fb389821f771b8c916490fa85efa21d035f4
|
[
"Apache-2.0"
] | null | null | null |
import os
API_ROOT_URL = os.environ.get("API_ROOT_URL", "https://headbot.io/api/")
| 17
| 72
| 0.717647
| 15
| 85
| 3.8
| 0.666667
| 0.245614
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 85
| 4
| 73
| 21.25
| 0.74026
| 0
| 0
| 0
| 0
| 0
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d73013e758a6b0d272f2d2cfa80aa76f749bee78
| 2,360
|
py
|
Python
|
google/cloud/servicemanagement_v1/types/__init__.py
|
LaudateCorpus1/python-service-management
|
ad49299424aefbaaef686c79af533058d5fa5b66
|
[
"Apache-2.0"
] | 2
|
2021-10-07T02:24:31.000Z
|
2021-11-04T07:19:04.000Z
|
google/cloud/servicemanagement_v1/types/__init__.py
|
LaudateCorpus1/python-service-management
|
ad49299424aefbaaef686c79af533058d5fa5b66
|
[
"Apache-2.0"
] | 36
|
2021-03-25T16:02:45.000Z
|
2022-03-07T16:49:58.000Z
|
google/cloud/servicemanagement_v1/types/__init__.py
|
LaudateCorpus1/python-service-management
|
ad49299424aefbaaef686c79af533058d5fa5b66
|
[
"Apache-2.0"
] | 5
|
2021-03-26T13:18:08.000Z
|
2022-01-29T08:13:29.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .resources import (
ChangeReport,
ConfigFile,
ConfigRef,
ConfigSource,
Diagnostic,
ManagedService,
OperationMetadata,
Rollout,
)
from .servicemanager import (
CreateServiceConfigRequest,
CreateServiceRequest,
CreateServiceRolloutRequest,
DeleteServiceRequest,
DisableServiceRequest,
DisableServiceResponse,
EnableServiceRequest,
EnableServiceResponse,
GenerateConfigReportRequest,
GenerateConfigReportResponse,
GetServiceConfigRequest,
GetServiceRequest,
GetServiceRolloutRequest,
ListServiceConfigsRequest,
ListServiceConfigsResponse,
ListServiceRolloutsRequest,
ListServiceRolloutsResponse,
ListServicesRequest,
ListServicesResponse,
SubmitConfigSourceRequest,
SubmitConfigSourceResponse,
UndeleteServiceRequest,
UndeleteServiceResponse,
)
__all__ = (
"ChangeReport",
"ConfigFile",
"ConfigRef",
"ConfigSource",
"Diagnostic",
"ManagedService",
"OperationMetadata",
"Rollout",
"CreateServiceConfigRequest",
"CreateServiceRequest",
"CreateServiceRolloutRequest",
"DeleteServiceRequest",
"DisableServiceRequest",
"DisableServiceResponse",
"EnableServiceRequest",
"EnableServiceResponse",
"GenerateConfigReportRequest",
"GenerateConfigReportResponse",
"GetServiceConfigRequest",
"GetServiceRequest",
"GetServiceRolloutRequest",
"ListServiceConfigsRequest",
"ListServiceConfigsResponse",
"ListServiceRolloutsRequest",
"ListServiceRolloutsResponse",
"ListServicesRequest",
"ListServicesResponse",
"SubmitConfigSourceRequest",
"SubmitConfigSourceResponse",
"UndeleteServiceRequest",
"UndeleteServiceResponse",
)
| 27.764706
| 74
| 0.741525
| 162
| 2,360
| 10.777778
| 0.62963
| 0.034364
| 0.014891
| 0.018328
| 0.717068
| 0.717068
| 0.717068
| 0.717068
| 0.612829
| 0.612829
| 0
| 0.004663
| 0.182203
| 2,360
| 84
| 75
| 28.095238
| 0.9
| 0.241102
| 0
| 0
| 0
| 0
| 0.352676
| 0.236056
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.029412
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d737c2717558809b91510eda2245d3e43d075769
| 221
|
py
|
Python
|
NASA SPACEAPPS CHALLENGE/Solution/Software part/Astronomical Data and Python Libraries/Astropy/astropy-1.1.2/astropy/visualization/__init__.py
|
sahirsharma/Martian
|
062e9b47849512863c16713811f347ad7e121b56
|
[
"MIT"
] | null | null | null |
NASA SPACEAPPS CHALLENGE/Solution/Software part/Astronomical Data and Python Libraries/Astropy/astropy-1.1.2/astropy/visualization/__init__.py
|
sahirsharma/Martian
|
062e9b47849512863c16713811f347ad7e121b56
|
[
"MIT"
] | null | null | null |
NASA SPACEAPPS CHALLENGE/Solution/Software part/Astronomical Data and Python Libraries/Astropy/astropy-1.1.2/astropy/visualization/__init__.py
|
sahirsharma/Martian
|
062e9b47849512863c16713811f347ad7e121b56
|
[
"MIT"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from .stretch import *
from .interval import *
from .transform import *
from .ui import *
from .mpl_style import *
from .hist import *
from .units import *
| 22.1
| 63
| 0.737557
| 33
| 221
| 4.909091
| 0.575758
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005525
| 0.180995
| 221
| 9
| 64
| 24.555556
| 0.889503
| 0.276018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d74b43515a07aa4a4874227520a2a1f3d8be0b3a
| 205
|
py
|
Python
|
venv/Lib/site-packages/neuralnetwork/Sigmoid.py
|
GacinhoV33/VoiceAssistant
|
3409cb630968dd9cd79abc5955a6e3f8df756622
|
[
"MIT-CMU"
] | 1
|
2021-01-11T11:56:12.000Z
|
2021-01-11T11:56:12.000Z
|
venv/Lib/site-packages/neuralnetwork/Sigmoid.py
|
GacinhoV33/VoiceAssistant
|
3409cb630968dd9cd79abc5955a6e3f8df756622
|
[
"MIT-CMU"
] | null | null | null |
venv/Lib/site-packages/neuralnetwork/Sigmoid.py
|
GacinhoV33/VoiceAssistant
|
3409cb630968dd9cd79abc5955a6e3f8df756622
|
[
"MIT-CMU"
] | 1
|
2020-03-30T16:34:34.000Z
|
2020-03-30T16:34:34.000Z
|
import math
class Sigmoid:
def getActivation(self,net):
return 1/(1+math.exp(-1 * net))
def getDerivative(self,net):
return self.getActivation(net) * (1 - self.getActivation(net))
| 25.625
| 70
| 0.653659
| 27
| 205
| 4.962963
| 0.444444
| 0.104478
| 0.19403
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.209756
| 205
| 8
| 70
| 25.625
| 0.802469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
d74b912b79bc14049cd898fb009af55bb35ece44
| 30
|
py
|
Python
|
src/django_core/settings/__init__.py
|
leonardon473/simplest-docker-django-example
|
b290ca0783b66a4c9ad1471f38b8ad128b6e1f91
|
[
"MIT"
] | null | null | null |
src/django_core/settings/__init__.py
|
leonardon473/simplest-docker-django-example
|
b290ca0783b66a4c9ad1471f38b8ad128b6e1f91
|
[
"MIT"
] | null | null | null |
src/django_core/settings/__init__.py
|
leonardon473/simplest-docker-django-example
|
b290ca0783b66a4c9ad1471f38b8ad128b6e1f91
|
[
"MIT"
] | null | null | null |
from .django import * # noqa
| 15
| 29
| 0.666667
| 4
| 30
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 30
| 1
| 30
| 30
| 0.869565
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d75006bf81456c3e2b70a33490c2d271c627920a
| 413
|
py
|
Python
|
ms_deisotope/data_source/_vendor/masslynx/__init__.py
|
mstim/ms_deisotope
|
29f4f466e92e66b65a2d21eca714aa627caa21db
|
[
"Apache-2.0"
] | 18
|
2017-09-01T12:26:12.000Z
|
2022-02-23T02:31:29.000Z
|
ms_deisotope/data_source/_vendor/masslynx/__init__.py
|
mstim/ms_deisotope
|
29f4f466e92e66b65a2d21eca714aa627caa21db
|
[
"Apache-2.0"
] | 19
|
2017-03-12T20:40:36.000Z
|
2022-03-31T22:50:47.000Z
|
ms_deisotope/data_source/_vendor/masslynx/__init__.py
|
mstim/ms_deisotope
|
29f4f466e92e66b65a2d21eca714aa627caa21db
|
[
"Apache-2.0"
] | 14
|
2016-05-06T02:25:30.000Z
|
2022-03-31T14:40:06.000Z
|
# from .libload import proxy, register_dll, _register_dll
# from .loader import (MassLynxRawLoader, is_waters_raw_dir,
# determine_if_available, infer_reader,
# IndexEntry, Cycle)
# __all__ = [
# "proxy", "register_dll", "_register_dll",
# "MassLynxRawLoader", "is_waters_raw_dir",
# "determine_if_available", "infer_reader",
# "IndexEntry", "Cycle"
# ]
| 34.416667
| 60
| 0.644068
| 41
| 413
| 5.95122
| 0.487805
| 0.180328
| 0.131148
| 0.196721
| 0.852459
| 0.631148
| 0.631148
| 0.631148
| 0.631148
| 0.631148
| 0
| 0
| 0.234867
| 413
| 12
| 61
| 34.416667
| 0.772152
| 0.946731
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d75a22b208b4f0acf7a33f0ee8733b1104c6ab51
| 124
|
py
|
Python
|
messaging/admin.py
|
pabulumm/neighbors
|
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
|
[
"BSD-3-Clause"
] | null | null | null |
messaging/admin.py
|
pabulumm/neighbors
|
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
|
[
"BSD-3-Clause"
] | null | null | null |
messaging/admin.py
|
pabulumm/neighbors
|
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from .models import Message,Alert
admin.site.register(Message)
admin.site.register(Alert)
| 20.666667
| 33
| 0.822581
| 18
| 124
| 5.666667
| 0.555556
| 0.176471
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08871
| 124
| 5
| 34
| 24.8
| 0.902655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d761f4c5f3d9510c38b21f178c089c9b84c2758b
| 1,010
|
py
|
Python
|
src/Caitrin/writeSASCodeLOL.py
|
networkdynamics/PuckIt
|
08542c324440919960198eae4ca8855f2ac43134
|
[
"Apache-2.0"
] | 1
|
2018-05-16T20:58:32.000Z
|
2018-05-16T20:58:32.000Z
|
src/Caitrin/writeSASCodeLOL.py
|
networkdynamics/PuckIt
|
08542c324440919960198eae4ca8855f2ac43134
|
[
"Apache-2.0"
] | null | null | null |
src/Caitrin/writeSASCodeLOL.py
|
networkdynamics/PuckIt
|
08542c324440919960198eae4ca8855f2ac43134
|
[
"Apache-2.0"
] | null | null | null |
Xs = ["Year_Founded","Year_Joined_NHL","Franchise_Moved__Kind_of_invalid","Stadium_Capacity","VAR6","Number_of_Stanley_Cup_Wins","Last_Time_Won_Stanley_Cup","Number_of_Stanley_Cup_Finals_App","Most_Recent_Stanley_Cup_Finals_A","Last_Time_Made_Playoffs__Does_no","Number_of_Playoff_Appearances_ov","sum_playoff_lengths_5_years","Total_Number_of_Playoff_Appearan","Average_Secondary_Market_Ticket","Team_Net_Worth__Again__unsure_of","Team_Debt_Value","Team_Revenue__Again__unsure_of_c","Team_Operating_Income__Again__un","Same_State_Teams","Population","Median_Household_Income","Latitude"]
Ys = ["Subscribers","Moderators","Number_of_Comments","Median_number_of_comments_per_us","Number_of_users","Number_of_posts","clustering","assortativity","pearson_corr_coef","clique_size","number_of_cliques","transitivity","number_weak_comp"]
for x in Xs:
for y in Ys:
print 'proc sgplot data=WORK.IMPORT; scatter x= %s y= %s / datalabel=Team datalabelattrs=(size=10);xaxis grid;yaxis grid;run;' % (x, y)
| 101
| 588
| 0.820792
| 152
| 1,010
| 4.848684
| 0.651316
| 0.097693
| 0.040706
| 0.048847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004154
| 0.046535
| 1,010
| 9
| 589
| 112.222222
| 0.761163
| 0
| 0
| 0
| 0
| 0.2
| 0.822772
| 0.477228
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d77740789e6229262866544bbdb94df57f189fd6
| 22
|
py
|
Python
|
discum/science/__init__.py
|
firewood-b/Discord-S.C.U.M
|
1beb8c25ab245a1389431a5206eafb9b4a95df0f
|
[
"MIT"
] | null | null | null |
discum/science/__init__.py
|
firewood-b/Discord-S.C.U.M
|
1beb8c25ab245a1389431a5206eafb9b4a95df0f
|
[
"MIT"
] | null | null | null |
discum/science/__init__.py
|
firewood-b/Discord-S.C.U.M
|
1beb8c25ab245a1389431a5206eafb9b4a95df0f
|
[
"MIT"
] | null | null | null |
from .science import *
| 22
| 22
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d788dc51593f48859a158847d4ccd533fc6c177c
| 21
|
py
|
Python
|
neslter/parsing/ctd/__init__.py
|
WHOIGit/nes-lter-ims
|
d4cc96c10da56ca33286af84d669625b67170522
|
[
"MIT"
] | 3
|
2019-01-24T16:32:50.000Z
|
2021-11-05T02:18:12.000Z
|
neslter/parsing/ctd/__init__.py
|
WHOIGit/nes-lter-ims
|
d4cc96c10da56ca33286af84d669625b67170522
|
[
"MIT"
] | 45
|
2019-05-23T15:15:32.000Z
|
2022-03-15T14:09:20.000Z
|
neslter/parsing/ctd/__init__.py
|
WHOIGit/nes-lter-ims
|
d4cc96c10da56ca33286af84d669625b67170522
|
[
"MIT"
] | null | null | null |
from .api import Ctd
| 10.5
| 20
| 0.761905
| 4
| 21
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ad541f30abcc65903bfb4338ea0e98817f2809ab
| 242
|
py
|
Python
|
openexplorer_old/exploration_campaign/__init__.py
|
uibcdf/PELE-OpenMM
|
15100f6167ea8f2b5795cc7b90d8c5e852de1f57
|
[
"MIT"
] | null | null | null |
openexplorer_old/exploration_campaign/__init__.py
|
uibcdf/PELE-OpenMM
|
15100f6167ea8f2b5795cc7b90d8c5e852de1f57
|
[
"MIT"
] | null | null | null |
openexplorer_old/exploration_campaign/__init__.py
|
uibcdf/PELE-OpenMM
|
15100f6167ea8f2b5795cc7b90d8c5e852de1f57
|
[
"MIT"
] | 1
|
2022-02-15T22:47:28.000Z
|
2022-02-15T22:47:28.000Z
|
from .montecarlo import MonteCarlo
from .montecarlo_minimization import MonteCarloMinimization
#from .basinhopping import BasinHopping
from .quench_and_restore import QuenchAndRestore
from .successive_confinement import SuccessiveConfinement
| 40.333333
| 59
| 0.892562
| 24
| 242
| 8.833333
| 0.541667
| 0.132075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082645
| 242
| 5
| 60
| 48.4
| 0.954955
| 0.157025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ad77ffd04f0d55291232ee0e4bf9aeeb707b095e
| 6,056
|
py
|
Python
|
tests/test_contagion.py
|
lucasmccabe/contagion
|
b692f03f142e6b94e0d5a271bb689589ae891e7d
|
[
"MIT"
] | 1
|
2021-05-05T20:52:12.000Z
|
2021-05-05T20:52:12.000Z
|
tests/test_contagion.py
|
lucasmccabe/contagion
|
b692f03f142e6b94e0d5a271bb689589ae891e7d
|
[
"MIT"
] | null | null | null |
tests/test_contagion.py
|
lucasmccabe/contagion
|
b692f03f142e6b94e0d5a271bb689589ae891e7d
|
[
"MIT"
] | null | null | null |
import sys
import copy
import unittest
import numpy as np
import networkx as nx
sys.path.append("..")
from contagion import contagion
class TestContagion(unittest.TestCase):
def test_init_In(self):
"""
Tests initialization of Infected compartment.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.5,
fraction_recovered = 0.35)
self.assertEqual(np.sum(network.In), 50)
def test_init_Su(self):
"""
Tests initialization of Susceptible compartment.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.5,
fraction_recovered = 0.35)
self.assertEqual(np.sum(network.Su), 15)
def test_init_Re(self):
"""
Tests initialization of Recovered compartment.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.5,
fraction_recovered = 0.35)
self.assertEqual(np.sum(network.Re), 35)
def test_reset_Su_In_Re(self):
"""
Tests reset of compartmental histories.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.5,
fraction_recovered = 0.35)
network.In -= network.In
network.reset_Su_In_Re()
self.assertEqual(np.sum(network.In), 50)
def test_generate_random_walk_length(self):
"""
Tests the length of the generated random walk.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.5,
fraction_recovered = 0.35)
walk = network.generate_random_walk(10)
self.assertEqual(len(walk), 10)
def test_generate_random_walk_degrees_length(self):
"""
Tests the length of the generated random degree sequence.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.5,
fraction_recovered = 0.35)
walk = network.generate_random_walk_degree_sequence(10)
self.assertEqual(len(walk), 10)
def test_immunize_network_vaccinate(self):
"""
Tests that network immunization is working correctly.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.5,
fraction_recovered = 0.35)
Im = copy.deepcopy(network.In)
np.random.shuffle(Im)
network.immunize_network(Im, efficacy = 0.7)
self.assertEqual(np.sum(network.Im), np.sum(Im))
def test_init_histories(self):
"""
Tests initiation of simulation history.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.25)
sim = contagion.Contagion(network, save_history = True)
self.assertEqual(len(sim.Su_hist), 1)
def test_run_simulation(self):
"""
Tests that the simulation runs for an appropriate number of steps.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.25)
sim = contagion.Contagion(network, save_history = True)
sim.run_simulation()
self.assertGreater(len(sim.In_hist), 4)
def test_time_varying_beta(self):
"""
Tests that the simulation correctly initializes time-varying
transmission rates as a list.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.25)
sim = contagion.Contagion(
network,
beta = [1., 0., 0.5, 1., 0., 0.5])
self.assertIsInstance(sim.beta_queue, list)
def test_single_omega(self):
"""
Tests running of a simulation with a Re-to-Su probability.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.25)
sim = contagion.Contagion(
network,
beta = 0.5,
omega = 0.05)
sim.run_simulation()
self.assertGreater(len(sim.In_hist), 4)
def test_multiple_omega(self):
"""
Tests running of a simulation with Re-to-Su and Im-to_Su probabilities.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.25)
Im = copy.deepcopy(network.In)
np.random.shuffle(Im)
network.immunize_network(Im, efficacy = 0.7)
sim = contagion.Contagion(
network,
beta = 0.5,
omega = (0.1, 0.05))
sim.run_simulation()
self.assertGreater(len(sim.In_hist), 4)
def test_max_infected(self):
"""
Tests believability of maximum infected during simulation.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.25)
sim = contagion.Contagion(network, save_history = True)
self.assertGreater(sim.run_simulation_get_max_infected(), 24)
def test_max_infected_index(self):
"""
Tests index of maximum infected during simulation.
"""
G = nx.barabasi_albert_graph(100, 5)
network = contagion.ContactNetwork(
G,
fraction_infected = 0.25)
sim = contagion.Contagion(network, save_history = True)
self.assertGreater(sim.run_simulation_get_max_infected_index(), -1)
if __name__ == '__main__':
unittest.main()
| 31.541667
| 79
| 0.588838
| 687
| 6,056
| 5.004367
| 0.173217
| 0.028505
| 0.044793
| 0.069226
| 0.741419
| 0.700407
| 0.700407
| 0.700407
| 0.659104
| 0.605585
| 0
| 0.038359
| 0.315555
| 6,056
| 191
| 80
| 31.706806
| 0.791074
| 0.128633
| 0
| 0.705426
| 0
| 0
| 0.002022
| 0
| 0
| 0
| 0
| 0
| 0.108527
| 1
| 0.108527
| false
| 0
| 0.046512
| 0
| 0.162791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ad7d63f28cfc61172c28b5903dd114afd04f8eda
| 200
|
py
|
Python
|
filemanager/signals.py
|
code-assassin/django-filemanager
|
69950c786348bec619f060e3131f49eb7f2e58c5
|
[
"BSD-3-Clause"
] | 12
|
2016-04-02T06:02:20.000Z
|
2021-11-18T18:49:08.000Z
|
filemanager/signals.py
|
code-assassin/django-filemanager
|
69950c786348bec619f060e3131f49eb7f2e58c5
|
[
"BSD-3-Clause"
] | 3
|
2015-06-02T08:28:29.000Z
|
2016-01-08T16:09:24.000Z
|
filemanager/signals.py
|
code-assassin/django-filemanager
|
69950c786348bec619f060e3131f49eb7f2e58c5
|
[
"BSD-3-Clause"
] | 9
|
2015-04-21T05:07:20.000Z
|
2021-05-14T14:11:16.000Z
|
from django.dispatch import Signal
filemanager_pre_upload = Signal(providing_args=["filename", "path", "filepath"])
filemanager_post_upload = Signal(providing_args=["filename", "path", "filepath"])
| 33.333333
| 81
| 0.775
| 23
| 200
| 6.478261
| 0.608696
| 0.161074
| 0.281879
| 0.33557
| 0.604027
| 0.604027
| 0.604027
| 0
| 0
| 0
| 0
| 0
| 0.08
| 200
| 5
| 82
| 40
| 0.809783
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ad8c4d033aaf99350905356196be6ed9b4b0aebc
| 244
|
py
|
Python
|
mini_project/Classroom/views.py
|
op3ntrap/SMUMinipoject
|
7b5727faecc0454661baf4dba3149db05de2f3ef
|
[
"MIT"
] | null | null | null |
mini_project/Classroom/views.py
|
op3ntrap/SMUMinipoject
|
7b5727faecc0454661baf4dba3149db05de2f3ef
|
[
"MIT"
] | 6
|
2020-02-11T23:29:09.000Z
|
2021-06-10T18:51:47.000Z
|
mini_project/Classroom/views.py
|
op3ntrap/SMUMinipoject
|
7b5727faecc0454661baf4dba3149db05de2f3ef
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect, render_to_response
from django.http import HttpResponse
from django.contrib.auth import logout as auth_logout
from django.contrib.auth.decorators import login_required
# Create your views here.
| 27.111111
| 65
| 0.840164
| 35
| 244
| 5.742857
| 0.6
| 0.199005
| 0.169154
| 0.208955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114754
| 244
| 8
| 66
| 30.5
| 0.930556
| 0.094262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ad8fe935e29b7d655cc14b46dc579cdb4b2e37ff
| 113
|
py
|
Python
|
multi_task/config/__init__.py
|
mydkzgj/esm
|
2170d436af021188f233fa88a233959c61bd1f23
|
[
"MIT"
] | null | null | null |
multi_task/config/__init__.py
|
mydkzgj/esm
|
2170d436af021188f233fa88a233959c61bd1f23
|
[
"MIT"
] | null | null | null |
multi_task/config/__init__.py
|
mydkzgj/esm
|
2170d436af021188f233fa88a233959c61bd1f23
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
"""
@author: Jiayang Chen
@contact: yjcmydkzgj@gmail.com
"""
from .defaults import _C as cfg
| 14.125
| 31
| 0.699115
| 16
| 113
| 4.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.159292
| 113
| 7
| 32
| 16.142857
| 0.810526
| 0.619469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a8dd849fd9e6c30288642083c73d8528211ad3c4
| 199
|
py
|
Python
|
presentation/code/duck.py
|
jrahm/DuckTest
|
6a7ff76765d96d57764f5d88b94f676c7fbe1544
|
[
"BSD-2-Clause"
] | null | null | null |
presentation/code/duck.py
|
jrahm/DuckTest
|
6a7ff76765d96d57764f5d88b94f676c7fbe1544
|
[
"BSD-2-Clause"
] | null | null | null |
presentation/code/duck.py
|
jrahm/DuckTest
|
6a7ff76765d96d57764f5d88b94f676c7fbe1544
|
[
"BSD-2-Clause"
] | null | null | null |
class Duck:
def __init__(self):
print("Duck!")
def walk(self):
print("Waddle")
def quack(self):
print("Quack!")
def feathers(self):
print("Ruffle")
| 15.307692
| 23
| 0.517588
| 22
| 199
| 4.5
| 0.5
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.326633
| 199
| 12
| 24
| 16.583333
| 0.738806
| 0
| 0
| 0
| 0
| 0
| 0.115578
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0
| 0
| 0.555556
| 0.444444
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
|
0
| 5
|
d109549f1ad38c6e2afb6a58b737dbafd0424ec1
| 206
|
py
|
Python
|
monoport/lib/modeling/backbones/__init__.py
|
ArshdeepSahni/MonoPort
|
a67fdc02b4fb45b3cc187aa4ae34053574d0383c
|
[
"Unlicense"
] | 1
|
2021-01-15T09:42:29.000Z
|
2021-01-15T09:42:29.000Z
|
monoport/lib/modeling/backbones/__init__.py
|
ArshdeepSahni/MonoPort
|
a67fdc02b4fb45b3cc187aa4ae34053574d0383c
|
[
"Unlicense"
] | null | null | null |
monoport/lib/modeling/backbones/__init__.py
|
ArshdeepSahni/MonoPort
|
a67fdc02b4fb45b3cc187aa4ae34053574d0383c
|
[
"Unlicense"
] | null | null | null |
from .HGFilters import HGFilter, PIFuHGFilters
from .ResBlkFilters import ResnetFilter, PIFuResBlkFilters
from .Yolov4Filters import Yolov4Filters
from .HRNetFilters import HRNetV2_W18_small_v2_balance_last
| 51.5
| 59
| 0.88835
| 23
| 206
| 7.73913
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 0.082524
| 206
| 4
| 59
| 51.5
| 0.910053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d1386fb21a9d2cc73a2270c7779851ad0bff96d1
| 8,425
|
py
|
Python
|
LiquorStore/models.py
|
CPU-sangoma/PlentyPot
|
27e326f61e57746f5ca6701358d86c01b4a9ee31
|
[
"MIT"
] | null | null | null |
LiquorStore/models.py
|
CPU-sangoma/PlentyPot
|
27e326f61e57746f5ca6701358d86c01b4a9ee31
|
[
"MIT"
] | null | null | null |
LiquorStore/models.py
|
CPU-sangoma/PlentyPot
|
27e326f61e57746f5ca6701358d86c01b4a9ee31
|
[
"MIT"
] | null | null | null |
from django.db import models
from profiles.models import BusinessProfile
class LiquorHomePageModel(models.Model):
bannerImage = models.ImageField(verbose_name="upload a big banner Image for your Home Page",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine1 = models.ImageField(verbose_name="first pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine1des = models.TextField(verbose_name="price description for first wines/champagne pic", blank=False,null=True)
wine2 = models.ImageField(verbose_name="second pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine2des = models.TextField(verbose_name="price description for sec wines/champagne pic", blank=False,null=True)
wine3 = models.ImageField(verbose_name="third pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine3des = models.TextField(verbose_name="price description for third wines/champagne pic", blank=False,null=True)
wine4 = models.ImageField(verbose_name="fourth pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine4des = models.TextField(verbose_name="price description for fifth wines/champagne pic", blank=False,null=True)
wine5 = models.ImageField(verbose_name="fifth pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine5des = models.TextField(verbose_name="price description for fifth wines/champagne pic", blank=False,null=True)
wine6 = models.ImageField(verbose_name="sixth pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine6des = models.TextField(verbose_name="price description for sixth wines/champagne pic", blank=False,null=True)
wine7 = models.ImageField(verbose_name="seventh pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine7des = models.TextField(verbose_name="price description for seventh wines/champagne pic", blank=False,null=True)
wine8 = models.ImageField(verbose_name="8th pic under wines/champagne",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
wine8des = models.TextField(verbose_name="price description for eighth wines/champagne pic", blank=False,null=True)
company = models.OneToOneField(BusinessProfile,on_delete=models.CASCADE,related_name="liquorhome")
beer1 = models.ImageField(verbose_name="first pic under beers and cider",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
beer1des = models.TextField(verbose_name="price description for first beers/ciders pic", blank=False,null=True)
beer2 = models.ImageField(verbose_name="second pic under beers and cider",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
beer2des = models.TextField(verbose_name="price description for second beers/ciders pic ", blank=False,null=True)
beer3 = models.ImageField(verbose_name="third pic under beers and cider",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
beer3des = models.TextField(verbose_name="price description for third beers/ciders pic ", blank=False,null=True)
beer4 = models.ImageField(verbose_name="fourth pic under beers and cider",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
beer4des = models.TextField(verbose_name="price description for forth beers/ciders pic ", blank=False,null=True)
beer5 = models.ImageField(verbose_name="fifth pic under beers and cider",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
beer5des = models.TextField(verbose_name="price description for fifth beers/ciders pic", blank=False,null=True)
beer7 = models.ImageField(verbose_name="7th pic under beers and cider",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
beer7des = models.TextField(verbose_name="price description for seventh beers/ciders pic", blank=False,null=True)
beer8 = models.ImageField(verbose_name="8th pic under beers and cider",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
beer8des = models.TextField(verbose_name="price description for eighth beers/ciders pic", blank=False,null=True)
bottle1 = models.ImageField(verbose_name="first pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle1des = models.TextField(verbose_name="price description for first Liquor/Spirits pic", blank=False,null=True)
bottle2 = models.ImageField(verbose_name="second pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle2des = models.TextField(verbose_name="price description for second Liquor/Spirits pic", blank=False,null=True)
bottle3 = models.ImageField(verbose_name="third pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle3des = models.TextField(verbose_name="price description for third Liquor/Spirits pic", blank=False,null=True)
bottle4 = models.ImageField(verbose_name="fourth pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle4des = models.TextField(verbose_name="price description for forth Liquor/Spirits pic", blank=False,null=True)
bottle5 = models.ImageField(verbose_name="fifth pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle5des = models.TextField(verbose_name="price description for fifth Liquor/Spirits pic", blank=False,null=True)
bottle6 = models.ImageField(verbose_name="sixth pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle6des = models.TextField(verbose_name="price description for sixth Liquor/Spirits pic", blank=False,null=True)
bottle7 = models.ImageField(verbose_name="seventh pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle7des = models.TextField(verbose_name="price description for seventh Liquor/Spirits pic", blank=False,null=True)
bottle8 = models.ImageField(verbose_name="8th pic under Liquor/Spirits",upload_to="LiquorStore/LiquorHome/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
bottle8des = models.TextField(verbose_name="price description for eighth Liquor/Spirits pic", blank=False,null=True)
status = models.BooleanField(default=True)
PageComplete = models.BooleanField(default=False)
def __str__(self):
return f'{self.company}'
class LiquorSalesModel(models.Model):
company = models.OneToOneField(BusinessProfile, on_delete=models.CASCADE,related_name="liquorsales")
status = models.BooleanField(default=True)
PageComplete = models.BooleanField(default=False)
class ActualLiqSales(models.Model):
company = models.ForeignKey(BusinessProfile, on_delete=models.CASCADE,related_name="ActualLiquor")
salepic = models.ImageField(verbose_name="1st sale item",upload_to="LiquorStore/LiquorSale/", height_field=None, width_field=None, max_length=None,null=True,blank=False)
saledes = models.TextField(verbose_name="sale description for item1", blank=False,null=True)
def __str__(self):
return f'{self.company}'
| 102.743902
| 208
| 0.794659
| 1,154
| 8,425
| 5.660312
| 0.116118
| 0.068892
| 0.088028
| 0.103337
| 0.87722
| 0.87722
| 0.87722
| 0.638089
| 0.483313
| 0.483313
| 0
| 0.006792
| 0.091276
| 8,425
| 81
| 209
| 104.012346
| 0.846395
| 0
| 0
| 0.123077
| 0
| 0
| 0.295549
| 0.068249
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030769
| false
| 0
| 0.030769
| 0.030769
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0f02080bc075cbc4491b688fae5a41484c653235
| 220
|
py
|
Python
|
gallery/templatetags/spaces.py
|
mattmc318/coolwater-creations
|
a9ef97c39febaf28e3291d83a0a5022238204f8f
|
[
"MIT"
] | null | null | null |
gallery/templatetags/spaces.py
|
mattmc318/coolwater-creations
|
a9ef97c39febaf28e3291d83a0a5022238204f8f
|
[
"MIT"
] | 3
|
2020-06-09T01:29:36.000Z
|
2020-07-21T01:53:16.000Z
|
gallery/templatetags/spaces.py
|
mattmc318/coolwater-creations
|
a9ef97c39febaf28e3291d83a0a5022238204f8f
|
[
"MIT"
] | null | null | null |
import re
from django.template import Library
from django.template.defaultfilters import stringfilter
register = Library()
@stringfilter
def spaces(value):
return re.sub('\s+', ' ', value)
register.filter(spaces)
| 18.333333
| 55
| 0.759091
| 27
| 220
| 6.185185
| 0.592593
| 0.11976
| 0.215569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131818
| 220
| 11
| 56
| 20
| 0.874346
| 0
| 0
| 0
| 0
| 0
| 0.018182
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.375
| 0.125
| 0.625
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
0f5cc8c44b3863951472058ba105f53581bcbad9
| 27
|
py
|
Python
|
py/desitest/_version.py
|
sbailey/desitest
|
9f39999bc71b15ac8fc9bc864c12def8ee210218
|
[
"BSD-3-Clause"
] | null | null | null |
py/desitest/_version.py
|
sbailey/desitest
|
9f39999bc71b15ac8fc9bc864c12def8ee210218
|
[
"BSD-3-Clause"
] | 33
|
2018-01-10T22:24:06.000Z
|
2022-02-02T00:59:09.000Z
|
py/desitest/_version.py
|
sbailey/desitest
|
9f39999bc71b15ac8fc9bc864c12def8ee210218
|
[
"BSD-3-Clause"
] | 1
|
2019-05-17T15:37:08.000Z
|
2019-05-17T15:37:08.000Z
|
__version__ = '20.4.dev58'
| 13.5
| 26
| 0.703704
| 4
| 27
| 3.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 0.111111
| 27
| 1
| 27
| 27
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7e71df8f88193c74251ea533da63e0ae86edcc3f
| 156
|
py
|
Python
|
django_project/bucket_list/filters.py
|
ugauniyal/Bucket-List-App
|
03ccff11d2ac8be1b875fb9f1d7da54bf9006f6f
|
[
"MIT"
] | null | null | null |
django_project/bucket_list/filters.py
|
ugauniyal/Bucket-List-App
|
03ccff11d2ac8be1b875fb9f1d7da54bf9006f6f
|
[
"MIT"
] | null | null | null |
django_project/bucket_list/filters.py
|
ugauniyal/Bucket-List-App
|
03ccff11d2ac8be1b875fb9f1d7da54bf9006f6f
|
[
"MIT"
] | null | null | null |
import django_filters
from .models import *
class TaskFilter(django_filters.FilterSet):
class Meta:
model = Task
fields = ('category',)
| 22.285714
| 43
| 0.679487
| 17
| 156
| 6.117647
| 0.764706
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 156
| 7
| 44
| 22.285714
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.050955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7eab9aede48823942fce3d6fb1226a64b3a66e27
| 2,157
|
py
|
Python
|
test/test_cannon.py
|
PetrSpacek/angrylikegame-python-pyqt5
|
2ddc73fb5b4398117c3a83593632408b4b44e246
|
[
"MIT"
] | 2
|
2020-01-17T10:43:13.000Z
|
2021-11-21T14:56:53.000Z
|
test/test_cannon.py
|
PetrSpacek/angrylikegame-python-pyqt5
|
2ddc73fb5b4398117c3a83593632408b4b44e246
|
[
"MIT"
] | null | null | null |
test/test_cannon.py
|
PetrSpacek/angrylikegame-python-pyqt5
|
2ddc73fb5b4398117c3a83593632408b4b44e246
|
[
"MIT"
] | null | null | null |
from model.game_info import GameInfoA
from utils.game_object_factory import GameObjectFactoryA
from utils.geometry import Position
def test_shooting_mode_toggle(qtbot):
factory = GameObjectFactoryA()
cannon = factory.create_cannon(Position(0,0))
orig_shooting_mode = cannon.active_shooting_mode
# Switch to next shooting mode
cannon.next_shooting_mode()
assert cannon.active_shooting_mode != orig_shooting_mode
# Switch back to original shooting mode
cannon.next_shooting_mode()
assert cannon.active_shooting_mode == orig_shooting_mode
def test_simple_shooting_mode(qtbot):
factory = GameObjectFactoryA()
cannon = factory.create_cannon(Position(0, 0))
cannon.use_simple_shooting_mode()
damage = cannon.active_shooting_mode.get_damage()
angle = 0
gravity = 10
missile_speed = 5
game_info = GameInfoA(cannon.active_shooting_mode, angle, missile_speed, gravity)
angle = cannon.angle
missiles = cannon.shoot(game_info)
assert len(missiles) == 1
assert missiles[0].get_position() == Position(0, 0)
assert missiles[0].angle == angle
assert missiles[0].step_size == missile_speed
assert missiles[0].damage == damage
assert missiles[0].gravity == gravity
def test_double_shooting_mode(qtbot):
factory = GameObjectFactoryA()
cannon = factory.create_cannon(Position(0,0))
cannon.use_double_shooting_mode()
damage = cannon.active_shooting_mode.get_damage()
angle = 0
gravity = 10
missile_speed = 5
game_info = GameInfoA(cannon.active_shooting_mode, angle, missile_speed, gravity)
angle = cannon.angle
missiles = cannon.shoot(game_info)
assert len(missiles) == 2
assert missiles[0].get_position() == Position(0, 0)
assert missiles[0].angle == angle -5
assert missiles[0].step_size == missile_speed
assert missiles[0].damage == damage
assert missiles[0].gravity == gravity
assert missiles[1].get_position() == Position(0, 0)
assert missiles[1].angle == angle +5
assert missiles[1].step_size == missile_speed
assert missiles[1].damage == damage
assert missiles[1].gravity == gravity
| 37.842105
| 85
| 0.731108
| 281
| 2,157
| 5.384342
| 0.16726
| 0.150694
| 0.099141
| 0.111038
| 0.79577
| 0.778586
| 0.756114
| 0.732981
| 0.732981
| 0.732981
| 0
| 0.021873
| 0.173389
| 2,157
| 57
| 86
| 37.842105
| 0.826697
| 0.030598
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.38
| 1
| 0.06
| false
| 0
| 0.06
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7eaff1377f5a9d71cc3b2f583c44ce038fb89a35
| 69
|
py
|
Python
|
example/test/core/geometry/simple/usphere/__init__.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 2
|
2020-09-04T12:27:15.000Z
|
2022-01-17T14:49:40.000Z
|
example/test/core/geometry/simple/usphere/__init__.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | null | null | null |
example/test/core/geometry/simple/usphere/__init__.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 1
|
2020-09-04T12:27:52.000Z
|
2020-09-04T12:27:52.000Z
|
#__name__ = "box"
#__package__ = "box"
#__all__ = ['unit']
| 13.8
| 23
| 0.521739
| 6
| 69
| 4
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.275362
| 69
| 4
| 24
| 17.25
| 0.48
| 0.84058
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0e324adfb68a15219fdda5f4571b74612c114fa6
| 62
|
py
|
Python
|
flaskr/config.py
|
Mishuni/Flask_Practice
|
ac3c94bd7eb185ee655892b50a4d4881def9a373
|
[
"MIT"
] | null | null | null |
flaskr/config.py
|
Mishuni/Flask_Practice
|
ac3c94bd7eb185ee655892b50a4d4881def9a373
|
[
"MIT"
] | null | null | null |
flaskr/config.py
|
Mishuni/Flask_Practice
|
ac3c94bd7eb185ee655892b50a4d4881def9a373
|
[
"MIT"
] | null | null | null |
SECRET_KEY = b"'@,\xf6\x9d\xd3\xfa\x80<r\xb9\xa3\x91\x19\xd2h"
| 62
| 62
| 0.693548
| 14
| 62
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.032258
| 62
| 1
| 62
| 62
| 0.5
| 0
| 0
| 0
| 0
| 1
| 0.730159
| 0.730159
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0e5119adc054321d769fbd53fbc56983fef684ba
| 126
|
py
|
Python
|
backend/users/models.py
|
iron-claw-972/ScoutingApp2022
|
4bc12bccb45225717b1ed94e7dcf593c7a8e0c83
|
[
"MIT"
] | null | null | null |
backend/users/models.py
|
iron-claw-972/ScoutingApp2022
|
4bc12bccb45225717b1ed94e7dcf593c7a8e0c83
|
[
"MIT"
] | 8
|
2022-01-10T22:01:11.000Z
|
2022-01-25T02:35:15.000Z
|
backend/users/models.py
|
iron-claw-972/ScoutingApp2022
|
4bc12bccb45225717b1ed94e7dcf593c7a8e0c83
|
[
"MIT"
] | 2
|
2022-02-05T00:06:39.000Z
|
2022-02-19T20:18:56.000Z
|
from django.db import models
from django.contrib.auth.models import AbstractUser
class CustomUser(AbstractUser):
pass
| 25.2
| 52
| 0.793651
| 16
| 126
| 6.25
| 0.6875
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150794
| 126
| 5
| 53
| 25.2
| 0.934579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
0e598afac02e98ebd703ac728c779f86dd114b80
| 48
|
py
|
Python
|
Tools/MagicPanels/panelBackOut.py
|
dprojects/Woodworking
|
24420b248e3343a387ae1328fc6dcbf97e433242
|
[
"MIT"
] | 6
|
2022-02-25T19:11:40.000Z
|
2022-03-24T22:03:47.000Z
|
Tools/MagicPanels/panelBackOut.py
|
dprojects/Woodworking
|
24420b248e3343a387ae1328fc6dcbf97e433242
|
[
"MIT"
] | 1
|
2022-03-13T09:35:22.000Z
|
2022-03-13T13:30:36.000Z
|
Tools/MagicPanels/panelBackOut.py
|
dprojects/Woodworking
|
24420b248e3343a387ae1328fc6dcbf97e433242
|
[
"MIT"
] | 3
|
2022-02-26T15:01:08.000Z
|
2022-03-20T21:30:04.000Z
|
import MagicPanels
MagicPanels.panelBackOut()
| 9.6
| 26
| 0.833333
| 4
| 48
| 10
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 4
| 27
| 12
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0e95632a162bf60bdcb0cb5221fbb686ad20a62a
| 91
|
py
|
Python
|
precis/templating/__init__.py
|
rukmal/precis
|
0329a0b4a67d996a2b573e3870a509ae92f2c051
|
[
"MIT"
] | null | null | null |
precis/templating/__init__.py
|
rukmal/precis
|
0329a0b4a67d996a2b573e3870a509ae92f2c051
|
[
"MIT"
] | 12
|
2019-04-09T03:42:52.000Z
|
2021-11-16T00:33:36.000Z
|
precis/templating/__init__.py
|
rukmal/precis
|
0329a0b4a67d996a2b573e3870a509ae92f2c051
|
[
"MIT"
] | null | null | null |
from . import util
from .driver import TemplateDriver
from .template import PrecisTemplate
| 22.75
| 36
| 0.835165
| 11
| 91
| 6.909091
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131868
| 91
| 3
| 37
| 30.333333
| 0.962025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0e9cd3f34f4a868090a05ca7564d5c3a9f860156
| 103
|
py
|
Python
|
markov.py
|
jshap70/jarbot
|
9004cfa262d492281840f50632ee7bd40420739b
|
[
"MIT"
] | null | null | null |
markov.py
|
jshap70/jarbot
|
9004cfa262d492281840f50632ee7bd40420739b
|
[
"MIT"
] | null | null | null |
markov.py
|
jshap70/jarbot
|
9004cfa262d492281840f50632ee7bd40420739b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Interface module to markov framework
"""
# global imports
import markovify
| 12.875
| 36
| 0.737864
| 13
| 103
| 5.846154
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.145631
| 103
| 7
| 37
| 14.714286
| 0.852273
| 0.708738
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7eba18ac0cd019d89fefcb1bea3af02bf84ebeb3
| 812
|
py
|
Python
|
util/common.py
|
agibli/sansapp
|
e87dffd81ec709172c25a7729fdf713feaac3a78
|
[
"MIT"
] | 11
|
2016-05-10T06:22:14.000Z
|
2022-02-01T15:53:54.000Z
|
util/common.py
|
agibli/sansapp
|
e87dffd81ec709172c25a7729fdf713feaac3a78
|
[
"MIT"
] | 2
|
2015-06-01T05:05:52.000Z
|
2016-04-27T05:17:16.000Z
|
util/common.py
|
agibli/sansapp
|
e87dffd81ec709172c25a7729fdf713feaac3a78
|
[
"MIT"
] | 5
|
2016-04-26T08:20:47.000Z
|
2021-08-09T07:07:54.000Z
|
import struct
def be_word4(buf):
return struct.unpack(">L", buf)[0]
def le_word4(buf):
return struct.unpack("<L", buf)[0]
def be_word8(buf):
return struct.unpack(">Q", buf)[0]
def le_word8(buf):
return struct.unpack("<Q", buf)[0]
def be_read4(stream):
return struct.unpack(">L", stream.read(4))[0]
def le_read4(stream):
return struct.unpack("<L", stream.read(4))[0]
def be_read8(stream):
return struct.unpack(">Q", stream.read(8))[0]
def le_read8(stream):
return struct.unpack("<Q", stream.read(8))[0]
def align(size, stride):
return stride * int(1 + ((size - 1) / stride))
def read_null_terminated(stream):
result = ""
next = stream.read(1)
while stream and next != '\0':
result += next
next = stream.read(1)
return result
| 17.276596
| 50
| 0.619458
| 124
| 812
| 3.975806
| 0.241935
| 0.194726
| 0.292089
| 0.170385
| 0.640974
| 0.640974
| 0.640974
| 0.640974
| 0.640974
| 0.365112
| 0
| 0.03864
| 0.203202
| 812
| 46
| 51
| 17.652174
| 0.723338
| 0
| 0
| 0.076923
| 0
| 0
| 0.022167
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.384615
| false
| 0
| 0.038462
| 0.346154
| 0.807692
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
7d49a13b89e511282e7692c8272b01862de1fb4c
| 36
|
py
|
Python
|
lib/ancientsolutions/__init__.py
|
caoimhechaos/py-ancientsolutions-crypttools
|
6ea317b2e291113524eb8b4aae74c0077550e36c
|
[
"BSD-2-Clause"
] | null | null | null |
lib/ancientsolutions/__init__.py
|
caoimhechaos/py-ancientsolutions-crypttools
|
6ea317b2e291113524eb8b4aae74c0077550e36c
|
[
"BSD-2-Clause"
] | null | null | null |
lib/ancientsolutions/__init__.py
|
caoimhechaos/py-ancientsolutions-crypttools
|
6ea317b2e291113524eb8b4aae74c0077550e36c
|
[
"BSD-2-Clause"
] | null | null | null |
# __init__.py for ancientsolutions.
| 18
| 35
| 0.805556
| 4
| 36
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.78125
| 0.916667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc02f7fb94c8be0a06bb883519d4dc703782d507
| 10,436
|
py
|
Python
|
gitinsights/tests/unit/test_ado_services.py
|
nmiodice/git-insights
|
5667615a152dc5b1c6b2481fe9d6ca6e3de082fa
|
[
"MIT"
] | null | null | null |
gitinsights/tests/unit/test_ado_services.py
|
nmiodice/git-insights
|
5667615a152dc5b1c6b2481fe9d6ca6e3de082fa
|
[
"MIT"
] | null | null | null |
gitinsights/tests/unit/test_ado_services.py
|
nmiodice/git-insights
|
5667615a152dc5b1c6b2481fe9d6ca6e3de082fa
|
[
"MIT"
] | null | null | null |
import json
import os
from unittest import TestCase
from unittest.mock import Mock
from unittest.mock import patch
import pandas as pd
from gitinsights.mods.ado_client import AzureDevopsInsights
def loadMockFile(filePath: str):
with open(os.path.join(os.path.dirname(__file__), filePath)) as f:
return json.load(f)
def aggregateDataframe(df: pd.DataFrame) -> pd.DataFrame:
return df.groupby(['week']) \
.agg(
{
'prs_merged': 'sum',
'prs_submitted': 'sum',
'pr_completion_days': 'mean',
'pr_comments': 'sum',
'prs_reviewed': 'sum',
'pr_commits_pushed': 'sum',
'commit_change_count_edits': 'sum',
'commit_change_count_deletes': 'sum',
'commit_change_count_additions': 'sum',
'user_stories_assigned': 'sum',
'user_stories_completed': 'sum',
'user_story_points_assigned': 'sum',
'user_story_completion_days': 'mean',
'user_stories_created': 'sum'
})
class Test_ADOPrStatService(TestCase):
def setUp(self):
self.mockedRepoPrResponse = loadMockFile("./data/prStatsByProject.json")
self.mockedPrThreadsResponse = loadMockFile("./data/prThreads.json")
self.mockedPrCommitsResponse = loadMockFile("./data/prCommits.json")
self.mockedRepoCommitsResponse = loadMockFile("./data/repoCommits.json")
self.mockedWorkitemListResponse = loadMockFile("./data/workitemList.json")
self.mockedWorkitemDetailsResponse = loadMockFile("./data/workitemDetails.json")
@patch('gitinsights.mods.repo_insights_base.requests.get')
def test_repo_pr_stats(self, mock_get):
# Configuring the mock to return a response with an OK status code. Also, the mock should have
# a `json()` method that returns a list of pr stats.
mock_get.return_value = Mock(ok=True)
mock_get.return_value.json.return_value = self.mockedRepoPrResponse
client = AzureDevopsInsights("myorg", "my-super-project", ["repo1"], "team-buffalo")
response = client.invokePRsByProjectAPICall("", "repo1")
self.assertEqual(len(response), 4)
@patch('gitinsights.mods.repo_insights_base.requests.get')
def test_repo_pr_threads_stats(self, mock_get):
# Configuring the mock to return a response with an OK status code. Also, the mock should have
# a `json()` method that returns a list of pr threads.
mock_get.return_value = Mock(ok=True)
mock_get.return_value.json.return_value = self.mockedPrThreadsResponse
client = AzureDevopsInsights("myorg", "my-super-project", ["repo1"], "team-buffalo")
response = client.invokePRCommentThreadsAPICall("", "repo1", "1")
self.assertEqual(len(response), 8)
@patch.object(AzureDevopsInsights, 'invokeCommitsByRepoAPICall')
def test_repo_commits_stats(self, commitsByRepoMock):
emptyResponse = {"value": []}
commitsByRepoMock.side_effect = [self.mockedRepoCommitsResponse['value'], emptyResponse['value']]
client = AzureDevopsInsights("myorg", "my-super-project", ["repo1"], "team-buffalo")
response = client.repoCommits("", "repo1")
self.assertEqual(len(response), 8)
self.assertEqual(response['3104bd0b0accbc74278fe6880e53215f6b93a5cd']['Add'], 1)
self.assertEqual(response['9991b4f66def4c0a9ad8f9f27043ece7eddcf1c7']['Delete'], 1)
@patch('gitinsights.mods.repo_insights_base.RepoInsightsClient.invokeAPICall')
def test_invoke_workitem_api(self, invokeAPICallMock):
invokeAPICallMock.side_effect = [self.mockedWorkitemListResponse['workItems'], self.mockedWorkitemDetailsResponse['value']]
client = AzureDevopsInsights("myorg", "my-super-project", ["repo1"], "team-buffalo")
response = client.invokeWorkitemsAPICall("", "team-buffalo")
self.assertEqual(len(response), 3)
@patch.object(AzureDevopsInsights, 'invokePRCommentThreadsAPICall')
@patch.object(AzureDevopsInsights, 'invokePRsByProjectAPICall')
@patch.object(AzureDevopsInsights, 'invokePRCommitsAPICall')
@patch.object(AzureDevopsInsights, 'invokeCommitsByRepoAPICall')
@patch.object(AzureDevopsInsights, 'invokeWorkitemsAPICall')
def test_single_pr_stats_dataframe(self, workitemsMock, commitsByRepoMock, prCommits, prsByProject, prThreads):
emptyResponse = {"value": []}
commitsByRepoMock.side_effect = [self.mockedRepoCommitsResponse['value'], emptyResponse['value']]
prThreads.return_value = self.mockedPrThreadsResponse['value']
prsByProject.return_value = self.mockedRepoPrResponse['value']
prCommits.return_value = self.mockedPrCommitsResponse['value']
workitemsMock.return_value = self.mockedWorkitemDetailsResponse['value']
expectedDataframeSize = 54
client = AzureDevopsInsights("myorg", "my-super-project", ["repo1"], "team-buffalo")
dataframe = client.collectPullRequestActivity("11c")
self.assertEqual(len(dataframe), expectedDataframeSize)
agg = aggregateDataframe(dataframe)
self.assertEqual(len(agg), 1)
self.assertEqual(agg.loc['44', 'prs_submitted'], 4)
self.assertEqual(agg.loc['44', 'prs_merged'], 1)
self.assertEqual(agg.loc['44', 'pr_completion_days'], 2)
self.assertEqual(agg.loc['44', 'pr_comments'], 12)
self.assertEqual(agg.loc['44', 'prs_reviewed'], 1)
self.assertEqual(agg.loc['44', 'pr_commits_pushed'], 32)
self.assertEqual(agg.loc['44', 'commit_change_count_additions'], 20)
self.assertEqual(agg.loc['44', 'commit_change_count_edits'], 0)
self.assertEqual(agg.loc['44', 'commit_change_count_deletes'], 12)
self.assertEqual(agg.loc['44', 'user_stories_assigned'], 2)
self.assertEqual(agg.loc['44', 'user_stories_completed'], 1)
self.assertEqual(agg.loc['44', 'user_story_points_assigned'], 12)
self.assertEqual(agg.loc['44', 'user_story_completion_days'], 1)
self.assertEqual(agg.loc['44', 'user_stories_created'], 3)
@patch.object(AzureDevopsInsights, 'invokePRCommentThreadsAPICall')
@patch.object(AzureDevopsInsights, 'invokePRsByProjectAPICall')
@patch.object(AzureDevopsInsights, 'invokePRCommitsAPICall')
@patch.object(AzureDevopsInsights, 'invokeCommitsByRepoAPICall')
@patch.object(AzureDevopsInsights, 'invokeWorkitemsAPICall')
def test_multiple_repo_pr_stats_dataframe(self, workitemsMock, commitsByRepoMock, prCommits, prsByProject, prThreads):
emptyResponse = {"value": []}
commitsByRepoMock.side_effect = [self.mockedRepoCommitsResponse['value'], emptyResponse['value'], self.mockedRepoCommitsResponse['value'], emptyResponse['value']]
prThreads.return_value = self.mockedPrThreadsResponse['value']
prsByProject.return_value = self.mockedRepoPrResponse['value']
prCommits.return_value = self.mockedPrCommitsResponse['value']
workitemsMock.return_value = self.mockedWorkitemDetailsResponse['value']
repos = ["repo1", "repo2"]
client = AzureDevopsInsights("myorg", "my-super-project", repos, "team-buffalo")
dataframe = client.collectPullRequestActivity("11c")
self.assertEqual(len(dataframe), 103)
agg = aggregateDataframe(dataframe)
self.assertEqual(len(agg), 1)
self.assertEqual(agg.loc['44', 'prs_submitted'], 8)
self.assertEqual(agg.loc['44', 'prs_merged'], 2)
self.assertEqual(agg.loc['44', 'pr_completion_days'], 2)
self.assertEqual(agg.loc['44', 'pr_comments'], 24)
self.assertEqual(agg.loc['44', 'prs_reviewed'], 2)
self.assertEqual(agg.loc['44', 'pr_commits_pushed'], 64)
self.assertEqual(agg.loc['44', 'commit_change_count_additions'], 40)
self.assertEqual(agg.loc['44', 'commit_change_count_edits'], 0)
self.assertEqual(agg.loc['44', 'commit_change_count_deletes'], 24)
self.assertEqual(agg.loc['44', 'user_stories_assigned'], 2)
self.assertEqual(agg.loc['44', 'user_stories_completed'], 1)
self.assertEqual(agg.loc['44', 'user_story_points_assigned'], 12)
self.assertEqual(agg.loc['44', 'user_story_completion_days'], 1)
self.assertEqual(agg.loc['44', 'user_stories_created'], 3)
@patch.object(AzureDevopsInsights, 'invokePRCommentThreadsAPICall')
@patch.object(AzureDevopsInsights, 'invokePRsByProjectAPICall')
@patch.object(AzureDevopsInsights, 'invokePRCommitsAPICall')
@patch.object(AzureDevopsInsights, 'invokeCommitsByRepoAPICall')
@patch.object(AzureDevopsInsights, 'invokeWorkitemsAPICall')
def test_pr_activity_aggregation(self, workitemsMock, commitsByRepoMock, prCommits, prsByProject, prThreads):
emptyResponse = {"value": []}
commitsByRepoMock.side_effect = [self.mockedRepoCommitsResponse['value'], emptyResponse['value']]
prThreads.return_value = self.mockedPrThreadsResponse['value']
prsByProject.return_value = self.mockedRepoPrResponse['value']
prCommits.return_value = self.mockedPrCommitsResponse['value']
workitemsMock.return_value = self.mockedWorkitemDetailsResponse['value']
client = AzureDevopsInsights("myorg", "my-super-project", ["repo1"], "team-buffalo")
agg = client.aggregatePullRequestActivity(['week'], "11a")
self.assertEqual(len(agg), 1)
self.assertEqual(agg.loc['44', 'prs_submitted'], 4)
self.assertEqual(agg.loc['44', 'prs_merged'], 1)
self.assertEqual(agg.loc['44', 'pr_completion_days'], 2)
self.assertEqual(agg.loc['44', 'pr_comments'], 12)
self.assertEqual(agg.loc['44', 'prs_reviewed'], 1)
self.assertEqual(agg.loc['44', 'pr_commits_pushed'], 32)
self.assertEqual(agg.loc['44', 'commit_change_count_additions'], 20)
self.assertEqual(agg.loc['44', 'commit_change_count_edits'], 0)
self.assertEqual(agg.loc['44', 'commit_change_count_deletes'], 12)
self.assertEqual(agg.loc['44', 'user_stories_assigned'], 2)
self.assertEqual(agg.loc['44', 'user_stories_completed'], 1)
self.assertEqual(agg.loc['44', 'user_story_points_assigned'], 12)
self.assertEqual(agg.loc['44', 'user_story_completion_days'], 1)
self.assertEqual(agg.loc['44', 'user_stories_created'], 3)
| 54.638743
| 170
| 0.694423
| 1,071
| 10,436
| 6.596639
| 0.152194
| 0.112527
| 0.107006
| 0.124841
| 0.752866
| 0.740127
| 0.728238
| 0.718613
| 0.713234
| 0.699363
| 0
| 0.025104
| 0.171713
| 10,436
| 190
| 171
| 54.926316
| 0.792226
| 0.027693
| 0
| 0.559748
| 0
| 0
| 0.245612
| 0.146914
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.062893
| false
| 0
| 0.044025
| 0.006289
| 0.125786
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc1326f1d890abe2d582bc9bfbb791f589100409
| 553
|
py
|
Python
|
services/match.py
|
nayfaan/Wordle_Analyzer
|
1efba591a0c6243c06f97101bdf2827e39b910f3
|
[
"MIT"
] | null | null | null |
services/match.py
|
nayfaan/Wordle_Analyzer
|
1efba591a0c6243c06f97101bdf2827e39b910f3
|
[
"MIT"
] | null | null | null |
services/match.py
|
nayfaan/Wordle_Analyzer
|
1efba591a0c6243c06f97101bdf2827e39b910f3
|
[
"MIT"
] | null | null | null |
class match:
def __init__(self, comb):
self.comb = comb
self.cover = self.__cover__()
'''def __eq__(self, other):
return self.comb == self.comb
def __gt__(self, other):
return self.comb > self.comb
def __hash__(self):
return hash((self.w1, self.w2))'''
def __cover__(self):
intersect = set("".join(self.comb))
return len(intersect)
def __str__(self):
return str(self.cover) + ': ' + ', '.join(self.comb)
if __name__ == "__main__":
pass
| 24.043478
| 60
| 0.549729
| 65
| 553
| 4.123077
| 0.353846
| 0.238806
| 0.134328
| 0.179104
| 0.253731
| 0.253731
| 0.253731
| 0.253731
| 0
| 0
| 0
| 0.005208
| 0.305606
| 553
| 23
| 61
| 24.043478
| 0.692708
| 0
| 0
| 0
| 0
| 0
| 0.034682
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.090909
| 0
| 0.090909
| 0.545455
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
70da3268298b169368477d1e02d18ba27f175180
| 127
|
py
|
Python
|
katas/kyu_6/simple_sentences.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/kyu_6/simple_sentences.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/kyu_6/simple_sentences.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
def make_sentences(parts):
return '{}.'.format(
''.join(' ' + a if a.isalnum() else a for a in parts).strip(' .'))
| 31.75
| 74
| 0.551181
| 18
| 127
| 3.833333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228346
| 127
| 3
| 75
| 42.333333
| 0.704082
| 0
| 0
| 0
| 0
| 0
| 0.047244
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
cb24e33b93aef758d0d7d5d66724e37d67f23b56
| 106
|
py
|
Python
|
projects/ISTR/istr/__init__.py
|
braindevices/ISTR
|
520b0d410ba8be5dbf53971d962b0bfcf072a7c0
|
[
"Apache-2.0"
] | 171
|
2021-05-04T02:44:01.000Z
|
2022-03-28T09:58:29.000Z
|
projects/ISTR/istr/__init__.py
|
braindevices/ISTR
|
520b0d410ba8be5dbf53971d962b0bfcf072a7c0
|
[
"Apache-2.0"
] | 10
|
2021-05-09T16:04:43.000Z
|
2021-12-03T01:21:44.000Z
|
projects/ISTR/istr/__init__.py
|
braindevices/ISTR
|
520b0d410ba8be5dbf53971d962b0bfcf072a7c0
|
[
"Apache-2.0"
] | 21
|
2021-05-04T02:47:57.000Z
|
2022-01-06T07:34:24.000Z
|
from .config import add_ISTR_config
from .inseg import ISTR
from .dataset_mapper import ISTRDatasetMapper
| 26.5
| 45
| 0.858491
| 15
| 106
| 5.866667
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113208
| 106
| 3
| 46
| 35.333333
| 0.93617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cb27794b61c2afee8f678401b1e88cc23b563de6
| 42
|
py
|
Python
|
grortir/test/externals/__init__.py
|
wojtekPi/grortir
|
0ef8b495527a4f3861e5df5db756d0ee3ed4aa6f
|
[
"MIT"
] | null | null | null |
grortir/test/externals/__init__.py
|
wojtekPi/grortir
|
0ef8b495527a4f3861e5df5db756d0ee3ed4aa6f
|
[
"MIT"
] | null | null | null |
grortir/test/externals/__init__.py
|
wojtekPi/grortir
|
0ef8b495527a4f3861e5df5db756d0ee3ed4aa6f
|
[
"MIT"
] | null | null | null |
"""Package for test externals package."""
| 21
| 41
| 0.714286
| 5
| 42
| 6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 42
| 1
| 42
| 42
| 0.810811
| 0.833333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cb6c3a0c44e201c452bf3d59c3d8d0e67163cd7d
| 59
|
py
|
Python
|
tests/test.py
|
rokujyouhitoma/cysqlite3
|
56d7a1974724a1dbfca0b75660db7ed46060ed89
|
[
"MIT"
] | null | null | null |
tests/test.py
|
rokujyouhitoma/cysqlite3
|
56d7a1974724a1dbfca0b75660db7ed46060ed89
|
[
"MIT"
] | null | null | null |
tests/test.py
|
rokujyouhitoma/cysqlite3
|
56d7a1974724a1dbfca0b75660db7ed46060ed89
|
[
"MIT"
] | null | null | null |
import pyximport; pyximport.install()
import test_sqlite3
| 14.75
| 37
| 0.830508
| 7
| 59
| 6.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0.101695
| 59
| 3
| 38
| 19.666667
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cbcf058bc586d539f01f6b2b3270fd90c02e4801
| 3,514
|
py
|
Python
|
python/fbs/SymmetricStateMatrix.py
|
oliverlee/biketest
|
074b0b03455021c52a13efe583b1816bc5daad4e
|
[
"BSD-2-Clause"
] | 3
|
2016-12-14T01:22:27.000Z
|
2020-04-07T05:15:04.000Z
|
python/fbs/SymmetricStateMatrix.py
|
oliverlee/biketest
|
074b0b03455021c52a13efe583b1816bc5daad4e
|
[
"BSD-2-Clause"
] | 7
|
2017-01-12T15:20:57.000Z
|
2017-07-02T16:09:37.000Z
|
python/fbs/SymmetricStateMatrix.py
|
oliverlee/biketest
|
074b0b03455021c52a13efe583b1816bc5daad4e
|
[
"BSD-2-Clause"
] | 1
|
2020-04-07T05:15:05.000Z
|
2020-04-07T05:15:05.000Z
|
# automatically generated, do not modify
# namespace: fbs
import flatbuffers
class SymmetricStateMatrix(object):
__slots__ = ['_tab']
# SymmetricStateMatrix
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# SymmetricStateMatrix
def Q00(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0))
# SymmetricStateMatrix
def Q01(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(8))
# SymmetricStateMatrix
def Q02(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(16))
# SymmetricStateMatrix
def Q03(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(24))
# SymmetricStateMatrix
def Q04(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(32))
# SymmetricStateMatrix
def Q11(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(40))
# SymmetricStateMatrix
def Q12(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(48))
# SymmetricStateMatrix
def Q13(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(56))
# SymmetricStateMatrix
def Q14(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(64))
# SymmetricStateMatrix
def Q22(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(72))
# SymmetricStateMatrix
def Q23(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(80))
# SymmetricStateMatrix
def Q24(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(88))
# SymmetricStateMatrix
def Q33(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(96))
# SymmetricStateMatrix
def Q34(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(104))
# SymmetricStateMatrix
def Q44(self): return self._tab.Get(flatbuffers.number_types.Float64Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(112))
def CreateSymmetricStateMatrix(builder, q00, q01, q02, q03, q04, q11, q12, q13, q14, q22, q23, q24, q33, q34, q44):
builder.Prep(8, 120)
builder.PrependFloat64(q44)
builder.PrependFloat64(q34)
builder.PrependFloat64(q33)
builder.PrependFloat64(q24)
builder.PrependFloat64(q23)
builder.PrependFloat64(q22)
builder.PrependFloat64(q14)
builder.PrependFloat64(q13)
builder.PrependFloat64(q12)
builder.PrependFloat64(q11)
builder.PrependFloat64(q04)
builder.PrependFloat64(q03)
builder.PrependFloat64(q02)
builder.PrependFloat64(q01)
builder.PrependFloat64(q00)
return builder.Offset()
| 55.777778
| 147
| 0.7749
| 427
| 3,514
| 6.187354
| 0.149883
| 0.082135
| 0.249811
| 0.096518
| 0.596139
| 0.596139
| 0.596139
| 0.596139
| 0.596139
| 0.596139
| 0
| 0.059297
| 0.116961
| 3,514
| 62
| 148
| 56.677419
| 0.792137
| 0.1107
| 0
| 0
| 1
| 0
| 0.001288
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.447368
| false
| 0
| 0.026316
| 0.394737
| 0.552632
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1dbd902b3e1d94479e72f35dea53f2b01b463e5d
| 59
|
py
|
Python
|
tests/tests_setups/colreqs_test/test_collect_reqs_latest_SHOULD_exclude_repoassist_reqs_properly/mod.py
|
DamianPala/pyrepogen
|
1626d13875083b4d005b4ae5d68ac35a9879a464
|
[
"MIT"
] | null | null | null |
tests/tests_setups/colreqs_test/test_collect_reqs_latest_SHOULD_exclude_repoassist_reqs_properly/mod.py
|
DamianPala/pyrepogen
|
1626d13875083b4d005b4ae5d68ac35a9879a464
|
[
"MIT"
] | null | null | null |
tests/tests_setups/colreqs_test/test_collect_reqs_latest_SHOULD_exclude_repoassist_reqs_properly/mod.py
|
DamianPala/pyrepogen
|
1626d13875083b4d005b4ae5d68ac35a9879a464
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.skip
def test_dump():
pass
| 9.833333
| 17
| 0.711864
| 9
| 59
| 4.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186441
| 59
| 6
| 18
| 9.833333
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
1dcbca3f876551a0f6b8746ec5aacc31020ebe97
| 183
|
py
|
Python
|
pyapp_ext/messaging/sio/checks.py
|
pyapp-org/pyapp-messaging
|
159dede0d9a9823997dd37641cc2f46b139d24e3
|
[
"BSD-3-Clause"
] | 1
|
2021-02-21T18:27:23.000Z
|
2021-02-21T18:27:23.000Z
|
pyapp_ext/messaging/sio/checks.py
|
pyapp-org/pyapp-messaging
|
159dede0d9a9823997dd37641cc2f46b139d24e3
|
[
"BSD-3-Clause"
] | 22
|
2020-07-31T05:06:51.000Z
|
2022-02-14T00:03:03.000Z
|
pyapp_ext/messaging/sio/checks.py
|
pyapp-org/pyapp-messaging
|
159dede0d9a9823997dd37641cc2f46b139d24e3
|
[
"BSD-3-Clause"
] | null | null | null |
from pyapp.checks.registry import register
from .factory import message_sender_factory, message_receiver_factory
register(message_sender_factory)
register(message_receiver_factory)
| 26.142857
| 69
| 0.885246
| 23
| 183
| 6.695652
| 0.434783
| 0.168831
| 0.25974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071038
| 183
| 6
| 70
| 30.5
| 0.905882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
382444817ccbd09ece3a6ee4a1dee1140c214bfe
| 117
|
py
|
Python
|
meniscus/data/handlers/elasticsearch/__init__.py
|
ProjectMeniscus/meniscus
|
1df9efe33ead702d0f53dfc227b5da385ba9cf23
|
[
"Apache-2.0"
] | 12
|
2015-01-14T03:40:05.000Z
|
2018-08-20T13:19:07.000Z
|
meniscus/data/handlers/elasticsearch/__init__.py
|
ProjectMeniscus/meniscus
|
1df9efe33ead702d0f53dfc227b5da385ba9cf23
|
[
"Apache-2.0"
] | 1
|
2015-07-02T17:03:47.000Z
|
2015-07-02T17:03:47.000Z
|
meniscus/data/handlers/elasticsearch/__init__.py
|
ProjectMeniscus/meniscus
|
1df9efe33ead702d0f53dfc227b5da385ba9cf23
|
[
"Apache-2.0"
] | 4
|
2015-05-12T12:04:44.000Z
|
2020-11-17T19:08:43.000Z
|
#hoist get_handler method into package namespace
from meniscus.data.handlers.elasticsearch.driver import get_handler
| 39
| 67
| 0.871795
| 16
| 117
| 6.25
| 0.875
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08547
| 117
| 2
| 68
| 58.5
| 0.934579
| 0.401709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
69dd4a866acdb6c0fdc9faf4d74dd58e63acda10
| 2,390
|
py
|
Python
|
py-rest-client/test/test_products_l0_dist_api.py
|
ausseabed/product-catalogue
|
3deca0301298efe22d7b61028683254ac3bd3c66
|
[
"Apache-2.0"
] | null | null | null |
py-rest-client/test/test_products_l0_dist_api.py
|
ausseabed/product-catalogue
|
3deca0301298efe22d7b61028683254ac3bd3c66
|
[
"Apache-2.0"
] | 4
|
2022-02-13T10:48:44.000Z
|
2022-03-02T21:22:04.000Z
|
py-rest-client/test/test_products_l0_dist_api.py
|
ausseabed/product-catalogue
|
3deca0301298efe22d7b61028683254ac3bd3c66
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
AusSeabed product catalogue
The API description for the Ausseabed product catalogue inventory # noqa: E501
The version of the OpenAPI document: 0.2.2
Contact: AusSeabed@ga.gov.au
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import product_catalogue_py_rest_client
from product_catalogue_py_rest_client.api.products_l0_dist_api import ProductsL0DistApi # noqa: E501
from product_catalogue_py_rest_client.rest import ApiException
class TestProductsL0DistApi(unittest.TestCase):
"""ProductsL0DistApi unit test stubs"""
def setUp(self):
self.api = product_catalogue_py_rest_client.api.products_l0_dist_api.ProductsL0DistApi() # noqa: E501
def tearDown(self):
pass
def test_products_l0_dist_controller_create(self):
"""Test case for products_l0_dist_controller_create
"""
pass
def test_products_l0_dist_controller_create_instrument(self):
"""Test case for products_l0_dist_controller_create_instrument
"""
pass
def test_products_l0_dist_controller_delete(self):
"""Test case for products_l0_dist_controller_delete
"""
pass
def test_products_l0_dist_controller_delete_instrument(self):
"""Test case for products_l0_dist_controller_delete_instrument
"""
pass
def test_products_l0_dist_controller_find_all(self):
"""Test case for products_l0_dist_controller_find_all
"""
pass
def test_products_l0_dist_controller_find_instruments(self):
"""Test case for products_l0_dist_controller_find_instruments
"""
pass
def test_products_l0_dist_controller_find_one(self):
"""Test case for products_l0_dist_controller_find_one
"""
pass
def test_products_l0_dist_controller_find_one_instrument(self):
"""Test case for products_l0_dist_controller_find_one_instrument
"""
pass
def test_products_l0_dist_controller_update(self):
"""Test case for products_l0_dist_controller_update
"""
pass
def test_products_l0_dist_controller_update_instrument(self):
"""Test case for products_l0_dist_controller_update_instrument
"""
pass
if __name__ == '__main__':
unittest.main()
| 25.157895
| 110
| 0.71841
| 295
| 2,390
| 5.342373
| 0.220339
| 0.139594
| 0.195431
| 0.304569
| 0.713198
| 0.695431
| 0.648477
| 0.648477
| 0.423858
| 0.119289
| 0
| 0.020867
| 0.217992
| 2,390
| 94
| 111
| 25.425532
| 0.822365
| 0.396234
| 0
| 0.34375
| 0
| 0
| 0.005891
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0.34375
| 0.15625
| 0
| 0.5625
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
0e12818f898714380cb5fd3cab7d431aeacfc7f7
| 345
|
py
|
Python
|
stubs/resizeimage/resizeimage.py
|
tiliado/fxwebgen
|
5d1c5120b27fc13b6b45ee4e0017771271c3c3e0
|
[
"BSD-2-Clause"
] | null | null | null |
stubs/resizeimage/resizeimage.py
|
tiliado/fxwebgen
|
5d1c5120b27fc13b6b45ee4e0017771271c3c3e0
|
[
"BSD-2-Clause"
] | 13
|
2018-08-06T15:25:50.000Z
|
2019-04-14T14:09:22.000Z
|
stubs/resizeimage/resizeimage.py
|
tiliado/fxwebgen
|
5d1c5120b27fc13b6b45ee4e0017771271c3c3e0
|
[
"BSD-2-Clause"
] | null | null | null |
from typing import Union, Tuple, List
from PIL.Image import Image
def resize_width(image: Image, size: Union[int, Tuple[int, int], List[int]]) -> Image: ...
def resize_height(image: Image, size: Union[int, Tuple[int, int], List[int]]) -> Image: ...
def resize_thumbnail(image: Image, size: Union[int, Tuple[int, int], List[int]]) -> Image: ...
| 49.285714
| 94
| 0.689855
| 53
| 345
| 4.433962
| 0.283019
| 0.102128
| 0.178723
| 0.242553
| 0.651064
| 0.651064
| 0.651064
| 0.651064
| 0.651064
| 0.651064
| 0
| 0
| 0.127536
| 345
| 6
| 95
| 57.5
| 0.780731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.6
| false
| 0
| 0.4
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
38b0c0abf965823ba4d857c58f57de8c41d5ad29
| 2,374
|
py
|
Python
|
tests/cli/97_test_site_update.py
|
zorrobyte/easyengine
|
a37396d0c941ef363c6a297876582ddcc37ed55b
|
[
"MIT"
] | 2
|
2015-09-06T13:18:36.000Z
|
2019-02-13T09:00:56.000Z
|
tests/cli/97_test_site_update.py
|
zorrobyte/easyengine
|
a37396d0c941ef363c6a297876582ddcc37ed55b
|
[
"MIT"
] | 1
|
2020-10-27T19:47:50.000Z
|
2020-10-27T19:47:50.000Z
|
tests/cli/97_test_site_update.py
|
zorrobyte/easyengine
|
a37396d0c941ef363c6a297876582ddcc37ed55b
|
[
"MIT"
] | null | null | null |
from ee.utils import test
from ee.cli.main import get_test_app
class CliTestCaseSite(test.EETestCase):
def test_ee_cli(self):
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_html(self):
self.app = get_test_app(argv=['site', 'update', 'example2.com',
'--html'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_php(self):
self.app = get_test_app(argv=['site', 'update', 'example1.com',
'--php'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_mysql(self):
self.app = get_test_app(argv=['site', 'update', 'example1.com',
'--html'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_wp(self):
self.app = get_test_app(argv=['site', 'update', 'example5.com',
'--wp'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_wpsubdir(self):
self.app = get_test_app(argv=['site', 'update', 'example4.com',
'--wpsubdir'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_wpsubdomain(self):
self.app = get_test_app(argv=['site', 'update', 'example7.com',
'--wpsubdomain'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_w3tc(self):
self.app = get_test_app(argv=['site', 'update', 'example8.com',
'--w3tc'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_wpfc(self):
self.app = get_test_app(argv=['site', 'update', 'example9.com',
'--wpfc'])
self.app.setup()
self.app.run()
self.app.close()
def test_ee_cli_site_update_wpsc(self):
self.app = get_test_app(argv=['site', 'update', 'example6.com',
'--wpsc'])
self.app.setup()
self.app.run()
self.app.close()
| 32.081081
| 71
| 0.508846
| 283
| 2,374
| 4.031802
| 0.134276
| 0.239264
| 0.087642
| 0.105171
| 0.776512
| 0.776512
| 0.776512
| 0.776512
| 0.776512
| 0.531989
| 0
| 0.007079
| 0.345409
| 2,374
| 73
| 72
| 32.520548
| 0.727156
| 0
| 0
| 0.557377
| 0
| 0
| 0.10952
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163934
| false
| 0
| 0.032787
| 0
| 0.213115
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
38cdc884fd43405de51719a7d80ee6169abf9749
| 137
|
py
|
Python
|
comments/admin.py
|
LordK1/Socialist
|
775820db682b97b00caa47e3df6d5d63dd3f8b37
|
[
"MIT"
] | null | null | null |
comments/admin.py
|
LordK1/Socialist
|
775820db682b97b00caa47e3df6d5d63dd3f8b37
|
[
"MIT"
] | null | null | null |
comments/admin.py
|
LordK1/Socialist
|
775820db682b97b00caa47e3df6d5d63dd3f8b37
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from comments.models import PostComment
admin.site.register(PostComment)
| 19.571429
| 39
| 0.824818
| 18
| 137
| 6.277778
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116788
| 137
| 6
| 40
| 22.833333
| 0.933884
| 0.189781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
38d0167e537bf04b5ac3b4db66068bf2cb34859d
| 14,691
|
py
|
Python
|
hsstock/service/sinanews_service.py
|
hsstock/hsstock
|
f8841331022e8844537a5c5b08d047e2cc328856
|
[
"Apache-2.0"
] | 2
|
2018-10-04T08:04:24.000Z
|
2021-01-21T06:58:30.000Z
|
hsstock/service/sinanews_service.py
|
hsstock/hsstock
|
f8841331022e8844537a5c5b08d047e2cc328856
|
[
"Apache-2.0"
] | null | null | null |
hsstock/service/sinanews_service.py
|
hsstock/hsstock
|
f8841331022e8844537a5c5b08d047e2cc328856
|
[
"Apache-2.0"
] | 1
|
2018-10-20T09:39:50.000Z
|
2018-10-20T09:39:50.000Z
|
# -*- coding: UTF-8 -*-
import requests
import bs4
import time
import random
import hsstock.utils.logger as logger
from hsstock.utils.date_util import DateUtil
from hsstock.utils.decorator import retry
class SinanewsService(object):
def __init__(self, mongodbutil):
self.itemArray = []
self.mongodbutil = mongodbutil
def generate_url(self,market, code):
"""
US: http://stock.finance.sina.com.cn/usstock/quotes/ntes.html
HK: http://stock.finance.sina.com.cn/hkstock/quotes/00771.html
SH: http://finance.sina.com.cn/realstock/company/sh603722/nc.shtml
SZ: http://finance.sina.com.cn/realstock/company/sz000782/nc.shtml
:param market:
:param code:
:return:
"""
if market == 'US':
return 'http://stock.finance.sina.com.cn/usstock/quotes/' + code + '.html'
if market == 'HK':
return 'http://stock.finance.sina.com.cn/hkstock/quotes/' + code + '.html'
if market == 'SH':
return 'http://finance.sina.com.cn/realstock/company/' + str.lower(market) + code + '/nc.shtml'
if market == 'SZ':
return 'http://finance.sina.com.cn/realstock/company/' + str.lower(market) + code + '/nc.shtml'
else:
return "url not found"
@retry(wait=30)
def get_page(self,market, code,url):
ret_code = -1
ret_data = ''
self.itemArray = []
try:
res = requests.get(url, timeout=60, headers={
'Content-type': 'text/html;charset=gb2312'
})
if res.encoding == 'ISO-8859-1':
res.encoding = 'gbk'
html = res.text # .encode(res.encoding)
res.raise_for_status()
if res.status_code == 200 :
contentSoup = bs4.BeautifulSoup(html,'lxml')
elems = contentSoup.select('#js_ggzx > li,.li_point > ul > li,.col02_22 > ul > li')
for elem in elems:
json = {}
json['code'] = code
temp = elem.__str__()[4:5]
if (temp == '\n') and market == 'US':
continue
ele = elem.select('span')
json['date'] = DateUtil.format_date(ele[0].getText()[1:-1])
s = json['date']
json['date'] = s.replace(' </a>', '')
date = DateUtil.string_toDatetime(json['date'])
json['date'] = date
ele = elem.select('a')
json['title'] = ele[len(ele)-1].getText()
logger.info("date:{},title:{}".format(s, json['title']))
json['href'] = ele[len(ele)-1].attrs['href']
json['year'] = 'guess'
ret,content = self.get_content(json['href'],'utf-8')
# if ret != -1 :
# time.sleep(4 * random.random())
if ret == 0 :
json['content'] = content
self.itemArray.append(json)
ret_code = 0
ret_data = ''
except Exception as err:
#time.sleep(4 * random.random())
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.ConnectTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.ReadTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.Timeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except:
logger.warning('Unfortunitely -- An Unknow Error Happened, Please wait 3 seconds')
time.sleep(random.random())
ret_code = -1
ret_data = ''
finally:
res.close()
return ret_code,ret_data
def generate_page_url(self, market, code, page):
"""
HK: http://stock.finance.sina.com.cn/hkstock/go/CompanyNews/page/1/code/00771.html
US: http://biz.finance.sina.com.cn/usstock/usstock_news.php?pageIndex=1&symbol=ntes&type=1
SH: http://vip.stock.finance.sina.com.cn/corp/view/vCB_AllNewsStock.php?symbol=sh603722&Page=1
SZ: http://vip.stock.finance.sina.com.cn/corp/view/vCB_AllNewsStock.php?symbol=sz000063&Page=1
"""
if market == 'HK':
return 'http://stock.finance.sina.com.cn/hkstock/go/CompanyNews/page/' + str(
page) + '/code/' + code + '.html'
if market == 'US':
return 'http://biz.finance.sina.com.cn/usstock/usstock_news.php?pageIndex=' + str(
page) + '&symbol=' + code + '&type='
if market == 'SH':
return 'http://vip.stock.finance.sina.com.cn/corp/view/vCB_AllNewsStock.php?symbol=' + str.lower(
market) + code + '&Page=' + str(
page)
if market == 'SZ':
return 'http://vip.stock.finance.sina.com.cn/corp/view/vCB_AllNewsStock.php?symbol=' + str.lower(
market) + code + '&Page=' + str(
page)
else:
return "url not found"
@retry()
def get_hk_page(self, market, code, page):
"""
:param market:
:param code:
:param page:
:return: page number, -1: failed
"""
self.itemArray = []
url = self.generate_page_url(market, code, page)
logger.info('fetch url: {}'.format(url))
try:
res = requests.get(url, timeout=60, headers={'Content-type': 'text/html;charset=gb2312'})
if res.encoding == 'ISO-8859-1':
res.encoding = 'gbk'
html = res.text # .encode(res.encoding)
res.raise_for_status()
if res.status_code == 200:
contentSoup = bs4.BeautifulSoup(html, 'lxml')
elems = contentSoup.select('#js_ggzx > li,.li_point > ul > li,.col02_22 > ul > li')
if len(elems) < 2:
return -1,''
for elem in elems:
json = {}
json['code'] = code
ele = elem.select('span')
if len(ele) == 0:
continue
json['date'] = ele[0].getText()
s = json['date']
ele = elem.select('a')
json['title'] = ele[len(ele) - 1].getText()
logger.info("date:{},title:{}".format(s, json['title']))
json['href'] = ele[len(ele) - 1].attrs['href']
json['year'] = 'real'
ret, content = self.get_content(json['href'], "utf-8")
# if ret != -1:
# time.sleep(4 * random.random())
if ret == 0:
json['content'] = content
self.itemArray.append(json)
except Exception as err:
#
logger.warning(err)
except requests.exceptions.ConnectTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.ReadTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.Timeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except:
logger.warning('Unfortunitely -- An Unknow Error Happened, Please wait 3 seconds')
time.sleep(random.random())
ret_code = -1
ret_data = ''
finally:
res.close()
return page + 1,''
@retry()
def get_us_page(self, market, code, page, type):
"""
:param market:
:param code:
:param page:
:param type:
:return: (page_number, type), page_number:-1
"""
self.itemArray = []
url = self.generate_page_url(market, code, page)
url = url + type
logger.info('fetch url: {}'.format(url))
try:
res = requests.get(url, timeout=60, headers={'Content-type': 'text/html;charset=gb2312'})
if res.encoding == 'ISO-8859-1':
res.encoding = 'gbk'
html = res.text # .encode(res.encoding)
res.raise_for_status()
if res.status_code == 200:
contentSoup = bs4.BeautifulSoup(html, 'lxml')
elems = contentSoup.select('.xb_news > ul > li')
if page >= 100:
if type.__eq__("1"):
return 1, '2'
else:
return -1, '2'
for elem in elems:
json = {}
json['code'] = code
ele = elem.select('span')
if len(ele) == 0:
continue
json['date'] = DateUtil.format_date_us_history(ele[0].getText())
s = json['date']
ele = elem.select('a')
json['title'] = ele[len(ele) - 1].getText()
logger.info("date:{},title:{}".format(s, json['title']))
json['href'] = ele[len(ele) - 1].attrs['href']
json['year'] = 'real'
ret, content = self.get_content(json['href'], "utf-8")
# if ret != -1:
# time.sleep(4 * random.random())
if ret == 0:
json['content'] = content
self.itemArray.append(json)
except Exception as err:
#time.sleep(4 * random.random())
logger.warning(err)
except requests.exceptions.ConnectTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.ReadTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.Timeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except:
logger.warning('Unfortunitely -- An Unknow Error Happened, Please wait 3 seconds')
time.sleep(random.random())
ret_code = -1
ret_data = ''
finally:
res.close()
return page + 1, type
@retry()
def get_chn_page(self, market, code, page):
self.itemArray = []
url = self.generate_page_url(market, code, page)
logger.info('fetch url: {}'.format(url))
try:
res = requests.get(url, timeout=60, headers={'Content-type':'text/html;charset=gb2312'})
if res.encoding == 'ISO-8859-1':
res.encoding = 'gbk'
html = res.text #.encode(res.encoding)
res.raise_for_status()
if res.status_code == 200:
contentSoup = bs4.BeautifulSoup(html, 'lxml')
strList = str(contentSoup.select('.datelist > ul'))[10:-12]
elems = strList.split("<br/>")
if len(elems) < 2:
return -1,''
for elem in elems:
if elem == '':
continue
json = {}
elem = elem.lstrip()
parts = elem.split('<a href="')
json['code'] = code
json['date'] = parts[0].rstrip() + ":00"
s = json['date']
parts1 = parts[1].split('" target="_blank">')
json['href'] = parts1[0]
json['year'] = 'real'
parts2 = parts1[1].split('</a>')
json['title'] = parts2[0]
logger.info("date:{},title:{}".format(s, json['title']))
ret, content = self.get_content(json['href'], "utf-8")
# if ret != -1:
# time.sleep(4 * random.random())
if ret == 0:
json['content'] = content
self.itemArray.append(json)
except Exception as err:
#time.sleep(4 * random.random())
logger.warning(err)
except:
logger.warning('Unfortunitely -- An Unknow Error Happened, Please wait 3 seconds')
time.sleep(random.random())
ret_code = -1
ret_data = ''
finally:
res.close()
return page + 1,''
@retry()
def get_content(self, url, enco):
content = ''
ret = -1
urlExist = self.mongodbutil.urlIsExist(url)
if urlExist:
logger.info('This url:{} has existed'.format(url))
return -2, content
header = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
try:
res = requests.get(url, headers=header, timeout=60)
res.encoding = enco
res.raise_for_status()
if res.status_code == 200:
soup = bs4.BeautifulSoup(res.text, 'lxml')
elems = soup.select('#artibody,.entry-content')
if len(elems) > 0:
content = elems[0].getText()
ret = 0
except Exception as err:
#time.sleep(4 * random.random())
logger.warning(err)
except requests.exceptions.ConnectTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.ReadTimeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except requests.exceptions.Timeout as err:
logger.warning(err)
ret_code = -1
ret_data = err
except:
logger.warning('Unfortunitely -- An Unknow Error Happened, Please wait 3 seconds')
time.sleep(random.random())
ret_code = -1
ret_data = ''
finally:
res.close()
return ret, content
def get_item_array(self):
return self.itemArray
def clear_item_array(self):
return self.itemArray.clear()
| 39.280749
| 150
| 0.481519
| 1,559
| 14,691
| 4.461835
| 0.135985
| 0.041116
| 0.021852
| 0.030046
| 0.782921
| 0.750288
| 0.725273
| 0.71406
| 0.686889
| 0.681857
| 0
| 0.027009
| 0.390103
| 14,691
| 373
| 151
| 39.386059
| 0.74933
| 0.086311
| 0
| 0.753247
| 0
| 0.00974
| 0.137808
| 0.009101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032468
| false
| 0
| 0.022727
| 0.006494
| 0.12987
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
38d5b5d7e0f316b1f7f1295ca986e454c9184d27
| 222
|
py
|
Python
|
deepstocks/math/moving_average.py
|
b3h47pte/deepstocks
|
cbe8e4bba47eadb44f41434190acee2e1d6f8417
|
[
"Apache-2.0"
] | null | null | null |
deepstocks/math/moving_average.py
|
b3h47pte/deepstocks
|
cbe8e4bba47eadb44f41434190acee2e1d6f8417
|
[
"Apache-2.0"
] | 4
|
2021-01-28T20:07:40.000Z
|
2022-03-25T18:51:39.000Z
|
deepstocks/math/moving_average.py
|
b3h47pte/deepstocks
|
cbe8e4bba47eadb44f41434190acee2e1d6f8417
|
[
"Apache-2.0"
] | null | null | null |
#
# Utiltiies for computing the moving average.
#
# newVal = [(N- 1) * previousAvg + newVal] / N
def smoothedMovingAverage(previousAvg, newVal, N):
newVal = ((N-1) * previousAvg + newVal) / float(N)
return newVal
| 24.666667
| 54
| 0.666667
| 26
| 222
| 5.692308
| 0.538462
| 0.189189
| 0.108108
| 0.256757
| 0.337838
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.198198
| 222
| 8
| 55
| 27.75
| 0.820225
| 0.396396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
2a11456d4f9e5329f2b4c44982edaff5a6bf2ebe
| 102
|
py
|
Python
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/stats/api.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 18
|
2018-02-23T11:28:54.000Z
|
2021-09-23T08:19:54.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/stats/api.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 2
|
2018-01-22T23:21:36.000Z
|
2018-01-22T23:31:27.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/stats/api.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 12
|
2017-05-23T06:01:12.000Z
|
2021-08-16T05:09:46.000Z
|
"""
Common namespace of statistical functions
"""
# flake8: noqa
from pandas.stats.moments import *
| 12.75
| 41
| 0.735294
| 12
| 102
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0.156863
| 102
| 7
| 42
| 14.571429
| 0.860465
| 0.539216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aa7a8ea05dd8e1b66ceb006b968f214a152a1564
| 6,160
|
py
|
Python
|
lte/gateway/python/magma/pipelined/tests/test_uplink_bridge.py
|
kyaaqba/magma
|
fdb7be22a2076f9a9b158c9670a9af6cad68b85f
|
[
"BSD-3-Clause"
] | null | null | null |
lte/gateway/python/magma/pipelined/tests/test_uplink_bridge.py
|
kyaaqba/magma
|
fdb7be22a2076f9a9b158c9670a9af6cad68b85f
|
[
"BSD-3-Clause"
] | 10
|
2021-03-31T20:19:00.000Z
|
2022-02-19T07:09:57.000Z
|
lte/gateway/python/magma/pipelined/tests/test_uplink_bridge.py
|
119Vik/magma-1
|
107a7b374466a837fc0a49b283ba9d6ff1d702e3
|
[
"BSD-3-Clause"
] | 3
|
2020-08-20T18:45:34.000Z
|
2020-08-20T20:18:42.000Z
|
"""
Copyright (c) 2020-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
import unittest
import warnings
from concurrent.futures import Future
from magma.pipelined.tests.app.start_pipelined import (
TestSetup,
PipelinedController,
)
from magma.pipelined.bridge_util import BridgeTools
from magma.pipelined.tests.pipelined_test_util import (
start_ryu_app_thread,
stop_ryu_app_thread,
create_service_manager,
assert_bridge_snapshot_match,
)
class UplinkBridgeTest(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
UPLINK_BRIDGE = 'up_br0'
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeTest, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': True,
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
assert_bridge_snapshot_match(self, self.UPLINK_BRIDGE, self.service_manager)
class UplinkBridgeWithNonNATTest(unittest.TestCase):
BRIDGE = 'testing_br'
MAC_DEST = "5e:cc:cc:b1:49:4b"
BRIDGE_IP = '192.168.128.1'
UPLINK_BRIDGE = 'up_br0'
UPLINK_DHCP = 'test_dhcp0'
UPLINK_PATCH = 'test_patch_p2'
UPLINK_ETH_PORT = 'test_eth3'
@classmethod
def setUpClass(cls):
"""
Starts the thread which launches ryu apps
Create a testing bridge, add a port, setup the port interfaces. Then
launch the ryu apps for testing pipelined. Gets the references
to apps launched by using futures.
"""
super(UplinkBridgeWithNonNATTest, cls).setUpClass()
warnings.simplefilter('ignore')
cls.service_manager = create_service_manager([])
uplink_bridge_controller_reference = Future()
testing_controller_reference = Future()
test_setup = TestSetup(
apps=[PipelinedController.UplinkBridge,
PipelinedController.Testing,
PipelinedController.StartupFlows],
references={
PipelinedController.UplinkBridge:
uplink_bridge_controller_reference,
PipelinedController.Testing:
testing_controller_reference,
PipelinedController.StartupFlows:
Future(),
},
config={
'bridge_name': cls.BRIDGE,
'bridge_ip_address': cls.BRIDGE_IP,
'ovs_gtp_port_number': 32768,
'clean_restart': True,
'enable_nat': False,
'uplink_bridge': cls.UPLINK_BRIDGE,
'uplink_eth_port_name': cls.UPLINK_ETH_PORT,
'virtual_mac': '02:bb:5e:36:06:4b',
'uplink_patch': cls.UPLINK_PATCH,
'uplink_dhcp_port': cls.UPLINK_DHCP,
},
mconfig=None,
loop=None,
service_manager=cls.service_manager,
integ_test=False,
)
BridgeTools.create_bridge(cls.BRIDGE, cls.BRIDGE)
# dummy uplink interface
BridgeTools.create_bridge(cls.UPLINK_BRIDGE, cls.UPLINK_BRIDGE)
BridgeTools.create_internal_iface(cls.UPLINK_BRIDGE,
cls.UPLINK_DHCP, None)
BridgeTools.create_internal_iface(cls.UPLINK_BRIDGE,
cls.UPLINK_PATCH, None)
BridgeTools.create_internal_iface(cls.UPLINK_BRIDGE,
cls.UPLINK_ETH_PORT, None)
cls.thread = start_ryu_app_thread(test_setup)
cls.uplink_br_controller = uplink_bridge_controller_reference.result()
cls.testing_controller = testing_controller_reference.result()
@classmethod
def tearDownClass(cls):
stop_ryu_app_thread(cls.thread)
BridgeTools.destroy_bridge(cls.BRIDGE)
BridgeTools.destroy_bridge(cls.UPLINK_BRIDGE)
def testFlowSnapshotMatch(self):
# time.sleep(100)
assert_bridge_snapshot_match(self, self.UPLINK_BRIDGE, self.service_manager,
include_stats=False)
if __name__ == "__main__":
unittest.main()
| 35.2
| 84
| 0.633766
| 632
| 6,160
| 5.901899
| 0.245253
| 0.06756
| 0.040214
| 0.03941
| 0.76059
| 0.753351
| 0.753351
| 0.753351
| 0.753351
| 0.710456
| 0
| 0.013749
| 0.291558
| 6,160
| 174
| 85
| 35.402299
| 0.840972
| 0.120617
| 0
| 0.669291
| 0
| 0
| 0.070192
| 0
| 0
| 0
| 0
| 0
| 0.023622
| 1
| 0.047244
| false
| 0
| 0.047244
| 0
| 0.19685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
aa8884e4d257627c00be8a44b2576b63fde32ff2
| 89
|
py
|
Python
|
LibsRobotics/__init__.py
|
michalnand/libs_robotics
|
815748c46e88accfa8c97b4b9aba5fa19daca50b
|
[
"MIT"
] | null | null | null |
LibsRobotics/__init__.py
|
michalnand/libs_robotics
|
815748c46e88accfa8c97b4b9aba5fa19daca50b
|
[
"MIT"
] | null | null | null |
LibsRobotics/__init__.py
|
michalnand/libs_robotics
|
815748c46e88accfa8c97b4b9aba5fa19daca50b
|
[
"MIT"
] | null | null | null |
from .common.MapGrid import *
from .localisation.ParticleFilter import *
| 44.5
| 44
| 0.662921
| 8
| 89
| 7.375
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.269663
| 89
| 2
| 45
| 44.5
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aaa66afc2f7931cfff392787190426f2579d3cba
| 160
|
py
|
Python
|
finper/admin.py
|
vamosviendo/djangofinper
|
8bd4ceb2c66d25cbb7b248fd6d713189cc6ac9fb
|
[
"BSD-3-Clause"
] | null | null | null |
finper/admin.py
|
vamosviendo/djangofinper
|
8bd4ceb2c66d25cbb7b248fd6d713189cc6ac9fb
|
[
"BSD-3-Clause"
] | null | null | null |
finper/admin.py
|
vamosviendo/djangofinper
|
8bd4ceb2c66d25cbb7b248fd6d713189cc6ac9fb
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from .models import Movement, Account
# Register your models here.
admin.site.register(Movement)
admin.site.register(Account)
| 22.857143
| 37
| 0.8125
| 22
| 160
| 5.909091
| 0.545455
| 0.138462
| 0.261538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10625
| 160
| 7
| 38
| 22.857143
| 0.909091
| 0.1625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
aab38362d5c1742e391965d30c2412ace4eb1a7a
| 1,362
|
py
|
Python
|
tests/routes/test_helpers/recipes_helper.py
|
f-i-l-i-p/recipes-backend
|
bcfc1a188361582024447d2e8103dfcdfda612a4
|
[
"MIT"
] | null | null | null |
tests/routes/test_helpers/recipes_helper.py
|
f-i-l-i-p/recipes-backend
|
bcfc1a188361582024447d2e8103dfcdfda612a4
|
[
"MIT"
] | null | null | null |
tests/routes/test_helpers/recipes_helper.py
|
f-i-l-i-p/recipes-backend
|
bcfc1a188361582024447d2e8103dfcdfda612a4
|
[
"MIT"
] | null | null | null |
import json
from tests.routes.test_helpers.route_test_case import RouteTestCase
from server.database.models import Recipe, User
def create_recipe_ingredients():
ingredients = [{
"name": "test",
"unit": "kg",
"quantity": 4
}] * 100
return json.dumps(ingredients)
def create_recipe_instructions():
instructions = ["instruction"] * 100
return json.dumps(instructions)
def create_recipe_image():
# Data URI to a 2x2 pixels jpg image
return "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAP//////////////////////////////////////////////////////////////////////////////////////2wBDAf//////////////////////////////////////////////////////////////////////////////////////wAARCAACAAIDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwAoooplH//Z"
| 54.48
| 876
| 0.743759
| 77
| 1,362
| 13.038961
| 0.662338
| 0.026892
| 0.044821
| 0.035857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059856
| 0.080029
| 1,362
| 24
| 877
| 56.75
| 0.741421
| 0.024963
| 0
| 0
| 0
| 0.066667
| 0.675716
| 0.65083
| 0
| 1
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.066667
| 0.6
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
2abf533d66ea53f13b65d01899e7f811ec14a785
| 128
|
py
|
Python
|
src/demo/__init__.py
|
shehancaldera/baxter_pick_and_place
|
e6f6b7decf5bee0b3b757b4dd7326c97115b6e38
|
[
"BSD-2-Clause"
] | 6
|
2018-12-15T15:08:45.000Z
|
2020-10-24T05:57:59.000Z
|
src/demo/__init__.py
|
BRML/baxter_pick_and_place
|
b9faf285bcbeb0cbadac59c34d016838b26d1fe4
|
[
"BSD-2-Clause"
] | null | null | null |
src/demo/__init__.py
|
BRML/baxter_pick_and_place
|
b9faf285bcbeb0cbadac59c34d016838b26d1fe4
|
[
"BSD-2-Clause"
] | 4
|
2016-12-14T17:44:43.000Z
|
2020-01-07T06:58:16.000Z
|
# -*- coding: utf-8 -*-
"""Module for implementing demonstration procedure."""
from joint_pick_and_place import PickAndPlace
| 18.285714
| 54
| 0.742188
| 15
| 128
| 6.133333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009009
| 0.132813
| 128
| 6
| 55
| 21.333333
| 0.81982
| 0.554688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2aca77b163255e1877734b2d8767a895d8d49b92
| 46
|
py
|
Python
|
example/marketprice/messages/shark/__init__.py
|
jcoates/avro2py
|
410bd4da92bd2673c616dfb02ba6db6f8af4ab95
|
[
"MIT"
] | 2
|
2020-12-08T00:23:48.000Z
|
2021-09-29T07:43:49.000Z
|
example/marketprice/messages/shark/__init__.py
|
jcoates/avro2py
|
410bd4da92bd2673c616dfb02ba6db6f8af4ab95
|
[
"MIT"
] | 8
|
2020-12-02T22:36:38.000Z
|
2021-12-02T17:48:03.000Z
|
example/marketprice/messages/shark/__init__.py
|
jcoates/avro2py
|
410bd4da92bd2673c616dfb02ba6db6f8af4ab95
|
[
"MIT"
] | 4
|
2021-03-10T19:52:29.000Z
|
2021-12-10T12:02:26.000Z
|
"""`marketprice.messages.shark` namespace."""
| 23
| 45
| 0.717391
| 4
| 46
| 8.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 46
| 1
| 46
| 46
| 0.75
| 0.847826
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2ad001c1355f7f025958d89644fdafafddafb94c
| 128
|
py
|
Python
|
Aw/Rpyc/test_aw.py
|
StarryHome/MultiToolsPlatform
|
2bd2b7e0700dbf542f0272ece3590a4afde328a4
|
[
"MIT"
] | null | null | null |
Aw/Rpyc/test_aw.py
|
StarryHome/MultiToolsPlatform
|
2bd2b7e0700dbf542f0272ece3590a4afde328a4
|
[
"MIT"
] | null | null | null |
Aw/Rpyc/test_aw.py
|
StarryHome/MultiToolsPlatform
|
2bd2b7e0700dbf542f0272ece3590a4afde328a4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class TestAw(object):
def __init__(self):
pass
def show(self):
print('show')
| 12.8
| 23
| 0.515625
| 15
| 128
| 4.133333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.3125
| 128
| 9
| 24
| 14.222222
| 0.693182
| 0.164063
| 0
| 0
| 0
| 0
| 0.038095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.2
| 0
| 0
| 0.6
| 0.2
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
2ae63f3b738899070bc7130fe31e210842498b70
| 17,303
|
py
|
Python
|
tests/BundleInstallTest.py
|
mwatts15/owmeta-core
|
b072178f8e7b83cc8665a29f4d038554d18adc35
|
[
"MIT"
] | 2
|
2021-03-06T16:25:35.000Z
|
2022-03-24T15:00:03.000Z
|
tests/BundleInstallTest.py
|
mwatts15/owmeta-core
|
b072178f8e7b83cc8665a29f4d038554d18adc35
|
[
"MIT"
] | 39
|
2020-02-08T21:58:33.000Z
|
2022-01-03T15:28:18.000Z
|
tests/BundleInstallTest.py
|
openworm/owmeta-core
|
b072178f8e7b83cc8665a29f4d038554d18adc35
|
[
"MIT"
] | null | null | null |
from collections import namedtuple
import json
from os import listdir, makedirs
from os.path import join as p, isdir, isfile
import transaction
from unittest.mock import patch
from tempfile import TemporaryDirectory
import pytest
import rdflib
from rdflib.term import URIRef
from owmeta_core.bundle import (Installer, Descriptor, make_include_func, FilesDescriptor,
UncoveredImports, DependencyDescriptor, TargetIsNotEmpty,
Remote, Bundle, BUNDLE_MANIFEST_FILE_NAME)
from owmeta_core.context import IMPORTS_CONTEXT_KEY, CLASS_REGISTRY_CONTEXT_KEY
from owmeta_core.context_common import CONTEXT_IMPORTS
Dirs = namedtuple('Dirs', ('source_directory', 'bundles_directory'))
@pytest.fixture
def dirs():
with TemporaryDirectory() as source_directory,\
TemporaryDirectory() as bundles_directory:
yield Dirs(source_directory, bundles_directory)
def test_bundle_install_directory(dirs):
d = Descriptor('test')
bi = Installer(*dirs, graph=rdflib.ConjunctiveGraph())
bi.install(d)
assert isdir(p(dirs.bundles_directory, 'test', '1'))
def test_context_hash_file_exists(dirs):
d = Descriptor('test')
ctxid = 'http://example.org/ctx1'
d.includes.add(make_include_func(ctxid))
g = rdflib.ConjunctiveGraph()
cg = g.get_context(ctxid)
cg.add((aURI('a'), aURI('b'), aURI('c')))
bi = Installer(*dirs, graph=g)
bi.install(d)
assert isfile(p(dirs.bundles_directory, 'test', '1', 'graphs', 'hashes'))
def test_context_index_file_exists(dirs):
d = Descriptor('test')
ctxid = 'http://example.org/ctx1'
d.includes.add(make_include_func(ctxid))
g = rdflib.ConjunctiveGraph()
cg = g.get_context(ctxid)
cg.add((aURI('a'), aURI('b'), aURI('c')))
bi = Installer(*dirs, graph=g)
bi.install(d)
assert isfile(p(dirs.bundles_directory, 'test', '1', 'graphs', 'index'))
def test_context_hash_file_contains_ctxid(dirs):
d = Descriptor('test')
ctxid = 'http://example.org/ctx1'
d.includes.add(make_include_func(ctxid))
g = rdflib.ConjunctiveGraph()
cg = g.get_context(ctxid)
with transaction.manager:
cg.add((aURI('a'), aURI('b'), aURI('c')))
bi = Installer(*dirs, graph=g)
bi.install(d)
with open(p(dirs.bundles_directory, 'test', '1', 'graphs', 'hashes'), 'rb') as f:
assert f.read().startswith(ctxid.encode('UTF-8'))
def test_context_index_file_contains_ctxid(dirs):
d = Descriptor('test')
ctxid = 'http://example.org/ctx1'
d.includes.add(make_include_func(ctxid))
g = rdflib.ConjunctiveGraph()
cg = g.get_context(ctxid)
with transaction.manager:
cg.add((aURI('a'), aURI('b'), aURI('c')))
bi = Installer(*dirs, graph=g)
bi.install(d)
with open(p(dirs.bundles_directory, 'test', '1', 'graphs', 'index'), 'rb') as f:
assert f.read().startswith(ctxid.encode('UTF-8'))
def test_multiple_context_hash(dirs):
d = Descriptor('test')
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
d.includes.add(make_include_func(ctxid_1))
d.includes.add(make_include_func(ctxid_2))
g = rdflib.ConjunctiveGraph()
cg = g.get_context(ctxid_1)
with transaction.manager:
cg.add((aURI('a'), aURI('b'), aURI('c')))
cg = g.get_context(ctxid_2)
with transaction.manager:
cg.add((aURI('a'), aURI('b'), aURI('c')))
bi = Installer(*dirs, graph=g)
bi.install(d)
with open(p(dirs.bundles_directory, 'test', '1', 'graphs', 'hashes'), 'rb') as f:
contents = f.read()
assert ctxid_1.encode('UTF-8') in contents
assert ctxid_2.encode('UTF-8') in contents
def test_no_dupe(dirs):
'''
Test that if we have two contexts with the same contents that we don't create more
than one file for it.
The index will point to the same file for the two contexts
'''
d = Descriptor('test')
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
d.includes.add(make_include_func(ctxid_1))
d.includes.add(make_include_func(ctxid_2))
g = rdflib.ConjunctiveGraph()
cg = g.get_context(ctxid_1)
with transaction.manager:
cg.add((aURI('a'), aURI('b'), aURI('c')))
cg = g.get_context(ctxid_2)
with transaction.manager:
cg.add((aURI('a'), aURI('b'), aURI('c')))
bi = Installer(*dirs, graph=g)
bi.install(d)
graph_files = [x for x in listdir(p(dirs.bundles_directory, 'test', '1', 'graphs')) if x.endswith('.nt')]
assert len(graph_files) == 1
def test_file_copy(dirs):
d = Descriptor('test')
open(p(dirs[0], 'somefile'), 'w').close()
d.files = FilesDescriptor()
d.files.includes.add('somefile')
g = rdflib.ConjunctiveGraph()
bi = Installer(*dirs, graph=g)
bi.install(d)
bfiles = p(dirs.bundles_directory, 'test', '1', 'files')
assert set(listdir(bfiles)) == set(['hashes', 'somefile'])
def test_file_pattern_copy(dirs):
d = Descriptor('test')
open(p(dirs[0], 'somefile'), 'w').close()
d.files = FilesDescriptor()
d.files.patterns.add('some*')
g = rdflib.ConjunctiveGraph()
bi = Installer(*dirs, graph=g)
bi.install(d)
bfiles = p(dirs.bundles_directory, 'test', '1', 'files')
assert set(listdir(bfiles)) == set(['hashes', 'somefile'])
def test_file_hash(dirs):
d = Descriptor('test')
open(p(dirs[0], 'somefile'), 'w').close()
d.files = FilesDescriptor()
d.files.includes.add('somefile')
g = rdflib.ConjunctiveGraph()
bi = Installer(*dirs, graph=g)
bi.install(d)
assert isfile(p(dirs.bundles_directory, 'test', '1', 'files', 'hashes'))
def test_file_hash_content(dirs):
d = Descriptor('test')
open(p(dirs[0], 'somefile'), 'w').close()
d.files = FilesDescriptor()
d.files.includes.add('somefile')
g = rdflib.ConjunctiveGraph()
bi = Installer(*dirs, graph=g)
bi.install(d)
with open(p(dirs.bundles_directory, 'test', '1', 'files', 'hashes'), 'rb') as f:
contents = f.read()
assert b'somefile' in contents
def test_uncovered_imports(dirs):
'''
If we have imports and no dependencies, then thrown an exception if we have not
included them in the bundle
'''
imports_ctxid = 'http://example.org/imports'
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
d.includes.add(make_include_func(ctxid_1))
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_1 = g.get_context(ctxid_1)
cg_2 = g.get_context(ctxid_2)
cg_imp = g.get_context(imports_ctxid)
with transaction.manager:
cg_1.add((aURI('a'), aURI('b'), aURI('c')))
cg_2.add((aURI('d'), aURI('e'), aURI('f')))
cg_imp.add((URIRef(ctxid_1), CONTEXT_IMPORTS, URIRef(ctxid_2)))
bi = Installer(*dirs, imports_ctx=imports_ctxid, graph=g)
with pytest.raises(UncoveredImports):
bi.install(d)
def test_imports_are_included(dirs):
'''
If we have imports and no dependencies, then thrown an exception if we have not
included them in the bundle
'''
imports_ctxid = 'http://example.org/imports'
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
d.includes.add(make_include_func(ctxid_1))
d.includes.add(make_include_func(ctxid_2))
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_1 = g.get_context(ctxid_1)
cg_2 = g.get_context(ctxid_2)
cg_imp = g.get_context(imports_ctxid)
with transaction.manager:
cg_1.add((aURI('a'), aURI('b'), aURI('c')))
cg_2.add((aURI('d'), aURI('e'), aURI('f')))
cg_imp.add((URIRef(ctxid_1), CONTEXT_IMPORTS, URIRef(ctxid_2)))
bi = Installer(*dirs, imports_ctx=imports_ctxid, graph=g)
bi.install(d)
with Bundle(d.id, dirs.bundles_directory) as bnd:
g = bnd.rdf.get_context(bnd.conf[IMPORTS_CONTEXT_KEY])
assert (URIRef(ctxid_1), CONTEXT_IMPORTS, URIRef(ctxid_2)) in g
def test_unrelated_imports_excluded(dirs):
imports_ctxid = 'http://example.org/imports'
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
ctxid_3 = 'http://example.org/ctx3'
ctxid_4 = 'http://example.org/ctx4'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
d.includes.add(make_include_func(ctxid_1))
d.includes.add(make_include_func(ctxid_2))
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_1 = g.get_context(ctxid_1)
cg_2 = g.get_context(ctxid_2)
cg_3 = g.get_context(ctxid_3)
cg_4 = g.get_context(ctxid_4)
cg_imp = g.get_context(imports_ctxid)
with transaction.manager:
cg_1.add((aURI('a'), aURI('b'), aURI('c')))
cg_2.add((aURI('d'), aURI('e'), aURI('f')))
cg_3.add((aURI('g'), aURI('h'), aURI('i')))
cg_4.add((aURI('j'), aURI('k'), aURI('l')))
cg_imp.add((URIRef(ctxid_1), CONTEXT_IMPORTS, URIRef(ctxid_2)))
cg_imp.add((URIRef(ctxid_3), CONTEXT_IMPORTS, URIRef(ctxid_4)))
bi = Installer(*dirs, imports_ctx=imports_ctxid, graph=g)
bi.install(d)
with Bundle(d.id, dirs.bundles_directory) as bnd:
g = bnd.rdf.get_context(bnd.conf[IMPORTS_CONTEXT_KEY])
assert (URIRef(ctxid_3), CONTEXT_IMPORTS, URIRef(ctxid_4)) not in g
def test_imports_in_dependencies(dirs):
'''
If we have imports and a dependency includes the context, then we shouldn't have an
error.
Versioned bundles are assumed to be immutable, so we won't re-fetch a bundle already
in the local index
'''
imports_ctxid = 'http://example.org/imports'
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
d.includes.add(make_include_func(ctxid_1))
d.includes.add(make_include_func(imports_ctxid))
d.dependencies.add(DependencyDescriptor('dep'))
dep_d = Descriptor('dep')
dep_d.includes.add(make_include_func(ctxid_2))
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_1 = g.get_context(ctxid_1)
cg_2 = g.get_context(ctxid_2)
cg_imp = g.get_context(imports_ctxid)
with transaction.manager:
cg_1.add((aURI('a'), aURI('b'), aURI('c')))
cg_2.add((aURI('d'), aURI('e'), aURI('f')))
cg_imp.add((URIRef(ctxid_1), CONTEXT_IMPORTS, URIRef(ctxid_2)))
bi = Installer(*dirs, imports_ctx=imports_ctxid, graph=g)
bi.install(dep_d)
bi.install(d)
def test_imports_in_unfetched_dependencies(dirs):
'''
If we have imports and a dependency includes the context, then we shouldn't have an
error.
Versioned bundles are assumed to be immutable, so we won't re-fetch a bundle already
in the local index
'''
imports_ctxid = 'http://example.org/imports'
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
d.includes.add(make_include_func(ctxid_1))
d.includes.add(make_include_func(imports_ctxid))
d.dependencies.add(DependencyDescriptor('dep'))
dep_d = Descriptor('dep')
dep_d.includes.add(make_include_func(ctxid_2))
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_1 = g.get_context(ctxid_1)
cg_2 = g.get_context(ctxid_2)
cg_imp = g.get_context(imports_ctxid)
cg_1.add((URIRef('http://example.com/a'), URIRef('http://example.com/b'), URIRef('http://example.com/c')))
cg_2.add((URIRef('http://example.com/d'), URIRef('http://example.com/e'), URIRef('http://example.com/f')))
cg_imp.add((URIRef(ctxid_1), CONTEXT_IMPORTS, URIRef(ctxid_2)))
class loader_class(object):
def __init__(self, *args):
self.bi = None
def can_load(self, *args):
return True
def can_load_from(self, *args):
return True
def bundle_versions(self, *args):
return [1]
def __call__(self, *args):
self.bi.install(dep_d)
loader = loader_class()
class remote_class(Remote):
def generate_loaders(self, *args):
yield loader
bi = Installer(*dirs, imports_ctx=imports_ctxid, graph=g,
remotes=[remote_class('remote')])
loader.bi = bi
with patch('owmeta_core.bundle.LOADER_CLASSES', (loader_class,)):
bi.install(d)
def test_imports_in_transitive_dependency_not_included(dirs):
'''
If we have imports and a transitive dependency includes the context, then we should
still have an error.
Versioned bundles are assumed to be immutable, so we won't re-fetch a bundle already
in the local index
'''
imports_ctxid = 'http://example.org/imports'
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
d.includes.add(make_include_func(ctxid_1))
d.includes.add(make_include_func(imports_ctxid))
d.dependencies.add(DependencyDescriptor('dep'))
dep_d = Descriptor('dep')
dep_d.dependencies.add(DependencyDescriptor('dep_dep'))
dep_dep_d = Descriptor('dep_dep')
dep_dep_d.includes.add(make_include_func(ctxid_2))
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_1 = g.get_context(ctxid_1)
cg_2 = g.get_context(ctxid_2)
cg_imp = g.get_context(imports_ctxid)
cg_1.add((aURI('a'), aURI('b'), aURI('c')))
cg_2.add((aURI('d'), aURI('e'), aURI('f')))
cg_imp.add((URIRef(ctxid_1), CONTEXT_IMPORTS, URIRef(ctxid_2)))
bi = Installer(*dirs, imports_ctx=imports_ctxid, graph=g)
bi.install(dep_dep_d)
bi.install(dep_d)
with pytest.raises(UncoveredImports):
bi.install(d)
def test_class_registry_in_manifest(dirs):
'''
If a class registry context is specified, then include it
'''
cr_ctxid = 'http://example.org/class_registry'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
bi = Installer(*dirs, class_registry_ctx=cr_ctxid, graph=g)
bdir = bi.install(d)
with open(p(bdir, BUNDLE_MANIFEST_FILE_NAME)) as mf:
manifest_data = json.load(mf)
assert manifest_data[CLASS_REGISTRY_CONTEXT_KEY]
def test_class_registry_contents(dirs):
'''
If a class registry context is specified, then include it
'''
cr_ctxid = 'http://example.org/class_registry'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_cr = g.get_context(cr_ctxid)
with transaction.manager:
cg_cr.add((aURI('blah'), aURI('bruh'), aURI('uhhhh')))
bi = Installer(*dirs, class_registry_ctx=cr_ctxid, graph=g)
bi.install(d)
with Bundle(d.id, dirs.bundles_directory) as bnd:
g = bnd.rdf.get_context(bnd.conf[CLASS_REGISTRY_CONTEXT_KEY])
assert (aURI('blah'), aURI('bruh'), aURI('uhhhh')) in g
def test_fail_on_non_empty_target(dirs):
d = Descriptor('test')
g = rdflib.ConjunctiveGraph()
bi = Installer(*dirs, graph=g)
bundles_directory = dirs[1]
sma = p(bundles_directory, 'test', '1', 'blah')
makedirs(sma)
with pytest.raises(TargetIsNotEmpty):
bi.install(d)
def test_dependency_version_in_manifest_without_spec(dirs):
'''
It is permitted to not specify the version of a bundle dependency in the descriptor,
but we must pin a specific version of the bundle in the manifest.
'''
ctxid_1 = 'http://example.org/ctx1'
ctxid_2 = 'http://example.org/ctx2'
# Make a descriptor that includes ctx1 and the imports, but not ctx2
d = Descriptor('test')
d.includes.add(make_include_func(ctxid_1))
d.dependencies.add(DependencyDescriptor('dep'))
dep_d = Descriptor('dep')
dep_d.includes.add(make_include_func(ctxid_2))
# Add some triples so the contexts aren't empty -- we can't save an empty context
g = rdflib.ConjunctiveGraph()
cg_1 = g.get_context(ctxid_1)
cg_2 = g.get_context(ctxid_2)
cg_1.add((aURI('a'), aURI('b'), aURI('c')))
cg_2.add((aURI('d'), aURI('e'), aURI('f')))
bi = Installer(*dirs, graph=g)
bi.install(dep_d)
bi.install(d)
test_bnd = Bundle('test', bundles_directory=dirs.bundles_directory)
assert test_bnd.manifest_data['dependencies'][0]['version'] == 1
def aURI(c):
return URIRef(f'http://example.org/uri#{c}')
| 34.263366
| 110
| 0.664451
| 2,540
| 17,303
| 4.358268
| 0.08937
| 0.038753
| 0.041734
| 0.034688
| 0.788618
| 0.758988
| 0.740921
| 0.729359
| 0.709756
| 0.693406
| 0
| 0.012617
| 0.193839
| 17,303
| 504
| 111
| 34.331349
| 0.780988
| 0.148703
| 0
| 0.69883
| 0
| 0
| 0.10502
| 0.002273
| 0
| 0
| 0
| 0
| 0.049708
| 1
| 0.084795
| false
| 0
| 0.157895
| 0.011696
| 0.260234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6307e6ce23b8b77a3a71b98cafa7d3ffcec958e1
| 70
|
py
|
Python
|
pysptools/distance/__init__.py
|
ctherien/pysptools
|
fbcd3ecaa7ab27f0158b28b4327537c3e75db160
|
[
"Apache-2.0"
] | 35
|
2016-03-20T15:25:07.000Z
|
2022-03-29T04:05:56.000Z
|
pysptools/distance/__init__.py
|
ctherien/pysptools
|
fbcd3ecaa7ab27f0158b28b4327537c3e75db160
|
[
"Apache-2.0"
] | 12
|
2016-03-24T13:38:52.000Z
|
2021-04-06T07:11:19.000Z
|
pysptools/distance/__init__.py
|
ctherien/pysptools
|
fbcd3ecaa7ab27f0158b28b4327537c3e75db160
|
[
"Apache-2.0"
] | 14
|
2016-03-21T17:26:46.000Z
|
2022-01-18T08:39:27.000Z
|
"""
"""
from .dist import SAM, SID, chebyshev, NormXCorr, classify
| 17.5
| 58
| 0.657143
| 8
| 70
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185714
| 70
| 4
| 58
| 17.5
| 0.807018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2da5088d15271fb0f8776598a171254bce8547a7
| 98
|
py
|
Python
|
mayan/apps/common/tests/__init__.py
|
eshbeata/open-paperless
|
6b9ed1f21908116ad2795b3785b2dbd66713d66e
|
[
"Apache-2.0"
] | 2,743
|
2017-12-18T07:12:30.000Z
|
2022-03-27T17:21:25.000Z
|
mayan/apps/common/tests/__init__.py
|
kyper999/mayan-edms
|
ca7b8301a1f68548e8e718d42a728a500d67286e
|
[
"Apache-2.0"
] | 15
|
2020-06-06T00:00:48.000Z
|
2022-03-12T00:03:54.000Z
|
mayan/apps/common/tests/__init__.py
|
kyper999/mayan-edms
|
ca7b8301a1f68548e8e718d42a728a500d67286e
|
[
"Apache-2.0"
] | 257
|
2017-12-18T03:12:58.000Z
|
2022-03-25T08:59:10.000Z
|
from .base import BaseTestCase # NOQA
from .decorators import skip_file_descriptor_check # NOQA
| 32.666667
| 58
| 0.816327
| 13
| 98
| 5.923077
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 98
| 2
| 59
| 49
| 0.916667
| 0.091837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2dd5e448b5e7bf0186b12ee8d56f1e00ed6a75bc
| 129
|
py
|
Python
|
starboard/__init__.py
|
notodinair/RedV3-Cogs
|
47747ccc33617dcaa3851ff12c6f95aee675d1e6
|
[
"MIT"
] | 1
|
2020-06-08T13:39:30.000Z
|
2020-06-08T13:39:30.000Z
|
starboard/__init__.py
|
Tominous/Swift-Cogs
|
47747ccc33617dcaa3851ff12c6f95aee675d1e6
|
[
"MIT"
] | null | null | null |
starboard/__init__.py
|
Tominous/Swift-Cogs
|
47747ccc33617dcaa3851ff12c6f95aee675d1e6
|
[
"MIT"
] | 1
|
2020-06-08T13:39:32.000Z
|
2020-06-08T13:39:32.000Z
|
from redbot.core.bot import Red
from starboard.starboard import Starboard
def setup(bot: Red):
bot.add_cog(Starboard(bot))
| 18.428571
| 41
| 0.767442
| 20
| 129
| 4.9
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 129
| 6
| 42
| 21.5
| 0.882883
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2dd964aaee755da0dba6a18709cdd415afe5c49c
| 84
|
py
|
Python
|
route_dynamics/__init__.py
|
EricaEgg/Route_Dynamics
|
ebb34bb29042629ef95d46e155233d2d603f6a7a
|
[
"MIT"
] | null | null | null |
route_dynamics/__init__.py
|
EricaEgg/Route_Dynamics
|
ebb34bb29042629ef95d46e155233d2d603f6a7a
|
[
"MIT"
] | 6
|
2019-03-07T23:37:36.000Z
|
2019-03-22T03:43:54.000Z
|
route_dynamics/__init__.py
|
EricaEgg/Route_Dynamics
|
ebb34bb29042629ef95d46e155233d2d603f6a7a
|
[
"MIT"
] | 5
|
2019-02-28T23:37:19.000Z
|
2021-02-06T23:27:26.000Z
|
#from .base import *
#from .single_route import *
#from .multiple_route import *
| 21
| 31
| 0.714286
| 11
| 84
| 5.272727
| 0.545455
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 84
| 3
| 32
| 28
| 0.84058
| 0.892857
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
935eba47efdcb4c262e4d3f4ee1531a4ecd3f010
| 39,419
|
py
|
Python
|
033_Hand_Detection_and_Tracking/01_float32/12_new_hand_landmark_tflite2h5_weight_int_fullint_float16_quant.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 1,529
|
2019-12-11T13:36:23.000Z
|
2022-03-31T18:38:27.000Z
|
033_Hand_Detection_and_Tracking/01_float32/12_new_hand_landmark_tflite2h5_weight_int_fullint_float16_quant.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 200
|
2020-01-06T09:24:42.000Z
|
2022-03-31T17:29:08.000Z
|
033_Hand_Detection_and_Tracking/01_float32/12_new_hand_landmark_tflite2h5_weight_int_fullint_float16_quant.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 288
|
2020-02-21T14:56:02.000Z
|
2022-03-30T03:00:35.000Z
|
### tensorflow==2.3.0
### https://ai.googleblog.com/2020/08/on-device-real-time-body-pose-tracking.html
### https://google.github.io/mediapipe/solutions/pose
### https://www.tensorflow.org/api_docs/python/tf/keras/Model
### https://www.tensorflow.org/lite/guide/ops_compatibility
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/Conv2D
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/DepthwiseConv2D
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/Add
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/ReLU
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/MaxPool2D
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/Reshape
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/Concatenate
### https://www.tensorflow.org/api_docs/python/tf/keras/layers/Layer
### How to initialize a convolution layer with an arbitrary kernel in Keras? https://stackoverrun.com/ja/q/12269118
### saved_model_cli show --dir saved_model_hand_landmark_new/ --tag_set serve --signature_def serving_default
import tensorflow as tf
import tensorflow_datasets as tfds
from tensorflow.keras import Model, Input
from tensorflow.keras.layers import Conv2D, DepthwiseConv2D, Add, ReLU, MaxPool2D, Reshape, Concatenate, Layer
from tensorflow.keras.initializers import Constant
from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2
import numpy as np
import sys
# tmp = np.load('weights/depthwise_conv2d_Kernel')
# print(tmp.shape)
# print(tmp)
# def init_f(shape, dtype=None):
# ker = np.load('weights/depthwise_conv2d_Kernel')
# print(shape)
# return ker
# sys.exit(0)
inputs = Input(shape=(256, 256, 3), name='input')
# Block_01
conv1_1 = Conv2D(filters=24, kernel_size=[3, 3], strides=[2, 2], padding="same", dilation_rate=[1, 1], activation='relu',
kernel_initializer=Constant(np.load('weights_new/conv2d_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_Bias')))(inputs)
depthconv1_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_Bias')))(conv1_1)
conv1_2 = Conv2D(filters=24, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_1_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_1_Bias')))(depthconv1_1)
add1_1 = Add()([conv1_1, conv1_2])
relu1_1 = ReLU()(add1_1)
# Block_02
depthconv2_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_1_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_1_Bias')))(relu1_1)
conv2_1 = Conv2D(filters=24, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_2_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_2_Bias')))(depthconv2_1)
add2_1 = Add()([relu1_1, conv2_1])
relu2_1 = ReLU()(add2_1)
# Block_03
depthconv3_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_2_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_2_Bias')))(relu2_1)
conv3_1 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_3_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_3_Bias')))(depthconv3_1)
maxpool3_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu2_1)
pad3_1 = tf.pad(maxpool3_1, paddings=tf.constant(np.load('weights_new/channel_padding_Paddings')))
add3_1 = Add()([conv3_1, pad3_1])
relu3_1 = ReLU()(add3_1)
# Block_04
depthconv4_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_3_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_3_Bias')))(relu3_1)
conv4_1 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_4_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_4_Bias')))(depthconv4_1)
add4_1 = Add()([conv4_1, relu3_1])
relu4_1 = ReLU()(add4_1)
# Block_05
depthconv5_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_4_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_4_Bias')))(relu4_1)
conv5_1 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_5_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_5_Bias')))(depthconv5_1)
add5_1 = Add()([relu4_1, conv5_1])
relu5_1 = ReLU()(add5_1)
# Block_06
depthconv6_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_5_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_5_Bias')))(relu5_1)
conv6_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_6_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_6_Bias')))(depthconv6_1)
maxpool6_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu5_1)
pad6_1 = tf.pad(maxpool6_1, paddings=tf.constant(np.load('weights_new/channel_padding_1_Paddings')))
add6_1 = Add()([conv6_1, pad6_1])
relu6_1 = ReLU()(add6_1)
# Block_07
depthconv7_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_6_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_6_Bias')))(relu6_1)
conv7_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_7_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_7_Bias')))(depthconv7_1)
add7_1 = Add()([conv7_1, relu6_1])
relu7_1 = ReLU()(add7_1)
# Block_08
depthconv8_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_7_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_7_Bias')))(relu7_1)
conv8_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_8_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_8_Bias')))(depthconv8_1)
add8_1 = Add()([conv8_1, relu7_1])
relu8_1 = ReLU()(add8_1)
# Block_09
depthconv9_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_8_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_8_Bias')))(relu8_1)
conv9_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_9_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_9_Bias')))(depthconv9_1)
maxpool9_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu8_1)
add9_1 = Add()([conv9_1, maxpool9_1])
relu9_1 = ReLU()(add9_1)
# Block_10
depthconv10_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_9_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_9_Bias')))(relu9_1)
conv10_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_10_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_10_Bias')))(depthconv10_1)
add10_1 = Add()([conv10_1, relu9_1])
relu10_1 = ReLU()(add10_1)
# Block_11
depthconv11_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_10_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_10_Bias')))(relu10_1)
conv11_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_11_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_11_Bias')))(depthconv11_1)
add11_1 = Add()([conv11_1, relu10_1])
relu11_1 = ReLU()(add11_1)
# Block_12
depthconv12_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_11_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_11_Bias')))(relu11_1)
conv12_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_12_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_12_Bias')))(depthconv12_1)
maxpool12_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu11_1)
add12_1 = Add()([conv12_1, maxpool12_1])
relu12_1 = ReLU()(add12_1)
resize12_1 = tf.image.resize(relu12_1, np.load('weights_new/up_sampling2d_Size'))
add12_2 = Add()([relu9_1, resize12_1])
# Block_13
depthconv13_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_12_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_12_Bias')))(add12_2)
conv13_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_13_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_13_Bias')))(depthconv13_1)
add13_1 = Add()([add12_2, conv13_1])
relu13_1 = ReLU()(add13_1)
resize13_1 = tf.image.resize(relu13_1, np.load('weights_new/up_sampling2d_1_Size'))
add13_2 = Add()([relu6_1, resize13_1])
# Block_14
depthconv14_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_13_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_13_Bias')))(add13_2)
conv14_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_14_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_14_Bias')))(depthconv14_1)
add14_1 = Add()([add13_2, conv14_1])
relu14_1 = ReLU()(add14_1)
conv14_2 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_15_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_15_Bias')))(relu14_1)
resize14_1 = tf.image.resize(conv14_2, np.load('weights_new/up_sampling2d_2_Size'))
relu14_2 = ReLU()(relu3_1)
add14_1 = Add()([relu14_2, resize14_1])
# Block_15
depthconv15_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_14_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_14_Bias')))(add14_1)
conv15_1 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_16_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_16_Bias')))(depthconv15_1)
add15_1 = Add()([add14_1, conv15_1])
relu15_1 = ReLU()(add15_1)
# Block_16
depthconv16_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_15_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_15_Bias')))(relu15_1)
conv16_1 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_17_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_17_Bias')))(depthconv16_1)
add16_1 = Add()([conv16_1, relu15_1])
relu16_1 = ReLU()(add16_1)
# Block_17
depthconv17_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_16_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_16_Bias')))(relu16_1)
conv17_1 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_18_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_18_Bias')))(depthconv17_1)
add17_1 = Add()([conv17_1, relu16_1])
relu17_1 = ReLU()(add17_1)
# Block_18
depthconv18_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_17_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_17_Bias')))(relu17_1)
conv18_1 = Conv2D(filters=48, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_19_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_19_Bias')))(depthconv18_1)
add18_1 = Add()([conv18_1, relu17_1])
relu18_1 = ReLU()(add18_1)
# Block_19
depthconv19_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_18_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_18_Bias')))(relu18_1)
conv19_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_20_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_20_Bias')))(depthconv19_1)
maxpool19_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu18_1)
pad19_1 = tf.pad(maxpool19_1, paddings=tf.constant(np.load('weights_new/channel_padding_2_Paddings')))
add19_1 = Add()([conv19_1, pad19_1])
relu19_1 = ReLU()(add19_1)
# Block_20
depthconv20_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_19_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_19_Bias')))(relu19_1)
conv20_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_21_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_21_Bias')))(depthconv20_1)
add20_1 = Add()([relu19_1, conv20_1])
relu20_1 = ReLU()(add20_1)
# Block_21
depthconv21_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_20_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_20_Bias')))(relu20_1)
conv21_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_22_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_22_Bias')))(depthconv21_1)
add21_1 = Add()([relu20_1, conv21_1])
relu21_1 = ReLU()(add21_1)
# Block_22
depthconv22_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_21_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_21_Bias')))(relu21_1)
conv22_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_23_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_23_Bias')))(depthconv22_1)
add22_1 = Add()([conv22_1, relu21_1])
relu22_1 = ReLU()(add22_1)
# Block_23
depthconv23_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_22_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_22_Bias')))(relu22_1)
conv23_1 = Conv2D(filters=96, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_24_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_24_Bias')))(depthconv23_1)
add23_1 = Add()([conv23_1, relu22_1])
relu23_1 = ReLU()(add23_1)
# Block_24
depthconv24_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_23_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_23_Bias')))(relu23_1)
conv24_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_25_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_25_Bias')))(depthconv24_1)
maxpool24_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu23_1)
pad24_1 = tf.pad(maxpool24_1, paddings=tf.constant(np.load('weights_new/channel_padding_3_Paddings')))
add24_1 = Add()([conv24_1, pad24_1])
relu24_1 = ReLU()(add24_1)
# Block_25
depthconv25_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_24_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_24_Bias')))(relu24_1)
conv25_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_26_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_26_Bias')))(depthconv25_1)
add25_1 = Add()([relu24_1, conv25_1])
relu25_1 = ReLU()(add25_1)
# Block_26
depthconv26_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_25_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_25_Bias')))(relu25_1)
conv26_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_27_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_27_Bias')))(depthconv26_1)
add26_1 = Add()([relu25_1, conv26_1])
relu26_1 = ReLU()(add26_1)
# Block_27
depthconv27_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_26_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_26_Bias')))(relu26_1)
conv27_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_28_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_28_Bias')))(depthconv27_1)
add27_1 = Add()([conv27_1, relu26_1])
relu27_1 = ReLU()(add27_1)
# Block_28
depthconv28_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_27_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_27_Bias')))(relu27_1)
conv28_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_29_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_29_Bias')))(depthconv28_1)
add28_1 = Add()([conv28_1, relu27_1])
relu28_1 = ReLU()(add28_1)
# Block_29
depthconv29_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_28_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_28_Bias')))(relu28_1)
conv29_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_30_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_30_Bias')))(depthconv29_1)
maxpool29_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu28_1)
add29_1 = Add()([conv29_1, maxpool29_1])
relu29_1 = ReLU()(add29_1)
# Block_30
depthconv30_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_29_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_29_Bias')))(relu29_1)
conv30_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_31_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_31_Bias')))(depthconv30_1)
add30_1 = Add()([conv30_1, relu29_1])
relu30_1 = ReLU()(add30_1)
# Block_31
depthconv31_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_30_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_30_Bias')))(relu30_1)
conv31_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_32_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_32_Bias')))(depthconv31_1)
add31_1 = Add()([conv31_1, relu30_1])
relu31_1 = ReLU()(add31_1)
# Block_32
depthconv32_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_31_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_31_Bias')))(relu31_1)
conv32_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_33_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_33_Bias')))(depthconv32_1)
add32_1 = Add()([conv32_1, relu31_1])
relu32_1 = ReLU()(add32_1)
# Block_33
depthconv33_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_32_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_32_Bias')))(relu32_1)
conv33_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_34_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_34_Bias')))(depthconv33_1)
add33_1 = Add()([conv33_1, relu32_1])
relu33_1 = ReLU()(add33_1)
# Block_34
depthconv34_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_33_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_33_Bias')))(relu33_1)
conv34_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_35_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_35_Bias')))(depthconv34_1)
maxpool34_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu33_1)
add34_1 = Add()([conv34_1, maxpool34_1])
relu34_1 = ReLU()(add34_1)
# Block_35
depthconv35_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_34_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_34_Bias')))(relu34_1)
conv35_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_36_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_36_Bias')))(depthconv35_1)
add35_1 = Add()([conv35_1, relu34_1])
relu35_1 = ReLU()(add35_1)
# Block_36
depthconv36_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_35_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_35_Bias')))(relu35_1)
conv36_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_37_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_37_Bias')))(depthconv36_1)
add36_1 = Add()([conv36_1, relu35_1])
relu36_1 = ReLU()(add36_1)
# Block_37
depthconv37_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_36_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_36_Bias')))(relu36_1)
conv37_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_38_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_38_Bias')))(depthconv37_1)
add37_1 = Add()([conv37_1, relu36_1])
relu37_1 = ReLU()(add37_1)
# Block_38
depthconv38_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_37_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_37_Bias')))(relu37_1)
conv38_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_39_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_39_Bias')))(depthconv38_1)
add38_1 = Add()([conv38_1, relu37_1])
relu38_1 = ReLU()(add38_1)
# Block_39
depthconv39_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[2, 2], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_38_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_38_Bias')))(relu38_1)
conv39_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_40_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_40_Bias')))(depthconv39_1)
maxpool39_1 = MaxPool2D(pool_size=[2, 2], strides=[2, 2], padding='valid')(relu38_1)
add39_1 = Add()([conv39_1, maxpool39_1])
relu39_1 = ReLU()(add39_1)
# Block_40
depthconv40_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_39_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_39_Bias')))(relu39_1)
conv40_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_41_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_41_Bias')))(depthconv40_1)
add40_1 = Add()([conv40_1, relu39_1])
relu40_1 = ReLU()(add40_1)
# Block_41
depthconv41_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_40_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_40_Bias')))(relu40_1)
conv41_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_42_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_42_Bias')))(depthconv41_1)
add41_1 = Add()([conv41_1, relu40_1])
relu41_1 = ReLU()(add41_1)
# Block_42
depthconv42_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_41_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_41_Bias')))(relu41_1)
conv42_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_43_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_43_Bias')))(depthconv42_1)
add42_1 = Add()([conv42_1, relu41_1])
relu42_1 = ReLU()(add42_1)
# Block_43
depthconv43_1 = DepthwiseConv2D(kernel_size=[5, 5], strides=[1, 1], padding="same", depth_multiplier=1, dilation_rate=[1, 1],
depthwise_initializer=Constant(np.load('weights_new/depthwise_conv2d_42_Kernel')),
bias_initializer=Constant(np.load('weights_new/depthwise_conv2d_42_Bias')))(relu42_1)
conv43_1 = Conv2D(filters=288, kernel_size=[1, 1], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv2d_44_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv2d_44_Bias')))(depthconv43_1)
add43_1 = Add()([conv43_1, relu42_1])
relu43_1 = ReLU()(add43_1)
# Final Block_99
conv99_1 = Conv2D(filters=1, kernel_size=[2, 2], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv_handflag_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv_handflag_Bias')))(relu43_1)
sigm99_1 = tf.math.sigmoid(conv99_1, name='output_handflag')
# reshape99_1 = tf.reshape(sigm99_1, (1, -1), name='output_handflag')
conv99_2 = Conv2D(filters=1, kernel_size=[2, 2], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/conv_handedness_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/conv_handedness_Bias')))(relu43_1)
sigm99_2 = tf.math.sigmoid(conv99_2, name='output_handedness')
# reshape99_2 = tf.reshape(sigm99_2, (1, -1), name='output_handedness')
conv99_3 = Conv2D(filters=63, kernel_size=[2, 2], strides=[1, 1], padding="valid", dilation_rate=[1, 1],
kernel_initializer=Constant(np.load('weights_new/convld_21_3d_Kernel').transpose(1,2,3,0)),
bias_initializer=Constant(np.load('weights_new/convld_21_3d_Bias')))(relu43_1)
reshape99_3 = tf.reshape(conv99_3, (1, 63), name='ld_21_3d')
model = Model(inputs=inputs, outputs=[sigm99_1, sigm99_2, reshape99_3])
# model = Model(inputs=inputs, outputs=[reshape99_1, reshape99_2, reshape99_3])
model.summary()
tf.saved_model.save(model, 'saved_model_hand_landmark_new')
model.save('hand_landmark_new.h5')
full_model = tf.function(lambda inputs: model(inputs))
full_model = full_model.get_concrete_function(inputs = (tf.TensorSpec(model.inputs[0].shape, model.inputs[0].dtype)))
frozen_func = convert_variables_to_constants_v2(full_model, lower_control_flow=False)
frozen_func.graph.as_graph_def()
tf.io.write_graph(graph_or_graph_def=frozen_func.graph,
logdir=".",
name="hand_landmark_new_256x256_float32.pb",
as_text=False)
# No Quantization - Input/Output=float32
converter = tf.lite.TFLiteConverter.from_keras_model(model)
tflite_model = converter.convert()
with open('hand_landmark_new_256x256_float32.tflite', 'wb') as w:
w.write(tflite_model)
print("tflite convert complete! - hand_landmark_new_256x256_float32.tflite")
# Weight Quantization - Input/Output=float32
converter = tf.lite.TFLiteConverter.from_keras_model(model)
converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE]
tflite_model = converter.convert()
with open('hand_landmark_new_256x256_weight_quant.tflite', 'wb') as w:
w.write(tflite_model)
print("Weight Quantization complete! - hand_landmark_new_256x256_weight_quant.tflite")
def representative_dataset_gen():
for data in raw_test_data.take(100):
image = data['image'].numpy()
image = tf.image.resize(image, (256, 256))
image = image[np.newaxis,:,:,:]
image = image - 127.5
image = image * 0.007843
yield [image]
raw_test_data, info = tfds.load(name="the300w_lp", with_info=True, split="train", data_dir="~/TFDS", download=True)
# Integer Quantization - Input/Output=float32
converter = tf.lite.TFLiteConverter.from_keras_model(model)
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset_gen
tflite_quant_model = converter.convert()
with open('hand_landmark_new_256x256_integer_quant.tflite', 'wb') as w:
w.write(tflite_quant_model)
print("Integer Quantization complete! - hand_landmark_new_256x256_integer_quant.tflite")
# # Full Integer Quantization - Input/Output=int8
# converter = tf.lite.TFLiteConverter.from_keras_model(model)
# converter.optimizations = [tf.lite.Optimize.DEFAULT]
# converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
# converter.inference_input_type = tf.uint8
# converter.inference_output_type = tf.uint8
# converter.representative_dataset = representative_dataset_gen
# tflite_quant_model = converter.convert()
# with open('hand_landmark_new_256x256_full_integer_quant.tflite', 'wb') as w:
# w.write(tflite_quant_model)
# print("Full Integer Quantization complete! - hand_landmark_new_256x256_full_integer_quant.tflite")
# Float16 Quantization - Input/Output=float32
converter = tf.lite.TFLiteConverter.from_keras_model(model)
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.target_spec.supported_types = [tf.float16]
tflite_quant_model = converter.convert()
with open('hand_landmark_new_256x256_float16_quant.tflite', 'wb') as w:
w.write(tflite_quant_model)
print("Float16 Quantization complete! - hand_landmark_new_256x256_float16_quant.tflite")
# # EdgeTPU
# import subprocess
# result = subprocess.check_output(["edgetpu_compiler", "-s", "hand_landmark_new_256x256_full_integer_quant.tflite"])
# print(result)
| 65.263245
| 125
| 0.714072
| 5,668
| 39,419
| 4.657022
| 0.080805
| 0.016593
| 0.094067
| 0.114563
| 0.760153
| 0.751705
| 0.746515
| 0.734316
| 0.721511
| 0.711017
| 0
| 0.089498
| 0.133768
| 39,419
| 603
| 126
| 65.371476
| 0.683535
| 0.07126
| 0
| 0.032037
| 0
| 0
| 0.194585
| 0.175811
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002288
| false
| 0
| 0.018307
| 0
| 0.020595
| 0.009153
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
937e7125924f798c0a2800ecb6adb68f83c5ec15
| 2,019
|
py
|
Python
|
tests/devices/test_base.py
|
jtappolet/pydigitalstrom
|
b988a2c5f72920264ce5e008709aa287c3e6d631
|
[
"MIT"
] | 3
|
2018-12-09T00:33:26.000Z
|
2020-09-05T04:58:13.000Z
|
tests/devices/test_base.py
|
jtappolet/pydigitalstrom
|
b988a2c5f72920264ce5e008709aa287c3e6d631
|
[
"MIT"
] | null | null | null |
tests/devices/test_base.py
|
jtappolet/pydigitalstrom
|
b988a2c5f72920264ce5e008709aa287c3e6d631
|
[
"MIT"
] | 4
|
2020-08-20T18:25:34.000Z
|
2021-08-30T11:15:28.000Z
|
# -*- coding: UTF-8 -*-
import aiounittest
from unittest.mock import Mock, patch
from pydigitalstrom.devices.base import DSDevice
from tests.common import get_testclient
class TestDevice(aiounittest.AsyncTestCase):
def test_attributes(self):
device = DSDevice(client=get_testclient(), device_id=5, device_name="test")
self.assertEqual(device._id, 5)
self.assertEqual(device._name, "test")
def test_name(self):
device = DSDevice(client=get_testclient(), device_id=5, device_name="test")
self.assertEqual(device.name, "test")
def test_unique_id(self):
device = DSDevice(client=get_testclient(), device_id=5, device_name="test")
self.assertEqual(device.unique_id, 5)
async def test_request_enqueued(self):
with patch(
"pydigitalstrom.commandstack.DSCommandStack.append",
Mock(return_value=aiounittest.futurized(dict())),
) as mock_stack_append:
device = DSDevice(client=get_testclient(), device_id=5, device_name="test")
await device.request(url="abc.de")
mock_stack_append.assert_called_with(url="abc.de")
async def test_request_plain(self):
with patch(
"pydigitalstrom.commandstack.DSCommandStack.append",
Mock(return_value=aiounittest.futurized(dict())),
) as mock_stack_append:
device = DSDevice(client=get_testclient(), device_id=5, device_name="test")
await device.request(url="abc.de")
mock_stack_append.assert_called_with(url="abc.de")
async def test_request_with_data(self):
with patch(
"pydigitalstrom.commandstack.DSCommandStack.append",
Mock(return_value=aiounittest.futurized(dict())),
) as mock_stack_append:
device = DSDevice(client=get_testclient(), device_id=5, device_name="test")
await device.request(url="abc.de?{x}", x="hello")
mock_stack_append.assert_called_with(url="abc.de?hello")
| 41.204082
| 87
| 0.674096
| 243
| 2,019
| 5.378601
| 0.222222
| 0.018363
| 0.085692
| 0.105585
| 0.795715
| 0.795715
| 0.795715
| 0.756695
| 0.756695
| 0.726855
| 0
| 0.005636
| 0.209014
| 2,019
| 48
| 88
| 42.0625
| 0.812774
| 0.010401
| 0
| 0.564103
| 0
| 0
| 0.11523
| 0.073647
| 0
| 0
| 0
| 0
| 0.179487
| 1
| 0.076923
| false
| 0
| 0.102564
| 0
| 0.205128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fa8f4e6eab1f95f44a8d23afba095e1f564c48c8
| 73
|
py
|
Python
|
modules/biometry/__init__.py
|
camargo2019/conecta
|
3bf67a7aa5280d1d263e5ee40cbc48f0045fcb99
|
[
"MIT"
] | 1
|
2021-08-01T05:40:31.000Z
|
2021-08-01T05:40:31.000Z
|
modules/biometry/__init__.py
|
camargo2019/conecta
|
3bf67a7aa5280d1d263e5ee40cbc48f0045fcb99
|
[
"MIT"
] | null | null | null |
modules/biometry/__init__.py
|
camargo2019/conecta
|
3bf67a7aa5280d1d263e5ee40cbc48f0045fcb99
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
def teste_biometry():
print('Em desenvolvimento')
| 18.25
| 28
| 0.712329
| 9
| 73
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 0.123288
| 73
| 4
| 28
| 18.25
| 0.78125
| 0.232877
| 0
| 0
| 0
| 0
| 0.339623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
fa9a45b7517609bfe6bf39f363db0b80c38458c2
| 391
|
py
|
Python
|
shared/lesson02/server1_topology.py
|
flavio-fernandes/ovs-lab
|
4cac9368ecba4d99783fda5fcee051868df1e761
|
[
"Apache-2.0"
] | 38
|
2015-02-23T11:15:15.000Z
|
2021-11-16T11:37:09.000Z
|
shared/lesson02/server1_topology.py
|
flavio-fernandes/ovs-lab
|
4cac9368ecba4d99783fda5fcee051868df1e761
|
[
"Apache-2.0"
] | 2
|
2015-11-06T11:39:41.000Z
|
2019-07-20T01:23:18.000Z
|
shared/lesson02/server1_topology.py
|
flavio-fernandes/ovs-lab
|
4cac9368ecba4d99783fda5fcee051868df1e761
|
[
"Apache-2.0"
] | 26
|
2015-01-19T21:40:11.000Z
|
2021-05-14T02:42:42.000Z
|
#!/usr/bin/python
from common import createTopology
createTopology(
's1',
[
[
'red1',
{
'ip': '10.0.0.1/8',
'mac': '00:00:00:00:aa:01'
}
],
[
'blue1',
{
'ip': '10.0.0.1/8',
'mac': '00:00:00:00:aa:01'
}
]
]
)
| 15.64
| 42
| 0.289003
| 37
| 391
| 3.054054
| 0.513514
| 0.212389
| 0.212389
| 0.106195
| 0.40708
| 0.40708
| 0.40708
| 0.40708
| 0.40708
| 0.40708
| 0
| 0.194444
| 0.539642
| 391
| 24
| 43
| 16.291667
| 0.433333
| 0.040921
| 0
| 0.2
| 0
| 0
| 0.200535
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.05
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8782a8d1c7de0cb34a466e2e9734f099540e875e
| 104
|
py
|
Python
|
bitmovin_api_sdk/encoding/configurations/audio/eac3/customdata/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/configurations/audio/eac3/customdata/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/configurations/audio/eac3/customdata/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.configurations.audio.eac3.customdata.customdata_api import CustomdataApi
| 52
| 103
| 0.903846
| 13
| 104
| 7
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01
| 0.038462
| 104
| 1
| 104
| 104
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8785f4e008026946e4ff11b315b9b3cb0252509f
| 161
|
py
|
Python
|
heymac/utl/mock_serial.py
|
dwhall/HeyMac
|
18c86b5855d65d3d6ffe9ee9d5913396cee40d81
|
[
"MIT"
] | 5
|
2018-03-25T03:31:52.000Z
|
2021-11-23T03:17:01.000Z
|
heymac/utl/mock_serial.py
|
dwhall/HeyMac
|
18c86b5855d65d3d6ffe9ee9d5913396cee40d81
|
[
"MIT"
] | 10
|
2018-05-18T20:53:45.000Z
|
2021-08-22T03:01:31.000Z
|
heymac/utl/mock_serial.py
|
dwhall/HeyMac
|
18c86b5855d65d3d6ffe9ee9d5913396cee40d81
|
[
"MIT"
] | 1
|
2020-06-14T16:31:34.000Z
|
2020-06-14T16:31:34.000Z
|
class Serial():
def __init__(self, port, baudrate, timeout):
self.baudrate = baudrate
def read(self, n): return [0,]*n
def close(self): pass
| 26.833333
| 48
| 0.627329
| 22
| 161
| 4.409091
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.236025
| 161
| 5
| 49
| 32.2
| 0.780488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.6
| false
| 0.2
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 5
|
879481eafca0906cd384909827c67ecb999bd247
| 129
|
py
|
Python
|
trebelge/tasks.py
|
askmetoo/trebelge
|
bff1b13edf41d1a9afd9ddb039f5b4089cb83dd6
|
[
"MIT"
] | 6
|
2019-12-21T21:15:50.000Z
|
2021-12-30T21:59:53.000Z
|
trebelge/tasks.py
|
Framras/trebelge
|
362179925dc688ad8ea008f532de72e67e49941b
|
[
"MIT"
] | null | null | null |
trebelge/tasks.py
|
Framras/trebelge
|
362179925dc688ad8ea008f532de72e67e49941b
|
[
"MIT"
] | 3
|
2020-01-05T19:32:40.000Z
|
2021-11-03T14:11:21.000Z
|
from __future__ import unicode_literals
from trebelge import api
def every_day_at_02_38():
api.check_all_ebelge_parties()
| 16.125
| 39
| 0.813953
| 20
| 129
| 4.65
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036036
| 0.139535
| 129
| 7
| 40
| 18.428571
| 0.801802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
87a5e24276f16abf412f936f07d0bb42dd76c5e9
| 4,776
|
py
|
Python
|
egs/biendata/s5/local/data_cleaning.py
|
zhangshuaiCasia/kaldi
|
e298736da4e6fb3bf5d70a7b520eb742c4744500
|
[
"Apache-2.0"
] | 1
|
2021-06-30T07:29:48.000Z
|
2021-06-30T07:29:48.000Z
|
egs/biendata/s5/local/data_cleaning.py
|
zhangshuaiCasia/kaldi
|
e298736da4e6fb3bf5d70a7b520eb742c4744500
|
[
"Apache-2.0"
] | null | null | null |
egs/biendata/s5/local/data_cleaning.py
|
zhangshuaiCasia/kaldi
|
e298736da4e6fb3bf5d70a7b520eb742c4744500
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import re
# resource_dir = "data_text_clean/test/text"
# target_dir = "data_text_clean/test/text_new"
# # remove illegal encode whiteblanks
# with open(resource_dir, 'r') as f:
# for line in f.readlines():
# new = re.sub('\u3000',' ',line)
# with open(target_dir, 'a') as w:
# w.write(new)
## remove (ppl,ppo,ppc..),
# resource_dir = "data/text"
# target_dir = "data_text_clean/text"
# pp = ['(ppl)', '(ppo)', '(ppc)', '(ppb)']
# utt_id = []
# with open(resource_dir, 'r') as f:
# for line in f.readlines():
# l = line.split()
# if len(l) == 2 and l[1] in pp:
# utt_id.append(l[0])
# continue
# with open(target_dir, 'a') as w:
# w.write(line)
# print(utt_id)
# for file in ['utt2spk', 'spk2utt', 'wav.scp']:
# with open('data/' + file, 'r') as f:
# for line in f.readlines():
# l = line.split()
# if l[0] in utt_id:
# continue
# with open('data_text_clean/' + file, 'a') as w:
# w.write(line)
## remove [],()
# resource_dir = "data_all_phaseI/text"
# target_dir = "data_all_phaseI/text1"
# # symble = ['(', ')', '[', ']', '*']
# with open(resource_dir, 'r') as f:
# for line in f.readlines():
# new = line.replace('(ppl)','').replace('(ppo)','').replace('(ppc)','').replace('(ppb)','') \
# .replace('(','').replace(')','').replace(']','').replace('[','').replace('*','') \
# .replace('【','').replace('】','').replace('.','').replace('-','').replace('~','') \
# .replace('(','').replace(')','').replace('[','').replace(']','')
# with open(target_dir, 'a') as w:
# w.write(new)
## remove blank lines,
# resource_dir = "data_text_clean/text1"
# target_dir = "data_text_clean/text2"
# utt_id = []
# with open(resource_dir, 'r') as f:
# for line in f.readlines():
# l = line.split()
# if len(l) == 1:
# utt_id.append(l[0])
# continue
# with open(target_dir, 'a') as w:
# w.write(line)
# print(utt_id)
# for file in ['utt2spk', 'spk2utt', 'wav.scp']:
# with open('data_text_clean/' + file, 'r') as f:
# for line in f.readlines():
# l = line.split()
# if l[0] in utt_id:
# continue
# with open('data_text_clean/' + file + '_2', 'a') as w:
# w.write(line)
## parse words to word
# resource_dir = "data_all/text"
# target_dir = "data_all/text1"
# utt_id = []
# with open(resource_dir, 'r') as f:
# for line in f.readlines():
# l = line.split()
# for li in l:
# if li >= u'\u4e00' and li <= u'\u9fa5' and len(li) > 1:
# line = line.replace(li, " ".join([i for i in li]))
# with open(target_dir, 'a') as w:
# w.write(line)
#中英文分词
resource_dir = "/home/zhangshuai/kaldi-master/egs/biendata/s5/data/train_dev_sp/text_raw"
target_dir = "/home/zhangshuai/kaldi-master/egs/biendata/s5/data/train_dev_sp/text_char"
en = ''
with open(resource_dir, 'r') as f:
for line in f.readlines():
utt_id = []
line_list = line.split()
l = line_list[1:]
for li in l:
for i in range(len(li)):
if i+1 <= len(li):
if (li[i] < u'\u4e00' or li[i] > u'\u9fa5'):
en = en + li[i]
if i+1 == len(li):
utt_id.append(en)
en = ''
elif (li[i+1] >= u'\u4e00' and li[i+1] <= u'\u9fa5'):
utt_id.append(en)
en = ''
else:
utt_id.append(li[i])
with open(target_dir, 'a') as w:
w.write(line_list[0] + ' ' + " ".join(utt_id) + '\n')
## 删除中文,余下英文用于生成wordpiece
# resource_dir = "data_all_phaseI/text_raw"
# target_dir = "data_all_phaseI/text1"
# en = ''
# with open(resource_dir, 'r') as f:
# for line in f.readlines():
# utt_id = []
# line_list = line.split()
# l = line_list[1:]
# for li in l:
# for i in range(len(li)):
# if i+1 <= len(li):
# if (li[i] < u'\u4e00' or li[i] > u'\u9fa5'):
# en = en + li[i]
# if i+1 == len(li):
# utt_id.append(en)
# en = ''
# elif (li[i+1] >= u'\u4e00' and li[i+1] <= u'\u9fa5'):
# utt_id.append(en)
# en = ''
# else:
# utt_id.append(li[i])
# with open(target_dir, 'a') as w:
# w.write(line_list[0] + ' ' + " ".join(utt_id) + '\n')
## 将文本中非语音发音替换为<SPOKEN_NOISE>
# resource_dir = "data_all/text_raw"
# target_dir = "data_all/text1"
# # symble = ['(', ')', '[', ']', '*']
# with open(resource_dir, 'r') as f:
# for line in f.readlines():
# uttid = line.split()[0]
# new = line.replace('(ppl)','<SPOKEN_NOISE>').replace('(ppo)','<SPOKEN_NOISE>').replace('(ppc)','<SPOKEN_NOISE>').replace('(ppb)','<SPOKEN_NOISE>') \
# .replace('(','').replace(')','').replace(']','').replace('[','').replace('*','') \
# .replace('【','').replace('】','').replace('.','').replace('~','') \
# .replace('(','').replace(')','').replace('[','').replace(']','')
# with open(target_dir, 'a') as w:
# w.write(uttid + ' ' + new.lower())
| 28.260355
| 152
| 0.545017
| 704
| 4,776
| 3.555398
| 0.140625
| 0.139832
| 0.192968
| 0.234918
| 0.815421
| 0.769477
| 0.670396
| 0.670396
| 0.670396
| 0.670396
| 0
| 0.016887
| 0.206449
| 4,776
| 168
| 153
| 28.428571
| 0.643536
| 0.737856
| 0
| 0.208333
| 0
| 0
| 0.155971
| 0.129234
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
87d9871afcdc947f17cf6893a64407ec89430393
| 177
|
py
|
Python
|
AI/data/__init__.py
|
yast-ia/YastAI
|
f5a05841126da4acd9b7250c5bf6f627ac1703d5
|
[
"MIT"
] | 1
|
2020-08-23T22:00:17.000Z
|
2020-08-23T22:00:17.000Z
|
AI/data/__init__.py
|
yast-ia/YastAI
|
f5a05841126da4acd9b7250c5bf6f627ac1703d5
|
[
"MIT"
] | null | null | null |
AI/data/__init__.py
|
yast-ia/YastAI
|
f5a05841126da4acd9b7250c5bf6f627ac1703d5
|
[
"MIT"
] | 1
|
2020-08-23T18:34:12.000Z
|
2020-08-23T18:34:12.000Z
|
"""
Data
====
Load, preprocess and filted data.
"""
from .constants import *
from .dataset import *
from .generator import *
from .preprocessing import *
from .utils import *
| 13.615385
| 33
| 0.700565
| 21
| 177
| 5.904762
| 0.571429
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175141
| 177
| 13
| 34
| 13.615385
| 0.849315
| 0.248588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
87f1f3b129a966e650b7cf594ad5b0d94868c09d
| 515
|
py
|
Python
|
src/shared_library/shared_library.py
|
DantasB/smart-speaker
|
d74151f447848f5054d310679cafb0b3298d34bd
|
[
"MIT"
] | 1
|
2021-09-25T16:07:30.000Z
|
2021-09-25T16:07:30.000Z
|
src/shared_library/shared_library.py
|
DantasB/smart-speaker
|
d74151f447848f5054d310679cafb0b3298d34bd
|
[
"MIT"
] | 1
|
2022-01-10T03:50:13.000Z
|
2022-01-10T03:50:13.000Z
|
src/shared_library/shared_library.py
|
DantasB/smart-speaker
|
d74151f447848f5054d310679cafb0b3298d34bd
|
[
"MIT"
] | null | null | null |
import tempfile
def generate_workfolder() -> tempfile.TemporaryDirectory:
"""Generates a workfolder
Returns:
tempfile.TemporaryDirectory: the temporary directory generated
"""
temp_dir = tempfile.TemporaryDirectory()
return temp_dir
def delete_workfolder(directory: tempfile.TemporaryDirectory) -> None:
"""Deletes the temporary directory generated
Args:
directory (tempfile.TemporaryDirectory): the directory path
"""
directory.cleanup()
| 25.75
| 71
| 0.699029
| 45
| 515
| 7.911111
| 0.488889
| 0.365169
| 0.162921
| 0.168539
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.223301
| 515
| 19
| 72
| 27.105263
| 0.89
| 0.41165
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
e20a6e9062b584a8742ba88963b500f7424a3f89
| 193
|
py
|
Python
|
python_justclick/python_justclick_django/__init__.py
|
moshkov/python_justclick
|
b7039c53b6140e66e98f02ddb324269e044a4054
|
[
"MIT"
] | null | null | null |
python_justclick/python_justclick_django/__init__.py
|
moshkov/python_justclick
|
b7039c53b6140e66e98f02ddb324269e044a4054
|
[
"MIT"
] | null | null | null |
python_justclick/python_justclick_django/__init__.py
|
moshkov/python_justclick
|
b7039c53b6140e66e98f02ddb324269e044a4054
|
[
"MIT"
] | null | null | null |
from python_justclick.justclick import JustClickConnection
from django.conf import settings
justClickConnection = JustClickConnection(settings.JUSTCLICK_USERNAME, settings.JUSTCLICK_API_KEY)
| 32.166667
| 98
| 0.88601
| 20
| 193
| 8.35
| 0.55
| 0.203593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072539
| 193
| 5
| 99
| 38.6
| 0.932961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
356ec68d102707648a63ab1bdeda22b402009a35
| 133
|
py
|
Python
|
affo_sms_service/api/exception.py
|
fossabot/affo-sms-service
|
a843eb37d61560392d7fcbb6b14ca2d31cd50b12
|
[
"BSD-3-Clause"
] | null | null | null |
affo_sms_service/api/exception.py
|
fossabot/affo-sms-service
|
a843eb37d61560392d7fcbb6b14ca2d31cd50b12
|
[
"BSD-3-Clause"
] | 1
|
2019-11-25T14:25:13.000Z
|
2019-11-25T14:25:13.000Z
|
affo_sms_service/api/exception.py
|
fossabot/affo-sms-service
|
a843eb37d61560392d7fcbb6b14ca2d31cd50b12
|
[
"BSD-3-Clause"
] | 1
|
2019-11-25T14:21:58.000Z
|
2019-11-25T14:21:58.000Z
|
import http
import connexion_buzz
class InvalidPhone(connexion_buzz.ConnexionBuzz):
status_code = http.HTTPStatus.BAD_REQUEST
| 16.625
| 49
| 0.827068
| 16
| 133
| 6.625
| 0.75
| 0.245283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120301
| 133
| 7
| 50
| 19
| 0.905983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3574bc2423066b4a2512a253ab5a3aa09958e2a2
| 217
|
py
|
Python
|
pythontohtml/__init__.py
|
hakiKhuva/pythontohtml
|
d86df95c5260b1b567fabb356c60057bc4055224
|
[
"MIT"
] | null | null | null |
pythontohtml/__init__.py
|
hakiKhuva/pythontohtml
|
d86df95c5260b1b567fabb356c60057bc4055224
|
[
"MIT"
] | null | null | null |
pythontohtml/__init__.py
|
hakiKhuva/pythontohtml
|
d86df95c5260b1b567fabb356c60057bc4055224
|
[
"MIT"
] | null | null | null |
"""
PythontoHTML : use python functions to create html document
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from .__about__ import __name__, __version__, __author__, __author_email__, __license__
| 36.166667
| 87
| 0.576037
| 17
| 217
| 5.882353
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115207
| 217
| 6
| 87
| 36.166667
| 0.520833
| 0.529954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
357951f7bdce5e3652fb55ba49f202e8ea78a23c
| 240
|
py
|
Python
|
confidant/authnz/errors.py
|
chadwhitacre/confidant
|
dd788147b355f760767cf3b9487671c67948ade3
|
[
"Apache-2.0"
] | 1,820
|
2015-11-04T17:57:16.000Z
|
2022-03-31T16:47:24.000Z
|
confidant/authnz/errors.py
|
chadwhitacre/confidant
|
dd788147b355f760767cf3b9487671c67948ade3
|
[
"Apache-2.0"
] | 1,601
|
2018-09-13T14:56:27.000Z
|
2021-03-31T20:06:16.000Z
|
confidant/authnz/errors.py
|
isabella232/confidant
|
3dac318c3e1f29bae5771084ad29a4bc121f1771
|
[
"Apache-2.0"
] | 136
|
2015-11-04T19:23:14.000Z
|
2022-02-25T01:51:29.000Z
|
# authentication / authorization related error classes
class UserUnknownError(Exception):
pass
class TokenVersionError(Exception):
pass
class AuthenticationError(Exception):
pass
class NotAuthorized(Exception):
pass
| 13.333333
| 54
| 0.7625
| 21
| 240
| 8.714286
| 0.571429
| 0.284153
| 0.295082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179167
| 240
| 17
| 55
| 14.117647
| 0.928934
| 0.216667
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
35850910507f5b61ca978b4a79a718c629acfa4f
| 323
|
py
|
Python
|
octicons16px/chevron_down.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | 1
|
2021-01-28T06:47:39.000Z
|
2021-01-28T06:47:39.000Z
|
octicons16px/chevron_down.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
octicons16px/chevron_down.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
OCTICON_CHEVRON_DOWN = """
<svg class="octicon octicon-chevron-down" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path fill-rule="evenodd" d="M12.78 6.22a.75.75 0 010 1.06l-4.25 4.25a.75.75 0 01-1.06 0L3.22 7.28a.75.75 0 011.06-1.06L8 9.94l3.72-3.72a.75.75 0 011.06 0z"></path></svg>
"""
| 64.6
| 290
| 0.671827
| 74
| 323
| 2.905405
| 0.621622
| 0.074419
| 0.093023
| 0.074419
| 0.093023
| 0
| 0
| 0
| 0
| 0
| 0
| 0.309278
| 0.099071
| 323
| 4
| 291
| 80.75
| 0.429553
| 0
| 0
| 0
| 0
| 0.333333
| 0.906832
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
35dc2e3e2beff573f5d8768af63a591c784733f8
| 74
|
py
|
Python
|
asyncworker/metrics/__init__.py
|
async-worker/async-worker
|
9025d8f14d3fe6e1a2b1373c84abf41de575b359
|
[
"MIT"
] | 7
|
2021-05-02T19:26:14.000Z
|
2022-02-08T15:12:10.000Z
|
asyncworker/metrics/__init__.py
|
async-worker/async-worker
|
9025d8f14d3fe6e1a2b1373c84abf41de575b359
|
[
"MIT"
] | 10
|
2021-05-02T15:37:55.000Z
|
2021-09-11T10:58:32.000Z
|
asyncworker/metrics/__init__.py
|
async-worker/async-worker
|
9025d8f14d3fe6e1a2b1373c84abf41de575b359
|
[
"MIT"
] | null | null | null |
from .collectors import *
from .definitions import *
from .types import *
| 18.5
| 26
| 0.756757
| 9
| 74
| 6.222222
| 0.555556
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 74
| 3
| 27
| 24.666667
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
35e3d18c0457a8e2c4e28c1568febe10d2e25bad
| 31,624
|
py
|
Python
|
orttraining/orttraining/test/python/orttraining_test_orttrainer_checkpoint_functions.py
|
vpisarev/onnxruntime
|
bab9b80f1f2330d3a115e0abbb4d8278c2be3f44
|
[
"MIT"
] | null | null | null |
orttraining/orttraining/test/python/orttraining_test_orttrainer_checkpoint_functions.py
|
vpisarev/onnxruntime
|
bab9b80f1f2330d3a115e0abbb4d8278c2be3f44
|
[
"MIT"
] | null | null | null |
orttraining/orttraining/test/python/orttraining_test_orttrainer_checkpoint_functions.py
|
vpisarev/onnxruntime
|
bab9b80f1f2330d3a115e0abbb4d8278c2be3f44
|
[
"MIT"
] | null | null | null |
import pytest
from unittest.mock import patch, Mock
from _test_commons import _load_pytorch_transformer_model
from onnxruntime.training import amp, checkpoint, optim, orttrainer, _checkpoint_storage
import numpy as np
import onnx
import torch
# Helper functions
def _create_trainer(zero_enabled=False):
"""Cerates a simple ORTTrainer for ORTTrainer functional tests"""
device = 'cuda'
optim_config = optim.LambConfig(lr=0.1)
opts = {
'device' : {'id' : device},
'debug' : {'deterministic_compute': True}
}
if zero_enabled:
opts['distributed'] = {
'world_rank' : 0,
'world_size' : 1,
'horizontal_parallel_size' : 1,
'data_parallel_size' : 1,
'allreduce_post_accumulation' : True,
'deepspeed_zero_optimization':
{
'stage': 1
}
}
model, model_desc, loss_fn, batcher_fn, train_data, _, _ = _load_pytorch_transformer_model(device)
trainer = orttrainer.ORTTrainer(model, model_desc, optim_config, loss_fn=loss_fn, options=orttrainer.ORTTrainerOptions(opts))
return trainer
class _training_session_mock(object):
"""Mock object for the ORTTrainer _training_session member"""
def __init__(self, model_states, optimizer_states, partition_info):
self.model_states = model_states
self.optimizer_states = optimizer_states
self.partition_info = partition_info
def get_model_state(self, include_mixed_precision_weights=False):
return self.model_states
def get_optimizer_state(self):
return self.optimizer_states
def get_partition_info_map(self):
return self.partition_info
def _get_load_state_dict_strict_error_arguments():
"""Return a list of tuples that can be used as parameters for test_load_state_dict_errors_when_model_key_missing
Construct a list of tuples (training_session_state_dict, input_state_dict, error_arguments)
The load_state_dict function will compare the two state dicts (training_session_state_dict, input_state_dict) and
throw a runtime error with the missing/unexpected keys. The error arguments capture these missing/unexpected keys.
"""
training_session_state_dict = {
'model': {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
},
'optimizer': {
'a': {
'Moment_1': np.arange(5),
'Moment_2': np.arange(7)
},
'shared_optimizer_state': {
'step': np.arange(5)
}
}
}
# input state dictionaries
precision_key_missing = {'model': {}, 'optimizer': {}}
precision_key_unexpected = {'model': {'full_precision': {}, 'mixed_precision': {}}, 'optimizer': {}}
model_state_key_missing = {'model': {'full_precision': {}}, 'optimizer': {}}
model_state_key_unexpected = {'model': {'full_precision': {'a': 2, 'b': 3, 'c': 4}}, 'optimizer': {}}
optimizer_model_state_key_missing = {'model': {'full_precision': {'a': 2, 'b': 3}}, 'optimizer': {}}
optimizer_model_state_key_unexpected = {'model': {'full_precision': {'a': 2, 'b': 3}}, 'optimizer': \
{'a': {}, 'shared_optimizer_state': {}, 'b': {}}}
optimizer_state_key_missing = {'model': {'full_precision': {'a': 2, 'b': 3}}, 'optimizer': \
{'a': {}, 'shared_optimizer_state': {'step': np.arange(5)}}}
optimizer_state_key_unexpected = {'model': {'full_precision': {'a': 2, 'b': 3}}, 'optimizer': \
{'a': {'Moment_1': np.arange(5), 'Moment_2': np.arange(7)}, 'shared_optimizer_state': {'step': np.arange(5), 'another_step': np.arange(1)}}}
input_arguments = [
(training_session_state_dict, precision_key_missing, ['full_precision']),
(training_session_state_dict, precision_key_unexpected, ['mixed_precision']),
(training_session_state_dict, model_state_key_missing, ['a', 'b']),
(training_session_state_dict, model_state_key_unexpected, ['c']),
(training_session_state_dict, optimizer_model_state_key_missing, ['a', 'shared_optimizer_state']),
(training_session_state_dict, optimizer_model_state_key_unexpected, ['b']),
(training_session_state_dict, optimizer_state_key_missing, ['Moment_1', 'Moment_2']),
(training_session_state_dict, optimizer_state_key_unexpected, ['another_step'])
]
return input_arguments
# Tests
def test_empty_state_dict_when_training_session_uninitialized():
trainer = _create_trainer()
with pytest.warns(UserWarning) as user_warning:
state_dict = trainer.state_dict()
assert len(state_dict.keys()) == 0
assert user_warning[0].message.args[0] == "ONNX Runtime training session is not initialized yet. " \
"Please run train_step or eval_step at least once before calling ORTTrainer.state_dict()."
@patch('onnx.ModelProto')
def test_training_session_provides_empty_model_states(onnx_model_mock):
trainer = _create_trainer()
training_session_mock = _training_session_mock({}, {}, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict()
assert len(state_dict['model'].keys()) == 0
@patch('onnx.ModelProto')
def test_training_session_provides_model_states(onnx_model_mock):
trainer = _create_trainer()
model_states = {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
}
training_session_mock = _training_session_mock(model_states, {}, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict()
assert (state_dict['model']['full_precision']['a'] == np.arange(5)).all()
assert (state_dict['model']['full_precision']['b'] == np.arange(7)).all()
@patch('onnx.ModelProto')
def test_training_session_provides_model_states_pytorch_format(onnx_model_mock):
trainer = _create_trainer()
model_states = {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
}
training_session_mock = _training_session_mock(model_states, {}, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict(pytorch_format=True)
assert torch.all(torch.eq(state_dict['a'], torch.tensor(np.arange(5))))
assert torch.all(torch.eq(state_dict['b'], torch.tensor(np.arange(7))))
@patch('onnx.ModelProto')
def test_onnx_graph_provides_frozen_model_states(onnx_model_mock):
trainer = _create_trainer()
model_states = {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
}
training_session_mock = _training_session_mock(model_states, {}, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
trainer.options.utils.frozen_weights = ['a_frozen_weight', 'a_float16_weight']
trainer._onnx_model.graph.initializer = [
onnx.numpy_helper.from_array(np.array([1, 2, 3], dtype=np.float32), 'a_frozen_weight'),
onnx.numpy_helper.from_array(np.array([4, 5, 6], dtype=np.float32), 'a_non_fronzen_weight'),
onnx.numpy_helper.from_array(np.array([7, 8, 9], dtype=np.float16), 'a_float16_weight')
]
state_dict = trainer.state_dict()
assert (state_dict['model']['full_precision']['a'] == np.arange(5)).all()
assert (state_dict['model']['full_precision']['b'] == np.arange(7)).all()
assert (state_dict['model']['full_precision']['a_frozen_weight'] == np.array([1, 2, 3], dtype=np.float32)).all()
assert 'a_non_fronzen_weight' not in state_dict['model']['full_precision']
assert (state_dict['model']['full_precision']['a_float16_weight'] == np.array([7, 8, 9], dtype=np.float32)).all()
@patch('onnx.ModelProto')
def test_training_session_provides_empty_optimizer_states(onnx_model_mock):
trainer = _create_trainer()
training_session_mock = _training_session_mock({}, {}, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict()
assert len(state_dict['optimizer'].keys()) == 0
@patch('onnx.ModelProto')
def test_training_session_provides_optimizer_states(onnx_model_mock):
trainer = _create_trainer()
optimizer_states = {
'model_weight': {
'Moment_1': np.arange(5),
'Moment_2': np.arange(7)
},
'shared_optimizer_state': {
'step': np.arange(1)
}
}
training_session_mock = _training_session_mock({}, optimizer_states, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict()
assert (state_dict['optimizer']['model_weight']['Moment_1'] == np.arange(5)).all()
assert (state_dict['optimizer']['model_weight']['Moment_2'] == np.arange(7)).all()
assert (state_dict['optimizer']['shared_optimizer_state']['step'] == np.arange(1)).all()
@patch('onnx.ModelProto')
def test_training_session_provides_optimizer_states_pytorch_format(onnx_model_mock):
trainer = _create_trainer()
model_states = {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
}
optimizer_states = {
'model_weight': {
'Moment_1': np.arange(5),
'Moment_2': np.arange(7)
},
'shared_optimizer_state': {
'step': np.arange(1)
}
}
training_session_mock = _training_session_mock(model_states, optimizer_states, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict(pytorch_format=True)
assert 'optimizer' not in state_dict
@patch('onnx.ModelProto')
def test_training_session_provides_empty_partition_info_map(onnx_model_mock):
trainer = _create_trainer(zero_enabled=True)
training_session_mock = _training_session_mock({}, {}, {})
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict()
assert len(state_dict['partition_info'].keys()) == 0
@patch('onnx.ModelProto')
def test_training_session_provides_partition_info_map(onnx_model_mock):
trainer = _create_trainer(zero_enabled=True)
partition_info = {
'a': {
'original_dim': [1, 2, 3]
}
}
training_session_mock = _training_session_mock({}, {}, partition_info)
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict()
assert state_dict['partition_info']['a']['original_dim'] == [1, 2, 3]
@patch('onnx.ModelProto')
def test_training_session_provides_all_states(onnx_model_mock):
trainer = _create_trainer(zero_enabled=True)
model_states = {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
}
optimizer_states = {
'model_weight': {
'Moment_1': np.arange(5),
'Moment_2': np.arange(7)
},
'shared_optimizer_state': {
'step': np.arange(1)
}
}
partition_info = {
'a': {
'original_dim': [1, 2, 3]
}
}
training_session_mock = _training_session_mock(model_states, optimizer_states, partition_info)
trainer._training_session = training_session_mock
trainer._onnx_model = onnx_model_mock()
state_dict = trainer.state_dict()
assert (state_dict['model']['full_precision']['a'] == np.arange(5)).all()
assert (state_dict['model']['full_precision']['b'] == np.arange(7)).all()
assert (state_dict['optimizer']['model_weight']['Moment_1'] == np.arange(5)).all()
assert (state_dict['optimizer']['model_weight']['Moment_2'] == np.arange(7)).all()
assert (state_dict['optimizer']['shared_optimizer_state']['step'] == np.arange(1)).all()
assert state_dict['partition_info']['a']['original_dim'] == [1, 2, 3]
def test_load_state_dict_holds_when_training_session_not_initialized():
trainer = _create_trainer()
state_dict = {
'model': {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
},
'optimizer': {
'a': {
'Moment_1': np.arange(5),
'Moment_2': np.arange(7)
},
'shared_optimizer_state': {
'step': np.arange(5)
}
}
}
assert not trainer._load_state_dict
state_dict = trainer.load_state_dict(state_dict)
assert trainer._load_state_dict
@pytest.mark.parametrize("state_dict, input_state_dict, error_key", [
({
'model':{},
'optimizer':{}
},
{
'model':{},
'optimizer':{},
'trainer_options': {
'optimizer_name': 'LambOptimizer'
}
},
'train_step_info'),
({
'optimizer':{},
'train_step_info': {
'optimization_step': 0,
'step': 0
}
},
{
'optimizer':{},
'trainer_options': {
'optimizer_name': 'LambOptimizer'
},
'train_step_info': {
'optimization_step': 0,
'step': 0
}
},
'model'),
({
'model':{},
'train_step_info': {
'optimization_step': 0,
'step': 0
}
},
{
'model':{},
'trainer_options': {
'optimizer_name': 'LambOptimizer'
},
'train_step_info': {
'optimization_step': 0,
'step': 0
}
},
'optimizer')])
def test_load_state_dict_warns_when_model_optimizer_key_missing(state_dict, input_state_dict, error_key):
trainer = _create_trainer()
trainer._training_session = _training_session_mock({}, {}, {})
trainer.state_dict = Mock(return_value=state_dict)
trainer._update_onnx_model_initializers = Mock()
trainer._init_session = Mock()
with patch('onnx.ModelProto') as onnx_model_mock:
trainer._onnx_model = onnx_model_mock()
trainer._onnx_model.graph.initializer = []
with pytest.warns(UserWarning) as user_warning:
trainer.load_state_dict(input_state_dict)
assert user_warning[0].message.args[0] == "Missing key: {} in state_dict".format(error_key)
@pytest.mark.parametrize("state_dict, input_state_dict, error_keys", _get_load_state_dict_strict_error_arguments())
def test_load_state_dict_errors_when_state_dict_mismatch(state_dict, input_state_dict, error_keys):
trainer = _create_trainer()
trainer._training_session = _training_session_mock({}, {}, {})
trainer.state_dict = Mock(return_value=state_dict)
with pytest.raises(RuntimeError) as runtime_error:
trainer.load_state_dict(input_state_dict)
assert any(key in str(runtime_error.value) for key in error_keys)
@patch('onnx.ModelProto')
def test_load_state_dict_loads_the_states_and_inits_training_session(onnx_model_mock):
trainer = _create_trainer()
training_session_state_dict = {
'model': {
'full_precision': {
'a': np.arange(5),
'b': np.arange(7)
}
},
'optimizer': {
'a': {
'Moment_1': np.arange(5),
'Moment_2': np.arange(7)
},
'shared_optimizer_state': {
'step': np.arange(1)
}
}
}
input_state_dict = {
'model': {
'full_precision': {
'a': np.array([1, 2]),
'b': np.array([3, 4])
}
},
'optimizer': {
'a': {
'Moment_1': np.array([5, 6]),
'Moment_2': np.array([7, 8])
},
'shared_optimizer_state': {
'step': np.array([9])
}
},
'trainer_options': {
'optimizer_name': 'LambOptimizer'
}
}
trainer._training_session = _training_session_mock({}, {}, {})
trainer.state_dict = Mock(return_value=training_session_state_dict)
trainer._onnx_model = onnx_model_mock()
trainer._onnx_model.graph.initializer = [
onnx.numpy_helper.from_array(np.arange(20, dtype=np.float32), 'a'),
onnx.numpy_helper.from_array(np.arange(25, dtype=np.float32), 'b')
]
trainer._update_onnx_model_initializers = Mock()
trainer._init_session = Mock()
trainer.load_state_dict(input_state_dict)
loaded_initializers, _ = trainer._update_onnx_model_initializers.call_args
state_dict_to_load, _ = trainer._init_session.call_args
assert 'a' in loaded_initializers[0]
assert (loaded_initializers[0]['a'] == np.array([1, 2])).all()
assert 'b' in loaded_initializers[0]
assert (loaded_initializers[0]['b'] == np.array([3, 4])).all()
assert (state_dict_to_load[0]['a']['Moment_1'] == np.array([5, 6])).all()
assert (state_dict_to_load[0]['a']['Moment_2'] == np.array([7, 8])).all()
assert (state_dict_to_load[0]['shared_optimizer_state']['step'] == np.array([9])).all()
@patch('onnxruntime.training._checkpoint_storage.save')
def test_save_checkpoint_calls_checkpoint_storage_save(save_mock):
trainer = _create_trainer()
state_dict = {
'model': {},
'optimizer': {}
}
trainer.state_dict = Mock(return_value=state_dict)
trainer.save_checkpoint('abc')
save_args, _ = save_mock.call_args
assert 'model' in save_args[0]
assert not bool(save_args[0]['model'])
assert 'optimizer' in save_args[0]
assert not bool(save_args[0]['optimizer'])
assert save_args[1] == 'abc'
@patch('onnxruntime.training._checkpoint_storage.save')
def test_save_checkpoint_exclude_optimizer_states(save_mock):
trainer = _create_trainer()
state_dict = {
'model': {},
'optimizer': {}
}
trainer.state_dict = Mock(return_value=state_dict)
trainer.save_checkpoint('abc', include_optimizer_states=False)
save_args, _ = save_mock.call_args
assert 'model' in save_args[0]
assert not bool(save_args[0]['model'])
assert 'optimizer' not in save_args[0]
assert save_args[1] == 'abc'
@patch('onnxruntime.training._checkpoint_storage.save')
def test_save_checkpoint_user_dict(save_mock):
trainer = _create_trainer()
state_dict = {
'model': {},
'optimizer': {}
}
trainer.state_dict = Mock(return_value=state_dict)
trainer.save_checkpoint('abc', user_dict={'abc': np.arange(4)})
save_args, _ = save_mock.call_args
assert 'user_dict' in save_args[0]
assert save_args[0]['user_dict'] == _checkpoint_storage.to_serialized_hex({'abc': np.arange(4)})
@patch('onnxruntime.training._checkpoint_storage.load')
@patch('onnxruntime.training.checkpoint.aggregate_checkpoints')
def test_load_checkpoint(aggregate_checkpoints_mock, load_mock):
trainer = _create_trainer()
trainer_options = {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(1),
'zero_stage': np.int64(0)
}
state_dict = {
'model': {},
'optimizer': {},
'trainer_options': {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(1),
'zero_stage': np.int64(0)
}
}
trainer.load_state_dict = Mock()
load_mock.side_effect = [trainer_options, state_dict]
trainer.load_checkpoint('abc')
args_list = load_mock.call_args_list
load_args, load_kwargs = args_list[0]
assert load_args[0] == 'abc'
assert load_kwargs['key'] == 'trainer_options'
load_args, load_kwargs = args_list[1]
assert load_args[0] == 'abc'
assert 'key' not in load_kwargs
assert not aggregate_checkpoints_mock.called
@patch('onnxruntime.training._checkpoint_storage.load')
@patch('onnxruntime.training.checkpoint.aggregate_checkpoints')
@pytest.mark.parametrize("trainer_options", [
{
'mixed_precision': np.bool_(False),
'world_rank': np.int64(0),
'world_size': np.int64(4),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(4),
'zero_stage': np.int64(1)
},
{
'mixed_precision': np.bool_(True),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(1),
'zero_stage': np.int64(1)
},
{
'mixed_precision': np.bool_(True),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(1),
'zero_stage': np.int64(1)
}
])
def test_load_checkpoint_aggregation_required_zero_enabled(aggregate_checkpoints_mock, load_mock, trainer_options):
trainer = _create_trainer()
trainer.load_state_dict = Mock()
load_mock.side_effect = [trainer_options]
trainer.load_checkpoint('abc')
args_list = load_mock.call_args_list
load_args, load_kwargs = args_list[0]
assert load_args[0] == 'abc'
assert load_kwargs['key'] == 'trainer_options'
assert aggregate_checkpoints_mock.called
call_args, _ = aggregate_checkpoints_mock.call_args
assert call_args[0] == tuple(['abc'])
@patch('onnxruntime.training._checkpoint_storage.load')
@patch('onnxruntime.training.checkpoint.aggregate_checkpoints')
def test_load_checkpoint_user_dict(aggregate_checkpoints_mock, load_mock):
trainer = _create_trainer()
trainer_options = {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size': np.int64(1),
'data_parallel_size': np.int64(1),
'zero_stage': np.int64(0)
}
state_dict = {
'model': {},
'optimizer': {},
'trainer_options': {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size': np.int64(1),
'data_parallel_size': np.int64(1),
'zero_stage': np.int64(0)
},
'user_dict': _checkpoint_storage.to_serialized_hex({'array': torch.tensor(np.arange(5))})
}
trainer.load_state_dict = Mock()
load_mock.side_effect = [trainer_options, state_dict]
user_dict = trainer.load_checkpoint('abc')
assert torch.all(torch.eq(user_dict['array'], torch.tensor(np.arange(5))))
@patch('onnxruntime.training._checkpoint_storage.load')
def test_checkpoint_aggregation(load_mock):
trainer_options1 = {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(0),
'world_size': np.int64(2),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(2),
'zero_stage': np.int64(1),
'optimizer_name': b'Adam'
}
trainer_options2 = {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(1),
'world_size': np.int64(2),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(2),
'zero_stage': np.int64(1),
'optimizer_name': b'Adam'
}
state_dict1 = {
'model': {
'full_precision': {
'optimizer_sharded': np.array([1, 2, 3]),
'non_sharded': np.array([11, 22, 33])
}
},
'optimizer': {
'optimizer_sharded': {
'Moment_1': np.array([9, 8, 7]),
'Moment_2': np.array([99, 88, 77]),
'Step': np.array([5])
},
'non_sharded': {
'Moment_1': np.array([666, 555, 444]),
'Moment_2': np.array([6666, 5555, 4444]),
'Step': np.array([55])
}
},
'trainer_options': {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(1),
'zero_stage': np.int64(0),
'optimizer_name': b'Adam'
},
'partition_info': {
'optimizer_sharded': {'original_dim': np.array([2, 3])}
}
}
state_dict2 = {
'model': {
'full_precision': {
'optimizer_sharded': np.array([1, 2, 3]),
'non_sharded': np.array([11, 22, 33])
}
},
'optimizer': {
'optimizer_sharded': {
'Moment_1': np.array([6, 5, 4]),
'Moment_2': np.array([66, 55, 44]),
'Step': np.array([5])
},
'non_sharded': {
'Moment_1': np.array([666, 555, 444]),
'Moment_2': np.array([6666, 5555, 4444]),
'Step': np.array([55])
}
},
'trainer_options': {
'mixed_precision': np.bool_(False),
'world_rank': np.int64(1),
'world_size': np.int64(1),
'horizontal_parallel_size' : np.int64(1),
'data_parallel_size' : np.int64(1),
'zero_stage': np.int64(0),
'optimizer_name': b'Adam'
},
'partition_info': {
'optimizer_sharded': {'original_dim': np.array([2, 3])}
}
}
load_mock.side_effect = [trainer_options1, trainer_options2, trainer_options1, state_dict1, state_dict2]
state_dict = checkpoint.aggregate_checkpoints(['abc', 'def'], pytorch_format=False)
assert (state_dict['model']['full_precision']['optimizer_sharded'] == np.array([1, 2, 3])).all()
assert (state_dict['model']['full_precision']['non_sharded'] == np.array([11, 22, 33])).all()
assert (state_dict['optimizer']['optimizer_sharded']['Moment_1'] == np.array([[9, 8, 7], [6, 5, 4]])).all()
assert (state_dict['optimizer']['optimizer_sharded']['Moment_2'] == np.array([[99, 88, 77], [66, 55, 44]])).all()
assert (state_dict['optimizer']['optimizer_sharded']['Step'] == np.array([5])).all()
assert (state_dict['optimizer']['non_sharded']['Moment_1'] == np.array([666, 555, 444])).all()
assert (state_dict['optimizer']['non_sharded']['Moment_2'] == np.array([6666, 5555, 4444])).all()
assert (state_dict['optimizer']['non_sharded']['Step'] == np.array([55])).all()
assert state_dict['trainer_options']['mixed_precision'] == False
assert state_dict['trainer_options']['world_rank'] == 0
assert state_dict['trainer_options']['world_size'] == 1
assert state_dict['trainer_options']['horizontal_parallel_size'] == 1
assert state_dict['trainer_options']['data_parallel_size'] == 1
assert state_dict['trainer_options']['zero_stage'] == 0
assert state_dict['trainer_options']['optimizer_name'] == b'Adam'
@patch('onnxruntime.training._checkpoint_storage.load')
def test_checkpoint_aggregation_mixed_precision(load_mock):
trainer_options1 = {
'mixed_precision': np.bool_(True),
'world_rank': np.int64(0),
'world_size': np.int64(2),
'horizontal_parallel_size': np.int64(1),
'data_parallel_size': np.int64(2),
'zero_stage': np.int64(1),
'optimizer_name': b'Adam'
}
trainer_options2 = {
'mixed_precision': np.bool_(True),
'world_rank': np.int64(1),
'world_size': np.int64(2),
'horizontal_parallel_size': np.int64(1),
'data_parallel_size': np.int64(2),
'zero_stage': np.int64(1),
'optimizer_name': b'Adam'
}
state_dict1 = {
'model': {
'full_precision': {
'sharded': np.array([1, 2, 3]),
'non_sharded': np.array([11, 22, 33])
}
},
'optimizer': {
'sharded': {
'Moment_1': np.array([9, 8, 7]),
'Moment_2': np.array([99, 88, 77]),
'Step': np.array([5])
},
'non_sharded': {
'Moment_1': np.array([666, 555, 444]),
'Moment_2': np.array([6666, 5555, 4444]),
'Step': np.array([55])
}
},
'trainer_options': {
'mixed_precision': np.bool_(True),
'world_rank': np.int64(0),
'world_size': np.int64(1),
'horizontal_parallel_size': np.int64(1),
'data_parallel_size': np.int64(1),
'zero_stage': np.int64(0),
'optimizer_name': b'Adam'
},
'partition_info': {
'sharded': {'original_dim': np.array([2, 3])}
}
}
state_dict2 = {
'model': {
'full_precision': {
'sharded': np.array([4, 5, 6]),
'non_sharded': np.array([11, 22, 33])
}
},
'optimizer': {
'sharded': {
'Moment_1': np.array([6, 5, 4]),
'Moment_2': np.array([66, 55, 44]),
'Step': np.array([5])
},
'non_sharded': {
'Moment_1': np.array([666, 555, 444]),
'Moment_2': np.array([6666, 5555, 4444]),
'Step': np.array([55])
}
},
'trainer_options': {
'mixed_precision': np.bool_(True),
'world_rank': np.int64(1),
'world_size': np.int64(1),
'horizontal_parallel_size': np.int64(1),
'data_parallel_size': np.int64(1),
'zero_stage': np.int64(0),
'optimizer_name': b'Adam'
},
'partition_info': {
'sharded': {'original_dim': np.array([2, 3])}
}
}
load_mock.side_effect = [trainer_options1, trainer_options2, trainer_options1, state_dict1, state_dict2]
state_dict = checkpoint.aggregate_checkpoints(['abc', 'def'], pytorch_format=False)
assert (state_dict['model']['full_precision']['sharded'] == np.array([[1, 2, 3], [4, 5, 6]])).all()
assert (state_dict['model']['full_precision']['non_sharded'] == np.array([11, 22, 33])).all()
assert (state_dict['optimizer']['sharded']['Moment_1'] == np.array([[9, 8, 7], [6, 5, 4]])).all()
assert (state_dict['optimizer']['sharded']['Moment_2'] == np.array([[99, 88, 77], [66, 55, 44]])).all()
assert (state_dict['optimizer']['sharded']['Step'] == np.array([5])).all()
assert (state_dict['optimizer']['non_sharded']['Moment_1'] == np.array([666, 555, 444])).all()
assert (state_dict['optimizer']['non_sharded']['Moment_2'] == np.array([6666, 5555, 4444])).all()
assert (state_dict['optimizer']['non_sharded']['Step'] == np.array([55])).all()
assert state_dict['trainer_options']['mixed_precision'] == True
assert state_dict['trainer_options']['world_rank'] == 0
assert state_dict['trainer_options']['world_size'] == 1
assert state_dict['trainer_options']['horizontal_parallel_size'] == 1
assert state_dict['trainer_options']['data_parallel_size'] == 1
assert state_dict['trainer_options']['zero_stage'] == 0
assert state_dict['trainer_options']['optimizer_name'] == b'Adam'
| 37.647619
| 148
| 0.611561
| 3,742
| 31,624
| 4.815874
| 0.064137
| 0.075412
| 0.040786
| 0.023306
| 0.838633
| 0.813773
| 0.789301
| 0.736974
| 0.697464
| 0.67044
| 0
| 0.031847
| 0.238427
| 31,624
| 839
| 149
| 37.692491
| 0.716409
| 0.018846
| 0
| 0.616
| 0
| 0
| 0.207079
| 0.043816
| 0
| 0
| 0
| 0
| 0.116
| 1
| 0.038667
| false
| 0
| 0.009333
| 0.004
| 0.056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ea1e8f3e59a2502d167add15ee1ef7df59cd09ce
| 379
|
py
|
Python
|
erica/domain/BackgroundJobs/BackgroundJobInterface.py
|
punknoir101/erica-1
|
675a6280d38ca5b56946af6f3ed7e295ba896db0
|
[
"MIT"
] | null | null | null |
erica/domain/BackgroundJobs/BackgroundJobInterface.py
|
punknoir101/erica-1
|
675a6280d38ca5b56946af6f3ed7e295ba896db0
|
[
"MIT"
] | null | null | null |
erica/domain/BackgroundJobs/BackgroundJobInterface.py
|
punknoir101/erica-1
|
675a6280d38ca5b56946af6f3ed7e295ba896db0
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
class BackgroundJobInterface:
__metaclass__ = ABCMeta
@abstractmethod
def enqueue(self, f, *args, **kwargs):
pass
@abstractmethod
def scheduled_enqueue(self):
pass
@abstractmethod
def get_enqueued_job_by_id(self):
pass
@abstractmethod
def list_all_jobs(self):
pass
| 17.227273
| 42
| 0.662269
| 39
| 379
| 6.153846
| 0.615385
| 0.283333
| 0.2625
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.271768
| 379
| 21
| 43
| 18.047619
| 0.869565
| 0
| 0
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0.266667
| 0.066667
| 0
| 0.466667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ea6c325ef1569aaff98b9c5edf3c0ebf6cb13acd
| 45
|
py
|
Python
|
pyalt/gui/__main__.py
|
demian-wolf/Alternativa-PC-Client
|
24e1136ef006268b9afe46512c69c7354b2c1b60
|
[
"MIT"
] | null | null | null |
pyalt/gui/__main__.py
|
demian-wolf/Alternativa-PC-Client
|
24e1136ef006268b9afe46512c69c7354b2c1b60
|
[
"MIT"
] | null | null | null |
pyalt/gui/__main__.py
|
demian-wolf/Alternativa-PC-Client
|
24e1136ef006268b9afe46512c69c7354b2c1b60
|
[
"MIT"
] | null | null | null |
from .journal import Main
Main().mainloop()
| 11.25
| 25
| 0.733333
| 6
| 45
| 5.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 45
| 3
| 26
| 15
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
57a50a9c9bcc8b47f1e0e0f3e6a18b367cd81839
| 51
|
py
|
Python
|
sepc/self_mmdet/models/__init__.py
|
implus/SEPC
|
51e24ace1653cba6d3bc0ab536c6adb3b956c8dd
|
[
"Apache-2.0"
] | 2
|
2020-04-27T06:30:32.000Z
|
2020-04-27T06:30:34.000Z
|
sepc/self_mmdet/models/__init__.py
|
yhl41001/SEPC
|
51e24ace1653cba6d3bc0ab536c6adb3b956c8dd
|
[
"Apache-2.0"
] | null | null | null |
sepc/self_mmdet/models/__init__.py
|
yhl41001/SEPC
|
51e24ace1653cba6d3bc0ab536c6adb3b956c8dd
|
[
"Apache-2.0"
] | 1
|
2021-03-23T01:39:30.000Z
|
2021-03-23T01:39:30.000Z
|
from .anchor_heads import *
from .necks import sepc
| 25.5
| 27
| 0.803922
| 8
| 51
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 51
| 2
| 28
| 25.5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
57bafef27aedcc92a9f9359583bd93eb4fa1df87
| 41,119
|
py
|
Python
|
tests/config_test.py
|
tomcallahan/rally
|
4d05fa88ea0920ec1f3178c3705201a53f6420db
|
[
"Apache-2.0"
] | null | null | null |
tests/config_test.py
|
tomcallahan/rally
|
4d05fa88ea0920ec1f3178c3705201a53f6420db
|
[
"Apache-2.0"
] | null | null | null |
tests/config_test.py
|
tomcallahan/rally
|
4d05fa88ea0920ec1f3178c3705201a53f6420db
|
[
"Apache-2.0"
] | null | null | null |
import os
import configparser
from unittest import TestCase
import unittest.mock as mock
from esrally import config
from esrally.utils import io
class MockInput:
def __init__(self, inputs):
self.inputs = iter(inputs)
def __call__(self, *args, **kwargs):
null_output(*args, **kwargs)
v = next(self.inputs)
null_output(v)
return v
def null_output(*args, **kwargs):
# print(*args, **kwargs)
pass
class InMemoryConfigStore:
def __init__(self, config_name, config=None, backup_created=False, present=False):
self.config_name = config_name
# support initialization from a dict
if config:
self.config = configparser.ConfigParser()
self.config.read_dict(config)
else:
self.config = config
self.backup_created = backup_created
self.present = present
self.location = "in-memory"
self.config_dir = "in-memory"
def backup(self):
self.backup_created = True
def store(self, c):
self.present = True
self.config = configparser.ConfigParser()
self.config.read_dict(c)
def load(self, interpolation=None):
# interpolation is not supported in tests, we just mimic the interface
return self.config
class ConfigTests(TestCase):
def test_load_non_existing_config(self):
cfg = config.Config(config_file_class=InMemoryConfigStore)
self.assertFalse(cfg.config_present())
# standard properties are still available
self.assertEqual("rally-node", cfg.opts("provisioning", "node.name.prefix"))
def test_load_existing_config(self):
cfg = config.Config(config_file_class=InMemoryConfigStore)
self.assertFalse(cfg.config_present())
sample_config = {
"tests": {
"sample.key": "value"
},
"meta": {
"config.version": config.Config.CURRENT_CONFIG_VERSION
}
}
cfg.config_file.store(sample_config)
self.assertTrue(cfg.config_present())
cfg.load_config()
# standard properties are still available
self.assertEqual("rally-node", cfg.opts("provisioning", "node.name.prefix"))
self.assertEqual("value", cfg.opts("tests", "sample.key"))
# we can also override values
cfg.add(config.Scope.applicationOverride, "tests", "sample.key", "override")
self.assertEqual("override", cfg.opts("tests", "sample.key"))
def test_load_all_opts_in_section(self):
cfg = config.Config(config_file_class=InMemoryConfigStore)
self.assertFalse(cfg.config_present())
sample_config = {
"distributions": {
"release.url": "https://acme.com/releases",
"release.cache": "true",
"snapshot.url": "https://acme.com/snapshots",
"snapshot.cache": "false"
},
"system": {
"env.name": "local"
},
"meta": {
"config.version": config.Config.CURRENT_CONFIG_VERSION
}
}
cfg.config_file.store(sample_config)
self.assertTrue(cfg.config_present())
cfg.load_config()
# override a value so we can see that the scoping logic still works. Default is scope "application"
cfg.add(config.Scope.applicationOverride, "distributions", "snapshot.cache", "true")
self.assertEqual({
"release.url": "https://acme.com/releases",
"release.cache": "true",
"snapshot.url": "https://acme.com/snapshots",
# overridden!
"snapshot.cache": "true"
}, cfg.all_opts("distributions"))
def test_add_all_in_section(self):
source_cfg = config.Config(config_file_class=InMemoryConfigStore)
sample_config = {
"tests": {
"sample.key": "value",
"sample.key2": "value"
},
"no_copy": {
"other.key": "value"
},
"meta": {
"config.version": config.Config.CURRENT_CONFIG_VERSION
}
}
source_cfg.config_file.store(sample_config)
source_cfg.load_config()
target_cfg = config.Config(config_file_class=InMemoryConfigStore)
self.assertIsNone(target_cfg.opts("tests", "sample.key", mandatory=False))
target_cfg.add_all(source=source_cfg, section="tests")
self.assertEqual("value", target_cfg.opts("tests", "sample.key"))
self.assertIsNone(target_cfg.opts("no_copy", "other.key", mandatory=False))
# nonexisting key will not throw an error
target_cfg.add_all(source=source_cfg, section="this section does not exist")
class AutoLoadConfigTests(TestCase):
def test_can_create_non_existing_config(self):
base_cfg = config.Config(config_name="unittest", config_file_class=InMemoryConfigStore)
base_cfg.add(config.Scope.application, "meta", "config.version", config.Config.CURRENT_CONFIG_VERSION)
base_cfg.add(config.Scope.application, "benchmarks", "local.dataset.cache", "/base-config/data-set-cache")
base_cfg.add(config.Scope.application, "reporting", "datastore.type", "elasticsearch")
base_cfg.add(config.Scope.application, "tracks", "metrics.url", "http://github.com/org/metrics")
base_cfg.add(config.Scope.application, "teams", "private.url", "http://github.com/org/teams")
base_cfg.add(config.Scope.application, "distributions", "release.cache", False)
base_cfg.add(config.Scope.application, "defaults", "preserve_benchmark_candidate", True)
cfg = config.auto_load_local_config(base_cfg, config_file_class=InMemoryConfigStore)
self.assertTrue(cfg.config_file.present)
# did not just copy base config
self.assertNotEqual(base_cfg.opts("benchmarks", "local.dataset.cache"), cfg.opts("benchmarks", "local.dataset.cache"))
# copied sections from base config
self.assert_equals_base_config(base_cfg, cfg, "reporting", "datastore.type")
self.assert_equals_base_config(base_cfg, cfg, "tracks", "metrics.url")
self.assert_equals_base_config(base_cfg, cfg, "teams", "private.url")
self.assert_equals_base_config(base_cfg, cfg, "distributions", "release.cache")
self.assert_equals_base_config(base_cfg, cfg, "defaults", "preserve_benchmark_candidate")
def test_can_load_and_amend_existing_config(self):
base_cfg = config.Config(config_name="unittest", config_file_class=InMemoryConfigStore)
base_cfg.add(config.Scope.application, "meta", "config.version", config.Config.CURRENT_CONFIG_VERSION)
base_cfg.add(config.Scope.application, "benchmarks", "local.dataset.cache", "/base-config/data-set-cache")
base_cfg.add(config.Scope.application, "unit-test", "sample.property", "let me copy you")
cfg = config.auto_load_local_config(base_cfg, additional_sections=["unit-test"],
config_file_class=InMemoryConfigStore, present=True, config={
"distributions": {
"release.url": "https://acme.com/releases",
"release.cache": "true",
},
"system": {
"env.name": "existing-unit-test-config"
},
"meta": {
"config.version": config.Config.CURRENT_CONFIG_VERSION
},
"benchmarks": {
"local.dataset.cache": "/tmp/rally/data"
}
})
self.assertTrue(cfg.config_file.present)
# did not just copy base config
self.assertNotEqual(base_cfg.opts("benchmarks", "local.dataset.cache"), cfg.opts("benchmarks", "local.dataset.cache"))
# keeps config properties
self.assertEqual("existing-unit-test-config", cfg.opts("system", "env.name"))
# copies additional properties
self.assert_equals_base_config(base_cfg, cfg, "unit-test", "sample.property")
def test_can_migrate_outdated_config(self):
base_cfg = config.Config(config_name="unittest", config_file_class=InMemoryConfigStore)
base_cfg.add(config.Scope.application, "meta", "config.version", config.Config.CURRENT_CONFIG_VERSION)
base_cfg.add(config.Scope.application, "benchmarks", "local.dataset.cache", "/base-config/data-set-cache")
base_cfg.add(config.Scope.application, "unit-test", "sample.property", "let me copy you")
cfg = config.auto_load_local_config(base_cfg, additional_sections=["unit-test"],
config_file_class=InMemoryConfigStore, present=True, config={
"distributions": {
"release.url": "https://acme.com/releases",
"release.cache": "true",
},
"system": {
"env.name": "existing-unit-test-config"
},
# outdated
"meta": {
"config.version": config.Config.CURRENT_CONFIG_VERSION - 1
},
"benchmarks": {
"local.dataset.cache": "/tmp/rally/data"
},
"runtime": {
"java8.home": "/opt/jdk8"
}
})
self.assertTrue(cfg.config_file.present)
# did not just copy base config
self.assertNotEqual(base_cfg.opts("benchmarks", "local.dataset.cache"), cfg.opts("benchmarks", "local.dataset.cache"))
# migrated existing config
self.assertEqual(config.Config.CURRENT_CONFIG_VERSION, int(cfg.opts("meta", "config.version")))
def assert_equals_base_config(self, base_config, local_config, section, key):
self.assertEqual(base_config.opts(section, key), local_config.opts(section, key))
class ConfigFactoryTests(TestCase):
@mock.patch("esrally.utils.git.is_working_copy")
@mock.patch("esrally.utils.jvm.is_early_access_release")
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.io.guess_install_location")
def test_create_simple_config(self, guess_install_location, guess_java_home, is_ea_release, working_copy):
guess_install_location.side_effect = ["/tests/usr/bin/git", "/tests/usr/bin/gradle"]
guess_java_home.return_value = "/tests/java9/home"
is_ea_release.return_value = False
# Rally checks in the parent and sibling directories whether there is an ES working copy. We don't want this detection logic
# to succeed spuriously (e.g. on developer machines).
working_copy.return_value = False
mock_input = MockInput([""])
f = config.ConfigFactory(i=mock_input, sec_i=mock_input, o=null_output)
config_store = InMemoryConfigStore("test")
f.create_config(config_store)
self.assertIsNotNone(config_store.config)
for section, _ in config_store.config.items():
for k, v in config_store.config[section].items():
print("%s::%s: %s" % (section, k, v))
self.assertTrue("meta" in config_store.config)
self.assertEqual("13", config_store.config["meta"]["config.version"])
self.assertTrue("system" in config_store.config)
self.assertEqual("local", config_store.config["system"]["env.name"])
self.assertTrue("node" in config_store.config)
self.assertEqual(io.normalize_path(os.path.abspath("./in-memory/benchmarks")), config_store.config["node"]["root.dir"])
self.assertEqual(io.normalize_path(os.path.abspath("./in-memory/benchmarks/src")), config_store.config["node"]["src.root.dir"])
self.assertTrue("source" in config_store.config)
self.assertEqual("https://github.com/elastic/elasticsearch.git", config_store.config["source"]["remote.repo.url"])
self.assertEqual("elasticsearch", config_store.config["source"]["elasticsearch.src.subdir"])
self.assertTrue("build" in config_store.config)
self.assertEqual("/tests/usr/bin/gradle", config_store.config["build"]["gradle.bin"])
self.assertTrue("runtime" in config_store.config)
self.assertEqual("/tests/java9/home", config_store.config["runtime"]["java.home"])
self.assertEqual("/tests/java9/home", config_store.config["runtime"]["java9.home"])
self.assertTrue("benchmarks" in config_store.config)
self.assertEqual("${node:root.dir}/data", config_store.config["benchmarks"]["local.dataset.cache"])
self.assertTrue("reporting" in config_store.config)
self.assertEqual("in-memory", config_store.config["reporting"]["datastore.type"])
self.assertEqual("", config_store.config["reporting"]["datastore.host"])
self.assertEqual("", config_store.config["reporting"]["datastore.port"])
self.assertEqual("", config_store.config["reporting"]["datastore.secure"])
self.assertEqual("", config_store.config["reporting"]["datastore.user"])
self.assertEqual("", config_store.config["reporting"]["datastore.password"])
self.assertTrue("tracks" in config_store.config)
self.assertEqual("https://github.com/elastic/rally-tracks", config_store.config["tracks"]["default.url"])
self.assertTrue("teams" in config_store.config)
self.assertEqual("https://github.com/elastic/rally-teams", config_store.config["teams"]["default.url"])
self.assertTrue("defaults" in config_store.config)
self.assertEqual("False", config_store.config["defaults"]["preserve_benchmark_candidate"])
self.assertTrue("distributions" in config_store.config)
self.assertEqual("https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-{{VERSION}}.tar.gz",
config_store.config["distributions"]["release.1.url"])
self.assertEqual("https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/"
"{{VERSION}}/elasticsearch-{{VERSION}}.tar.gz",
config_store.config["distributions"]["release.2.url"])
self.assertEqual("https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz",
config_store.config["distributions"]["release.url"])
self.assertEqual("true", config_store.config["distributions"]["release.cache"])
@mock.patch("esrally.utils.jvm.is_early_access_release")
@mock.patch("esrally.utils.jvm.major_version")
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.io.guess_install_location")
@mock.patch("esrally.utils.io.normalize_path")
@mock.patch("os.path.exists")
def test_create_simple_config_no_java_detected(self, path_exists, normalize_path, guess_install_location, guess_java_home,
major_jvm_version, jvm_is_early_access_release):
guess_install_location.side_effect = ["/tests/usr/bin/git", "/tests/usr/bin/gradle"]
guess_java_home.return_value = None
normalize_path.side_effect = ["/home/user/.rally/benchmarks", "/tests/java9/home", "/tests/java8/home",
"/home/user/.rally/benchmarks/src"]
major_jvm_version.return_value = 9
jvm_is_early_access_release.return_value = False
path_exists.return_value = True
f = config.ConfigFactory(i=MockInput(["/tests/java9/home", "/Projects/elasticsearch/src", "/tests/java8/home"]), o=null_output)
config_store = InMemoryConfigStore("test")
f.create_config(config_store)
self.assertIsNotNone(config_store.config)
self.assertTrue("runtime" in config_store.config)
self.assertEqual("/tests/java8/home", config_store.config["runtime"]["java.home"])
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.io.guess_install_location")
def test_create_simple_config_no_java_installed(self, guess_install_location, guess_java_home):
guess_install_location.side_effect = ["/tests/usr/bin/git", "/tests/usr/bin/gradle"]
guess_java_home.return_value = None
# the input is the question for the JDK home and the JDK 9 home directory - the user does not define one
f = config.ConfigFactory(i=MockInput(["", ""]), o=null_output)
config_store = InMemoryConfigStore("test")
f.create_config(config_store)
self.assertIsNotNone(config_store.config)
self.assertFalse("java.home" in config_store.config["runtime"])
self.assertFalse("java9.home" in config_store.config["runtime"])
@mock.patch("esrally.utils.jvm.is_early_access_release")
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.io.guess_install_location")
def test_create_advanced_config(self, guess_install_location, guess_java_home, is_ea_release):
guess_install_location.side_effect = ["/tests/usr/bin/git", "/tests/usr/bin/gradle"]
guess_java_home.side_effect = ["/tests/java8/home", "/tests/java9/home"]
is_ea_release.return_value = False
f = config.ConfigFactory(i=MockInput([
# benchmark root directory
"/var/data/rally",
# src dir
"/Projects/elasticsearch/src",
# metrics store type (Elasticsearch)
"2",
# data_store_host
"localhost",
# data_store_port
"9200",
# data_store_secure
"Yes",
# data_store_user
"user",
# env
"unittest-env",
# preserve benchmark candidate
"y"
]), sec_i=MockInput(["pw"]), o=null_output)
config_store = InMemoryConfigStore("test")
f.create_config(config_store, advanced_config=True)
self.assertIsNotNone(config_store.config)
self.assertTrue("meta" in config_store.config)
self.assertEqual("13", config_store.config["meta"]["config.version"])
self.assertTrue("system" in config_store.config)
self.assertEqual("unittest-env", config_store.config["system"]["env.name"])
self.assertTrue("node" in config_store.config)
self.assertEqual("/var/data/rally", config_store.config["node"]["root.dir"])
self.assertTrue("source" in config_store.config)
self.assertTrue("build" in config_store.config)
self.assertEqual("/tests/usr/bin/gradle", config_store.config["build"]["gradle.bin"])
self.assertTrue("runtime" in config_store.config)
self.assertEqual("/tests/java8/home", config_store.config["runtime"]["java.home"])
self.assertEqual("/tests/java9/home", config_store.config["runtime"]["java9.home"])
self.assertTrue("benchmarks" in config_store.config)
self.assertTrue("reporting" in config_store.config)
self.assertEqual("elasticsearch", config_store.config["reporting"]["datastore.type"])
self.assertEqual("localhost", config_store.config["reporting"]["datastore.host"])
self.assertEqual("9200", config_store.config["reporting"]["datastore.port"])
self.assertEqual("True", config_store.config["reporting"]["datastore.secure"])
self.assertEqual("user", config_store.config["reporting"]["datastore.user"])
self.assertEqual("pw", config_store.config["reporting"]["datastore.password"])
self.assertTrue("tracks" in config_store.config)
self.assertEqual("https://github.com/elastic/rally-tracks", config_store.config["tracks"]["default.url"])
self.assertTrue("teams" in config_store.config)
self.assertEqual("https://github.com/elastic/rally-teams", config_store.config["teams"]["default.url"])
self.assertTrue("defaults" in config_store.config)
self.assertEqual("True", config_store.config["defaults"]["preserve_benchmark_candidate"])
self.assertTrue("distributions" in config_store.config)
self.assertEqual("https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-{{VERSION}}.tar.gz",
config_store.config["distributions"]["release.1.url"])
self.assertEqual("https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/"
"{{VERSION}}/elasticsearch-{{VERSION}}.tar.gz",
config_store.config["distributions"]["release.2.url"])
self.assertEqual("https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz",
config_store.config["distributions"]["release.url"])
self.assertEqual("true", config_store.config["distributions"]["release.cache"])
class ConfigMigrationTests(TestCase):
# catch all test, migrations are checked in more detail in the other tests
@mock.patch("esrally.utils.io.get_size")
@mock.patch("esrally.time.sleep")
def test_migrate_from_0_to_latest(self, sleep, get_size):
get_size.return_value = 0
config_file = InMemoryConfigStore("test")
sample_config = {
"system": {
"root.dir": "in-memory"
},
"provisioning": {
},
"build": {
"maven.bin": "/usr/local/mvn"
},
"benchmarks": {
"metrics.stats.disk.device": "/dev/hdd1"
},
"reporting": {
"report.base.dir": "/tests/rally/reporting",
"output.html.report.filename": "index.html"
},
"runtime": {
"java8.home": "/opt/jdk/8",
}
}
config_file.store(sample_config)
config.migrate(config_file, 0, config.Config.CURRENT_CONFIG_VERSION, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual(str(config.Config.CURRENT_CONFIG_VERSION), config_file.config["meta"]["config.version"])
def test_migrate_from_2_to_3(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 2
},
"system": {
"root.dir": "in-memory"
},
"reporting": {
"report.base.dir": "/tests/rally/reporting",
"output.html.report.filename": "index.html"
},
}
config_file.store(sample_config)
config.migrate(config_file, 2, 3, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("3", config_file.config["meta"]["config.version"])
# Did not delete the section...
self.assertTrue("reporting" in config_file.config)
# ... but the key
self.assertFalse("report.base.dir" in config_file.config["reporting"])
self.assertFalse("output.html.report.filename" in config_file.config["reporting"])
@mock.patch("esrally.utils.io.get_size")
@mock.patch("esrally.time.sleep")
def test_migrate_from_3_to_4(self, sleep, get_size):
get_size.return_value = 0
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 3
},
"system": {
"root.dir": "in-memory"
},
"reporting": {
"datastore.host": ""
},
"build": {
"maven.bin": "/usr/local/mvn"
},
"benchmarks": {
"metrics.stats.disk.device": "/dev/hdd1"
}
}
config_file.store(sample_config)
config.migrate(config_file, 3, 4, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("4", config_file.config["meta"]["config.version"])
# Did not delete the section...
self.assertTrue("build" in config_file.config)
# ... but the key
self.assertFalse("maven.bin" in config_file.config["build"])
self.assertTrue("benchmarks" in config_file.config)
self.assertFalse("metrics.stats.disk.device" in config_file.config["benchmarks"])
self.assertEqual("in-memory", config_file.config["reporting"]["datastore.type"])
def test_migrate_from_4_to_5(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 4
}
}
config_file.store(sample_config)
config.migrate(config_file, 4, 5, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("5", config_file.config["meta"]["config.version"])
self.assertTrue("tracks" in config_file.config)
self.assertEqual("https://github.com/elastic/rally-tracks", config_file.config["tracks"]["default.url"])
def test_migrate_from_5_to_6(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 5
}
}
config_file.store(sample_config)
config.migrate(config_file, 5, 6, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("6", config_file.config["meta"]["config.version"])
self.assertTrue("defaults" in config_file.config)
self.assertEqual("False", config_file.config["defaults"]["preserve_benchmark_candidate"])
def test_migrate_from_6_to_7(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 6
},
"system": {
"log.root.dir": "logs"
},
"provisioning": {
"local.install.dir": "install"
},
}
config_file.store(sample_config)
config.migrate(config_file, 6, 7, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("7", config_file.config["meta"]["config.version"])
self.assertTrue("provisioning" not in config_file.config)
self.assertTrue("log.root.dir" not in config_file.config["system"])
def test_migrate_from_7_to_8(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 7
},
"system": {
"root.dir": "~/.rally/benchmarks",
"environment.name": "local"
},
"benchmarks": {
"local.dataset.cache": "${system:root.dir}/data",
"some.other.cache": "/data"
}
}
config_file.store(sample_config)
config.migrate(config_file, 7, 8, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("8", config_file.config["meta"]["config.version"])
self.assertTrue("root.dir" not in config_file.config["system"])
self.assertEqual("~/.rally/benchmarks", config_file.config["node"]["root.dir"])
self.assertEqual("local", config_file.config["system"]["environment.name"])
self.assertEqual("${node:root.dir}/data", config_file.config["benchmarks"]["local.dataset.cache"])
self.assertEqual("/data", config_file.config["benchmarks"]["some.other.cache"])
def test_migrate_from_8_to_9(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 8
},
"system": {
"root.dir": "~/.rally/benchmarks",
"environment.name": "local"
},
"benchmarks": {
"local.dataset.cache": "${system:root.dir}/data",
"some.other.cache": "/data"
}
}
config_file.store(sample_config)
config.migrate(config_file, 8, 9, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("9", config_file.config["meta"]["config.version"])
self.assertTrue("teams" in config_file.config)
self.assertEqual("https://github.com/elastic/rally-teams", config_file.config["teams"]["default.url"])
def test_migrate_from_9_to_10(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 9
},
"system": {
"root.dir": "~/.rally/benchmarks",
"environment.name": "local"
},
"benchmarks": {
"local.dataset.cache": "${system:root.dir}/data",
"some.other.cache": "/data"
}
}
config_file.store(sample_config)
config.migrate(config_file, 9, 10, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("10", config_file.config["meta"]["config.version"])
self.assertTrue("distributions" in config_file.config)
self.assertEqual("https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-{{VERSION}}.tar.gz",
config_file.config["distributions"]["release.1.url"])
self.assertEqual("https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/"
"{{VERSION}}/elasticsearch-{{VERSION}}.tar.gz",
config_file.config["distributions"]["release.2.url"])
self.assertEqual("https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz",
config_file.config["distributions"]["release.url"])
self.assertEqual("true",
config_file.config["distributions"]["release.cache"])
def test_migrate_from_10_to_11(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 10
},
"runtime": {
"java8.home": "/opt/jdk/8",
}
}
config_file.store(sample_config)
config.migrate(config_file, 10, 11, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("11", config_file.config["meta"]["config.version"])
self.assertTrue("runtime" in config_file.config)
self.assertFalse("java8.home" in config_file.config["runtime"])
self.assertEqual("/opt/jdk/8", config_file.config["runtime"]["java.home"])
@mock.patch("esrally.utils.io.exists")
@mock.patch("os.rename")
def test_migrate_from_11_to_12_with_default_src_config_repo_checked_out(self, path_rename, path_exists):
path_exists.return_value = True
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 11
},
"node": {
"root.dir": io.normalize_path("~/.rally/benchmarks")
},
"source": {
"local.src.dir": io.normalize_path("~/.rally/benchmarks/src")
}
}
config_file.store(sample_config)
config.migrate(config_file, 11, 12, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("12", config_file.config["meta"]["config.version"])
self.assertEqual(io.normalize_path("~/.rally/benchmarks/src"), config_file.config["node"]["src.root.dir"])
self.assertEqual("elasticsearch", config_file.config["source"]["elasticsearch.src.subdir"])
path_rename.assert_has_calls(
[
mock.call(io.normalize_path("~/.rally/benchmarks/src"), io.normalize_path("~/.rally/benchmarks/tmp_src_mig")),
mock.call(io.normalize_path("~/.rally/benchmarks/tmp_src_mig"),
io.normalize_path("~/.rally/benchmarks/src/elasticsearch")),
]
)
@mock.patch("esrally.utils.io.exists")
@mock.patch("os.rename")
def test_migrate_from_11_to_12_with_default_src_config_repo_not_checked_out(self, path_rename, path_exists):
path_exists.return_value = False
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 11
},
"node": {
"root.dir": io.normalize_path("~/.rally/benchmarks")
},
"source": {
"local.src.dir": io.normalize_path("~/.rally/benchmarks/src")
}
}
config_file.store(sample_config)
config.migrate(config_file, 11, 12, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("12", config_file.config["meta"]["config.version"])
self.assertEqual(io.normalize_path("~/.rally/benchmarks/src"), config_file.config["node"]["src.root.dir"])
self.assertEqual("elasticsearch", config_file.config["source"]["elasticsearch.src.subdir"])
# did all the migrations but nothing moved
path_rename.assert_not_called()
def test_migrate_from_11_to_12_without_src_config(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 11
},
"node": {
"root.dir": "~/.rally/benchmarks"
}
}
config_file.store(sample_config)
config.migrate(config_file, 11, 12, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("12", config_file.config["meta"]["config.version"])
self.assertFalse("src.root.dir" in config_file.config["node"])
def test_migrate_from_11_to_12_with_partial_src_config(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 11
},
"node": {
"root.dir": "~/.rally/benchmarks"
},
"source": {
# a source config section without any keys should be treated like a missing source config section
}
}
config_file.store(sample_config)
config.migrate(config_file, 11, 12, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("12", config_file.config["meta"]["config.version"])
self.assertFalse("src.root.dir" in config_file.config["node"])
self.assertFalse("elasticsearch.src.subdir" in config_file.config["source"])
@mock.patch("esrally.utils.io.exists")
@mock.patch("os.rename")
def test_migrate_from_11_to_12_with_custom_src_config(self, path_rename, path_exists):
path_exists.return_value = False
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 11
},
"node": {
"root.dir": io.normalize_path("~/.rally/benchmarks")
},
"source": {
"local.src.dir": io.normalize_path("~/Projects/elasticsearch/master/es")
}
}
config_file.store(sample_config)
config.migrate(config_file, 11, 12, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("12", config_file.config["meta"]["config.version"])
self.assertEqual(io.normalize_path("~/Projects/elasticsearch/master"), config_file.config["node"]["src.root.dir"])
self.assertEqual("es", config_file.config["source"]["elasticsearch.src.subdir"])
# did all the migrations but nothing moved
path_rename.assert_not_called()
def test_migrate_from_12_to_13_without_gradle(self):
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 12
}
}
config_file.store(sample_config)
config.migrate(config_file, 12, 13, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("13", config_file.config["meta"]["config.version"])
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.jvm.is_early_access_release")
def test_migrate_from_12_to_13_with_gradle_and_jdk8_autodetect_jdk9(self, is_early_access_release, guess_java_home):
guess_java_home.return_value = "/usr/lib/java9"
is_early_access_release.return_value = False
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 12
},
"build": {
"gradle.bin": "/usr/local/bin/gradle"
},
"runtime": {
"java.home": "/usr/lib/java8"
}
}
config_file.store(sample_config)
config.migrate(config_file, 12, 13, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("13", config_file.config["meta"]["config.version"])
self.assertEqual("/usr/lib/java8", config_file.config["runtime"]["java.home"])
self.assertEqual("/usr/lib/java9", config_file.config["runtime"]["java9.home"])
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.jvm.is_early_access_release")
@mock.patch("esrally.utils.jvm.major_version")
def test_migrate_from_12_to_13_with_gradle_and_jdk9(self, major_version, is_early_access_release, guess_java_home):
guess_java_home.return_value = None
is_early_access_release.return_value = False
major_version.return_value = 9
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 12
},
"build": {
"gradle.bin": "/usr/local/bin/gradle"
},
"runtime": {
"java.home": "/usr/lib/java9"
}
}
config_file.store(sample_config)
config.migrate(config_file, 12, 13, out=null_output)
self.assertTrue(config_file.backup_created)
self.assertEqual("13", config_file.config["meta"]["config.version"])
self.assertEqual("/usr/lib/java9", config_file.config["runtime"]["java.home"])
self.assertEqual("/usr/lib/java9", config_file.config["runtime"]["java9.home"])
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.jvm.is_early_access_release")
@mock.patch("esrally.utils.jvm.major_version")
def test_migrate_from_12_to_13_with_gradle_and_jdk8_ask_user_and_skip(self, major_version, is_early_access_release, guess_java_home):
guess_java_home.return_value = None
is_early_access_release.return_value = False
major_version.return_value = 8
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 12
},
"build": {
"gradle.bin": "/usr/local/bin/gradle"
},
"runtime": {
"java.home": "/usr/lib/java8"
}
}
config_file.store(sample_config)
config.migrate(config_file, 12, 13, out=null_output, i=MockInput(inputs=[""]))
self.assertTrue(config_file.backup_created)
self.assertEqual("13", config_file.config["meta"]["config.version"])
self.assertEqual("/usr/lib/java8", config_file.config["runtime"]["java.home"])
self.assertTrue("java9.home" not in config_file.config["runtime"])
@mock.patch("esrally.utils.io.exists")
@mock.patch("esrally.utils.io.guess_java_home")
@mock.patch("esrally.utils.jvm.is_early_access_release")
@mock.patch("esrally.utils.jvm.major_version")
def test_migrate_from_12_to_13_with_gradle_and_jdk8_ask_user_enter_valid(self, major_version, is_early_access_release, guess_java_home,
path_exists):
guess_java_home.return_value = None
is_early_access_release.return_value = False
major_version.side_effect = [8, 9]
path_exists.return_value = True
config_file = InMemoryConfigStore("test")
sample_config = {
"meta": {
"config.version": 12
},
"build": {
"gradle.bin": "/usr/local/bin/gradle"
},
"runtime": {
"java.home": "/usr/lib/java8"
}
}
config_file.store(sample_config)
config.migrate(config_file, 12, 13, out=null_output, i=MockInput(inputs=["/usr/lib/java9"]))
self.assertTrue(config_file.backup_created)
self.assertEqual("13", config_file.config["meta"]["config.version"])
self.assertEqual("/usr/lib/java8", config_file.config["runtime"]["java.home"])
self.assertEqual("/usr/lib/java9", config_file.config["runtime"]["java9.home"])
| 44.597614
| 139
| 0.622243
| 4,549
| 41,119
| 5.415476
| 0.074522
| 0.066166
| 0.052446
| 0.039213
| 0.831622
| 0.793992
| 0.754942
| 0.735458
| 0.670022
| 0.641567
| 0
| 0.008377
| 0.242248
| 41,119
| 921
| 140
| 44.646037
| 0.782271
| 0.03361
| 0
| 0.547837
| 0
| 0.003932
| 0.246455
| 0.063956
| 0
| 0
| 0
| 0
| 0.251638
| 1
| 0.051114
| false
| 0.003932
| 0.007864
| 0.001311
| 0.069463
| 0.001311
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
57beb5c0816cdeecf55b778c4dd5b378b8aa17d8
| 184
|
py
|
Python
|
OpenAttack/attack_evals/__init__.py
|
wenh06/OpenAttack
|
412d1b2777dea5009fe97ac264044bfda65dfa5d
|
[
"MIT"
] | 10
|
2021-12-01T15:35:05.000Z
|
2022-03-16T16:10:24.000Z
|
OpenAttack/attack_evals/__init__.py
|
leileigan/clean_label_textual_backdoor_attack
|
56e3a96f6a4eeaf30b90a275685f37cc7e7b3c7c
|
[
"Apache-2.0"
] | null | null | null |
OpenAttack/attack_evals/__init__.py
|
leileigan/clean_label_textual_backdoor_attack
|
56e3a96f6a4eeaf30b90a275685f37cc7e7b3c7c
|
[
"Apache-2.0"
] | null | null | null |
from .default import DefaultAttackEval
from .invoke_limit_eval import InvokeLimitedAttackEval
from .chinese_eval import ChineseAttackEval
from .detailed_eval import DetailedAttackEval
| 36.8
| 54
| 0.891304
| 20
| 184
| 8
| 0.6
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 184
| 4
| 55
| 46
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
57cd5b18d1e192e16cc82c80fdaddeba3f7ce106
| 203
|
py
|
Python
|
django_rest_passwordreset/__init__.py
|
eadwinCode/django-ninja-passwordreset
|
1b23b781a75957ed31f2712e1b69a01b10a9a8ef
|
[
"BSD-3-Clause"
] | null | null | null |
django_rest_passwordreset/__init__.py
|
eadwinCode/django-ninja-passwordreset
|
1b23b781a75957ed31f2712e1b69a01b10a9a8ef
|
[
"BSD-3-Clause"
] | null | null | null |
django_rest_passwordreset/__init__.py
|
eadwinCode/django-ninja-passwordreset
|
1b23b781a75957ed31f2712e1b69a01b10a9a8ef
|
[
"BSD-3-Clause"
] | null | null | null |
""" An extension of django ninja framework, providing a configurable password reset strategy"""
__version__ = "1.0.0"
default_app_config = "django_rest_passwordreset.apps.DjangoRestPasswordResetConfig"
| 40.6
| 95
| 0.817734
| 24
| 203
| 6.583333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.098522
| 203
| 4
| 96
| 50.75
| 0.846995
| 0.433498
| 0
| 0
| 0
| 0
| 0.601852
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.5
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.