hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f58764fa0b40797d95abd3d6a3f0937cd6964e6c
| 1,197
|
py
|
Python
|
piccel/ui/__init__.py
|
lesca-research/piccel
|
dc363df65e29f3d6e71a2460b7cd7518d7f8ae0f
|
[
"MIT"
] | 2
|
2021-05-25T13:57:47.000Z
|
2021-12-19T14:50:40.000Z
|
piccel/ui/__init__.py
|
lesca-research/piccel
|
dc363df65e29f3d6e71a2460b7cd7518d7f8ae0f
|
[
"MIT"
] | null | null | null |
piccel/ui/__init__.py
|
lesca-research/piccel
|
dc363df65e29f3d6e71a2460b7cd7518d7f8ae0f
|
[
"MIT"
] | null | null | null |
from .generated import access_ui
from .generated import data_sheet_ui
from .generated import form_item_ui
from .generated import form_ui
from .generated import item_boolean_checkboxes_ui
from .generated import item_choice_radio_ui
from .generated import item_datetime_ui
from .generated import item_single_line_ui
from .generated import item_text_multi_line_ui
from .generated import login_ui
from .generated import progress_bar_ui
from .generated import resources
from .generated import section_ui
from .generated import selector_ui
from .generated import text_editor_ui
from .generated import workbook_ui
from .generated import workbook_creation_ui
from .generated import sheet_creation_ui
# from .generated import dynamic_vlist_ui
# from .generated import dynamic_vlist_item_ui
from .generated import form_editor_widget_ui
from .generated import form_editor_file_ui
from .generated import form_editor_sheet_ui
from .generated import form_edit_ui
from .generated import section_edit_ui
from .generated import item_edit_ui
from .generated import choice_edit_ui
from .generated import variable_edit_ui
from .generated import section_transition_edit_ui
from . import widgets
from . import main_qss
| 37.40625
| 49
| 0.867168
| 183
| 1,197
| 5.338798
| 0.202186
| 0.385875
| 0.563971
| 0.580348
| 0.659161
| 0.323439
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 1,197
| 31
| 50
| 38.612903
| 0.912232
| 0.070175
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f58e4bc40a350c6301e52e3f967841724fa4d675
| 67
|
py
|
Python
|
src/lesson_modules_and_packages/extension/demopkg2/overloaded.py
|
jasonwee/asus-rt-n14uhp-mrtg
|
4fa96c3406e32ea6631ce447db6d19d70b2cd061
|
[
"Apache-2.0"
] | 3
|
2018-08-14T09:33:52.000Z
|
2022-03-21T12:31:58.000Z
|
src/lesson_modules_and_packages/extension/demopkg2/overloaded.py
|
jasonwee/asus-rt-n14uhp-mrtg
|
4fa96c3406e32ea6631ce447db6d19d70b2cd061
|
[
"Apache-2.0"
] | null | null | null |
src/lesson_modules_and_packages/extension/demopkg2/overloaded.py
|
jasonwee/asus-rt-n14uhp-mrtg
|
4fa96c3406e32ea6631ce447db6d19d70b2cd061
|
[
"Apache-2.0"
] | null | null | null |
def func():
print('This is the installed version of func().')
| 16.75
| 53
| 0.641791
| 10
| 67
| 4.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208955
| 67
| 3
| 54
| 22.333333
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
f5a5e0a3f9b651f42de17ce7e9056c967d03811c
| 30
|
py
|
Python
|
sqlite_/__init__.py
|
StaticStartup/pyty
|
19d6ca69ea6fcf44e7c9f5b07cc703220597187d
|
[
"MIT"
] | null | null | null |
sqlite_/__init__.py
|
StaticStartup/pyty
|
19d6ca69ea6fcf44e7c9f5b07cc703220597187d
|
[
"MIT"
] | null | null | null |
sqlite_/__init__.py
|
StaticStartup/pyty
|
19d6ca69ea6fcf44e7c9f5b07cc703220597187d
|
[
"MIT"
] | null | null | null |
from ._connector import sqlite
| 30
| 30
| 0.866667
| 4
| 30
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
19122406f7cf3b1042bfef28d7961fa4d4dc2653
| 23
|
py
|
Python
|
database/__init__.py
|
drkitty/web-test
|
d49740100320fb542043280c31f420364aeba76f
|
[
"MIT"
] | null | null | null |
database/__init__.py
|
drkitty/web-test
|
d49740100320fb542043280c31f420364aeba76f
|
[
"MIT"
] | null | null | null |
database/__init__.py
|
drkitty/web-test
|
d49740100320fb542043280c31f420364aeba76f
|
[
"MIT"
] | null | null | null |
from .data import Base
| 11.5
| 22
| 0.782609
| 4
| 23
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5fd5a499aa553aaf264d1b6598e4fd94e80757b0
| 177
|
py
|
Python
|
manual/unicos/src-groups/script/lib/generator/predicate_function_declaration.py
|
Tikubonn/unico
|
c76de5309f8a3a6fda3110e463b7e9718ea530e3
|
[
"MIT"
] | null | null | null |
manual/unicos/src-groups/script/lib/generator/predicate_function_declaration.py
|
Tikubonn/unico
|
c76de5309f8a3a6fda3110e463b7e9718ea530e3
|
[
"MIT"
] | null | null | null |
manual/unicos/src-groups/script/lib/generator/predicate_function_declaration.py
|
Tikubonn/unico
|
c76de5309f8a3a6fda3110e463b7e9718ea530e3
|
[
"MIT"
] | null | null | null |
def write (name, stream):
stream.write("#include <unico.h>\n")
stream.write("#include <stddef.h>\n")
stream.write("extern int %s (size_t, size_t, unicos*);\n" % name)
| 25.285714
| 67
| 0.638418
| 28
| 177
| 3.964286
| 0.535714
| 0.297297
| 0.324324
| 0.234234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146893
| 177
| 6
| 68
| 29.5
| 0.735099
| 0
| 0
| 0
| 0
| 0
| 0.471591
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5ff0b1031851a7f6a66295518d61aaa725bf1b84
| 6,131
|
py
|
Python
|
loldib/getratings/models/NA/na_zed/na_zed_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_zed/na_zed_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_zed/na_zed_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Zed_Jng_Aatrox(Ratings):
pass
class NA_Zed_Jng_Ahri(Ratings):
pass
class NA_Zed_Jng_Akali(Ratings):
pass
class NA_Zed_Jng_Alistar(Ratings):
pass
class NA_Zed_Jng_Amumu(Ratings):
pass
class NA_Zed_Jng_Anivia(Ratings):
pass
class NA_Zed_Jng_Annie(Ratings):
pass
class NA_Zed_Jng_Ashe(Ratings):
pass
class NA_Zed_Jng_AurelionSol(Ratings):
pass
class NA_Zed_Jng_Azir(Ratings):
pass
class NA_Zed_Jng_Bard(Ratings):
pass
class NA_Zed_Jng_Blitzcrank(Ratings):
pass
class NA_Zed_Jng_Brand(Ratings):
pass
class NA_Zed_Jng_Braum(Ratings):
pass
class NA_Zed_Jng_Caitlyn(Ratings):
pass
class NA_Zed_Jng_Camille(Ratings):
pass
class NA_Zed_Jng_Cassiopeia(Ratings):
pass
class NA_Zed_Jng_Chogath(Ratings):
pass
class NA_Zed_Jng_Corki(Ratings):
pass
class NA_Zed_Jng_Darius(Ratings):
pass
class NA_Zed_Jng_Diana(Ratings):
pass
class NA_Zed_Jng_Draven(Ratings):
pass
class NA_Zed_Jng_DrMundo(Ratings):
pass
class NA_Zed_Jng_Ekko(Ratings):
pass
class NA_Zed_Jng_Elise(Ratings):
pass
class NA_Zed_Jng_Evelynn(Ratings):
pass
class NA_Zed_Jng_Ezreal(Ratings):
pass
class NA_Zed_Jng_Fiddlesticks(Ratings):
pass
class NA_Zed_Jng_Fiora(Ratings):
pass
class NA_Zed_Jng_Fizz(Ratings):
pass
class NA_Zed_Jng_Galio(Ratings):
pass
class NA_Zed_Jng_Gangplank(Ratings):
pass
class NA_Zed_Jng_Garen(Ratings):
pass
class NA_Zed_Jng_Gnar(Ratings):
pass
class NA_Zed_Jng_Gragas(Ratings):
pass
class NA_Zed_Jng_Graves(Ratings):
pass
class NA_Zed_Jng_Hecarim(Ratings):
pass
class NA_Zed_Jng_Heimerdinger(Ratings):
pass
class NA_Zed_Jng_Illaoi(Ratings):
pass
class NA_Zed_Jng_Irelia(Ratings):
pass
class NA_Zed_Jng_Ivern(Ratings):
pass
class NA_Zed_Jng_Janna(Ratings):
pass
class NA_Zed_Jng_JarvanIV(Ratings):
pass
class NA_Zed_Jng_Jax(Ratings):
pass
class NA_Zed_Jng_Jayce(Ratings):
pass
class NA_Zed_Jng_Jhin(Ratings):
pass
class NA_Zed_Jng_Jinx(Ratings):
pass
class NA_Zed_Jng_Kalista(Ratings):
pass
class NA_Zed_Jng_Karma(Ratings):
pass
class NA_Zed_Jng_Karthus(Ratings):
pass
class NA_Zed_Jng_Kassadin(Ratings):
pass
class NA_Zed_Jng_Katarina(Ratings):
pass
class NA_Zed_Jng_Kayle(Ratings):
pass
class NA_Zed_Jng_Kayn(Ratings):
pass
class NA_Zed_Jng_Kennen(Ratings):
pass
class NA_Zed_Jng_Khazix(Ratings):
pass
class NA_Zed_Jng_Kindred(Ratings):
pass
class NA_Zed_Jng_Kled(Ratings):
pass
class NA_Zed_Jng_KogMaw(Ratings):
pass
class NA_Zed_Jng_Leblanc(Ratings):
pass
class NA_Zed_Jng_LeeSin(Ratings):
pass
class NA_Zed_Jng_Leona(Ratings):
pass
class NA_Zed_Jng_Lissandra(Ratings):
pass
class NA_Zed_Jng_Lucian(Ratings):
pass
class NA_Zed_Jng_Lulu(Ratings):
pass
class NA_Zed_Jng_Lux(Ratings):
pass
class NA_Zed_Jng_Malphite(Ratings):
pass
class NA_Zed_Jng_Malzahar(Ratings):
pass
class NA_Zed_Jng_Maokai(Ratings):
pass
class NA_Zed_Jng_MasterYi(Ratings):
pass
class NA_Zed_Jng_MissFortune(Ratings):
pass
class NA_Zed_Jng_MonkeyKing(Ratings):
pass
class NA_Zed_Jng_Mordekaiser(Ratings):
pass
class NA_Zed_Jng_Morgana(Ratings):
pass
class NA_Zed_Jng_Nami(Ratings):
pass
class NA_Zed_Jng_Nasus(Ratings):
pass
class NA_Zed_Jng_Nautilus(Ratings):
pass
class NA_Zed_Jng_Nidalee(Ratings):
pass
class NA_Zed_Jng_Nocturne(Ratings):
pass
class NA_Zed_Jng_Nunu(Ratings):
pass
class NA_Zed_Jng_Olaf(Ratings):
pass
class NA_Zed_Jng_Orianna(Ratings):
pass
class NA_Zed_Jng_Ornn(Ratings):
pass
class NA_Zed_Jng_Pantheon(Ratings):
pass
class NA_Zed_Jng_Poppy(Ratings):
pass
class NA_Zed_Jng_Quinn(Ratings):
pass
class NA_Zed_Jng_Rakan(Ratings):
pass
class NA_Zed_Jng_Rammus(Ratings):
pass
class NA_Zed_Jng_RekSai(Ratings):
pass
class NA_Zed_Jng_Renekton(Ratings):
pass
class NA_Zed_Jng_Rengar(Ratings):
pass
class NA_Zed_Jng_Riven(Ratings):
pass
class NA_Zed_Jng_Rumble(Ratings):
pass
class NA_Zed_Jng_Ryze(Ratings):
pass
class NA_Zed_Jng_Sejuani(Ratings):
pass
class NA_Zed_Jng_Shaco(Ratings):
pass
class NA_Zed_Jng_Shen(Ratings):
pass
class NA_Zed_Jng_Shyvana(Ratings):
pass
class NA_Zed_Jng_Singed(Ratings):
pass
class NA_Zed_Jng_Sion(Ratings):
pass
class NA_Zed_Jng_Sivir(Ratings):
pass
class NA_Zed_Jng_Skarner(Ratings):
pass
class NA_Zed_Jng_Sona(Ratings):
pass
class NA_Zed_Jng_Soraka(Ratings):
pass
class NA_Zed_Jng_Swain(Ratings):
pass
class NA_Zed_Jng_Syndra(Ratings):
pass
class NA_Zed_Jng_TahmKench(Ratings):
pass
class NA_Zed_Jng_Taliyah(Ratings):
pass
class NA_Zed_Jng_Talon(Ratings):
pass
class NA_Zed_Jng_Taric(Ratings):
pass
class NA_Zed_Jng_Teemo(Ratings):
pass
class NA_Zed_Jng_Thresh(Ratings):
pass
class NA_Zed_Jng_Tristana(Ratings):
pass
class NA_Zed_Jng_Trundle(Ratings):
pass
class NA_Zed_Jng_Tryndamere(Ratings):
pass
class NA_Zed_Jng_TwistedFate(Ratings):
pass
class NA_Zed_Jng_Twitch(Ratings):
pass
class NA_Zed_Jng_Udyr(Ratings):
pass
class NA_Zed_Jng_Urgot(Ratings):
pass
class NA_Zed_Jng_Varus(Ratings):
pass
class NA_Zed_Jng_Vayne(Ratings):
pass
class NA_Zed_Jng_Veigar(Ratings):
pass
class NA_Zed_Jng_Velkoz(Ratings):
pass
class NA_Zed_Jng_Vi(Ratings):
pass
class NA_Zed_Jng_Viktor(Ratings):
pass
class NA_Zed_Jng_Vladimir(Ratings):
pass
class NA_Zed_Jng_Volibear(Ratings):
pass
class NA_Zed_Jng_Warwick(Ratings):
pass
class NA_Zed_Jng_Xayah(Ratings):
pass
class NA_Zed_Jng_Xerath(Ratings):
pass
class NA_Zed_Jng_XinZhao(Ratings):
pass
class NA_Zed_Jng_Yasuo(Ratings):
pass
class NA_Zed_Jng_Yorick(Ratings):
pass
class NA_Zed_Jng_Zac(Ratings):
pass
class NA_Zed_Jng_Zed(Ratings):
pass
class NA_Zed_Jng_Ziggs(Ratings):
pass
class NA_Zed_Jng_Zilean(Ratings):
pass
class NA_Zed_Jng_Zyra(Ratings):
pass
| 14.702638
| 46
| 0.750938
| 972
| 6,131
| 4.3107
| 0.151235
| 0.230549
| 0.329356
| 0.428162
| 0.784726
| 0.784726
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18121
| 6,131
| 416
| 47
| 14.737981
| 0.834661
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
5ff54900b3df8d8babec7d72f4a475e49d6c8ea3
| 14,953
|
py
|
Python
|
sim/env.py
|
nihalgoalla/pensieve
|
282d2db3ea42e152880ff12f1497fa8b224aba90
|
[
"MIT"
] | null | null | null |
sim/env.py
|
nihalgoalla/pensieve
|
282d2db3ea42e152880ff12f1497fa8b224aba90
|
[
"MIT"
] | null | null | null |
sim/env.py
|
nihalgoalla/pensieve
|
282d2db3ea42e152880ff12f1497fa8b224aba90
|
[
"MIT"
] | null | null | null |
import numpy as np
MILLISECONDS_IN_SECOND = 1000.0
B_IN_MB = 1000000.0
BITS_IN_BYTE = 8.0
RANDOM_SEED = 42
VIDEO_CHUNCK_LEN = 4000.0 # millisec, every time add this amount to buffer
BITRATE_LEVELS = 6
TOTAL_VIDEO_CHUNCK = 48
BUFFER_THRESH = 60.0 * MILLISECONDS_IN_SECOND # millisec, max buffer limit
DRAIN_BUFFER_SLEEP_TIME = 500.0 # millisec
PACKET_PAYLOAD_PORTION = 0.95
LINK_RTT = 80 # millisec
PACKET_SIZE = 1500 # bytes
NOISE_LOW = 0.9
NOISE_HIGH = 1.1
VIDEO_SIZE_FILE = './video_size_'
BUFFER_SIZE = 10
VIDEO_BIT_RATE = [300,750,1200,1850,2850,4300] # Kbps
class Environment:
def __init__(self, all_cooked_time, all_cooked_bw, random_seed=RANDOM_SEED):
assert len(all_cooked_time) == len(all_cooked_bw)
np.random.seed(random_seed)
self.all_cooked_time = all_cooked_time
self.all_cooked_bw = all_cooked_bw
self.video_chunk_counter = 0
self.buffer_size = 0
self.buffer = [0]*BUFFER_SIZE
# pick a random trace file
self.trace_idx = np.random.randint(len(self.all_cooked_time))
self.cooked_time = self.all_cooked_time[self.trace_idx]
self.cooked_bw = self.all_cooked_bw[self.trace_idx]
# randomize the start point of the trace
# note: trace file starts with time 0
self.mahimahi_ptr = np.random.randint(1, len(self.cooked_bw))
self.last_mahimahi_time = self.cooked_time[self.mahimahi_ptr - 1]
self.video_size = {} # in bytes
for bitrate in xrange(BITRATE_LEVELS):
self.video_size[bitrate] = []
with open(VIDEO_SIZE_FILE + str(bitrate)) as f:
for line in f:
self.video_size[bitrate].append(int(line.split()[0]))
# def get_video_chunk(self, quality):
# #self.mahimahi_ptr will denote the start of the buffer, once it is played it will be moved 1, with the array looping back?
# assert quality >= 0
# assert quality < BITRATE_LEVELS
# if quality < buffer_size:
# #download EL
# # self.video_size[quality]
# if self.buffer[quality] < len(VIDEO_BIT_RATE):
# video_chunk_size = self.video_size[self.buffer[quality]+1][self.video_chunk_counter + quality]
# self.buffer[quality] = self.buffer[quality] + 1
# self.z_t = self.z_t + self.buffer[quality]
#
# # else:
# # #we dont do anything idiot, it has to be in the buffer ffs
# # #send BL
# # throughput = self.cooked_bw[self.mahimahi_ptr] \
# # * B_IN_MB / BITS_IN_BYTE
# # duration = self.cooked_time[self.mahimahi_ptr] \
# # - self.last_mahimahi_time
# #
# # packet_payload = throughput * duration * PACKET_PAYLOAD_PORTION
# # #you have to update buffer along with time? how to do that?
# # if self.mahimahi_ptr+1 >= len(self.cooked_bw):#condition because the video is ended??
# # # loop back in the beginning
# # # note: trace file starts with time 0
# #
# # if video_chunk_counter_sent + packet_payload > video_chunk_size:
# # #this is the final BL, even more than that maybe, so we have to end the loop until or should we
# # #check for the time of chunk being run in the player
# # # pass:
# # # pass
# # # self.buffer = self.buffer[1:]
#
# video_chunk_size = self.video_size[quality][self.video_chunk_counter]
#
# # use the delivery opportunity in mahimahi
# delay = 0.0 # in ms
# video_chunk_counter_sent = 0 # in bytes
#
# while True: # download video chunk over mahimahi
# #defining throughput, how much data is being sent
# throughput = self.cooked_bw[self.mahimahi_ptr+quality] \
# * B_IN_MB / BITS_IN_BYTE
# #defining duration #how much time is the data representing
# duration = self.cooked_time[self.mahimahi_ptr+quality] \
# - self.last_mahimahi_time
#
# #total packet payload
# packet_payload = throughput * duration * PACKET_PAYLOAD_PORTION
#
# #if condition for checking if the shit is ending
# if video_chunk_counter_sent + packet_payload > video_chunk_size:
#
# fractional_time = (video_chunk_size - video_chunk_counter_sent) / \
# throughput / PACKET_PAYLOAD_PORTION
# delay += fractional_time
# self.last_mahimahi_time += fractional_time
# # assert(self.last_mahimahi_time <= self.cooked_time[self.mahimahi_ptr])
# break
#
# #adding packet payload so that we know how much data is still there to be precessed
# video_chunk_counter_sent += packet_payload
# #why delay?
# delay += duration
# #last data bit time
# self.last_mahimahi_time = self.cooked_time[self.mahimahi_ptr]
# self.mahimahi_ptr += 1
#
# if self.mahimahi_ptr >= len(self.cooked_bw):#condition because the video is ended??
# # loop back in the beginning
# # note: trace file starts with time 0
# self.mahimahi_ptr = 1
# self.last_mahimahi_time = 0
#
# delay *= MILLISECONDS_IN_SECOND
# delay += LINK_RTT
#
# # add a multiplicative noise to the delay
# # delay *= np.random.uniform(NOISE_LOW, NOISE_HIGH) #check if this is right
# delay *= np.random.uniform(NOISE_LOW, NOISE_HIGH)
# # rebuffer time
# rebuf = np.maximum(delay - self.buffer_size, 0.0)
#
# # update the buffer
# self.buffer_size = np.maximum(self.buffer_size - delay, 0.0)
#
# # add in the new chunk
# self.buffer_size += VIDEO_CHUNCK_LEN
#
# # sleep if buffer gets too large
# sleep_time = 0
# if self.buffer_size > BUFFER_THRESH:
# # exceed the buffer limit
# # we need to skip some network bandwidth here
# # but do not add up the delay
# drain_buffer_time = self.buffer_size - BUFFER_THRESH
# sleep_time = np.ceil(drain_buffer_time / DRAIN_BUFFER_SLEEP_TIME) * \
# DRAIN_BUFFER_SLEEP_TIME
# self.buffer_size -= sleep_time
#
# while True:
# duration = self.cooked_time[self.mahimahi_ptr] \
# - self.last_mahimahi_time
# if duration > sleep_time / MILLISECONDS_IN_SECOND:
# self.last_mahimahi_time += sleep_time / MILLISECONDS_IN_SECOND
# break
# sleep_time -= duration * MILLISECONDS_IN_SECOND
# self.last_mahimahi_time = self.cooked_time[self.mahimahi_ptr]
# self.mahimahi_ptr += 1
#
# if self.mahimahi_ptr >= len(self.cooked_bw):
# # loop back in the beginning
# # note: trace file starts with time 0
# self.mahimahi_ptr = 1
# self.last_mahimahi_time = 0
#
# # the "last buffer size" return to the controller
# # Note: in old version of dash the lowest buffer is 0.
# # In the new version the buffer always have at least
# # one chunk of video
# return_buffer_size = self.buffer_size
#
# self.video_chunk_counter += 1
# video_chunk_remain = TOTAL_VIDEO_CHUNCK - self.video_chunk_counter
#
# end_of_video = False
# if self.video_chunk_counter >= TOTAL_VIDEO_CHUNCK:
# end_of_video = True
# self.buffer_size = 0
# self.video_chunk_counter = 0
#
# # pick a random trace file
# self.trace_idx = np.random.randint(len(self.all_cooked_time))
# self.cooked_time = self.all_cooked_time[self.trace_idx]
# self.cooked_bw = self.all_cooked_bw[self.trace_idx]
#
# # randomize the start point of the video
# # note: trace file starts with time 0
# self.mahimahi_ptr = np.random.randint(1, len(self.cooked_bw))
# self.last_mahimahi_time = self.cooked_time[self.mahimahi_ptr - 1]
#
# next_video_chunk_sizes = []
# for i in xrange(BITRATE_LEVELS):
# next_video_chunk_sizes.append(self.video_size[i][self.video_chunk_counter])
#
# return delay, \
# sleep_time, \
# return_buffer_size / MILLISECONDS_IN_SECOND, \
# rebuf / MILLISECONDS_IN_SECOND, \
# video_chunk_size, \
# next_video_chunk_sizes, \
# end_of_video, \
# video_chunk_remain
def get_video_chunk(self, bit_rate_index):
#self.mahimahi_ptr will denote the start of the buffer, once it is played it will be moved 1, with the array looping back?
assert bit_rate_index >= 0
# assert bit_rate_index < BITRATE_LEVELS
if bit_rate_index < buffer_size:
#download EL
# self.video_size[bit_rate_index]
if self.buffer[bit_rate_index] < len(VIDEO_BIT_RATE):
video_chunk_size = self.video_size[self.buffer[bit_rate_index]+1][self.video_chunk_counter + bit_rate_index]
self.buffer[bit_rate_index] = self.buffer[bit_rate_index] + 1
self.z_t = self.z_t + self.buffer[bit_rate_index]
video_chunk_size = self.video_size[self.buffer[bit_rate_index]][self.video_chunk_counter] #change this
# use the delivery opportunity in mahimahi
delay = 0.0 # in ms
video_chunk_counter_sent = 0 # in bytes
while True: # download video chunk over mahimahi
#defining throughput, how much data is being sent
throughput = self.cooked_bw[self.mahimahi_ptr + bit_rate_index] \
* B_IN_MB / BITS_IN_BYTE
#defining duration #how much time is the data representing
duration = self.cooked_time[self.mahimahi_ptr + bit_rate_index] \
- self.last_mahimahi_time
#total packet payload
packet_payload = throughput * duration * PACKET_PAYLOAD_PORTION
#if condition for checking if the bandwidth is finished, but in grad you need no bandwidth check, you just need EL check?
if video_chunk_counter_sent + packet_payload > video_chunk_size:
fractional_time = (video_chunk_size - video_chunk_counter_sent) / \
throughput / PACKET_PAYLOAD_PORTION
delay += fractional_time
self.last_mahimahi_time += fractional_time
# assert(self.last_mahimahi_time <= self.cooked_time[self.mahimahi_ptr])
break
#adding packet payload so that we know how much data is still there to be precessed
video_chunk_counter_sent += packet_payload
#why delay?
delay += duration
#last data bit time
self.last_mahimahi_time = self.cooked_time[self.mahimahi_ptr]
self.mahimahi_ptr += 1
if self.mahimahi_ptr >= len(self.cooked_bw):#condition because the video is ended??
# loop back in the beginning
# note: trace file starts with time 0
self.mahimahi_ptr = 1
self.last_mahimahi_time = 0
delay *= MILLISECONDS_IN_SECOND
delay += LINK_RTT
# add a multiplicative noise to the delay
# delay *= np.random.uniform(NOISE_LOW, NOISE_HIGH) #check if this is right
delay *= np.random.uniform(NOISE_LOW, NOISE_HIGH)
# rebuffer time
rebuf = np.maximum(delay - self.buffer_size, 0.0)
# update the buffer
self.buffer_size = np.maximum(self.buffer_size - delay, 0.0)
# add in the new chunk
self.buffer_size += VIDEO_CHUNCK_LEN
# sleep if buffer gets too large
sleep_time = 0
if self.buffer_size > BUFFER_THRESH:
# exceed the buffer limit
# we need to skip some network bandwidth here
# but do not add up the delay
drain_buffer_time = self.buffer_size - BUFFER_THRESH
sleep_time = np.ceil(drain_buffer_time / DRAIN_BUFFER_SLEEP_TIME) * \
DRAIN_BUFFER_SLEEP_TIME
self.buffer_size -= sleep_time
while True:
duration = self.cooked_time[self.mahimahi_ptr] \
- self.last_mahimahi_time
if duration > sleep_time / MILLISECONDS_IN_SECOND:
self.last_mahimahi_time += sleep_time / MILLISECONDS_IN_SECOND
break
sleep_time -= duration * MILLISECONDS_IN_SECOND
self.last_mahimahi_time = self.cooked_time[self.mahimahi_ptr]
self.mahimahi_ptr += 1
if self.mahimahi_ptr >= len(self.cooked_bw):
# loop back in the beginning
# note: trace file starts with time 0
self.mahimahi_ptr = 1
self.last_mahimahi_time = 0
# the "last buffer size" return to the controller
# Note: in old version of dash the lowest buffer is 0.
# In the new version the buffer always have at least
# one chunk of video
return_buffer_size = self.buffer_size
self.video_chunk_counter += 1
video_chunk_remain = TOTAL_VIDEO_CHUNCK - self.video_chunk_counter
end_of_video = False
if self.video_chunk_counter >= TOTAL_VIDEO_CHUNCK:
end_of_video = True
self.buffer_size = 0
self.video_chunk_counter = 0
# pick a random trace file
self.trace_idx = np.random.randint(len(self.all_cooked_time))
self.cooked_time = self.all_cooked_time[self.trace_idx]
self.cooked_bw = self.all_cooked_bw[self.trace_idx]
# randomize the start point of the video
# note: trace file starts with time 0
self.mahimahi_ptr = np.random.randint(1, len(self.cooked_bw))
self.last_mahimahi_time = self.cooked_time[self.mahimahi_ptr - 1]
next_video_chunk_sizes = []
# for i in xrange(BITRATE_LEVELS):
# next_video_chunk_sizes.append(self.video_size[i][self.video_chunk_counter])
return delay, \
sleep_time, \
return_buffer_size / MILLISECONDS_IN_SECOND, \
rebuf / MILLISECONDS_IN_SECOND, \
video_chunk_size, \
next_video_chunk_sizes, \
end_of_video, \
video_chunk_remain
| 44.502976
| 133
| 0.600749
| 1,885
| 14,953
| 4.491777
| 0.119894
| 0.057872
| 0.062005
| 0.051966
| 0.847171
| 0.82355
| 0.809141
| 0.790363
| 0.783749
| 0.778316
| 0
| 0.013174
| 0.324818
| 14,953
| 335
| 134
| 44.635821
| 0.825475
| 0.537685
| 0
| 0.265487
| 0
| 0
| 0.001958
| 0
| 0
| 0
| 0
| 0
| 0.017699
| 1
| 0.017699
| false
| 0
| 0.00885
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
27598eb59ff4e69bffd8baa1d6f496234b1e7adf
| 6,686
|
py
|
Python
|
tests/test_rigid_alignment.py
|
Daiver/pygeom_tools
|
ed89d8cab2d5956c7c680da1ce6335f4c0a31c70
|
[
"MIT"
] | 9
|
2019-10-29T18:39:47.000Z
|
2022-03-18T11:44:12.000Z
|
tests/test_rigid_alignment.py
|
Daiver/pygeom_tools
|
ed89d8cab2d5956c7c680da1ce6335f4c0a31c70
|
[
"MIT"
] | null | null | null |
tests/test_rigid_alignment.py
|
Daiver/pygeom_tools
|
ed89d8cab2d5956c7c680da1ce6335f4c0a31c70
|
[
"MIT"
] | 1
|
2021-06-24T08:34:56.000Z
|
2021-06-24T08:34:56.000Z
|
import unittest
import numpy as np
import geom_tools
from geom_tools.rigid_alignment import find_rotation_and_translation, find_rotation_and_translation_weighted
class TestRigidAlignment(unittest.TestCase):
def test_find_rotation_and_translation01(self):
src = [
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
]
dst = [
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
]
src = np.array(src)
dst = np.array(dst)
transformation = find_rotation_and_translation(src, dst)
res = geom_tools.transform_vertices(transformation, src)
self.assertTrue(geom_tools.utils.is_arrays_equal(res, dst))
def test_find_rotation_and_translation02(self):
src = [
[1, 0, 0],
[0, 1, 0],
[0, 0, 2],
[0, 0, -2],
]
dst = [
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, -1],
]
ans = [
[1, 0, 0],
[0, 1, 0],
[0, 0, 2],
[0, 0, -2],
]
src = np.array(src)
dst = np.array(dst)
transformation = find_rotation_and_translation(src, dst)
res = geom_tools.transform_vertices(transformation, src)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans, res))
def test_find_rotation_and_translation03(self):
src = [
[1, 0, 0],
[0, 1, 0],
[0, 0, 2],
[0, 0, -2],
]
dst = [
[0, 1, 0],
[-1, 0, 0],
[0, 0, 1],
[0, 0, -1],
]
ans = [
[0, 1, 0],
[-1, 0, 0],
[0, 0, 2],
[0, 0, -2],
]
src = np.array(src)
dst = np.array(dst)
transformation = find_rotation_and_translation(src, dst)
res = geom_tools.transform_vertices(transformation, src)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans, res))
def test_find_rotation_and_translation04(self):
src = [
[1, 2, 3],
[1, 5, 3],
[1, 5, 4],
]
dst = [
[10, 2, 3],
[10, 5, 3],
[10, 5, 4],
]
src = np.array(src, dtype=np.float64)
dst = np.array(dst, dtype=np.float64)
ans_rotation = np.eye(3)
ans_translation = np.array([9, 0, 0])
res_rotation, res_translation = find_rotation_and_translation(src, dst)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_translation, res_translation))
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_rotation, res_rotation))
def test_cov_mat_from_vertices_weighted01(self):
vertices1 = np.array([
[1, 2, 0],
], dtype=np.float32)
vertices2 = np.array([
[3, 5, 0],
], dtype=np.float32)
weights = np.array([
2
])
ans = np.array([
[6, 10, 0],
[12, 20, 0],
[0, 0, 0],
], dtype=np.float32)
res = geom_tools.rigid_alignment.cov_mat_from_vertices_weighted(vertices1, vertices2, weights)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans, res))
def test_find_rotation_and_translation_weighted01(self):
vertices1 = np.array([
[-1, 0, 0],
[0, -1, 0],
[1, 0, 0],
[0, 1, 0]
])
vertices2 = np.array([
[-1, 0, 0],
[0, -1, 0],
[1, 0, 0],
[0, 1, 0]
])
weights = np.array([1, 1, 1, 1])
ans_trans = np.array([0, 0, 0], dtype=np.float32)
ans_rot = np.eye(3)
res_rot, res_trans = geom_tools.rigid_alignment.find_rotation_and_translation_weighted(
vertices1, vertices2, weights
)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_rot, res_rot))
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_trans, res_trans))
def test_find_rotation_and_translation_weighted02(self):
vertices1 = np.array([
[-1, 0, 0],
[0, -1, 0],
[1, 0, 0],
[1, 20, -55],
[0, 1, 0]
])
vertices2 = np.array([
[-1, 0, 0],
[0, -1, 0],
[1, 0, 0],
[100, 2340, 55],
[0, 1, 0]
])
weights = np.array([1, 1, 1, 0, 1])
ans_trans = np.array([0, 0, 0], dtype=np.float32)
ans_rot = np.eye(3)
res_rot, res_trans = geom_tools.rigid_alignment.find_rotation_and_translation_weighted(
vertices1, vertices2, weights
)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_rot, res_rot))
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_trans, res_trans))
def test_find_rotation_and_translation_weighted03(self):
vertices1 = np.array([
[-1, 0, 0.5],
[-2, -4, 5],
[0, -1, 0.5],
[1, 0, 0.5],
[0, 1, 0.5],
[22, -5, 2],
])
vertices2 = np.array([
[0, 2, 1],
[13, 3, 3],
[-2, 0, 1],
[0, -2, 1],
[2, 0, 1],
[16, 55, 33],
])
weights = np.array([1, 0, 1, 1, 1, 0])
ans_trans = np.array([0, 0, 0.5], dtype=np.float32)
ans_rot = np.array([
[0, 1, 0],
[-1, 0, 0],
[0, 0, 1]
], dtype=np.float)
res_rot, res_trans = geom_tools.rigid_alignment.find_rotation_and_translation_weighted(
vertices1, vertices2, weights
)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_rot, res_rot))
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_trans, res_trans))
def test_find_rotation_and_translation_weighted04(self):
src = [
[1, 2, 3],
[1, 5, 3],
[1, 5, 4],
[45, 22, 2]
]
dst = [
[10, 2, 3],
[10, 5, 3],
[10, 5, 4],
[4, 5, 111],
]
src = np.array(src, dtype=np.float64)
dst = np.array(dst, dtype=np.float64)
weights = np.array([1, 1, 1, 0])
ans_rotation = np.eye(3)
ans_translation = np.array([9, 0, 0])
res_rotation, res_translation = find_rotation_and_translation_weighted(src, dst, weights)
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_translation, res_translation))
self.assertTrue(geom_tools.utils.is_arrays_equal(ans_rotation, res_rotation))
if __name__ == '__main__':
unittest.main()
| 31.097674
| 108
| 0.495962
| 843
| 6,686
| 3.711744
| 0.09134
| 0.042186
| 0.027804
| 0.026846
| 0.838926
| 0.82007
| 0.772132
| 0.752956
| 0.747523
| 0.729626
| 0
| 0.087223
| 0.36554
| 6,686
| 214
| 109
| 31.242991
| 0.650401
| 0
| 0
| 0.665
| 0
| 0
| 0.001197
| 0
| 0
| 0
| 0
| 0
| 0.07
| 1
| 0.045
| false
| 0
| 0.02
| 0
| 0.07
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
27e5c041207bc56a22072339e4cbf39e1270b3f2
| 233
|
py
|
Python
|
other-materials/python-intro/atom.py
|
inferential/drug-computing
|
25ff2f04b2a1f7cb71c552f62e722edb26cc297f
|
[
"CC-BY-4.0",
"MIT"
] | 103
|
2017-10-21T18:49:01.000Z
|
2022-03-24T22:05:21.000Z
|
other-materials/python-intro/atom.py
|
inferential/drug-computing
|
25ff2f04b2a1f7cb71c552f62e722edb26cc297f
|
[
"CC-BY-4.0",
"MIT"
] | 29
|
2017-10-23T20:57:17.000Z
|
2022-03-15T21:57:09.000Z
|
other-materials/python-intro/atom.py
|
inferential/drug-computing
|
25ff2f04b2a1f7cb71c552f62e722edb26cc297f
|
[
"CC-BY-4.0",
"MIT"
] | 36
|
2018-01-18T20:22:29.000Z
|
2022-03-16T13:08:09.000Z
|
class AtomClass:
def __init__(self, Velocity, Element = 'C', Mass = 12.0):
self.Velocity = Velocity
self.Element = Element
self.Mass = Mass
def Momentum(self):
return self.Velocity * self.Mass
| 29.125
| 61
| 0.613734
| 28
| 233
| 4.964286
| 0.464286
| 0.258993
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017964
| 0.283262
| 233
| 7
| 62
| 33.285714
| 0.814371
| 0
| 0
| 0
| 0
| 0
| 0.004292
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
fd6e3de023ed5b1f472af3d7c5768225c9479062
| 26
|
py
|
Python
|
drawer/__init__.py
|
neuralrefinery/nr-python-client
|
14d569721079bdf64ba86650cb0286dbcd02eda2
|
[
"CC0-1.0"
] | null | null | null |
drawer/__init__.py
|
neuralrefinery/nr-python-client
|
14d569721079bdf64ba86650cb0286dbcd02eda2
|
[
"CC0-1.0"
] | null | null | null |
drawer/__init__.py
|
neuralrefinery/nr-python-client
|
14d569721079bdf64ba86650cb0286dbcd02eda2
|
[
"CC0-1.0"
] | null | null | null |
from .drawer import drawer
| 26
| 26
| 0.846154
| 4
| 26
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e33099bf92e5833943ac56471e47ebe360d26b20
| 73
|
py
|
Python
|
saopy/tzont/__init__.py
|
CityPulse/CP_Resourcemanagement
|
aa670fa89d5e086a98ade3ccc152518be55abf2e
|
[
"MIT"
] | 2
|
2016-11-03T14:57:45.000Z
|
2019-05-13T13:21:08.000Z
|
saopy/tzont/__init__.py
|
CityPulse/CP_Resourcemanagement
|
aa670fa89d5e086a98ade3ccc152518be55abf2e
|
[
"MIT"
] | null | null | null |
saopy/tzont/__init__.py
|
CityPulse/CP_Resourcemanagement
|
aa670fa89d5e086a98ade3ccc152518be55abf2e
|
[
"MIT"
] | 1
|
2020-07-23T11:27:15.000Z
|
2020-07-23T11:27:15.000Z
|
import saopy.model
from saopy.model import tzont___TimeZone as TimeZone
| 18.25
| 52
| 0.849315
| 11
| 73
| 5.363636
| 0.636364
| 0.338983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123288
| 73
| 3
| 53
| 24.333333
| 0.921875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8b64196986c464a5aa54044db3ae11c428be7621
| 16,797
|
py
|
Python
|
applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/classDefs_solverWrapper/methodDefs_KratosSolverWrapper/mpi_solve.py
|
ma6yu/Kratos
|
02380412f8a833a2cdda6791e1c7f9c32e088530
|
[
"BSD-4-Clause"
] | null | null | null |
applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/classDefs_solverWrapper/methodDefs_KratosSolverWrapper/mpi_solve.py
|
ma6yu/Kratos
|
02380412f8a833a2cdda6791e1c7f9c32e088530
|
[
"BSD-4-Clause"
] | null | null | null |
applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/classDefs_solverWrapper/methodDefs_KratosSolverWrapper/mpi_solve.py
|
ma6yu/Kratos
|
02380412f8a833a2cdda6791e1c7f9c32e088530
|
[
"BSD-4-Clause"
] | null | null | null |
# Import Python libraries
import time
import pickle
import os
try:
from threadpoolctl import *
except:
pass
# Import XMC, distributed environment
from xmc.distributedEnvironmentFramework import *
from xmc.classDefs_solverWrapper.methodDefs_KratosSolverWrapper.solve import ExecuteInstanceDeterministicAdaptiveRefinementAux_Functionality,ExecuteInstanceReadingFromFileAux_Functionality,ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality
try:
computing_procs_mlmc_execute_0 = int(os.environ["computing_procs_mlmc_execute_0"])
except:
computing_procs_mlmc_execute_0 = 1
####################################################################################################
########################################## SERIALIZATION ###########################################
####################################################################################################
@constraint(computing_units="${computing_units_mlmc_execute_0}")
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0)
@ExaquteTask(returns=computing_procs_mlmc_execute_0)
def SerializeMPIModel(pickled_parameters, main_model_part_name, fake_sample_to_serialize, analysis):
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
serialized_parameters = pickle.loads(pickled_parameters)
del pickled_parameters
deserialized_parameters = KratosMultiphysics.Parameters()
serialized_parameters.Load("ParametersSerialization", deserialized_parameters)
# prepare the model to serialize
model = KratosMultiphysics.Model()
fake_sample = fake_sample_to_serialize
deserialized_parameters["solver_settings"]["model_import_settings"]["input_type"].SetString("mdpa")
simulation = analysis(model,deserialized_parameters,fake_sample)
simulation.Initialize()
# reset general flags
simulation.model.GetModelPart(main_model_part_name).ProcessInfo.SetValue(KratosMultiphysics.IS_RESTARTED,True)
# serialize model
serialized_model = KratosMultiphysics.MpiSerializer()
serialized_model.Save("ModelSerialization",simulation.model)
# self.serialized_model.append(serialized_model)
# pickle dataserialized_data
pickled_model = pickle.dumps(serialized_model, 2) # second argument is the protocol and is NECESSARY (according to pybind11 docs)
return pickled_model
####################################################################################################
############################################ WRAPPERS ##############################################
####################################################################################################
def executeInstanceStochasticAdaptiveRefinementAllAtOnce_Wrapper(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,current_contribution):
if (current_index == 0):
qoi_and_time_list = ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 1):
qoi_and_time_list = ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev1_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 2):
qoi_and_time_list = ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev2_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
raise Exception("Level not supported")
qoi, time_for_qoi = UnfoldFutureQT(qoi_and_time_list)
return qoi, time_for_qoi
def executeInstanceStochasticAdaptiveRefinementMultipleTasks_Wrapper(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,mapping_flag,print_to_file,current_contribution,pickled_mapping_reference_model=None):
if (current_index == 0):
qoi_pickled_current_model_time_for_qoi_list = ExecuteInstanceStochasticAdaptiveRefinementMultipleTasksAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 1):
qoi_and_time_list = ExecuteInstanceStochasticAdaptiveRefinementMultipleTasksAuxLev1_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
raise Exception("Level not supported")
qoi, pickled_current_model, time_for_qoi = UnfoldFutureQMT(qoi_pickled_current_model_time_for_qoi_list)
return qoi, pickled_current_model, time_for_qoi
def executeInstanceDeterministicAdaptiveRefinement_Wrapper(current_index,pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,current_contribution):
if (current_index == 0):
qoi_and_time_list = executeInstanceDeterministicAdaptiveRefinementAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 1):
qoi_and_time_list = executeInstanceDeterministicAdaptiveRefinementAuxLev1_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
raise Exception("Level not supported")
qoi, time_for_qoi = UnfoldFutureQT(qoi_and_time_list)
return qoi, time_for_qoi
def executeInstanceReadingFromFile_Wrapper(current_index,pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,current_contribution):
if (current_index == 0):
qoi_and_time_list = executeInstanceReadingFromFileAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
elif (current_index == 1):
qoi_and_time_list = executeInstanceReadingFromFileAuxLev1_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,"filename_level_"+str(current_index)+"_contribution_"+str(current_contribution)+"_random_variable_"+str(random_variable[0])+".dat")
else:
raise Exception("Level not supported")
qoi, time_for_qoi = UnfoldFutureQT(qoi_and_time_list)
return qoi, time_for_qoi
####################################################################################################
############################################## TASKS ###############################################
####################################################################################################
@ExaquteTask(qoi_and_time_list={Type: COLLECTION_IN, Depth: 2}, returns=2)
def UnfoldFutureQT(qoi_and_time_list):
qoi = qoi_and_time_list[0][0] # get first qoi element (all are equal since they are synchronized)
time_for_qoi = 0.0
for qoi_and_time in qoi_and_time_list:
time_for_qoi += qoi_and_time[1] # sum all times
return qoi, time_for_qoi
@ExaquteTask(qoi_pickled_current_model_time_for_qoi_list={Type: COLLECTION_IN, Depth: 2}, returns=3)
def UnfoldFutureQMT(qoi_pickled_current_model_time_for_qoi_list):
qoi = qoi_pickled_current_model_time_for_qoi_list[0][0] # get first qoi element (all are equal since they are synchronized)
pickled_current_model = qoi_pickled_current_model_time_for_qoi_list[1]
time_for_qoi = 0.0
for qoi_pickled_current_model_time_for_qoi in qoi_pickled_current_model_time_for_qoi_list:
time_for_qoi += qoi_pickled_current_model_time_for_qoi[-1] # sum all times
return qoi, pickled_current_model, time_for_qoi
############################### StochasticAdaptiveRefinementAllAtOnce ##############################
# @ExaquteTask(filename=FILE_OUT,pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units="${computing_units_mlmc_execute_0}")
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@ExaquteTask(pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def ExecuteInstanceStochasticAdaptiveRefinementAllAtOnceAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_analysis,time_for_qoi,mapping_flag,adaptive_refinement_jump_to_finest_level,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = int(os.environ["computing_units_mlmc_execute_0"])
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
pickled_coarsest_model = pickled_coarse_model
for current_local_index in range(current_index+1):
if ((adaptive_refinement_jump_to_finest_level is False) or (adaptive_refinement_jump_to_finest_level is True and (current_local_index == 0 or current_local_index == current_index))):
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_coarsest_model,print_to_file,filename)
del(pickled_coarse_model)
pickled_coarse_model = pickled_current_model
del(pickled_current_model)
return qoi,time_for_qoi
############################# StochasticAdaptiveRefinementMultipleTasks ############################
# @ExaquteTask(filename=FILE_OUT,pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units="${computing_units_mlmc_execute_0}")
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, pickled_coarse_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@ExaquteTask(pickled_coarse_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def ExecuteInstanceStochasticAdaptiveRefinementMultipleTasksAuxLev0_Task(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = int(os.environ["computing_units_mlmc_execute_0"])
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,pickled_current_model,time_for_qoi = \
ExecuteInstanceStochasticAdaptiveRefinementAux_Functionality(current_index,pickled_coarse_model,pickled_coarse_project_parameters,pickled_custom_metric_refinement_parameters,pickled_custom_remesh_refinement_parameters,random_variable,current_local_index,current_analysis,time_for_qoi,open_mp_threads,mapping_flag,pickled_mapping_reference_model,print_to_file,filename)
return qoi,pickled_current_model,time_for_qoi
########################################## DeterministicAdaptiveRefinement ########################################
# @ExaquteTask(filename=FILE_OUT,pickled_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units="${computing_units_mlmc_execute_0}")
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, pickled_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@ExaquteTask(pickled_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def executeInstanceDeterministicAdaptiveRefinementAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = int(os.environ["computing_units_mlmc_execute_0"])
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceDeterministicAdaptiveRefinementAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
########################################## ReadingFromFile #########################################
# @ExaquteTask(filename=FILE_OUT,pickled_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
@constraint(computing_units="${computing_units_mlmc_execute_0}")
@mpi(runner="mpirun", processes=computing_procs_mlmc_execute_0, pickled_model_layout={block_count: computing_procs_mlmc_execute_0, block_length: 1, stride: 1})
@ExaquteTask(pickled_model=COLLECTION_IN, returns=computing_procs_mlmc_execute_0)
def executeInstanceReadingFromFileAuxLev0_Task(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename):
# Import Kratos
import KratosMultiphysics
import KratosMultiphysics.mpi as KratosMPI
from KratosMultiphysics.MultilevelMonteCarloApplication.adaptive_refinement_utilities import AdaptiveRefinement
try:
open_mp_threads = int(os.environ["computing_units_mlmc_execute_0"])
threadpool_limits(limits=open_mp_threads)
except:
open_mp_threads = 1
qoi,time_for_qoi = \
ExecuteInstanceReadingFromFileAux_Functionality(pickled_model,pickled_project_parameters,current_analysis,random_variable,time_for_qoi,mapping_flag,pickled_mapping_reference_model,print_to_file,filename,open_mp_threads)
return qoi,time_for_qoi
| 76.69863
| 536
| 0.794547
| 1,902
| 16,797
| 6.494742
| 0.100946
| 0.026228
| 0.042095
| 0.0425
| 0.806849
| 0.785882
| 0.783211
| 0.756658
| 0.730106
| 0.721849
| 0
| 0.006048
| 0.074656
| 16,797
| 218
| 537
| 77.050459
| 0.788715
| 0.063821
| 0
| 0.565789
| 0
| 0
| 0.066103
| 0.024668
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072368
| false
| 0.006579
| 0.138158
| 0
| 0.282895
| 0.138158
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8b7265568526c35191b0cede28aea96cf50c6844
| 124
|
py
|
Python
|
lab3/text_recognizer/lit_models/__init__.py
|
Agyey/fsdl-text-recognizer-2021-labs
|
4bd85042ab9f6decd78849bb655c197cc13ffc11
|
[
"MIT"
] | 402
|
2021-01-18T12:14:08.000Z
|
2022-03-28T03:41:05.000Z
|
lab3/text_recognizer/lit_models/__init__.py
|
Agyey/fsdl-text-recognizer-2021-labs
|
4bd85042ab9f6decd78849bb655c197cc13ffc11
|
[
"MIT"
] | 27
|
2021-01-21T01:54:30.000Z
|
2022-03-29T21:39:41.000Z
|
lab3/text_recognizer/lit_models/__init__.py
|
Agyey/fsdl-text-recognizer-2021-labs
|
4bd85042ab9f6decd78849bb655c197cc13ffc11
|
[
"MIT"
] | 271
|
2021-01-21T18:07:24.000Z
|
2022-03-30T12:49:53.000Z
|
from .base import BaseLitModel
# Hide lines below until Lab 3
from .ctc import CTCLitModel
# Hide lines above until Lab 3
| 17.714286
| 30
| 0.774194
| 20
| 124
| 4.8
| 0.65
| 0.1875
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.193548
| 124
| 6
| 31
| 20.666667
| 0.94
| 0.459677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8bce004b983cdd07f804f47b1734ffc4fc5d65ce
| 192
|
py
|
Python
|
crypto-cuck-coin-genesis.py
|
alexanderjsingleton/crypto-cuck-coin
|
1bb184eff82f3a541391663ebdf82740ac8577cf
|
[
"MIT"
] | 3
|
2018-12-01T06:52:59.000Z
|
2020-02-13T17:30:07.000Z
|
crypto-cuck-coin-genesis.py
|
alexanderjsingleton/crypto-cuck-coin
|
1bb184eff82f3a541391663ebdf82740ac8577cf
|
[
"MIT"
] | null | null | null |
crypto-cuck-coin-genesis.py
|
alexanderjsingleton/crypto-cuck-coin
|
1bb184eff82f3a541391663ebdf82740ac8577cf
|
[
"MIT"
] | null | null | null |
import datetime as date
def create_genesis_block():
# Manually construct a block with
# index zero and arbitrary previous hash
return Block(0, date.datetime.now(), "Genesis Block", "0")
| 32
| 60
| 0.744792
| 28
| 192
| 5.035714
| 0.75
| 0.170213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012422
| 0.161458
| 192
| 6
| 60
| 32
| 0.863354
| 0.364583
| 0
| 0
| 0
| 0
| 0.116667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
8bdab21d22bccbf3db0fc329877db4c964bb5537
| 305
|
py
|
Python
|
runtests.py
|
Mishioo/tesliper
|
26857e1d46b309ea24ade01059fc92e3a192c471
|
[
"BSD-2-Clause"
] | 1
|
2019-01-27T00:03:50.000Z
|
2019-01-27T00:03:50.000Z
|
runtests.py
|
Mishioo/tesliper
|
26857e1d46b309ea24ade01059fc92e3a192c471
|
[
"BSD-2-Clause"
] | null | null | null |
runtests.py
|
Mishioo/tesliper
|
26857e1d46b309ea24ade01059fc92e3a192c471
|
[
"BSD-2-Clause"
] | null | null | null |
from test.unit.extraction.gaussian_parser_test import *
from test.unit.extraction.extraction_test import *
from test.unit.glassware_test import *
from test.unit.datawork_test import *
from test.unit.writer_test import *
from test.unit.tesliper_test import *
if __name__ == '__main__':
unittest.main()
| 33.888889
| 55
| 0.8
| 44
| 305
| 5.204545
| 0.340909
| 0.209607
| 0.31441
| 0.393013
| 0.480349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108197
| 305
| 9
| 56
| 33.888889
| 0.841912
| 0
| 0
| 0
| 0
| 0
| 0.026144
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
47494238acbbcaf60a4791356c76abac1e9f349d
| 79,447
|
py
|
Python
|
tests/app/views/test_buyers.py
|
pebblecode/cirrus-buyer-frontend
|
506c45eab09fa9538c0eb05643e24feecdcca56f
|
[
"MIT"
] | null | null | null |
tests/app/views/test_buyers.py
|
pebblecode/cirrus-buyer-frontend
|
506c45eab09fa9538c0eb05643e24feecdcca56f
|
[
"MIT"
] | null | null | null |
tests/app/views/test_buyers.py
|
pebblecode/cirrus-buyer-frontend
|
506c45eab09fa9538c0eb05643e24feecdcca56f
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals
from ...helpers import BaseApplicationTest
from dmapiclient import api_stubs, HTTPError
from dmutils.content_loader import ContentLoader
import mock
from lxml import html
import pytest
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestBuyerDashboard(BaseApplicationTest):
def test_buyer_dashboard(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.find_briefs.return_value = {
"briefs": [
{"status": "draft",
"title": "A draft brief",
"createdAt": "2016-02-02T00:00:00.000000Z",
"frameworkSlug": "digital-outcomes-and-specialists"},
{"status": "live",
"title": "A live brief",
"createdAt": "2016-02-01T00:00:00.000000Z",
"publishedAt": "2016-02-04T12:00:00.000000Z",
"frameworkSlug": "digital-outcomes-and-specialists"},
]
}
res = self.client.get("/buyers")
document = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
tables = document.xpath('//table')
draft_row = [cell.text_content().strip() for cell in tables[0].xpath('.//tbody/tr/td')]
assert draft_row[0] == "A draft brief"
assert draft_row[1] == "Tuesday 02 February 2016"
live_row = [cell.text_content().strip() for cell in tables[1].xpath('.//tbody/tr/td')]
assert live_row[0] == "A live brief"
assert live_row[1] == "Thursday 04 February 2016"
@pytest.mark.skip(reason="no counts on dashboard until API response includes them")
def test_closed_brief_response_count(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.find_briefs.return_value = {
"briefs": [
{"status": "closed",
"id": 12,
"title": "A closed brief",
"createdAt": "2016-02-01T00:00:00.000000Z",
"publishedAt": "2016-02-04T12:00:00.000000Z",
"frameworkSlug": "digital-outcomes-and-specialists"},
]
}
data_api_client.find_brief_responses.return_value = {
"links": [],
"briefResponses": [
{"empty": "empty"},
]
}
res = self.client.get("/buyers")
document = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
cell = document.xpath(
"//caption[contains(text(), 'Closed requirements')]"
"//following-sibling::tbody/tr[1]/td[last()]"
)[0]
assert "1 responses" in cell.text_content()
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestStartNewBrief(BaseApplicationTest):
def test_show_start_brief_page(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/create")
assert res.status_code == 200
def test_404_if_lot_does_not_allow_brief(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=False)
]
)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/create")
assert res.status_code == 404
def test_404_if_framework_status_is_not_live(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='open',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/create")
assert res.status_code == 404
def test_404_if_lot_does_not_exist(self, data_api_client):
with self.app.app_context():
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-octopuses/create")
assert res.status_code == 404
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestCreateNewBrief(BaseApplicationTest):
def test_create_new_digital_specialists_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/create",
data={
"title": "Title"
})
assert res.status_code == 302
data_api_client.create_brief.assert_called_with(
'digital-outcomes-and-specialists',
'digital-specialists',
123,
{'title': "Title"},
page_questions=['title'],
updated_by='buyer@email.com'
)
def test_create_new_digital_outcomes_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=True)
]
)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/create",
data={
"title": "Title"
})
assert res.status_code == 302
data_api_client.create_brief.assert_called_with(
'digital-outcomes-and-specialists',
'digital-outcomes',
123,
{'title': "Title"},
page_questions=['title'],
updated_by='buyer@email.com'
)
def test_404_if_lot_does_not_allow_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='open',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=False)
]
)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/create",
data={
"specialistRole": "agileCoach"
})
assert res.status_code == 404
assert not data_api_client.create_brief.called
def test_404_if_framework_status_is_not_live(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='open',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/create",
data={
"specialistRole": "agileCoach"
})
assert res.status_code == 404
assert not data_api_client.create_brief.called
def test_404_if_lot_does_not_exist(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='open',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-octopuses/create",
data={
"specialistRole": "agileCoach"
})
assert res.status_code == 404
assert not data_api_client.create_brief.called
def test_400_if_form_error(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.create_brief.side_effect = HTTPError(
mock.Mock(status_code=400),
{"title": "answer_required"})
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/create",
data={
"title": "Title"
})
document = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 400
anchor = document.cssselect('div.validation-masthead a[href="#title"]')
assert len(anchor) == 1
assert "Title" in anchor[0].text_content().strip()
data_api_client.create_brief.assert_called_with(
'digital-outcomes-and-specialists',
'digital-specialists',
123,
{'title': "Title"},
page_questions=['title'],
updated_by='buyer@email.com'
)
class TestEveryDamnPage(BaseApplicationTest):
# @mock.patch("app.buyers.views.buyers.content_loader")
def _load_page(self, url, status_code, method='get', data=None):
data = {} if data is None else data
baseurl = "/buyers/frameworks/digital-outcomes-and-specialists/requirements"
with mock.patch('app.buyers.views.buyers.content_loader') as content_loader, \
mock.patch('app.buyers.views.buyers.data_api_client') as data_api_client:
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
api_stubs.lot(slug='digital-outcomes', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = getattr(self.client, method)(
"{}{}".format(baseurl, url),
data=data)
assert res.status_code == status_code
# These should all work as expected
def test_wrong_lot_get_view_brief_overview(self):
self._load_page("/digital-specialists/1234", 200)
def test_wrong_lot_get_view_section_summary(self):
self._load_page("/digital-specialists/1234/section-1", 200)
def test_wrong_lot_get_edit_brief_question(self):
self._load_page("/digital-specialists/1234/edit/section-1/required1", 200)
def test_wrong_lot_post_edit_brief_question(self):
data = {"required1": True}
self._load_page("/digital-specialists/1234/edit/section-1/required1", 302, method='post', data=data)
def test_wrong_lot_get_view_brief_responses(self):
self._load_page("/digital-specialists/1234/responses", 200)
# get and post are the same for publishing
def test_wrong_lot_post_delete_a_brief(self):
data = {"delete_confirmed": True}
self._load_page("/digital-specialists/1234/delete", 302, method='post', data=data)
# Wrong lots
def test_get_view_brief_overview(self):
self._load_page("/digital-outcomes/1234", 404)
def test_get_view_section_summary(self):
self._load_page("/digital-outcomes/1234/section-1", 404)
def test_get_edit_brief_question(self):
self._load_page("/digital-outcomes/1234/edit/section-1/required1", 404)
def test_post_edit_brief_question(self):
data = {"required1": True}
self._load_page("/digital-outcomes/1234/edit/section-1/required1", 404, method='post', data=data)
def test_get_view_brief_responses(self):
self._load_page("/digital-outcomes/1234/responses", 404)
# get and post are the same for publishing
def test_publish_brief(self):
self._load_page("/digital-outcomes/1234/publish", 404)
def test_post_delete_a_brief(self):
data = {"delete_confirmed": True}
self._load_page("/digital-outcomes/1234/delete", 404, method='post', data=data)
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestEditBriefSubmission(BaseApplicationTest):
def _test_breadcrumbs_on_question_page(self, response, has_summary_page=False, section_name=None):
breadcrumbs = html.fromstring(response.get_data(as_text=True)).xpath(
'//*[@id="global-breadcrumb"]/nav/ol/li'
)
breadcrumbs_we_expect = [
('Digital Marketplace', '/'),
('Your account', '/buyers'),
('I need a thing to do a thing',
'/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234')
]
if has_summary_page and section_name:
breadcrumbs_we_expect.append((
section_name,
'/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/{}'.format(
section_name.lower().replace(' ', '-')
)
))
assert len(breadcrumbs) == len(breadcrumbs_we_expect)
for index, link in enumerate(breadcrumbs_we_expect):
assert breadcrumbs[index].find('a').text_content().strip() == link[0]
assert breadcrumbs[index].find('a').get('href').strip() == link[1]
def test_edit_brief_submission(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/description-of-work/organisation")
assert res.status_code == 200
document = html.fromstring(res.get_data(as_text=True))
assert document.xpath('//h1')[0].text_content().strip() == "Organisation the work is for"
@mock.patch("app.buyers.views.buyers.content_loader")
def test_edit_brief_submission_return_link_to_section_summary_if_section_has_description(
self, content_loader, data_api_client
):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/section-4/optional2")
assert res.status_code == 200
document = html.fromstring(res.get_data(as_text=True))
assert document.xpath('//h1')[0].text_content().strip() == "Optional 2"
assert document.xpath(
'//form//div[contains(@class, "secondary-action-link")]/a'
)[0].get('href').strip() == "/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/section-4" # noqa
self._test_breadcrumbs_on_question_page(response=res, has_summary_page=True, section_name='Section 4')
@mock.patch("app.buyers.views.buyers.content_loader")
def test_edit_brief_submission_return_link_to_section_summary_if_other_questions(self, content_loader,
data_api_client): # noqa
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/section-1/required1")
assert res.status_code == 200
document = html.fromstring(res.get_data(as_text=True))
assert document.xpath('//h1')[0].text_content().strip() == "Required 1"
assert document.xpath(
'//form//div[contains(@class, "secondary-action-link")]/a'
)[0].get('href').strip() == "/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/section-1" # noqa
self._test_breadcrumbs_on_question_page(response=res, has_summary_page=True, section_name='Section 1')
@mock.patch("app.buyers.views.buyers.content_loader")
def test_edit_brief_submission_return_link_to_brief_overview_if_single_question(self, content_loader,
data_api_client): # noqa
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/section-2/required2")
assert res.status_code == 200
document = html.fromstring(res.get_data(as_text=True))
assert document.xpath('//h1')[0].text_content().strip() == "Required 2"
assert document.xpath(
'//form//div[contains(@class, "secondary-action-link")]/a'
)[0].get('href').strip() == "/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234" # noqa
self._test_breadcrumbs_on_question_page(response=res, has_summary_page=False)
@mock.patch("app.buyers.views.buyers.content_loader")
def test_edit_brief_submission_multiquestion(self, content_loader, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/edit/section-5/required3") # noqa
assert res.status_code == 200
document = html.fromstring(res.get_data(as_text=True))
assert document.xpath('//h1')[0].text_content().strip() == "Required 3"
assert document.xpath(
'//*[@id="required3_1"]//span[contains(@class, "question-heading")]/p'
)[0].text_content().strip() == "Required 3_1"
assert document.xpath(
'//*[@id="required3_2"]//span[contains(@class, "question-heading")]/p'
)[0].text_content().strip() == "Required 3_2"
def test_404_if_brief_does_not_belong_to_user(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(user_id=234)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/description-of-work/organisation")
assert res.status_code == 404
def test_404_if_lot_does_not_allow_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=False)
]
)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/description-of-work/organisation")
assert res.status_code == 404
def test_404_if_lot_does_not_exist(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-octopuses"
"/1234/edit/description-of-work/organisation")
assert res.status_code == 404
def test_404_if_framework_status_is_not_live(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='open',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/description-of-work/organisation")
assert res.status_code == 404
def test_404_if_brief_has_published_status(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(status='published')
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/description-of-work/organisation")
assert res.status_code == 404
def test_404_if_section_does_not_exist(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/not-a-real-section")
assert res.status_code == 404
def test_404_if_question_does_not_exist(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists"
"/1234/edit/description-of-work/not-a-real-question")
assert res.status_code == 404
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestUpdateBriefSubmission(BaseApplicationTest):
def test_update_brief_submission(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/description-of-work/organisation",
data={
"organisation": "GDS"
})
assert res.status_code == 302
data_api_client.update_brief.assert_called_with(
'1234',
{"organisation": "GDS"},
page_questions=['organisation'],
updated_by='buyer@email.com'
)
@mock.patch("app.buyers.views.buyers.content_loader")
def test_post_update_if_multiple_questions_redirects_to_section_summary(self, content_loader, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/section-1/required1",
data={
"required1": True
})
assert res.status_code == 302
data_api_client.update_brief.assert_called_with(
'1234',
{"required1": True},
page_questions=['required1'],
updated_by='buyer@email.com'
)
assert res.headers['Location'].endswith(
'buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/section-1'
) is True
@mock.patch("app.buyers.views.buyers.content_loader")
def test_post_update_if_section_description_redirects_to_section_summary(self, content_loader, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/section-4/optional2",
data={
"optional2": True
})
assert res.status_code == 302
data_api_client.update_brief.assert_called_with(
'1234',
{"optional2": True},
page_questions=['optional2'],
updated_by='buyer@email.com'
)
assert res.headers['Location'].endswith(
'buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/section-4'
) is True
@mock.patch("app.buyers.views.buyers.content_loader")
def test_post_update_if_single_question_no_description_redirects_to_overview(self, content_loader, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/section-2/required2",
data={
"required2": True
})
assert res.status_code == 302
data_api_client.update_brief.assert_called_with(
'1234',
{"required2": True},
page_questions=['required2'],
updated_by='buyer@email.com'
)
assert res.headers['Location'].endswith(
'buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234'
) is True
def test_404_if_brief_does_not_belong_to_user(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(user_id=234)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/description-of-work/organisation",
data={
"organisation": "GDS"
})
assert res.status_code == 404
assert not data_api_client.update_brief.called
def test_404_if_lot_does_not_allow_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=False)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/description-of-work/organisation",
data={
"title": "A new title"
})
assert res.status_code == 404
assert not data_api_client.update_brief.called
def test_404_if_lot_does_not_exist(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-octopuses/1234/edit/description-of-work/organisation",
data={
"title": "A new title"
})
assert res.status_code == 404
assert not data_api_client.update_brief.called
def test_404_if_framework_status_is_not_live(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='open',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/description-of-work/organisation",
data={
"title": "A new title"
})
assert res.status_code == 404
assert not data_api_client.update_brief.called
def test_404_if_brief_is_already_live(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(status='live')
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/description-of-work/organisation",
data={
"title": "A new title"
})
assert res.status_code == 404
assert not data_api_client.update_brief.called
def test_404_if_question_does_not_exist(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/description-of-work/some-made-up-question",
data={
"title": "A new title"
})
assert res.status_code == 404
assert not data_api_client.update_brief.called
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestPublishBrief(BaseApplicationTest):
def test_publish_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
brief_json = api_stubs.brief(status="draft")
brief_questions = brief_json['briefs']
brief_questions.update({
'backgroundInformation': 'test background info',
'contractLength': 'A very long time',
'culturalFitCriteria': ['CULTURAL', 'FIT'],
'culturalWeighting': 10,
'essentialRequirements': 'Everything',
'evaluationType': 'test evaluation type',
'existingTeam': 'team team team',
'importantDates': 'Near future',
'numberOfSuppliers': 5,
'location': 'somewhere',
'organisation': 'test organisation',
'priceWeighting': 80,
'specialistRole': 'communicationsManager',
'specialistWork': 'work work work',
'startDate': 'startDate',
'summary': 'blah',
'technicalWeighting': 10,
'workingArrangements': 'arrangements',
'workplaceAddress': 'address',
})
data_api_client.get_brief.return_value = brief_json
res = self.client.post("/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/publish")
assert res.status_code == 302
assert data_api_client.update_brief_status.called
assert res.location == "http://localhost/buyers/frameworks/digital-outcomes-and-specialists/" \
"requirements/digital-specialists/1234"
def test_publish_brief_with_unanswered_required_questions(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(status="draft")
res = self.client.post("/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/publish")
assert res.status_code == 400
assert not data_api_client.update_brief_status.called
def test_404_if_brief_does_not_belong_to_user(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(user_id=234)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/edit/your-organisation",
data={
"organisation": "GDS"
})
assert res.status_code == 404
assert not data_api_client.update_brief.called
def test_publish_button_available_if_questions_answered(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
brief_json = api_stubs.brief(status="draft")
brief_questions = brief_json['briefs']
brief_questions.update({
'backgroundInformation': 'test background info',
'contractLength': 'A very long time',
'culturalFitCriteria': ['CULTURAL', 'FIT'],
'culturalWeighting': 10,
'essentialRequirements': 'Everything',
'evaluationType': 'test evaluation type',
'existingTeam': 'team team team',
'importantDates': 'Near future',
'location': 'somewhere',
'numberOfSuppliers': 3,
'organisation': 'test organisation',
'priceWeighting': 80,
'specialistRole': 'communicationsManager',
'specialistWork': 'work work work',
'startDate': 'startDate',
'summary': 'blah',
'technicalWeighting': 10,
'workingArrangements': 'arrangements',
'workplaceAddress': 'address',
})
data_api_client.get_brief.return_value = brief_json
res = self.client.get("/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/publish")
page_html = res.get_data(as_text=True)
assert res.status_code == 200
assert 'Publish Requirements' in page_html, page_html
def test_publish_button_unavailable_if_questions_not_answered(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(status="draft")
res = self.client.get("/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/publish")
page_html = res.get_data(as_text=True)
assert res.status_code == 200
assert 'Publish Requirements' not in page_html
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestDeleteBriefSubmission(BaseApplicationTest):
def test_delete_brief_submission(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/delete"
)
assert res.status_code == 302
assert data_api_client.delete_brief.called
assert res.location == "http://localhost/buyers"
def test_404_if_framework_is_not_live(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='standstill',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/delete",
)
assert res.status_code == 404
assert not data_api_client.delete_brief.called
def test_cannot_delete_live_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(status='live')
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/delete",
)
assert res.status_code == 404
assert not data_api_client.delete_brief.called
def test_404_if_brief_does_not_belong_to_user(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True)
]
)
data_api_client.get_brief.return_value = api_stubs.brief(user_id=234)
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/"
"digital-specialists/1234/delete",
data={"delete_confirmed": True})
assert res.status_code == 404
@mock.patch('app.buyers.views.buyers.data_api_client')
class TestBriefSummaryPage(BaseApplicationTest):
def test_show_draft_brief_summary_page(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief(status="draft")
brief_json['briefs']['specialistRole'] = 'communicationsManager'
data_api_client.get_brief.return_value = brief_json
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234"
)
assert res.status_code == 200
page_html = res.get_data(as_text=True)
document = html.fromstring(page_html)
assert (document.xpath('//h1')[0]).text_content().strip() == "I need a thing to do a thing"
assert [e.text_content() for e in document.xpath('//main[@id="content"]//ul/li/a')] == [
'title',
'specialist role',
'location',
'description of work',
'shortlist criteria',
'evaluation criteria',
'Review and publish your requirements',
'How to answer supplier questions',
'How to shortlist suppliers',
'How to evaluate suppliers',
'How to award a contract',
]
assert document.xpath('//a[contains(text(), "Delete")]')
def test_show_live_brief_summary_page(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief(status="live")
brief_json['briefs']['publishedAt'] = "2016-04-02T20:10:00.00000Z"
brief_json['briefs']['specialistRole'] = 'communicationsManager'
brief_json['briefs']["clarificationQuestionsAreClosed"] = True
data_api_client.get_brief.return_value = brief_json
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234"
)
assert res.status_code == 200
page_html = res.get_data(as_text=True)
document = html.fromstring(page_html)
assert (document.xpath('//h1')[0]).text_content().strip() == "I need a thing to do a thing"
assert [e.text_content() for e in document.xpath('//main[@id="content"]//ul/li/a')] == [
'View published requirements',
'Publish questions and answers',
'How to answer supplier questions',
'How to shortlist suppliers',
'How to evaluate suppliers',
'How to award a contract',
]
assert not document.xpath('//a[contains(text(), "Delete")]')
def test_show_closed_brief_summary_page(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief(status="closed")
brief_json['briefs']['publishedAt'] = "2016-04-02T20:10:00.00000Z"
brief_json['briefs']['specialistRole'] = 'communicationsManager'
brief_json['briefs']["clarificationQuestionsAreClosed"] = True
data_api_client.get_brief.return_value = brief_json
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234"
)
assert res.status_code == 200
page_html = res.get_data(as_text=True)
document = html.fromstring(page_html)
assert (document.xpath('//h1')[0]).text_content().strip() == "I need a thing to do a thing"
assert [e.text_content() for e in document.xpath('//main[@id="content"]//ul/li/a')] == [
'View published requirements',
'View and shortlist suppliers',
'How to shortlist suppliers',
'How to evaluate suppliers',
'How to award a contract',
]
assert not document.xpath('//a[contains(text(), "Delete")]')
def test_show_clarification_questions_page_for_live_brief_with_no_questions(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief(status="live")
brief_json['briefs']['publishedAt'] = "2016-04-02T20:10:00.00000Z"
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/supplier-questions" # noqa
)
assert res.status_code == 200
page_html = res.get_data(as_text=True)
assert "Supplier questions" in page_html
assert "No questions or answers have been published" in page_html
assert "Answer a supplier question" in page_html
def test_show_clarification_questions_page_for_live_brief_with_one_question(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief(status="live", clarification_questions=[
{"question": "Why is my question a question?",
"answer": "Because",
"publishedAt": "2016-01-01T00:00:00.000000Z"}
])
brief_json['briefs']['publishedAt'] = "2016-04-02T20:10:00.00000Z"
brief_json['briefs']["clarificationQuestionsAreClosed"] = True
data_api_client.get_brief.return_value = brief_json
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/supplier-questions" # noqa
)
assert res.status_code == 200
page_html = res.get_data(as_text=True)
assert "Supplier questions" in page_html
assert "Why is my question a question?" in page_html
assert "Because" in page_html
assert "Answer a supplier question" in page_html
assert "No questions or answers have been published" not in page_html
def test_404_if_framework_does_not_allow_brief(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=False),
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234"
)
assert res.status_code == 404
def test_404_if_brief_does_not_belong_to_user(self, data_api_client):
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(user_id=234)
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234"
)
assert res.status_code == 404
@mock.patch("app.buyers.views.buyers.content_loader")
def test_links_to_sections_go_to_the_correct_pages_whether_they_be_sections_or_questions(self, content_loader, data_api_client): # noqa
with self.app.app_context():
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief()
content_fixture = ContentLoader('tests/fixtures/content')
content_fixture.load_manifest('dos', 'data', 'edit_brief')
content_loader.get_manifest.return_value = content_fixture.get_manifest('dos', 'edit_brief')
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234"
)
assert res.status_code == 200
document = html.fromstring(res.get_data(as_text=True))
section_steps = document.xpath(
'//*[@id="content"]/div/div/ol[contains(@class, "instruction-list")]')
section_1_link = section_steps[0].xpath('li//a[contains(text(), "section 1")]')
section_2_link = section_steps[0].xpath('li//a[contains(text(), "section 2")]')
section_4_link = section_steps[0].xpath('li//a[contains(text(), "section 4")]')
# section with multiple questions
assert section_1_link[0].get('href').strip() == \
'/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/section-1'
# section with single question
assert section_2_link[0].get('href').strip() == \
'/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/edit/section-2/required2' # noqa
# section with single question and a description
assert section_4_link[0].get('href').strip() == \
'/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/section-4'
@mock.patch("app.buyers.views.buyers.data_api_client")
class TestAddBriefClarificationQuestion(BaseApplicationTest):
def test_show_brief_clarification_question_form(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug="digital-outcomes-and-specialists",
status="live",
lots=[
api_stubs.lot(slug="digital-specialists", allows_brief=True)
])
brief_json = api_stubs.brief(status="live")
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question")
assert res.status_code == 200
def test_add_brief_clarification_question(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug="digital-outcomes-and-specialists",
status="live",
lots=[
api_stubs.lot(slug="digital-specialists", allows_brief=True)
])
brief_json = api_stubs.brief(status="live")
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question",
data={
"question": "Why?",
"answer": "Because",
})
assert res.status_code == 302
data_api_client.add_brief_clarification_question.assert_called_with(
"1234", "Why?", "Because", "buyer@email.com")
# test that the redirect ends up on the right page
assert res.headers['Location'].endswith(
'/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/supplier-questions' # noqa
) is True
def test_404_if_framework_is_not_live(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='pending',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief()
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question",
data={
"question": "Why?",
"answer": "Because",
})
assert res.status_code == 404
assert not data_api_client.add_brief_clarification_question.called
def test_404_if_framework_does_not_allow_brief(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=False),
]
)
brief_json = api_stubs.brief()
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question",
data={
"question": "Why?",
"answer": "Because",
})
assert res.status_code == 404
assert not data_api_client.add_brief_clarification_question.called
def test_404_if_brief_does_not_belong_to_user(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief(user_id=234)
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question",
data={
"question": "Why?",
"answer": "Because",
})
assert res.status_code == 404
assert not data_api_client.add_brief_clarification_question.called
def test_404_if_brief_is_not_live(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
brief_json = api_stubs.brief(status="draft")
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question",
data={
"question": "Why?",
"answer": "Because",
})
assert res.status_code == 404
assert not data_api_client.add_brief_clarification_question.called
def test_validation_error(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug="digital-outcomes-and-specialists",
status="live",
lots=[
api_stubs.lot(slug="digital-specialists", allows_brief=True)
])
brief_json = api_stubs.brief(status="live")
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
data_api_client.add_brief_clarification_question.side_effect = HTTPError(
mock.Mock(status_code=400),
{"question": "answer_required"})
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question",
data={
"question": "Why?",
"answer": "Because",
})
document = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 400
assert len(document.cssselect(".validation-message")) == 1, res.get_data(as_text=True)
def test_api_error(self, data_api_client):
self.login_as_buyer()
data_api_client.get_framework.return_value = api_stubs.framework(
slug="digital-outcomes-and-specialists",
status="live",
lots=[
api_stubs.lot(slug="digital-specialists", allows_brief=True)
])
brief_json = api_stubs.brief(status="live")
brief_json['briefs']["clarificationQuestionsAreClosed"] = False
data_api_client.get_brief.return_value = brief_json
data_api_client.add_brief_clarification_question.side_effect = HTTPError(
mock.Mock(status_code=500))
res = self.client.post(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements"
"/digital-specialists/1234/supplier-questions/answer-question",
data={
"question": "Why?",
"answer": "Because",
})
assert res.status_code == 500
@mock.patch("app.buyers.views.buyers.data_api_client")
class TestViewBriefResponsesPage(BaseApplicationTest):
two_good_three_bad_responses = {
"briefResponses": [
{"essentialRequirements": [True, True, True, True, True]},
{"essentialRequirements": [True, False, True, True, True]},
{"essentialRequirements": [True, True, False, False, True]},
{"essentialRequirements": [True, True, True, True, True]},
{"essentialRequirements": [True, True, True, True, False]},
]
}
def test_page_shows_correct_count_of_eligible_suppliers(self, data_api_client):
data_api_client.find_brief_responses.return_value = self.two_good_three_bad_responses
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(lot_slug="digital-outcomes")
self.login_as_buyer()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1/responses"
)
page = res.get_data(as_text=True)
assert res.status_code == 200
assert "2 suppliers" in page
assert "responded to your requirements and meet all your essential skills and experience." in page
def test_page_does_not_pluralise_for_single_response(self, data_api_client):
data_api_client.find_brief_responses.return_value = {
"briefResponses": [{"essentialRequirements": [True, True, True, True, True]}]
}
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(lot_slug="digital-outcomes")
self.login_as_buyer()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1/responses"
)
page = res.get_data(as_text=True)
assert res.status_code == 200
assert "1 supplier" in page
assert "responded to your requirements and meets all your essential skills and experience." in page
def test_page_shows_correct_message_if_no_eligible_suppliers(self, data_api_client):
data_api_client.find_brief_responses.return_value = {
"briefResponses": [{"essentialRequirements": [True, False, True, True, True]}]
}
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(lot_slug="digital-outcomes")
self.login_as_buyer()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1234/responses"
)
page = res.get_data(as_text=True)
assert res.status_code == 200
assert "No suppliers met your essential skills and experience requirements." in page
def test_page_shows_csv_download_link_if_brief_closed(self, data_api_client):
data_api_client.find_brief_responses.return_value = self.two_good_three_bad_responses
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(lot_slug="digital-outcomes", status='closed')
self.login_as_buyer()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1234/responses"
)
document = html.fromstring(res.get_data(as_text=True))
csv_link = document.xpath(
'//a[@href="/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1234/responses/download"]' # noqa
)[0]
assert res.status_code == 200
assert self._strip_whitespace(csv_link.text_content()) == \
"CSVdocument:Downloadsupplierresponsesto‘Ineedathingtodoathing’"
def test_page_does_not_show_csv_download_link_if_brief_open(self, data_api_client):
data_api_client.find_brief_responses.return_value = self.two_good_three_bad_responses
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(lot_slug="digital-outcomes", status='live')
self.login_as_buyer()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1234/responses"
)
page = res.get_data(as_text=True)
document = html.fromstring(page)
csv_link = document.xpath(
'//a[@href="/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1234/responses/download"]' # noqa
)
assert res.status_code == 200
assert len(csv_link) == 0
assert "The file will be available here once applications have closed." in page
def test_404_if_brief_does_not_belong_to_buyer(self, data_api_client):
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(lot_slug="digital-outcomes", user_id=234)
self.login_as_buyer()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1234/responses"
)
assert res.status_code == 404
def test_404_if_lot_does_not_allow_brief(self, data_api_client):
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-outcomes', allows_brief=False),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(lot_slug="digital-outcomes")
self.login_as_buyer()
res = self.client.get(
"/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-outcomes/1234/responses"
)
assert res.status_code == 404
@mock.patch("app.buyers.views.buyers.data_api_client")
class TestDownloadBriefResponsesCsv(BaseApplicationTest):
url = "/buyers/frameworks/digital-outcomes-and-specialists/requirements/digital-specialists/1234/responses" \
"/download"
brief = api_stubs.brief(status='closed')
brief['briefs']['essentialRequirements'] = ["E1", "E2"]
brief['briefs']['niceToHaveRequirements'] = ["Nice1", "Nice2", "Nice3"]
brief_responses = {
"briefResponses": [
{
"supplierName": "Kev's Butties",
"availability": "Next Tuesday",
"dayRate": "£1.49",
"essentialRequirements": [True, True],
"niceToHaveRequirements": [True, False, False],
"respondToEmailAddress": "test1@email.com",
},
{
"supplierName": "Kev's Pies",
"availability": "A week Friday",
"dayRate": "£3.50",
"essentialRequirements": [True, True],
"niceToHaveRequirements": [False, True, True],
"respondToEmailAddress": "test2@email.com",
},
{
"supplierName": "Kev's Doughnuts",
"availability": "As soon as the sugar is delivered",
"dayRate": "£10 a dozen",
"essentialRequirements": [True, False],
"niceToHaveRequirements": [True, True, False],
"respondToEmailAddress": "test3@email.com",
},
{
"supplierName": "Kev's Fried Noodles",
"availability": "After Christmas",
"dayRate": "£12.35",
"essentialRequirements": [False, True],
"niceToHaveRequirements": [True, True, True],
"respondToEmailAddress": "test4@email.com",
},
{
"supplierName": "Kev's Pizza",
"availability": "Within the hour",
"dayRate": "£350",
"essentialRequirements": [False, False],
"niceToHaveRequirements": [False, False, False],
"respondToEmailAddress": "test5@email.com",
},
]
}
tricky_character_responses = {
"briefResponses": [
{
"supplierName": "K,ev’s \"Bu,tties",
"availability": "❝Next — Tuesday❞",
"dayRate": "¥1.49,",
"essentialRequirements": [True, True],
"niceToHaveRequirements": [True, False, False],
"respondToEmailAddress": "test1@email.com",
},
{
"supplierName": "Kev\'s \'Pies",
"availability": ""A week Friday&rdquot;",
"dayRate": "€3.50",
"essentialRequirements": [True, True],
"niceToHaveRequirements": [False, True, True],
"respondToEmailAddress": "te,st2@email.com",
},
]
}
def test_csv_includes_all_eligible_responses_and_no_ineligible_responses(self, data_api_client):
data_api_client.find_brief_responses.return_value = self.brief_responses
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = self.brief
self.login_as_buyer()
res = self.client.get(self.url)
page = res.get_data(as_text=True)
lines = page.split('\n')
# There are only the two eligible responses included
assert len(lines) == 4
assert lines[0] == "Supplier,Date the specialist can start work,Day rate,Nice1,Nice2,Nice3,Email address"
# The response with two nice-to-haves is sorted to above the one with only one
assert lines[1] == "Kev's Pies,A week Friday,£3.50,False,True,True,test2@email.com"
assert lines[2] == "Kev's Butties,Next Tuesday,£1.49,True,False,False,test1@email.com"
assert lines[-1] == ""
def test_csv_handles_tricky_characters(self, data_api_client):
data_api_client.find_brief_responses.return_value = self.tricky_character_responses
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = self.brief
self.login_as_buyer()
res = self.client.get(self.url)
page = res.get_data(as_text=True)
lines = page.split('\n')
assert len(lines) == 4
assert lines[0] == "Supplier,Date the specialist can start work,Day rate,Nice1,Nice2,Nice3,Email address"
# The values with internal commas are surrounded by quotes, and all other characters appear as in the data
assert lines[1] == 'Kev\'s \'Pies,"A week Friday&rdquot;,€3.50,False,True,True,"te,st2@email.com"'
assert lines[2] == '"K,ev’s ""Bu,tties",❝Next — Tuesday❞,"¥1.49,",True,False,False,test1@email.com'
assert lines[-1] == ""
def test_404_if_brief_does_not_belong_to_buyer(self, data_api_client):
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(user_id=234, status='closed')
self.login_as_buyer()
res = self.client.get(self.url)
assert res.status_code == 404
def test_404_if_brief_is_not_closed(self, data_api_client):
data_api_client.get_framework.return_value = api_stubs.framework(
slug='digital-outcomes-and-specialists',
status='live',
lots=[
api_stubs.lot(slug='digital-specialists', allows_brief=True),
]
)
data_api_client.get_brief.return_value = api_stubs.brief(status='live')
self.login_as_buyer()
res = self.client.get(self.url)
assert res.status_code == 404
| 42.214134
| 145
| 0.622125
| 8,756
| 79,447
| 5.386364
| 0.052193
| 0.036809
| 0.068359
| 0.095923
| 0.894578
| 0.875644
| 0.866548
| 0.856901
| 0.846341
| 0.832496
| 0
| 0.019408
| 0.26648
| 79,447
| 1,881
| 146
| 42.236576
| 0.789639
| 0.008169
| 0
| 0.686324
| 0
| 0.017566
| 0.278262
| 0.187721
| 0
| 0
| 0
| 0
| 0.102258
| 1
| 0.053325
| false
| 0
| 0.005646
| 0
| 0.069636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4774c5004900c2cf17d2ec3e2417bccb96e7d8e3
| 156
|
py
|
Python
|
libft/layers/__init__.py
|
kcosta42/Multilayer_Perceptron
|
51e2f9e0532c6e1e9b826b12171903b780e0ba4f
|
[
"MIT"
] | 1
|
2020-09-26T10:40:45.000Z
|
2020-09-26T10:40:45.000Z
|
libft/layers/__init__.py
|
kcosta42/Multilayer_Perceptron
|
51e2f9e0532c6e1e9b826b12171903b780e0ba4f
|
[
"MIT"
] | null | null | null |
libft/layers/__init__.py
|
kcosta42/Multilayer_Perceptron
|
51e2f9e0532c6e1e9b826b12171903b780e0ba4f
|
[
"MIT"
] | null | null | null |
from libft.layers.dense import Dense
from libft.layers.dropout import Dropout
from libft.layers.input import Input
__all__ = ['Dense', 'Dropout', 'Input']
| 26
| 40
| 0.775641
| 22
| 156
| 5.318182
| 0.363636
| 0.230769
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 156
| 5
| 41
| 31.2
| 0.847826
| 0
| 0
| 0
| 0
| 0
| 0.108974
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
478ca3cd22c91b846ba218186f1fda6eb6db009b
| 39
|
py
|
Python
|
autodeploy/loader/__init__.py
|
kartik4949/AutoDeploy
|
af7b3b32954a574307849ababb05fa2f4a80f52e
|
[
"MIT"
] | 46
|
2021-08-11T12:24:15.000Z
|
2022-01-17T19:34:48.000Z
|
autodeploy/loader/__init__.py
|
kartik4949/AutoDeploy
|
af7b3b32954a574307849ababb05fa2f4a80f52e
|
[
"MIT"
] | 17
|
2021-08-11T16:06:55.000Z
|
2021-10-05T09:44:57.000Z
|
autodeploy/loader/__init__.py
|
kartik4949/AutoDeploy
|
af7b3b32954a574307849ababb05fa2f4a80f52e
|
[
"MIT"
] | 10
|
2021-08-11T15:57:29.000Z
|
2021-12-04T16:44:13.000Z
|
from ._model_loader import ModelLoader
| 19.5
| 38
| 0.871795
| 5
| 39
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
47abacbc35f7affcecbcb1f9d427052a0d05c0fe
| 42
|
py
|
Python
|
tests/conftest.py
|
tony/libvcs
|
05db3a77b53326502cbb5bc76e8a6985cd271182
|
[
"MIT"
] | 35
|
2016-07-16T21:39:10.000Z
|
2017-11-24T02:52:13.000Z
|
tests/conftest.py
|
tony/libvcs
|
05db3a77b53326502cbb5bc76e8a6985cd271182
|
[
"MIT"
] | 70
|
2016-06-20T06:45:12.000Z
|
2018-03-06T14:57:35.000Z
|
tests/conftest.py
|
tony/libvcs
|
05db3a77b53326502cbb5bc76e8a6985cd271182
|
[
"MIT"
] | 2
|
2016-06-21T13:59:00.000Z
|
2017-05-12T17:49:45.000Z
|
from libvcs.conftest import * # noqa F40
| 21
| 41
| 0.738095
| 6
| 42
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.190476
| 42
| 1
| 42
| 42
| 0.852941
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9a14ab1f655bfde79e690e86d8dc662bc454a7f9
| 64
|
py
|
Python
|
multilingual_t5/baseline_hi/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
multilingual_t5/baseline_hi/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
multilingual_t5/baseline_hi/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
"""baseline_hi dataset."""
from .baseline_hi import BaselineHi
| 16
| 35
| 0.765625
| 8
| 64
| 5.875
| 0.75
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 64
| 3
| 36
| 21.333333
| 0.824561
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9a22a59fd5981f5e8dfc76843d22235b0c0569f2
| 181
|
py
|
Python
|
models/rnn/__init__.py
|
pdeubel/world-models-testing
|
36f2baf79898452e677fe141f11ba434f92e9218
|
[
"MIT"
] | null | null | null |
models/rnn/__init__.py
|
pdeubel/world-models-testing
|
36f2baf79898452e677fe141f11ba434f92e9218
|
[
"MIT"
] | null | null | null |
models/rnn/__init__.py
|
pdeubel/world-models-testing
|
36f2baf79898452e677fe141f11ba434f92e9218
|
[
"MIT"
] | null | null | null |
from models.rnn.base_rnn import BaseRNN, BaseMDNRNN, BaseSimpleRNN
from models.rnn.mdn_rnn import StandardMDNRNN, MDNRNNWithBCE
from models.rnn.lstm import LSTMWithBCE, LSTMWithMSE
| 45.25
| 66
| 0.856354
| 24
| 181
| 6.375
| 0.583333
| 0.196078
| 0.254902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088398
| 181
| 3
| 67
| 60.333333
| 0.927273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9a49db9af518a6bc0a0bbe357c509abc28bb297d
| 1,679
|
py
|
Python
|
main.py
|
feliphebueno/Tavern
|
d2026f8a28b898230a3c047be93b189f40305884
|
[
"MIT"
] | null | null | null |
main.py
|
feliphebueno/Tavern
|
d2026f8a28b898230a3c047be93b189f40305884
|
[
"MIT"
] | null | null | null |
main.py
|
feliphebueno/Tavern
|
d2026f8a28b898230a3c047be93b189f40305884
|
[
"MIT"
] | null | null | null |
"""
Project's entry-point
"""
import logging.config
import os
import json
import yaml
from tavern.core import run
token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpYXQiOjE1MzI4MDU4MjYsImlzcyI6Ik9ueXhwcmV2IiwiZXhwIjoxNTMyODQ5MDI2LCJuYmYiOjE1MzI4MDU4MjUsImRhdGEiOnsiYXBwIjp7ImFwaUlkIjoiODNjYzEyYTVmY2M1IiwiYXBwQ29kIjoxMiwiYXBwT3NDb2QiOjEsIm5hbWUiOiJSZWNhZCIsImFwaWtleSI6IjgzY2MxMmE1ZmNjNSIsInNlY3VyaXR5IjoiSSIsImxhdW5jaGVySWdub3JlIjoiTiIsImNhY2hlIjoiSSIsInZlcnNpb24iOiIxLjAuMCJ9LCJ1c2VyIjp7InBmQ29kIjoxLCJ1c3VhcmlvQ29kIjoxLCJvcmdhb0NvZCI6MSwib3JnYW9FbnRpZGFkZUNvZCI6MSwicHJpdkNvZCI6MTUsInN1cGVyVXN1YXJpbyI6IlMiLCJub21lIjoiRkVMSVBIRSBBVUdVU1RPIEJVRU5PIiwidXVpZCI6IjQxNDhkOTM4NWUyMTgxMTQzNmMxZDBmNjQ3NjE5MyIsImxvZ2luIjoiMDI0MDE2MjcxNDYiLCJmb3RvIjoiaHR0cHM6XC9cL3N0b3JhZ2Uub255eGVycC5jb20uYnJcLzhkOTBkNmY1ZDM1YjQ3NzlmMjM1NTY2NDNhM2YyZS5wbmciLCJjZWx1bGFyIjpudWxsLCJlbWFpbCI6bnVsbCwidXNlci1sZW5ndGgiOjgsInBmaWQiOiI0Nzc5MDU1OTMzMDg3MzAyODA5Iiwib2lkIjoiM2E5NWMzZTI0NGUzOTBlN2NhOGNlZDZkM2FiYzBiIiwib2VpZCI6IjVlOTU3NjdmMWFjNTVmODFjNWM0ZGQ5YWUyMTBlMyIsImZ1c29Ib3JhcmlvIjoiLTMiLCJtb2VkYSI6ImJyYXppbGlhbl9yZWFsIiwiaWRpb21hIjoicHQtYnIiLCJkYXRhIjoiYnJhemlsaWFuX2RhdGVfZm9ybWF0IiwibGF1bmNoZXIiOiJfYmxhbmsifX19.dSfTOXt9HDVeqEf4xTER0sN6EXIez2etB1ajZoPRbtY'
# Carrega os JWTs(app and user)
os.environ.update({
"APP_TOKEN": token,
"USER_TOKEN": token,
"USER_TOKEN_NO_AUTH": token,
"URL_API": "https://protokol-api.onyxapis.com"
})
with open("tests/logging.yaml", "r") as spec_file:
settings = yaml.load(spec_file)
logging.config.dictConfig(settings)
test_info = run('tests/main.tavern.yaml')
pass
print(json.dumps(test_info))
exit(int(test_info['all_passed'] is False))
| 55.966667
| 1,123
| 0.89994
| 88
| 1,679
| 17.034091
| 0.625
| 0.016011
| 0.018679
| 0.02535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078272
| 0.048839
| 1,679
| 29
| 1,124
| 57.896552
| 0.860363
| 0.030971
| 0
| 0
| 0
| 0
| 0.766523
| 0.70105
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0.105263
| 0.263158
| 0
| 0.263158
| 0.052632
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
9a66978d4fea6ce59934d0b9d530cf6749d0f37d
| 386
|
py
|
Python
|
terrascript/provider/rancher2.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/provider/rancher2.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/provider/rancher2.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/provider/rancher2.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:25:37 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.provider.rancher2
#
# instead of
#
# >>> import terrascript.provider.rancher.rancher2
#
# This is only available for 'official' and 'partner' providers.
from terrascript.provider.rancher.rancher2 import *
| 25.733333
| 73
| 0.748705
| 49
| 386
| 5.897959
| 0.714286
| 0.262976
| 0.186851
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047761
| 0.132124
| 386
| 14
| 74
| 27.571429
| 0.814925
| 0.797927
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7bd51e3371ba8530272be9c8b4818d2e9097453f
| 161
|
py
|
Python
|
modernquiz/modernquiz/views.py
|
yuriymironov96/modernquiz-redone
|
e7b0cbe6ccdbe7fdec993d01d774e96887274184
|
[
"MIT"
] | null | null | null |
modernquiz/modernquiz/views.py
|
yuriymironov96/modernquiz-redone
|
e7b0cbe6ccdbe7fdec993d01d774e96887274184
|
[
"MIT"
] | 9
|
2017-11-18T14:16:11.000Z
|
2017-12-14T06:39:52.000Z
|
modernquiz/modernquiz/views.py
|
yuriymironov96/modernquiz-redone
|
e7b0cbe6ccdbe7fdec993d01d774e96887274184
|
[
"MIT"
] | null | null | null |
from django.shortcuts import redirect
from django.core.urlresolvers import reverse_lazy
def redirect_to_home(request):
return redirect(reverse_lazy('home'))
| 32.2
| 49
| 0.826087
| 22
| 161
| 5.863636
| 0.636364
| 0.155039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099379
| 161
| 5
| 50
| 32.2
| 0.889655
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
7bf495220dca25ca1f3238aae36d671f73cb5c41
| 230,382
|
py
|
Python
|
llink/script/llink_gen.py
|
chipsalliance/aib-protocols
|
98858e6707f30ed6ea714598e3e324d754d82be0
|
[
"Apache-2.0"
] | 11
|
2021-09-01T19:48:44.000Z
|
2022-03-10T16:13:59.000Z
|
llink/script/llink_gen.py
|
chipsalliance/aib-protocols
|
98858e6707f30ed6ea714598e3e324d754d82be0
|
[
"Apache-2.0"
] | 86
|
2021-07-16T17:55:30.000Z
|
2022-03-23T20:18:23.000Z
|
llink/script/llink_gen.py
|
chipsalliance/aib-protocols
|
98858e6707f30ed6ea714598e3e324d754d82be0
|
[
"Apache-2.0"
] | 4
|
2021-09-18T03:59:01.000Z
|
2022-01-30T09:14:37.000Z
|
############################################################
##
## Copyright (C) 2021 Eximius Design
##
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http:##www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
############################################################
from argparse import ArgumentParser
import os
import re
from shutil import copyfile
from shutil import rmtree
import subprocess
import sys
import math
import pprint
from collections import namedtuple
from operator import itemgetter
import llink_dv_packet_postproc
import global_struct
import packetization
import galt
gen_index_msb = global_struct.gen_index_msb
gen_direction = global_struct.gen_direction
gen_llink_concat_credit = global_struct.gen_llink_concat_credit
gen_llink_concat_fifoname = global_struct.gen_llink_concat_fifoname
gen_llink_concat_ovrd = global_struct.gen_llink_concat_ovrd
gen_llink_concat_pushbit = global_struct.gen_llink_concat_pushbit
gen_llink_debug_status = global_struct.gen_llink_debug_status
gen_llink_user_enable = global_struct.gen_llink_user_enable
gen_llink_user_fifoname = global_struct.gen_llink_user_fifoname
gen_llink_user_ready = global_struct.gen_llink_user_ready
gen_llink_user_valid = global_struct.gen_llink_user_valid
print_verilog_assign = global_struct.print_verilog_assign
print_verilog_io_line = global_struct.print_verilog_io_line
print_verilog_logic_line = global_struct.print_verilog_logic_line
print_verilog_regnb = global_struct.print_verilog_regnb
sprint_verilog_assign = global_struct.sprint_verilog_assign
sprint_verilog_case = global_struct.sprint_verilog_case
sprint_verilog_logic_line = global_struct.sprint_verilog_logic_line
##########################################################################################
## Major Structures:
# configuration - dictionary
# - Contains all configuration and calculated data.
# - One element is 'LL_LIST' which points to list_logic_links
#
# list_logic_links - list
# - List of logiclink
#
# logiclink - dictionary
# - contains details about a single Logic Link (name, direction, width, etc).
#
# The code has many options (configurations). The "base" code should be the
# logic link without packetization, GALT, Asymmetric Modes. This mode is known as
# "Fixed Allocation" mode
#
# Note internal to this script, we rename several features to shorter names. Specifically:
#
# RSTRUCT = Replicated Struct which is used for Asymmetric Modes
#
# GALT = Alternate Gen - This is used when we have a LLINK that can "dynamically" switch
# between Gen2 and Gen1 operations.
#
# Packetizing is ... packetizing (no rename there)
##########################################################################################
## FIXME, add marker or strobe disable for GALT mode
##########################################################################################
## parse_config_file
## As the name implies, we parse the configuration file with this function.
## The result is the configuration dictionary has all of the configuration info in it
## This also builds up the raw information for the logic link.
def parse_config_file(cfgfile):
if not os.path.exists(cfgfile):
print("ERROR: File {0} does not exists!!!\n".format(cfgfile))
sys.exit(1)
cf = open(cfgfile, "r")
## Initialize variables
linkname = 'null'
mux_mode = 'MAIN'
ll_sig_lsb = 0
configuration = dict()
list_logic_links = list()
# Configure Defaults
configuration["TX_RATE"] = 'Full'
configuration["RX_RATE"] = 'Full'
configuration["TX_DBI_PRESENT"] = False
configuration["RX_DBI_PRESENT"] = False
configuration['TX_USER_MARKER'] = False
configuration['TX_USER_STROBE'] = False
configuration['RX_USER_MARKER'] = False
configuration['RX_USER_STROBE'] = False
configuration['TX_ENABLE_MARKER'] = False
configuration['TX_ENABLE_STROBE'] = False
configuration['RX_ENABLE_MARKER'] = False
configuration['RX_ENABLE_STROBE'] = False
configuration['TX_PERSISTENT_MARKER'] = False
configuration['RX_PERSISTENT_MARKER'] = False
configuration['TX_PERSISTENT_STROBE'] = False
configuration['RX_PERSISTENT_STROBE'] = False
configuration['TX_STROBE_GEN2_LOC'] = 78
configuration['RX_STROBE_GEN2_LOC'] = 79
configuration['TX_MARKER_GEN2_LOC'] = 78
configuration['RX_MARKER_GEN2_LOC'] = 79
configuration['TX_STROBE_GEN1_LOC'] = 38
configuration['RX_STROBE_GEN1_LOC'] = 38
configuration['TX_MARKER_GEN1_LOC'] = 39
configuration['RX_MARKER_GEN1_LOC'] = 39
configuration['TX_STROBE_GEN2_LOC_USER_SPECIFY'] = False
configuration['RX_STROBE_GEN2_LOC_USER_SPECIFY'] = False
configuration['TX_MARKER_GEN2_LOC_USER_SPECIFY'] = False
configuration['RX_MARKER_GEN2_LOC_USER_SPECIFY'] = False
configuration['TX_STROBE_GEN1_LOC_USER_SPECIFY'] = False
configuration['RX_STROBE_GEN1_LOC_USER_SPECIFY'] = False
configuration['TX_MARKER_GEN1_LOC_USER_SPECIFY'] = False
configuration['RX_MARKER_GEN1_LOC_USER_SPECIFY'] = False
configuration['RX_REG_PHY'] = False
configuration['TX_REG_PHY'] = False
configuration['TX_ENABLE_PACKETIZATION'] = False
configuration['RX_ENABLE_PACKETIZATION'] = False
configuration['TX_PACKET_MAX_SIZE'] = 0
configuration['RX_PACKET_MAX_SIZE'] = 0
configuration['GEN2_AS_GEN1_EN'] = False
configuration['TX_SPARE_WIDTH'] = 0
configuration['RX_SPARE_WIDTH'] = 0
configuration['GEN1_USER_CONFIG'] = False
configuration['GEN2_USER_CONFIG'] = False
configuration['REPLICATED_STRUCT'] = False
configuration['RSTRUCT_MULTIPLY_FACTOR'] = 1
for line_no, line in enumerate(cf):
## Remove spaces, empty lines, etc.
line = line.strip("\n")
line = re.sub('\t', ' ', line)
line = re.sub('\s+', ' ', line)
line = re.sub(' *$', '', line)
line = re.sub('//.*', '', line)
line = line.lstrip()
if re.search("^\s*//", line):
continue
if re.search("^\s*$", line):
continue
if (global_struct.g_CFG_DEBUG):
print ("CFGINPUT " , line)
# Specify defaults for variables
key = 'null'
value = 'null'
width = 'null'
lsbit = '0'
## Each line should be 4 or fewer fields
## This section splits out the fields into 4 values.
if len(line.split(' ')) > 4:
print("ERROR: File {0} has more than 4 arguments on line ".format(cfgfile, repr(line_no+1)))
print (line, line.split(' '))
sys.exit(1)
elif len(line.split(' ')) is 0:
#empty line. Drop
continue
elif len(line.split(' ')) is 1:
key = line
value = 'null'
width = 'null'
lsbit = '0'
elif len(line.split(' ')) is 2:
key,value = line.split(' ')
width = 'null'
lsbit = '0'
elif len(line.split(' ')) is 3:
key,value,width = line.split(' ')
lsbit = '0'
elif len(line.split(' ')) is 4:
key,value,width,lsbit = line.split(' ')
if (global_struct.g_CFG_DEBUG):
print ("NEW: key,value,width,lsbit", key,value,width,lsbit)
if key == "MODULE" or key == "module" :
configuration[key.upper()] = value.lower()
continue
if key == "LLINK" or key == "llink" or key == "name":
linkname = value.lower()
mux_mode = 'MAIN'
continue
if key == "NUM_CHAN" or key == "NUM_PHY": ## NUM_PHY is deprecated but maintained for backware compatablity
if key == "NUM_PHY":
print ("WARNING: NUM_PHY is deprecated. Use NUM_CHAN instead.")
key = "NUM_CHAN"
if int(value) > 24:
print("ERROR: Key {} value {} exceeds max of 24 ".format(key,value))
sys.exit(1)
configuration[key] = int(value)
continue
if key == "CHAN_TYPE" or key == "PHY_TYPE": ## PHY_TYPE is deprecated but maintained for backware compatablity
if key == "PHY_TYPE":
print ("WARNING: PHY_TYPE is deprecated. Use CHAN_TYPE instead.")
key = "CHAN_TYPE"
if value != "Gen1Only" and value != "Gen2Only" and value != "Gen2" and value != "AIBO" and value != "Tiered" :
print("ERROR: Key {} value {} is not Gen1Only or Gen2Only or Gen2 or AIBO or Tiered".format(key,value))
sys.exit(1)
configuration[key] = value
continue
if key == "TX_RATE" or key == "RX_RATE":
if value != "Full" and value != "Half" and value != "Quarter" :
print("ERROR: Key {} value {} is not Full or Half or Quarter ".format(key,value))
sys.exit(1)
configuration[key] = value
continue
if ( key == "PACKETIZATION_PACKING_EN" ) :
if value.lower() != "true" and value.lower() != "false" and value.lower() != "yes" and value.lower() != "no" :
print("ERROR: Key {} value {} is not True or False ".format(key,value))
sys.exit(1)
if value.lower() == "true" or value.lower() == "yes" :
global_struct.g_PACKETIZATION_PACKING_EN = True
else:
global_struct.g_PACKETIZATION_PACKING_EN = False
continue
if ( key == "REPLICATED_STRUCT" or key == "SUPPORT_ASYMMETRIC") :
key = "REPLICATED_STRUCT"
if value.lower() != "true" and value.lower() != "false" and value.lower() != "yes" and value.lower() != "no" :
print("ERROR: Key {} value {} is not True or False ".format(key,value))
sys.exit(1)
if value.lower() == "true" or value.lower() == "yes" :
configuration[key] = True
else:
configuration[key] = False
continue
if ( key == "TX_DBI_PRESENT" or key == "RX_DBI_PRESENT" or
key == "TX_ENABLE_PACKETIZATION" or key == "RX_ENABLE_PACKETIZATION" or
key == "TX_PERSISTENT_STROBE" or key == "RX_PERSISTENT_STROBE" or
key == "TX_PERSISTENT_MARKER" or key == "RX_PERSISTENT_MARKER" or
key == "TX_USER_MARKER" or key == "RX_USER_MARKER" or
key == "TX_USER_STROBE" or key == "RX_USER_STROBE" or
key == "TX_ENABLE_MARKER" or key == "RX_ENABLE_MARKER" or
key == "TX_ENABLE_STROBE" or key == "RX_ENABLE_STROBE" or
key == "TX_REG_PHY" or key == "RX_REG_PHY" ) :
if value.lower() != "true" and value.lower() != "false" and value.lower() != "yes" and value.lower() != "no" :
print("ERROR: Key {} value {} is not True or False ".format(key,value))
sys.exit(1)
if value.lower() == "true" or value.lower() == "yes" :
configuration[key] = True
else:
configuration[key] = False
continue
if ( key == "TX_PACKET_MAX_SIZE" or key == "RX_PACKET_MAX_SIZE" ):
configuration[key] = int(value)
continue
if ( key == "TX_STROBE_GEN2_LOC" or key == "RX_STROBE_GEN2_LOC" or
key == "TX_MARKER_GEN2_LOC" or key == "RX_MARKER_GEN2_LOC" ):
configuration[key+'_USER_SPECIFY'] = True
configuration[key] = int(value)
continue
if ( key == "TX_STROBE_GEN1_LOC" or key == "RX_STROBE_GEN1_LOC" or
key == "TX_MARKER_GEN1_LOC" or key == "RX_MARKER_GEN1_LOC" ):
configuration[key+'_USER_SPECIFY'] = True
configuration[key] = int(value)
continue
if key == "{": ## Begin of a Logic LInk
if linkname == 'null':
print("ERROR: File {0} is missing link name ".format(cfgfile, repr(line_no+1)))
sys.exit(1)
mux_mode = 'MAIN'; ## Default unless told otherwise. Note, Gen1Only will still be called Gen2
ll_sig_lsb = 0;
logiclink = {'NAME':linkname,
'DIR':'null',
'WIDTH_MAIN':0,
'WIDTH_GALT':0,
'HASVALID':False,
'HASVALID_NOREADY_REPSTRUCT':False,
'HASVALID_NOREADY_NOREP':False,
'HASREADY':False,
'SIGNALLIST_MAIN':[],
'SIGNALLIST_GALT':[] }
continue
if key.upper() == "GEN2_AS_GEN1" or key.upper() == "MAIN": ## Begin of a GALT Section
mux_mode = key.upper()
if mux_mode == "GEN2_AS_GEN1":
mux_mode = 'GALT'
if mux_mode == 'GALT' and logiclink['WIDTH_MAIN'] == 0:
print("ERROR: File {0} is has mux_mode GALT on line {1} before defining MAIN first.".format(cfgfile, repr(line_no+1)))
sys.exit(1)
if configuration['CHAN_TYPE'] != 'Gen2' :
print("ERROR: File {0} is has mux_mode GEN2_AS_GEN1 for MAIN or GALT support on line {1}, but CHAN_TYPE is {2}. CHAN_TYPE must be Gen2 for this feature".format(cfgfile, repr(line_no+1), configuration['CHAN_TYPE']))
sys.exit(1)
ll_sig_lsb = 0;
continue
if key == '}': ## End of a Logic LInk
## Create enables as the last entry of rep struct LL
if configuration['REPLICATED_STRUCT']:
if logiclink['HASVALID']:
onesignal = {'NAME':"user_enable",
'DIR':logiclink['DIR'],
'TYPE':'rstruct_enable',
'SIGWID':1,
'MSB':0,
'LSB':0 }
logiclink['WIDTH_RX_RSTRUCT'] = logiclink['WIDTH_MAIN'] + onesignal['SIGWID'] #WIDTH_MAIN WIDTH_GALT assigned here
onesignal['LLINDEX_MAIN_LSB'] = ll_sig_lsb * configuration['RSTRUCT_MULTIPLY_FACTOR']
onesignal['LLINDEX_GALT_LSB'] = ll_sig_lsb ## Fixme, maybe we can comibine?
ll_sig_lsb += onesignal['SIGWID']
logiclink['SIGNALLIST_'+mux_mode].append(onesignal) # SIGNALLIST_GALT and SIGNALLIST_MAIN assigned here
else:
logiclink['WIDTH_RX_RSTRUCT'] = logiclink['WIDTH_MAIN']
list_logic_links.append(logiclink);
continue
if key == "TX_FIFO_DEPTH" or key == "RX_FIFO_DEPTH":
if int(value) > 255:
print("ERROR: Key {} value {} exceeds max of 255 ".format(key,value))
sys.exit(1)
logiclink[key] = value
continue
if key == "output" or key == "input": # signals
if width == "valid":
logiclink['HASVALID'] = True
onesignal = {'NAME':value,
'DIR':key,
'TYPE':'valid',
'LLINDEX_'+mux_mode:'null',
'SIGWID':1,
'MSB':0,
'LLINDEX_MAIN_LSB':-1,
'LLINDEX_GALT_LSB':-1,
'LSB':-1 }
elif width == "ready":
logiclink['HASREADY'] = True
onesignal = {'NAME':value,
'DIR':key,
'TYPE':'ready',
'LLINDEX_'+mux_mode:'null',
'SIGWID':1,
'MSB':0,
'LLINDEX_MAIN_LSB':-1,
'LLINDEX_GALT_LSB':-1,
'LSB':-1 }
else:
if (width == '0'):
print("ERROR: File {0} on line {1} has invalid width.".format(cfgfile, repr(line_no+1)))
sys.exit(1)
## Convert scalers to busses in replciated struct mode
if configuration['REPLICATED_STRUCT'] and (width == 'null'):
width = 1
if (width == 'null'): # This is for scalers
width = 1
onesignal = {'NAME':value,
'DIR':key,
'TYPE':'signal',
'SIGWID':1,
'MSB':0,
'LSB':-1 }
else:
width = int(width) - int(lsbit)
onesignal = {'NAME':value,
'DIR':key,
'TYPE':'bus',
'SIGWID':width,
'MSB':width -1 + int(lsbit),
'LSB':int(lsbit) }
## If llink direction is not defined, we'll use the first non-valid or ready to determine direction and check the rest after.
if (logiclink['DIR'] == 'null'):
logiclink['DIR'] = key
elif (logiclink['DIR'] != key):
print("ERROR: File {0} on line {1} has mix of inputs or outputs on same logic link.".format(cfgfile, repr(line_no+1)))
sys.exit(1)
logiclink['WIDTH_'+mux_mode] += onesignal['SIGWID'] #WIDTH_MAIN WIDTH_GALT assigned here
onesignal['LLINDEX_MAIN_LSB'] = ll_sig_lsb
onesignal['LLINDEX_GALT_LSB'] = ll_sig_lsb ## Fixme, maybe we can comibine?
ll_sig_lsb += onesignal['SIGWID']
if (mux_mode == 'GALT'):
configuration['GEN2_AS_GEN1_EN'] = True
logiclink['SIGNALLIST_'+mux_mode].append(onesignal) # SIGNALLIST_GALT and SIGNALLIST_MAIN assigned here
continue
print("ERROR: Unknown Key '{}' on line {}\n".format(key,int(line_no)+1))
sys.exit(1)
cf.close()
configuration['LL_LIST'] = list_logic_links
return configuration
## parse_config_file
##########################################################################################
##########################################################################################
## calc_total_llink_data
## Calculates the Needed Logic Link Data
def calc_total_llink_data(configuration, mux_mode, enable):
TX_LLINK_DATA = 0
RX_LLINK_DATA = 0
if mux_mode == "RSTRUCT":
loc_mux_mode = "MAIN"
loc_print_mux_mode = "RSTRUCT"
else:
loc_mux_mode = mux_mode
loc_print_mux_mode = mux_mode
if enable :
for llink in configuration['LL_LIST']:
current_tx_signals = 0
current_rx_signals = 0
if llink['DIR'] == 'output':
current_tx_signals += llink['WIDTH_'+loc_mux_mode]
if llink['HASVALID']:
current_tx_signals += 1
if llink['HASREADY']:
current_rx_signals += 1
else:
current_rx_signals += llink['WIDTH_'+loc_mux_mode]
if llink['HASVALID']:
current_rx_signals += 1
if llink['HASREADY']:
current_tx_signals += 1
global_struct.g_info_print.append (" LogicLink {:8} {:8} TX {:4} RX {:4}\n".format(loc_print_mux_mode, llink['NAME'], current_tx_signals, current_rx_signals))
TX_LLINK_DATA += current_tx_signals
RX_LLINK_DATA += current_rx_signals
if mux_mode == "RSTRUCT":
if (configuration['TX_RATE'] == "Full"):
configuration['RSTRUCT_MULTIPLY_FACTOR'] = 1
elif (configuration['TX_RATE'] == "Half"):
configuration['RSTRUCT_MULTIPLY_FACTOR'] = 2
elif (configuration['TX_RATE'] == "Quarter"):
configuration['RSTRUCT_MULTIPLY_FACTOR'] = 4
global_struct.g_info_print.append (" {:25} x{} x{}\n".format("RepStruct in {} Mode".format(configuration['TX_RATE']), configuration['RSTRUCT_MULTIPLY_FACTOR'], configuration['RSTRUCT_MULTIPLY_FACTOR']))
global_struct.g_info_print.append (" ------- -------\n")
global_struct.g_info_print.append (" Total {:8} TX {:4} RX {:4}\n".format(loc_print_mux_mode, TX_LLINK_DATA*configuration['RSTRUCT_MULTIPLY_FACTOR'], RX_LLINK_DATA*configuration['RSTRUCT_MULTIPLY_FACTOR']))
else:
global_struct.g_info_print.append (" ------- -------\n")
global_struct.g_info_print.append (" Total {:8} TX {:4} RX {:4}\n".format(loc_print_mux_mode, TX_LLINK_DATA, RX_LLINK_DATA))
global_struct.g_info_print.append ("\n")
if mux_mode == "RSTRUCT":
configuration['TOTAL_TX_LLINK_DATA_'+"RSTRUCT"] = TX_LLINK_DATA # TOTAL_TX_LLINK_DATA_RSTRUCT
configuration['TOTAL_RX_LLINK_DATA_'+"RSTRUCT"] = RX_LLINK_DATA # TOTAL_RX_LLINK_DATA_RSTRUCT
configuration['TOTAL_TX_LLINK_DATA_'+loc_mux_mode] = TX_LLINK_DATA * configuration['RSTRUCT_MULTIPLY_FACTOR']# TOTAL_TX_LLINK_DATA_MAIN TOTAL_TX_LLINK_DATA_GALT
configuration['TOTAL_RX_LLINK_DATA_'+loc_mux_mode] = RX_LLINK_DATA * configuration['RSTRUCT_MULTIPLY_FACTOR']# TOTAL_RX_LLINK_DATA_MAIN TOTAL_RX_LLINK_DATA_GALT
else:
configuration['TOTAL_TX_LLINK_DATA_'+loc_mux_mode] = TX_LLINK_DATA # TOTAL_TX_LLINK_DATA_MAIN TOTAL_TX_LLINK_DATA_GALT
configuration['TOTAL_RX_LLINK_DATA_'+loc_mux_mode] = RX_LLINK_DATA # TOTAL_RX_LLINK_DATA_MAIN TOTAL_RX_LLINK_DATA_GALT
return configuration
## calc_total_llink_data
##########################################################################################
##########################################################################################
## calc_raw_1phydata
## Calculates the amount of data that can be placed on a single channel, ignoring any overhead (markers, strobes, DBI, etc)
def calc_raw_1phydata(configuration, mux_mode, enable, use_cfg_rate):
TX_RAW1PHY_DATA = 0
RX_RAW1PHY_DATA = 0
RSTRUCT_RAW1PHY_DATA = 0
if mux_mode == "RSTRUCT":
loc_mux_mode = "MAIN"
loc_print_mux_mode = "RSTRUCT"
else:
loc_mux_mode = mux_mode
loc_print_mux_mode = mux_mode
if enable :
if mux_mode == 'RSTRUCT':
tx_rate = configuration['TX_RATE']
rx_rate = configuration['RX_RATE']
elif mux_mode == 'MAIN':
tx_rate = configuration['TX_RATE']
rx_rate = configuration['RX_RATE']
else:
tx_rate = galt.covert_rate_gen2_as_gen1(configuration['TX_RATE'])
rx_rate = galt.covert_rate_gen2_as_gen1(configuration['RX_RATE'])
## Calculate Channel Width
if loc_mux_mode == 'MAIN' :
if configuration['CHAN_TYPE'] == 'Gen2Only' or configuration['CHAN_TYPE'] == 'Gen2':
TX_RAW1PHY_DATA = 80
elif configuration['CHAN_TYPE'] == 'Gen1Only' :
TX_RAW1PHY_DATA = 40
elif configuration['CHAN_TYPE'] == 'AIBO' :
TX_RAW1PHY_DATA = 20
elif configuration['CHAN_TYPE'] == 'Tiered' :
TX_RAW1PHY_DATA = 9999
elif loc_mux_mode == 'GALT' :
if configuration['CHAN_TYPE'] == 'Gen2':
TX_RAW1PHY_DATA = 40
else :
print("ERROR: Unsupported Option for GALT (Gen2asGen1) PhyType = "+configuration['CHAN_TYPE'])
sys.exit(1)
if loc_mux_mode == 'MAIN' :
if configuration['CHAN_TYPE'] == 'Gen2Only' or configuration['CHAN_TYPE'] == 'Gen2':
RSTRUCT_RAW1PHY_DATA = 80
elif configuration['CHAN_TYPE'] == 'Gen1Only' :
RSTRUCT_RAW1PHY_DATA = 40
elif configuration['CHAN_TYPE'] == 'AIBO' :
RSTRUCT_RAW1PHY_DATA = 20
elif configuration['CHAN_TYPE'] == 'Tiered' :
RSTRUCT_RAW1PHY_DATA = 9999
elif loc_mux_mode == 'GALT' :
if configuration['CHAN_TYPE'] == 'Gen2':
RSTRUCT_RAW1PHY_DATA = 40
else :
print("ERROR: Unsupported Option for GALT (Gen2asGen1) PhyType = "+configuration['CHAN_TYPE'])
sys.exit(1)
## By definition, RX PHY = TX PHY
RX_RAW1PHY_DATA = TX_RAW1PHY_DATA
configuration['CHAN_TX_RAW1PHY_BEAT_'+loc_mux_mode] = TX_RAW1PHY_DATA # CHAN_TX_RAW1PHY_BEAT_MAIN or CHAN_TX_RAW1PHY_BEAT_GALT
configuration['CHAN_RX_RAW1PHY_BEAT_'+loc_mux_mode] = RX_RAW1PHY_DATA # CHAN_RX_RAW1PHY_BEAT_MAIN or CHAN_RX_RAW1PHY_BEAT_GALT
if mux_mode == 'RSTRUCT':
configuration['CHAN_TX_RAW1PHY_BEAT_'+'RSTRUCT'] = TX_RAW1PHY_DATA # CHAN_TX_RAW1PHY_DATA_RSTRUCT
configuration['CHAN_RX_RAW1PHY_BEAT_'+'RSTRUCT'] = RX_RAW1PHY_DATA # CHAN_RX_RAW1PHY_DATA_RSTRUCT
if tx_rate == 'Full' :
TX_RAW1PHY_DATA *= 1
elif tx_rate == 'Half' :
TX_RAW1PHY_DATA *= 2
elif tx_rate == 'Quarter' :
TX_RAW1PHY_DATA *= 4
if rx_rate == 'Full' :
RX_RAW1PHY_DATA *= 1
elif rx_rate == 'Half' :
RX_RAW1PHY_DATA *= 2
elif rx_rate == 'Quarter' :
RX_RAW1PHY_DATA *= 4
configuration['CHAN_TX_RAW1PHY_DATA_'+loc_mux_mode] = TX_RAW1PHY_DATA # CHAN_TX_RAW1PHY_DATA_MAIN or CHAN_TX_RAW1PHY_DATA_GALT
configuration['CHAN_RX_RAW1PHY_DATA_'+loc_mux_mode] = RX_RAW1PHY_DATA # CHAN_RX_RAW1PHY_DATA_MAIN or CHAN_RX_RAW1PHY_DATA_GALT
if mux_mode == 'RSTRUCT':
configuration['CHAN_TX_RAW1PHY_DATA_'+'RSTRUCT'] = configuration['CHAN_TX_RAW1PHY_BEAT_'+'RSTRUCT'] # CHAN_TX_RAW1PHY_DATA_RSTRUCT
configuration['CHAN_RX_RAW1PHY_DATA_'+'RSTRUCT'] = configuration['CHAN_RX_RAW1PHY_BEAT_'+'RSTRUCT'] # CHAN_RX_RAW1PHY_DATA_RSTRUCT
if enable :
if mux_mode == 'RSTRUCT':
global_struct.g_info_print.append (" RSTRUCT Sub Channel Info\n")
global_struct.g_info_print.append (" Note: RSTRUCT describes the Replicated Struct on a Full rate channel.\n")
global_struct.g_info_print.append (" RSTRUCT will be replicated for {} rate per configuration and that is known as MAIN channel\n".format(configuration['TX_RATE']))
global_struct.g_info_print.append ("\n")
global_struct.g_info_print.append (" {}: Each channel is {} PHY running at {} Rate with {} bits\n".format(loc_print_mux_mode, configuration['CHAN_TYPE'] if loc_mux_mode == 'MAIN' else 'Gen1', "Full", RSTRUCT_RAW1PHY_DATA))
global_struct.g_info_print.append (" {}: {}x channels\n".format(loc_print_mux_mode, configuration['NUM_CHAN']))
global_struct.g_info_print.append (" {}: Total AIB bits is {} bits\n".format(loc_print_mux_mode, configuration['NUM_CHAN'] * RSTRUCT_RAW1PHY_DATA))
global_struct.g_info_print.append("\n")
global_struct.g_info_print.append (" MAIN Channel Info\n")
global_struct.g_info_print.append (" {}: Each channel is {} PHY running at {} Rate with {} bits\n".format('MAIN', configuration['CHAN_TYPE'] if loc_mux_mode == 'MAIN' else 'Gen1', tx_rate, configuration['CHAN_TX_RAW1PHY_DATA_'+loc_mux_mode]))
global_struct.g_info_print.append (" {}: {}x channels\n".format('MAIN', configuration['NUM_CHAN']))
global_struct.g_info_print.append (" {}: Total AIB bits is {} bits\n".format('MAIN', configuration['NUM_CHAN'] * configuration['CHAN_TX_RAW1PHY_DATA_'+loc_mux_mode]))
else:
if mux_mode == 'MAIN':
global_struct.g_info_print.append (" Channel Info\n")
else:
global_struct.g_info_print.append (" Gen2asGen1 (aka GALT)\n")
if configuration['CHAN_TX_RAW1PHY_DATA_'+loc_mux_mode] != configuration['CHAN_RX_RAW1PHY_DATA_'+loc_mux_mode]:
global_struct.g_info_print.append (" TX: Each channel is {} PHY running at {} Rate is {} bits\n".format(configuration['CHAN_TYPE'], tx_rate, configuration['CHAN_TX_RAW1PHY_DATA_'+loc_mux_mode]))
global_struct.g_info_print.append (" RX: Each channel is {} PHY running at {} Rate is {} bits\n".format(configuration['CHAN_TYPE'], rx_rate, configuration['CHAN_RX_RAW1PHY_DATA_'+loc_mux_mode]))
else:
global_struct.g_info_print.append (" {}: Each channel is {} PHY running at {} Rate with {} bits\n".format(loc_print_mux_mode, configuration['CHAN_TYPE'] if loc_mux_mode == 'MAIN' else 'Gen1', tx_rate, configuration['CHAN_TX_RAW1PHY_DATA_'+loc_mux_mode]))
global_struct.g_info_print.append (" {}: {}x channels\n".format(loc_print_mux_mode, configuration['NUM_CHAN']))
global_struct.g_info_print.append (" {}: Total AIB bits is {} bits\n".format(loc_print_mux_mode, configuration['NUM_CHAN'] * configuration['CHAN_TX_RAW1PHY_DATA_'+loc_mux_mode]))
global_struct.g_info_print.append("\n")
return configuration
## calc_raw_1phydata
##########################################################################################
##########################################################################################
## calc_overhead_1phydata
## Calculate up the overhead needed for the design (DBI, Markers, etc)
def calc_overhead_1phydata(configuration, mux_mode, enable):
TX_OVERHEAD_BITS = 0
RX_OVERHEAD_BITS = 0
TX_OVERHEAD_BITS_RSTRUCT = 0
RX_OVERHEAD_BITS_RSTRUCT = 0
if enable :
if configuration['TX_DBI_PRESENT'] and (mux_mode == 'MAIN' or mux_mode == 'RSTRUCT') and (configuration['CHAN_TYPE'] == 'Gen2Only' or configuration['CHAN_TYPE'] == 'Gen2'):
TX_OVERHEAD_BITS += configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] // 20
global_struct.g_info_print.append (" TX: DBI enabled adds {} overhead bits per channel\n".format(configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] // 20))
else:
global_struct.g_info_print.append (" TX: No DBI\n")
if configuration['TX_PERSISTENT_STROBE'] and configuration['TX_ENABLE_STROBE']:
TX_OVERHEAD_BITS += 1
global_struct.g_info_print.append (" TX: Persistent Strobe adds {} overhead bits per channel\n".format(1))
else:
global_struct.g_info_print.append (" TX: Strobe is Recoverable or non-existent\n")
if configuration['TX_PERSISTENT_MARKER'] and configuration['TX_ENABLE_MARKER']:
TX_OVERHEAD_BITS += configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] // configuration['CHAN_TX_RAW1PHY_BEAT_'+mux_mode]
global_struct.g_info_print.append (" TX: Persistent Marker adds {} overhead bits per channel\n".format(configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] // configuration['CHAN_TX_RAW1PHY_BEAT_'+mux_mode]))
else:
global_struct.g_info_print.append (" TX: Marker is Recoverable or non-existent\n")
if mux_mode == 'RSTRUCT':
global_struct.g_info_print.append (" TX: Total RSTRUCT overhead bits across {} Full Rate channels is {}\n".format(configuration['NUM_CHAN'], configuration['NUM_CHAN'] * TX_OVERHEAD_BITS))
global_struct.g_info_print.append (" TX: Total RSTRUCT data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] - TX_OVERHEAD_BITS)))
global_struct.g_info_print.append (" TX: Total MAIN overhead bits across {} {} channels is {}\n".format(configuration['NUM_CHAN'], configuration['TX_RATE'], configuration['NUM_CHAN'] * TX_OVERHEAD_BITS * configuration['RSTRUCT_MULTIPLY_FACTOR']))
global_struct.g_info_print.append (" TX: Total MAIN data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_TX_RAW1PHY_DATA_'+'MAIN'] - (TX_OVERHEAD_BITS* configuration['RSTRUCT_MULTIPLY_FACTOR']))))
configuration['CHAN_TX_OVERHEAD_BITS_'+'MAIN'] = TX_OVERHEAD_BITS * configuration['RSTRUCT_MULTIPLY_FACTOR']
else:
global_struct.g_info_print.append (" TX: Total overhead bits across {} channels is {}\n".format(configuration['NUM_CHAN'], configuration['NUM_CHAN'] * TX_OVERHEAD_BITS))
global_struct.g_info_print.append (" TX: Total data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] - TX_OVERHEAD_BITS)))
global_struct.g_info_print.append("\n")
configuration['CHAN_TX_OVERHEAD_BITS_'+mux_mode] = TX_OVERHEAD_BITS
if enable :
if configuration['RX_DBI_PRESENT'] and (mux_mode == 'MAIN' or mux_mode == 'RSTRUCT') and (configuration['CHAN_TYPE'] == 'Gen2Only' or configuration['CHAN_TYPE'] == 'Gen2'):
RX_OVERHEAD_BITS += configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] // 20
global_struct.g_info_print.append (" RX: DBI enabled adds {} overhead bits per channel\n".format(configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] // 20))
else:
global_struct.g_info_print.append (" RX: No DBI\n")
if configuration['RX_PERSISTENT_STROBE'] and configuration['RX_ENABLE_STROBE']:
RX_OVERHEAD_BITS += 1
global_struct.g_info_print.append (" RX: Persistent Strobe adds {} overhead bits per channel\n".format(1))
else:
global_struct.g_info_print.append (" RX: Strobe is Recoverable or non-existent\n")
if configuration['RX_PERSISTENT_MARKER'] and configuration['RX_ENABLE_MARKER']:
RX_OVERHEAD_BITS += configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] // configuration['CHAN_RX_RAW1PHY_BEAT_'+mux_mode]
global_struct.g_info_print.append (" RX: Persistent Marker adds {} overhead bits per channel\n".format(configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] // configuration['CHAN_RX_RAW1PHY_BEAT_'+mux_mode]))
else:
global_struct.g_info_print.append (" RX: Marker is Recoverable or non-existent\n")
if mux_mode == 'RSTRUCT':
global_struct.g_info_print.append (" RX: Total RSTRUCT overhead bits across {} Full Rate channels is {}\n".format(configuration['NUM_CHAN'], configuration['NUM_CHAN'] * RX_OVERHEAD_BITS))
global_struct.g_info_print.append (" RX: Total RSTRUCT data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] - RX_OVERHEAD_BITS)))
global_struct.g_info_print.append (" RX: Total MAIN overhead bits across {} {} channels is {}\n".format(configuration['NUM_CHAN'], configuration['RX_RATE'], configuration['NUM_CHAN'] * RX_OVERHEAD_BITS * configuration['RSTRUCT_MULTIPLY_FACTOR']))
global_struct.g_info_print.append (" RX: Total MAIN data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_RX_RAW1PHY_DATA_'+'MAIN'] - (RX_OVERHEAD_BITS* configuration['RSTRUCT_MULTIPLY_FACTOR']))))
configuration['CHAN_RX_OVERHEAD_BITS_'+'MAIN'] = RX_OVERHEAD_BITS * configuration['RSTRUCT_MULTIPLY_FACTOR']
else:
global_struct.g_info_print.append (" RX: Total overhead bits across {} channels is {}\n".format(configuration['NUM_CHAN'], configuration['NUM_CHAN'] * RX_OVERHEAD_BITS))
global_struct.g_info_print.append (" RX: Total data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] - RX_OVERHEAD_BITS)))
global_struct.g_info_print.append("\n")
configuration['CHAN_RX_OVERHEAD_BITS_'+mux_mode] = RX_OVERHEAD_BITS
configuration['CHAN_TX_USEABLE1PHY_DATA_'+mux_mode] = configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] - configuration['CHAN_TX_OVERHEAD_BITS_'+mux_mode] ## CHAN_TX_USEABLE1PHY_DATA_MAIN
configuration['CHAN_RX_USEABLE1PHY_DATA_'+mux_mode] = configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] - configuration['CHAN_RX_OVERHEAD_BITS_'+mux_mode] ## CHAN_RX_USEABLE1PHY_DATA_MAIN
configuration['TOTAL_TX_USABLE_RAWDATA_'+mux_mode] = configuration['NUM_CHAN'] * configuration['CHAN_TX_USEABLE1PHY_DATA_'+mux_mode] ## TOTAL_TX_USABLE_RAWDATA_MAIN
configuration['TOTAL_RX_USABLE_RAWDATA_'+mux_mode] = configuration['NUM_CHAN'] * configuration['CHAN_RX_USEABLE1PHY_DATA_'+mux_mode] ## TOTAL_RX_USABLE_RAWDATA_MAIN
configuration['TOTAL_TX_ROUNDUP_BIT_'+mux_mode] = configuration['TOTAL_TX_USABLE_RAWDATA_'+mux_mode] - configuration['TOTAL_TX_LLINK_DATA_'+mux_mode] ## TOTAL_TX_ROUNDUP_BIT_MAIN, TOTAL_TX_ROUNDUP_BIT_GALT, TOTAL_TX_ROUNDUP_BIT_RSTRUCT defined here
configuration['TOTAL_RX_ROUNDUP_BIT_'+mux_mode] = configuration['TOTAL_RX_USABLE_RAWDATA_'+mux_mode] - configuration['TOTAL_RX_LLINK_DATA_'+mux_mode] ## TOTAL_RX_ROUNDUP_BIT_MAIN, TOTAL_RX_ROUNDUP_BIT_GALT, TOTAL_RX_ROUNDUP_BIT_RSTRUCT defined here
if mux_mode == 'RSTRUCT':
configuration['CHAN_TX_USEABLE1PHY_DATA_'+'MAIN'] = configuration['CHAN_TX_RAW1PHY_DATA_'+'MAIN'] - configuration['CHAN_TX_OVERHEAD_BITS_'+'MAIN']
configuration['CHAN_RX_USEABLE1PHY_DATA_'+'MAIN'] = configuration['CHAN_RX_RAW1PHY_DATA_'+'MAIN'] - configuration['CHAN_RX_OVERHEAD_BITS_'+'MAIN']
configuration['TOTAL_TX_USABLE_RAWDATA_'+'MAIN'] = configuration['NUM_CHAN'] * configuration['CHAN_TX_USEABLE1PHY_DATA_'+'MAIN']
configuration['TOTAL_RX_USABLE_RAWDATA_'+'MAIN'] = configuration['NUM_CHAN'] * configuration['CHAN_RX_USEABLE1PHY_DATA_'+'MAIN']
configuration['TOTAL_TX_ROUNDUP_BIT_'+'MAIN'] = configuration['TOTAL_TX_USABLE_RAWDATA_'+'MAIN'] - configuration['TOTAL_TX_LLINK_DATA_'+'MAIN'] ## TOTAL_TX_ROUNDUP_BIT_MAIN
configuration['TOTAL_RX_ROUNDUP_BIT_'+'MAIN'] = configuration['TOTAL_RX_USABLE_RAWDATA_'+'MAIN'] - configuration['TOTAL_RX_LLINK_DATA_'+'MAIN'] ## TOTAL_RX_ROUNDUP_BIT_MAIN
if mux_mode == "MAIN":
if global_struct.USE_SPARE_VECTOR:
configuration['TX_SPARE_WIDTH'] = configuration['TOTAL_TX_ROUNDUP_BIT_MAIN']
configuration['RX_SPARE_WIDTH'] = configuration['TOTAL_RX_ROUNDUP_BIT_MAIN']
else:
configuration['TX_SPARE_WIDTH'] = 0
configuration['RX_SPARE_WIDTH'] = 0
return configuration
## calc_overhead_1phydata
##########################################################################################
##########################################################################################
## check_configuration
## Runs some basic sanity checks on the stated configuration looking for errors
## or inconsistencies.
def check_configuration(configuration, mux_mode):
err_found = False
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration['TX_RATE'] == "Quarter":
print("ERROR: Gen1Only does not support TX_RATE = Quarter")
err_found = True
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration['RX_RATE'] == "Quarter":
print("ERROR: Gen1Only does not support TX_RATE = Quarter")
err_found = True
if configuration['CHAN_TYPE'] == "Gen2Only" and configuration['TX_STROBE_GEN1_LOC_USER_SPECIFY'] and not configuration['TX_STROBE_GEN2_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for TX_STROBE_GEN1_LOC but not one for TX_STROBE_GEN2_LOC and Channel Type is Gen2Only.\n Ignoring Gen1 settings and using default Gen2 settings.\n")
if configuration['CHAN_TYPE'] == "Gen2Only" and configuration['RX_STROBE_GEN1_LOC_USER_SPECIFY'] and not configuration['RX_STROBE_GEN2_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for RX_STROBE_GEN1_LOC but not one for RX_STROBE_GEN2_LOC and Channel Type is Gen2Only.\n Ignoring Gen1 settings and using default Gen2 settings.\n")
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration['TX_STROBE_GEN2_LOC_USER_SPECIFY'] and not configuration['TX_STROBE_GEN1_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for TX_STROBE_GEN2_LOC but not one for TX_STROBE_GEN1_LOC and Channel Type is Gen1Only.\n Ignoring Gen2 settings and using default Gen1 settings.\n")
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration['RX_STROBE_GEN2_LOC_USER_SPECIFY'] and not configuration['RX_STROBE_GEN1_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for RX_STROBE_GEN2_LOC but not one for RX_STROBE_GEN1_LOC and Channel Type is Gen1Only.\n Ignoring Gen2 settings and using default Gen1 settings.\n")
if configuration['CHAN_TYPE'] == "Gen2Only" and configuration['TX_MARKER_GEN1_LOC_USER_SPECIFY'] and not configuration['TX_MARKER_GEN2_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for TX_MARKER_GEN1_LOC but not one for TX_MARKER_GEN2_LOC and Channel Type is Gen2Only.\n Ignoring Gen1 settings and using default Gen2 settings.\n")
if configuration['CHAN_TYPE'] == "Gen2Only" and configuration['RX_MARKER_GEN1_LOC_USER_SPECIFY'] and not configuration['RX_MARKER_GEN2_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for RX_MARKER_GEN1_LOC but not one for RX_MARKER_GEN2_LOC and Channel Type is Gen2Only.\n Ignoring Gen1 settings and using default Gen2 settings.\n")
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration['TX_MARKER_GEN2_LOC_USER_SPECIFY'] and not configuration['TX_MARKER_GEN1_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for TX_MARKER_GEN2_LOC but not one for TX_MARKER_GEN1_LOC and Channel Type is Gen1Only.\n Ignoring Gen2 settings and using default Gen1 settings.\n")
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration['RX_MARKER_GEN2_LOC_USER_SPECIFY'] and not configuration['RX_MARKER_GEN1_LOC_USER_SPECIFY'] :
print("WARNING: Detected configuration for RX_MARKER_GEN2_LOC but not one for RX_MARKER_GEN1_LOC and Channel Type is Gen1Only.\n Ignoring Gen2 settings and using default Gen1 settings.\n")
if configuration['REPLICATED_STRUCT'] and (configuration['TX_ENABLE_PACKETIZATION'] or configuration['RX_ENABLE_PACKETIZATION']):
print("ERROR: REPLICATED_STRUCT and TX_ENABLE_PACKETIZATION or RX_ENABLE_PACKETIZATION both enabled. This is not supported.\n")
err_found = True
## This looks odd, but we use "GEN2" below, so if we are in Gen1Only, mark the GEN2 strobe with the Gen1 locations
if configuration['CHAN_TYPE'] == "Gen1Only":
configuration['TX_STROBE_GEN2_LOC'] = configuration['TX_STROBE_GEN1_LOC']
configuration['RX_STROBE_GEN2_LOC'] = configuration['RX_STROBE_GEN1_LOC']
configuration['TX_MARKER_GEN2_LOC'] = configuration['TX_MARKER_GEN1_LOC']
configuration['RX_MARKER_GEN2_LOC'] = configuration['RX_MARKER_GEN1_LOC']
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration ['TX_DBI_PRESENT']:
print("INFO: DBI not supported in Gen1. Setting TX_DBI_PRESENT to False\n")
configuration['TX_DBI_PRESENT'] = False
if configuration['CHAN_TYPE'] == "Gen1Only" and configuration ['RX_DBI_PRESENT']:
print("INFO: DBI not supported in Gen1. Setting RX_DBI_PRESENT to False\n")
configuration['RX_DBI_PRESENT'] = False
if err_found:
print("Fix above errors and re-run to continue.")
sys.exit(1)
## We shouldn't have a faiure if packetization is chosen.
if configuration['TX_ENABLE_PACKETIZATION'] == 0 :
if (configuration['TOTAL_TX_USABLE_RAWDATA_'+mux_mode] < configuration['TOTAL_TX_LLINK_DATA_'+mux_mode]):
print("ERROR: Not enough TX {} AIB Data bits {} for Fixed Allocation of Logic Link TX Data {} bits.\n".format(mux_mode, configuration['TOTAL_TX_USABLE_RAWDATA_'+mux_mode],configuration['TOTAL_TX_LLINK_DATA_'+mux_mode]))
sys.exit(1)
global_struct.g_info_print.append (" "+mux_mode+" TX needs {:4} bits of data and has {:4} bits available across {}x {} {:} Rate channels so {:4} spare bits\n".format(configuration['TOTAL_TX_LLINK_DATA_'+mux_mode], configuration['TOTAL_TX_USABLE_RAWDATA_'+mux_mode], configuration['NUM_CHAN'], configuration['CHAN_TYPE'], configuration['TX_RATE'], configuration['TOTAL_TX_ROUNDUP_BIT_'+mux_mode] ))
if (configuration['TOTAL_TX_USABLE_RAWDATA_'+mux_mode] - configuration['TOTAL_TX_LLINK_DATA_'+mux_mode]) > (configuration['CHAN_TX_RAW1PHY_DATA_'+mux_mode] - configuration['CHAN_TX_OVERHEAD_BITS_'+mux_mode]):
global_struct.g_info_print.append (" INFORMATION: At least one full channel unused for TX\n")
if configuration['RX_ENABLE_PACKETIZATION'] == 0 :
if (configuration['TOTAL_RX_USABLE_RAWDATA_'+mux_mode] < configuration['TOTAL_RX_LLINK_DATA_'+mux_mode]):
print("ERROR: Not enough RX {} AIB Data bits {} for Fixed Allocation of Logic Link RX Data {} bits.\n".format(mux_mode, configuration['TOTAL_RX_USABLE_RAWDATA_'+mux_mode],configuration['TOTAL_RX_LLINK_DATA_'+mux_mode]))
sys.exit(1)
global_struct.g_info_print.append (" "+mux_mode+" RX needs {:4} bits of data and has {:4} bits available across {}x {} {:} Rate channels so {:4} spare bits\n".format(configuration['TOTAL_RX_LLINK_DATA_'+mux_mode], configuration['TOTAL_RX_USABLE_RAWDATA_'+mux_mode], configuration['NUM_CHAN'], configuration['CHAN_TYPE'], configuration['RX_RATE'], configuration['TOTAL_RX_ROUNDUP_BIT_'+mux_mode] ))
if (configuration['TOTAL_RX_USABLE_RAWDATA_'+mux_mode] - configuration['TOTAL_RX_LLINK_DATA_'+mux_mode]) > (configuration['CHAN_RX_RAW1PHY_DATA_'+mux_mode] - configuration['CHAN_RX_OVERHEAD_BITS_'+mux_mode]):
global_struct.g_info_print.append (" INFORMATION: At least one full channel unused for RX\n")
global_struct.g_info_print.append("\n")
if mux_mode == "RSTRUCT":
if configuration['TX_ENABLE_PACKETIZATION'] == 0 :
if (configuration['TOTAL_TX_USABLE_RAWDATA_'+'MAIN'] < configuration['TOTAL_TX_LLINK_DATA_'+'MAIN']):
print("ERROR: Not enough TX {} AIB Data bits {} for Fixed Allocation of Logic Link TX Data {} bits.\n".format('MAIN', configuration['TOTAL_TX_USABLE_RAWDATA_'+'MAIN'],configuration['TOTAL_TX_LLINK_DATA_'+'MAIN']))
sys.exit(1)
global_struct.g_info_print.append (" "+'MAIN'+" TX needs {:4} bits of data and has {:4} bits available across {}x {} {:} Rate channels so {:4} spare bits\n".format(configuration['TOTAL_TX_LLINK_DATA_'+'MAIN'], configuration['TOTAL_TX_USABLE_RAWDATA_'+'MAIN'], configuration['NUM_CHAN'], configuration['CHAN_TYPE'], configuration['TX_RATE'], configuration['TOTAL_TX_ROUNDUP_BIT_'+'MAIN'] ))
if configuration['RX_ENABLE_PACKETIZATION'] == 0 :
if (configuration['TOTAL_RX_USABLE_RAWDATA_'+'MAIN'] < configuration['TOTAL_RX_LLINK_DATA_'+'MAIN']):
print("ERROR: Not enough RX {} AIB Data bits {} for Fixed Allocation of Logic Link RX Data {} bits.\n".format('MAIN', configuration['TOTAL_RX_USABLE_RAWDATA_'+'MAIN'],configuration['TOTAL_RX_LLINK_DATA_'+'MAIN']))
sys.exit(1)
global_struct.g_info_print.append (" "+'MAIN'+" RX needs {:4} bits of data and has {:4} bits available across {}x {} {:} Rate channels so {:4} spare bits\n".format(configuration['TOTAL_RX_LLINK_DATA_'+'MAIN'], configuration['TOTAL_RX_USABLE_RAWDATA_'+'MAIN'], configuration['NUM_CHAN'], configuration['CHAN_TYPE'], configuration['RX_RATE'], configuration['TOTAL_RX_ROUNDUP_BIT_'+'MAIN'] ))
global_struct.g_info_print.append("\n")
# Perform Checks
############################################################
############################################################
# Check Strobe // Marker placement
if configuration['TX_ENABLE_MARKER'] == False:
configuration['TX_PERSISTENT_MARKER'] = True
configuration['TX_USER_MARKER'] = False
configuration['TX_MARKER_GEN2_LOC'] = 0
configuration['TX_MARKER_GEN1_LOC'] = 0
if configuration['RX_ENABLE_MARKER'] == False:
configuration['RX_PERSISTENT_MARKER'] = True
configuration['RX_USER_MARKER'] = False
configuration['RX_MARKER_GEN2_LOC'] = 0
configuration['RX_MARKER_GEN1_LOC'] = 0
if configuration['TX_ENABLE_STROBE'] == False:
configuration['TX_PERSISTENT_STROBE'] = True
configuration['TX_USER_STROBE'] = False
configuration['TX_STROBE_GEN2_LOC'] = 0
configuration['TX_STROBE_GEN1_LOC'] = 0
if configuration['RX_ENABLE_STROBE'] == False:
configuration['RX_PERSISTENT_STROBE'] = True
configuration['RX_USER_STROBE'] = False
configuration['RX_STROBE_GEN2_LOC'] = 0
configuration['RX_STROBE_GEN1_LOC'] = 0
if int(configuration['TX_STROBE_GEN2_LOC']) >= int(configuration['CHAN_TX_RAW1PHY_DATA_MAIN']) and configuration['TX_ENABLE_STROBE']:
print ("ERROR TX_STROBE_GEN_LOC = {} is outside TX Channel Width 0-{}".format(configuration['TX_STROBE_GEN2_LOC'], configuration['CHAN_TX_RAW1PHY_DATA_MAIN']-1))
sys.exit(1)
if int(configuration['RX_STROBE_GEN2_LOC']) >= int(configuration['CHAN_RX_RAW1PHY_DATA_MAIN']) and configuration['RX_ENABLE_STROBE']:
print ("ERROR RX_STROBE_GEN_LOC = {} is outside RX Channel Width 0-{}".format(configuration['RX_STROBE_GEN2_LOC'], configuration['CHAN_RX_RAW1PHY_DATA_MAIN']-1))
sys.exit(1)
if int(configuration['TX_MARKER_GEN2_LOC']) >= int(configuration['CHAN_TX_RAW1PHY_BEAT_MAIN']) and configuration['TX_ENABLE_MARKER']:
print ("ERROR TX_MARKER_GEN_LOC = {} is outside TX Full Rate data word which is 0-{}".format(configuration['TX_MARKER_GEN2_LOC'], configuration['CHAN_TX_RAW1PHY_BEAT_MAIN']-1))
sys.exit(1)
if int(configuration['RX_MARKER_GEN2_LOC']) >= int(configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) and configuration['RX_ENABLE_MARKER']:
print ("ERROR RX_MARKER_GEN_LOC = {} is outside RX Full Rate data word which is 0-{}".format(configuration['RX_MARKER_GEN2_LOC'], configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']-1))
sys.exit(1)
# Check Strobe // Marker placement
############################################################
############################################################
# Check Strobe // Marker placement / DBI Placement dont' overlap.
if configuration['TX_ENABLE_MARKER'] and configuration['TX_DBI_PRESENT']:
if ((configuration['TX_MARKER_GEN2_LOC'] % 80) == 38 or
(configuration['TX_MARKER_GEN2_LOC'] % 80) == 39 or
(configuration['TX_MARKER_GEN2_LOC'] % 80) == 78 or
(configuration['TX_MARKER_GEN2_LOC'] % 80) == 79):
print ("ERROR TX_MARKER_GEN2_LOC = {} overlaps with DBI".format(configuration['TX_MARKER_GEN2_LOC']))
sys.exit(1)
if configuration['RX_ENABLE_MARKER'] and configuration['RX_DBI_PRESENT']:
if ((configuration['RX_MARKER_GEN2_LOC'] % 80) == 38 or
(configuration['RX_MARKER_GEN2_LOC'] % 80) == 39 or
(configuration['RX_MARKER_GEN2_LOC'] % 80) == 78 or
(configuration['RX_MARKER_GEN2_LOC'] % 80) == 79):
print ("ERROR RX_MARKER_GEN2_LOC = {} overlaps with DBI".format(configuration['RX_MARKER_GEN2_LOC']))
sys.exit(1)
if configuration['TX_ENABLE_STROBE'] and configuration['TX_DBI_PRESENT']:
if ((configuration['TX_STROBE_GEN2_LOC'] % 80) == 38 or
(configuration['TX_STROBE_GEN2_LOC'] % 80) == 39 or
(configuration['TX_STROBE_GEN2_LOC'] % 80) == 78 or
(configuration['TX_STROBE_GEN2_LOC'] % 80) == 79):
print ("ERROR TX_STROBE_GEN2_LOC = {} overlaps with DBI".format(configuration['TX_STROBE_GEN2_LOC']))
sys.exit(1)
if configuration['RX_ENABLE_STROBE'] and configuration['RX_DBI_PRESENT']:
if ((configuration['RX_STROBE_GEN2_LOC'] % 80) == 38 or
(configuration['RX_STROBE_GEN2_LOC'] % 80) == 39 or
(configuration['RX_STROBE_GEN2_LOC'] % 80) == 78 or
(configuration['RX_STROBE_GEN2_LOC'] % 80) == 79):
print ("ERROR RX_STROBE_GEN2_LOC = {} overlaps with DBI".format(configuration['RX_STROBE_GEN2_LOC']))
sys.exit(1)
if configuration['TX_ENABLE_MARKER'] and configuration['TX_ENABLE_STROBE'] and (configuration['CHAN_TYPE'] == "Gen2Only" or configuration['CHAN_TYPE'] == "Gen2"):
if ((configuration['TX_MARKER_GEN2_LOC'] % 80) == (configuration['TX_STROBE_GEN2_LOC'] % 80)):
print ("ERROR TX_MARKER_GEN2_LOC = {} overlaps with TX_STROBE_GEN2_LOC = {}".format(configuration['TX_MARKER_GEN2_LOC'], configuration['TX_STROBE_GEN2_LOC']))
sys.exit(1)
if configuration['RX_ENABLE_MARKER'] and configuration['RX_ENABLE_STROBE'] and (configuration['CHAN_TYPE'] == "Gen2Only" or configuration['CHAN_TYPE'] == "Gen2"):
if ((configuration['RX_MARKER_GEN2_LOC'] % 80) == (configuration['RX_STROBE_GEN2_LOC'] % 80)):
print ("ERROR RX_MARKER_GEN2_LOC = {} overlaps with RX_STROBE_GEN2_LOC = {}".format(configuration['RX_MARKER_GEN2_LOC'], configuration['RX_STROBE_GEN2_LOC']))
sys.exit(1)
if configuration['TX_ENABLE_MARKER'] and configuration['TX_ENABLE_STROBE'] and (configuration['CHAN_TYPE'] == "Gen1Only" or configuration['CHAN_TYPE'] == "Gen1"):
if ((configuration['TX_MARKER_GEN1_LOC'] % 80) == (configuration['TX_STROBE_GEN1_LOC'] % 80)):
print ("ERROR TX_MARKER_GEN1_LOC = {} overlaps with TX_STROBE_GEN1_LOC = {}".format(configuration['TX_MARKER_GEN1_LOC'], configuration['TX_STROBE_GEN1_LOC']))
sys.exit(1)
if configuration['RX_ENABLE_MARKER'] and configuration['RX_ENABLE_STROBE'] and (configuration['CHAN_TYPE'] == "Gen1Only" or configuration['CHAN_TYPE'] == "Gen1"):
if ((configuration['RX_MARKER_GEN1_LOC'] % 80) == (configuration['RX_STROBE_GEN1_LOC'] % 80)):
print ("ERROR RX_MARKER_GEN1_LOC = {} overlaps with RX_STROBE_GEN1_LOC = {}".format(configuration['RX_MARKER_GEN1_LOC'], configuration['RX_STROBE_GEN1_LOC']))
sys.exit(1)
# Check Strobe // Marker placement
############################################################
## check_configuration
##########################################################################################
##########################################################################################
## calculate_bit_locations
## This is the branching point for Packetization, GALT, RSTRUCT or "normal" Logic Link
def calculate_bit_locations(configuration):
if global_struct.g_SIGNAL_DEBUG:
print ("SIGNAL_DEBUG: Before calculate_bit_loc")
pprint.pprint (configuration)
if configuration['TX_ENABLE_PACKETIZATION']:
configuration = packetization.calculate_bit_loc_packet(True, configuration)
elif configuration['GEN2_AS_GEN1_EN']:
configuration = galt.calculate_bit_loc_galt(True, configuration)
elif configuration['REPLICATED_STRUCT']:
configuration = calculate_bit_loc_repstruct(True, configuration)
else:
configuration = calculate_bit_loc_fixed_alloc(True, configuration)
if configuration['RX_ENABLE_PACKETIZATION']:
configuration = packetization.calculate_bit_loc_packet(False, configuration)
elif configuration['GEN2_AS_GEN1_EN']:
configuration = galt.calculate_bit_loc_galt(False, configuration)
elif configuration['REPLICATED_STRUCT']:
configuration = calculate_bit_loc_repstruct(False, configuration)
else:
configuration = calculate_bit_loc_fixed_alloc(False ,configuration)
return configuration
## calculate_bit_locations
##########################################################################################
##########################################################################################
## calculate_channel_parameters
## Claculate and print the high level parameters of the channel / logic link data.
def calculate_channel_parameters(configuration):
############################################################
# Reduce No Ready case to data only
for llink in configuration['LL_LIST']:
if llink['HASVALID'] and not llink['HASREADY']:
if configuration['REPLICATED_STRUCT']:
llink['HASVALID_NOREADY_REPSTRUCT'] = True
# ## Then lets turn the Valid into data
# for sig in llink['SIGNALLIST_MAIN']:
# if sig['TYPE'] == 'valid':
# sig['TYPE'] = 'valid_nordy'
else:
llink['HASVALID'] = False
llink['HASVALID_NOREADY_NOREP'] = True
## First, lets find the LLINDEX of the last data bit
ll_sig_lsb = -1
for sig in llink['SIGNALLIST_MAIN']:
if sig['LLINDEX_MAIN_LSB'] >= ll_sig_lsb:
ll_sig_lsb = sig['LLINDEX_MAIN_LSB'] + sig['SIGWID'] - 1
## Then lets turn the Valid into data
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'valid':
sig['TYPE'] = 'signal_valid'
llink['WIDTH_MAIN'] += 1
sig['LLINDEX_MAIN_LSB'] = ll_sig_lsb+1
if configuration['GEN2_AS_GEN1_EN']:
## First, lets find the LLINDEX of the last data bit
ll_sig_lsb = -1
for sig in llink['SIGNALLIST_GALT']:
if sig['LLINDEX_GALT_LSB'] >= ll_sig_lsb:
ll_sig_lsb = sig['LLINDEX_GALT_LSB'] + sig['SIGWID'] - 1
## Then lets turn the Valid into data
for sig in llink['SIGNALLIST_GALT']:
if sig['TYPE'] == 'valid':
sig['TYPE'] = 'signal_valid'
llink['WIDTH_GALT'] += 1
sig['LLINDEX_GALT_LSB'] = ll_sig_lsb+1
# Reduce No Ready case to data only
############################################################
############################################################
# Calculate Channel Parameters
global_struct.g_info_print.append (" Logic Link Data Info\n")
enable_main = 1 # default, even for Gen1Only
enable_galt = 1
if configuration['CHAN_TYPE'] == 'Gen2Only' or configuration['CHAN_TYPE'] == 'Gen1Only' or configuration['CHAN_TYPE'] == 'AIBO':
enable_galt = 0
if configuration['GEN2_AS_GEN1_EN'] != True:
enable_galt = 0
if configuration['REPLICATED_STRUCT']:
configuration = calc_total_llink_data (configuration, 'RSTRUCT', 1)
configuration = calc_raw_1phydata (configuration, 'RSTRUCT', 1, 1)
configuration = calc_overhead_1phydata (configuration, 'RSTRUCT', 1)
else:
configuration = calc_total_llink_data (configuration, 'MAIN', enable_main)
configuration = calc_total_llink_data (configuration, 'GALT', enable_galt)
if configuration['CHAN_TYPE'] == 'Tiered':
if configuration['TX_PACKET_MAX_SIZE'] == 0:
configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] = configuration['TOTAL_TX_LLINK_DATA_MAIN']
configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] = configuration['TOTAL_TX_LLINK_DATA_MAIN']
else:
configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] = configuration['TX_PACKET_MAX_SIZE']
configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] = configuration['TX_PACKET_MAX_SIZE']
if configuration['RX_PACKET_MAX_SIZE'] == 0:
configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'] = configuration['TOTAL_RX_LLINK_DATA_MAIN']
configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] = configuration['TOTAL_RX_LLINK_DATA_MAIN']
else:
configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'] = configuration['RX_PACKET_MAX_SIZE']
configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] = configuration['RX_PACKET_MAX_SIZE']
global_struct.g_info_print.append (" Channel Info\n")
if configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] != configuration['CHAN_RX_RAW1PHY_DATA_MAIN']:
global_struct.g_info_print.append (" TX: Each channel is Tiered Mode is {} bits\n".format(configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_info_print.append (" RX: Each channel is Tiered Mode is {} bits\n".format(configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
else:
global_struct.g_info_print.append (" {}: Each channel is Tiered Mode with {} bits\n".format('MAIN', configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_info_print.append (" {}: {}x channels\n".format('MAIN', configuration['NUM_CHAN']))
global_struct.g_info_print.append (" {}: Total AIB bits is {} bits\n".format('MAIN', configuration['NUM_CHAN'] * configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_info_print.append("\n")
global_struct.g_info_print.append (" TX: No DBI\n")
global_struct.g_info_print.append (" TX: Strobe is Recoverable or non-existent\n")
global_struct.g_info_print.append (" TX: Marker is Recoverable or non-existent\n")
global_struct.g_info_print.append (" TX: Total overhead bits across {} channels is {}\n".format(configuration['NUM_CHAN'], configuration['NUM_CHAN'] * 0))
global_struct.g_info_print.append (" TX: Total data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] - 0)))
global_struct.g_info_print.append("\n")
global_struct.g_info_print.append (" RX: No DBI\n")
global_struct.g_info_print.append (" RX: Strobe is Recoverable or non-existent\n")
global_struct.g_info_print.append (" RX: Marker is Recoverable or non-existent\n")
global_struct.g_info_print.append (" RX: Total overhead bits across {} channels is {}\n".format(configuration['NUM_CHAN'], configuration['NUM_CHAN'] * 0))
global_struct.g_info_print.append (" RX: Total data bits available {}\n".format(configuration['NUM_CHAN'] * (configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] - 0)))
global_struct.g_info_print.append("\n")
configuration['CHAN_TX_OVERHEAD_BITS_MAIN'] = 0
configuration['CHAN_RX_OVERHEAD_BITS_MAIN'] = 0
configuration['CHAN_TX_USEABLE1PHY_DATA_MAIN'] = configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] - configuration['CHAN_TX_OVERHEAD_BITS_MAIN'] ## CHAN_TX_USEABLE1PHY_DATA_MAIN
configuration['CHAN_RX_USEABLE1PHY_DATA_MAIN'] = configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] - configuration['CHAN_RX_OVERHEAD_BITS_MAIN'] ## CHAN_RX_USEABLE1PHY_DATA_MAIN
configuration['TOTAL_TX_USABLE_RAWDATA_MAIN'] = configuration['NUM_CHAN'] * configuration['CHAN_TX_USEABLE1PHY_DATA_MAIN'] ## TOTAL_TX_USABLE_RAWDATA_MAIN
configuration['TOTAL_RX_USABLE_RAWDATA_MAIN'] = configuration['NUM_CHAN'] * configuration['CHAN_RX_USEABLE1PHY_DATA_MAIN'] ## TOTAL_RX_USABLE_RAWDATA_MAIN
configuration['TOTAL_TX_ROUNDUP_BIT_MAIN'] = configuration['TOTAL_TX_USABLE_RAWDATA_MAIN'] - configuration['TOTAL_TX_LLINK_DATA_MAIN'] ## TOTAL_TX_ROUNDUP_BIT_MAIN, TOTAL_TX_ROUNDUP_BIT_GALT, TOTAL_TX_ROUNDUP_BIT_RSTRUCT defined here
configuration['TOTAL_RX_ROUNDUP_BIT_MAIN'] = configuration['TOTAL_RX_USABLE_RAWDATA_MAIN'] - configuration['TOTAL_RX_LLINK_DATA_MAIN'] ## TOTAL_RX_ROUNDUP_BIT_MAIN, TOTAL_RX_ROUNDUP_BIT_GALT, TOTAL_RX_ROUNDUP_BIT_RSTRUCT defined here
configuration['TX_SPARE_WIDTH'] = 0
configuration['RX_SPARE_WIDTH'] = 0
else:
configuration = calc_raw_1phydata (configuration, 'MAIN', enable_main, 1)
configuration = calc_overhead_1phydata (configuration, 'MAIN', enable_main)
configuration = calc_raw_1phydata (configuration, 'GALT', enable_galt, 0)
configuration = calc_overhead_1phydata (configuration, 'GALT', enable_galt)
# Calculate Channel Parameters
############################################################
############################################################
# Perform Checks
if configuration['REPLICATED_STRUCT']:
check_configuration(configuration, 'RSTRUCT')
else:
if enable_main:
check_configuration(configuration, 'MAIN')
if enable_galt:
check_configuration(configuration, 'GALT')
return configuration
## calculate_channel_parameters
##########################################################################################
##########################################################################################
## calculate_bit_loc_repstruct
## Bit location calculation for Asymmetric mode (replicated struct, rstruct)
def calculate_bit_loc_repstruct(this_is_tx, configuration):
if this_is_tx:
localdir = "output"
otherdir = "input"
else:
localdir = "input"
otherdir = "output"
local_index_wid = 0;
tx_print_index_lsb = 0;
tx_local_index_lsb = 0;
config_raw1phy_beat = configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if this_is_tx else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']
config_raw1phy_data = configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if this_is_tx else configuration['CHAN_RX_RAW1PHY_DATA_MAIN']
## Define individual replicated struct push or credits
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
if llink['HASVALID']:
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
global_struct.g_concat_code_vector_master_tx.append ( sprint_verilog_logic_line (gen_llink_concat_pushbit (llink['NAME'],otherdir)+"_r"+str(rstruct_iteration)) )
global_struct.g_concat_code_vector_slave_rx.append ( sprint_verilog_logic_line (gen_llink_concat_pushbit (llink['NAME'],localdir)+"_r"+str(rstruct_iteration)) )
global_struct.g_concat_code_vector_master_tx.append ( "\n" )
global_struct.g_concat_code_vector_slave_rx.append ( "\n" )
else:
if llink['HASREADY']:
for rstruct_iteration in list (range (0, 4)):
global_struct.g_concat_code_vector_master_rx.append ( sprint_verilog_logic_line (gen_llink_concat_credit (llink['NAME'],localdir)+"_r"+str(rstruct_iteration)) )
global_struct.g_concat_code_vector_slave_tx.append ( sprint_verilog_logic_line (gen_llink_concat_credit (llink['NAME'],otherdir)+"_r"+str(rstruct_iteration)) )
global_struct.g_concat_code_vector_master_rx.append ( "\n" )
global_struct.g_concat_code_vector_slave_tx.append ( "\n" )
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
if llink['HASVALID']:
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
global_struct.g_concat_code_vector_master_tx.append ( sprint_verilog_assign (gen_llink_concat_pushbit (llink['NAME'],otherdir)+"_r"+str(rstruct_iteration), (gen_llink_concat_pushbit (llink['NAME'],otherdir)) ))
global_struct.g_concat_code_vector_master_tx.append ( "\n" )
global_struct.g_concat_code_vector_slave_rx.append ( " assign {:20} = ".format(gen_llink_concat_pushbit (llink['NAME'],localdir)))
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
global_struct.g_concat_code_vector_slave_rx.append ( "{:20}".format(gen_llink_concat_pushbit (llink['NAME'],localdir)+"_r"+str(rstruct_iteration)) )
if rstruct_iteration != configuration['RSTRUCT_MULTIPLY_FACTOR']-1:
global_struct.g_concat_code_vector_slave_rx.append ( "|\n {:20} ".format(""))
else:
global_struct.g_concat_code_vector_slave_rx.append ( ";\n")
global_struct.g_concat_code_vector_slave_rx.append ( "\n" )
else:
if llink['HASREADY']:
global_struct.g_concat_code_vector_master_rx.append (" // Asymmetric Credit Logic\n")
for rstruct_iteration in list (range (0, 4)):
if rstruct_iteration < configuration['RSTRUCT_MULTIPLY_FACTOR'] and localdir == "input":
global_struct.g_concat_code_vector_master_rx.append ( sprint_verilog_assign (gen_llink_concat_credit (llink['NAME'],localdir), gen_llink_concat_credit (llink['NAME'],localdir)+"_r"+str(rstruct_iteration), index1=gen_index_msb(1, rstruct_iteration) ))
else:
global_struct.g_concat_code_vector_master_rx.append ( sprint_verilog_assign (gen_llink_concat_credit (llink['NAME'],localdir), "1'b0", index1=gen_index_msb(1, rstruct_iteration) ))
global_struct.g_concat_code_vector_master_rx.append ( "\n" )
global_struct.g_concat_code_vector_slave_tx.append (" // Asymmetric Credit Logic\n")
for rstruct_iteration in list (range (0, 4)):
if rstruct_iteration < configuration['RSTRUCT_MULTIPLY_FACTOR']:
global_struct.g_concat_code_vector_slave_tx.append ( sprint_verilog_assign (gen_llink_concat_credit (llink['NAME'],otherdir)+"_r"+str(rstruct_iteration), (gen_llink_concat_credit (llink['NAME'],otherdir)), index2=gen_index_msb(1, rstruct_iteration) ))
#if rstruct_iteration == 0:
# global_struct.g_concat_code_vector_slave_tx.append ( sprint_verilog_assign (gen_llink_concat_credit (llink['NAME'],otherdir)+"_r"+str(rstruct_iteration), "|"+(gen_llink_concat_credit (llink['NAME'],otherdir)) ))
else:
global_struct.g_concat_code_vector_slave_tx.append ( sprint_verilog_assign (gen_llink_concat_credit (llink['NAME'],otherdir)+"_r"+str(rstruct_iteration), "1'b0") )
global_struct.g_concat_code_vector_slave_tx.append ( "\n" )
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
tx_print_index_lsb = rstruct_iteration * config_raw1phy_beat
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
if llink['HASVALID']:
tx_local_index_lsb += 1
tx_print_index_lsb = print_aib_mapping_text(configuration, localdir, gen_llink_concat_pushbit (llink['NAME'],otherdir)+"_r"+str(rstruct_iteration), wid1=1, lsb1=tx_print_index_lsb)
if (tx_print_index_lsb % config_raw1phy_beat) == 0:
tx_print_index_lsb += config_raw1phy_data - config_raw1phy_beat
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'valid' or sig['TYPE'] == 'ready':
continue
if sig['TYPE'] == 'rstruct_enable':
continue
llink_lsb = sig['LLINDEX_MAIN_LSB'] + (rstruct_iteration * llink['WIDTH_MAIN'])
lsb2 = sig['LSB'] + (rstruct_iteration * sig['SIGWID'])
for unused1 in list (range (0, sig['SIGWID'])):
#lsb2=sig['LSB'] + (sig['SIGWID']*iteration)
#llink_lsb=sig['LLINDEX_MAIN_LSB'] + (llink['WIDTH_MAIN']*iteration)
tx_local_index_lsb += 1
tx_print_index_lsb = print_aib_mapping_text(configuration, localdir, sig['NAME'], wid1=1, lsb1=tx_print_index_lsb, lsb2=lsb2, llink_lsb=llink_lsb, llink_name=llink['NAME'])
if (tx_print_index_lsb % config_raw1phy_beat) == 0:
tx_print_index_lsb += config_raw1phy_data - config_raw1phy_beat
llink_lsb += 1
lsb2 += 1
else:
if llink['HASREADY']:
#global_struct.g_dv_vector_print.append ("assign {}_f = {};\n".format(gen_llink_concat_credit (llink['NAME'],localdir), tx_local_index_lsb))
tx_local_index_lsb += 1
tx_print_index_lsb = print_aib_mapping_text(configuration, localdir, gen_llink_concat_credit (llink['NAME'],localdir)+"_r"+str(rstruct_iteration), wid1=1, lsb1=tx_print_index_lsb)
if (tx_print_index_lsb % config_raw1phy_beat) == 0:
tx_print_index_lsb += config_raw1phy_data - config_raw1phy_beat
## This fills in the unused data space
if this_is_tx:
local_index_wid = configuration['TOTAL_TX_ROUNDUP_BIT_RSTRUCT']
tx_local_index_lsb += local_index_wid
configuration['TX_SPARE_WIDTH'] = 0
else:
local_index_wid = configuration['TOTAL_RX_ROUNDUP_BIT_RSTRUCT']
tx_local_index_lsb += local_index_wid
configuration['RX_SPARE_WIDTH'] = 0
for unused1 in list (range (0, local_index_wid)):
tx_local_index_lsb += 1
tx_print_index_lsb= print_aib_mapping_text(configuration, localdir,"1'b0", wid1=1, lsb1=tx_print_index_lsb, lsb2=-1)
if (tx_print_index_lsb % config_raw1phy_beat) == 0:
tx_print_index_lsb += config_raw1phy_data - config_raw1phy_beat
# This is unused for rep struct
# ## This fills in the empty space after the data but before the end of the channel (e.g. DBI)
# local_index_wid = config_raw1phy_beat - tx_local_index_lsb
# tx_local_index_lsb += local_index_wid
#
# for unused1 in list (range (0, local_index_wid)):
# if global_struct.g_SIGNAL_DEBUG:
# print ("Fill in iteration {} for index_lsb {}".format(unused1, tx_local_index_lsb))
# tx_local_index_lsb += 1
# tx_print_index_lsb= print_aib_mapping_text(configuration, localdir,"1'b0", wid1=1, lsb1=tx_print_index_lsb, lsb2=-1)
# if (tx_print_index_lsb % config_raw1phy_beat) == 0:
# tx_print_index_lsb += config_raw1phy_data - config_raw1phy_beat
#
# # local_lsb1 = print_aib_assign_text_check_for_aib_bit (configuration, local_lsb1, use_tx, sysv)
## The print vectors were messed up by bit blasting. We'll correct it here
use_tx = True if localdir == "output" else False
if use_tx:
#global_struct.g_llink_vector_print_tx.clear()
del global_struct.g_llink_vector_print_tx [:]
else:
#global_struct.g_llink_vector_print_rx.clear()
del global_struct.g_llink_vector_print_rx [:]
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
tx_print_index_lsb = rstruct_iteration * config_raw1phy_beat
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
use_tx = True if localdir == "output" else False
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'valid' or sig['TYPE'] == 'ready':
continue
if use_tx:
if llink_lsb != -1:
global_struct.g_llink_vector_print_tx.append (" assign {0:20} {1:13} = {2:20} {3:13}\n".format(gen_llink_concat_fifoname (llink['NAME'],"input" ), gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'] + (rstruct_iteration * llink['WIDTH_MAIN'])), sig['NAME'], gen_index_msb (sig['SIGWID'], sig['LSB'] + (rstruct_iteration * sig['SIGWID']))))
else:
if llink_lsb != -1:
global_struct.g_llink_vector_print_rx.append (" assign {0:20} {1:13} = {2:20} {3:13}\n".format(gen_llink_concat_fifoname (llink['NAME'],"output"), gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'] + (rstruct_iteration * llink['WIDTH_MAIN'])), sig['NAME'], gen_index_msb (sig['SIGWID'], sig['LSB'] + (rstruct_iteration * sig['SIGWID']))))
return configuration
## calculate_bit_loc_repstruct
##########################################################################################
##########################################################################################
## calculate_bit_loc_fixed_alloc
## Calculate fixed allocation bit locations
def calculate_bit_loc_fixed_alloc(this_is_tx, configuration):
if this_is_tx:
localdir = "output"
otherdir = "input"
else:
localdir = "input"
otherdir = "output"
local_index_wid = 0;
tx_print_index_lsb = 0;
rx_print_index_lsb = 0;
tx_local_index_lsb = 0;
rx_local_index_lsb = 0;
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
if llink['HASVALID']:
local_index_wid = 1
llink['PUSH_RAW_INDEX_MAIN'] = gen_index_msb(local_index_wid, tx_local_index_lsb)
llink['PUSH_RAW_LSB_MAIN'] = tx_local_index_lsb
tx_local_index_lsb += local_index_wid
tx_print_index_lsb = print_aib_mapping_text(configuration, localdir, gen_llink_concat_pushbit (llink['NAME'],otherdir), wid1=1, lsb1=tx_print_index_lsb)
local_index_wid = llink['WIDTH_MAIN']
llink['DATA_RAW_INDEX_MAIN'] = gen_index_msb(local_index_wid, tx_local_index_lsb)
llink['DATA_RAW_LSB_MAIN'] = tx_local_index_lsb
tx_local_index_lsb += local_index_wid
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'valid' or sig['TYPE'] == 'ready':
continue
tx_print_index_lsb = print_aib_mapping_text(configuration, localdir, sig['NAME'], wid1=sig['SIGWID'], lsb1=tx_print_index_lsb, lsb2=sig['LSB'], llink_lsb=sig['LLINDEX_MAIN_LSB'], llink_name=llink['NAME'])
else:
if llink['HASREADY']:
local_index_wid = 1
llink['CREDIT_RAW_INDEX_MAIN'] = gen_index_msb(local_index_wid, tx_local_index_lsb)
llink['CREDIT_RAW_LSB_MAIN'] = tx_local_index_lsb
#global_struct.g_dv_vector_print.append ("assign {}_f = {};\n".format(gen_llink_concat_credit (llink['NAME'],localdir), tx_local_index_lsb))
tx_local_index_lsb += local_index_wid
tx_print_index_lsb = print_aib_mapping_text(configuration, localdir, gen_llink_concat_credit (llink['NAME'],localdir), wid1=1, lsb1=tx_print_index_lsb)
if this_is_tx:
local_index_wid = configuration['TOTAL_TX_ROUNDUP_BIT_MAIN']
tx_local_index_lsb += local_index_wid
if configuration['TOTAL_TX_ROUNDUP_BIT_MAIN'] :
if global_struct.USE_SPARE_VECTOR:
tx_print_index_lsb= print_aib_mapping_text(configuration, localdir,"spare_"+localdir, wid1=configuration['TOTAL_TX_ROUNDUP_BIT_MAIN'], lsb1=tx_print_index_lsb, lsb2=0, llink_lsb=0, llink_name="spare")
configuration['TX_SPARE_WIDTH'] = configuration['TOTAL_TX_ROUNDUP_BIT_MAIN']
else:
tx_print_index_lsb= print_aib_mapping_text(configuration, localdir,"1'b0", wid1=configuration['TOTAL_TX_ROUNDUP_BIT_MAIN'], lsb1=tx_print_index_lsb, lsb2=-1)
configuration['TX_SPARE_WIDTH'] = 0
else:
local_index_wid = configuration['TOTAL_RX_ROUNDUP_BIT_MAIN']
tx_local_index_lsb += local_index_wid
if configuration['TOTAL_RX_ROUNDUP_BIT_MAIN'] :
if global_struct.USE_SPARE_VECTOR:
tx_print_index_lsb= print_aib_mapping_text(configuration, localdir,"spare_"+localdir, wid1=configuration['TOTAL_RX_ROUNDUP_BIT_MAIN'], lsb1=tx_print_index_lsb, lsb2=0, llink_lsb=0, llink_name="spare")
configuration['RX_SPARE_WIDTH'] = configuration['TOTAL_RX_ROUNDUP_BIT_MAIN']
else:
tx_print_index_lsb= print_aib_mapping_text(configuration, localdir,"1'b0", wid1=configuration['TOTAL_RX_ROUNDUP_BIT_MAIN'], lsb1=tx_print_index_lsb, lsb2=-1)
configuration['RX_SPARE_WIDTH'] = 0
return configuration
## calculate_bit_loc_fixed_alloc
##########################################################################################
##########################################################################################
## make_name_file
## Generate name files
def make_name_file(configuration):
for direction in ['master', 'slave']:
name_file_name = "{}_{}_name".format(configuration['MODULE'], direction)
file_name = open("{}/{}.sv".format(configuration['OUTPUT_DIR'], name_file_name), "w+")
print_verilog_header(file_name)
file_name.write("module {} (\n".format(name_file_name))
first_line = True;
# List User Signals
for llink in configuration['LL_LIST']:
#if (llink['WIDTH_GALT'] != 0) and (llink['WIDTH_MAIN'] != 0):
# file_name.write("\n // {0} channel\n".format(llink['NAME']))
# for sig_gen2 in llink['SIGNALLIST_MAIN']:
# found_gen1_match = 0;
# for sig_gen1 in llink['SIGNALLIST_GALT']:
# if sig_gen2['NAME'] == sig_gen1['NAME']:
# found_gen1_match = 1
# localdir = gen_direction(name_file_name, sig_gen2['DIR'])
# print_verilog_io_line(file_name, localdir, sig_gen2['NAME'], index=gen_index_msb(sig_gen2['SIGWID'] + sig_gen1['SIGWID'],sig_gen1['LSB'], sysv=False))
# if found_gen1_match == 0:
# localdir = gen_direction(name_file_name, sig_gen2['DIR'])
# print_verilog_io_line(file_name, localdir, sig_gen2['NAME'], index=gen_index_msb(sig_gen2['SIGWID'],sig_gen2['LSB'], sysv=False))
#
#else:
file_name.write("\n // {0} channel\n".format(llink['NAME']))
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == "rstruct_enable" and direction == 'master' : ## Drop the user_enable if in master (slave only)
continue
localdir = gen_direction(name_file_name, sig['DIR'])
print_verilog_io_line(file_name, localdir, sig['NAME'], index=gen_index_msb(sig['SIGWID'] * configuration['RSTRUCT_MULTIPLY_FACTOR'],sig['LSB'], sysv=False))
# List Logic Link Signals
file_name.write("\n // Logic Link Interfaces\n")
for llink in configuration['LL_LIST']:
if first_line:
first_line = False
else:
file_name.write("\n")
localdir = gen_direction(name_file_name, llink['DIR'], True)
if llink['HASVALID']:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], True), gen_llink_user_valid (llink['NAME'] ))
if localdir == 'output':
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], True), gen_llink_user_fifoname (llink['NAME'],localdir), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
else:
if configuration['REPLICATED_STRUCT']:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], True), gen_llink_user_fifoname (llink['NAME'],localdir), gen_index_msb(llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
else:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], True), gen_llink_user_fifoname (llink['NAME'],localdir), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
if llink['HASREADY']:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], False), gen_llink_user_ready (llink['NAME'] ))
file_name.write("\n")
if llink['HASVALID_NOREADY_NOREP']:
print_verilog_io_line(file_name, "input", "rx_online")
print_verilog_io_line(file_name, "input", "m_gen2_mode", comma=False)
file_name.write("\n);\n")
file_name.write("\n // Connect Data\n")
for llink in configuration['LL_LIST']:
file_name.write("\n")
localdir = gen_direction(name_file_name, llink['DIR'], True);
if localdir == 'output':
if llink['HASVALID']:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'valid':
print_verilog_assign(file_name, gen_llink_user_valid (llink['NAME']), sig['NAME'])
else:
print_verilog_assign(file_name, gen_llink_user_valid (llink['NAME']), "1'b1", comment=gen_llink_user_valid (llink['NAME']) + " is unused" )
if llink['HASREADY']:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'ready':
print_verilog_assign(file_name, sig['NAME'], gen_llink_user_ready (llink['NAME']))
#else:
# file_name.write(" // "+ gen_llink_user_ready (llink['NAME']) +" is unused\n")
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'signal' or sig['TYPE'] == 'signal_valid' or sig['TYPE'] == 'bus':
print_verilog_assign(file_name, gen_llink_user_fifoname (llink['NAME'], localdir), sig['NAME'], index1=gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'] + (rstruct_iteration * llink['WIDTH_MAIN'])), index2=gen_index_msb(sig['SIGWID'], sig['LSB'] + (rstruct_iteration * sig['SIGWID'])))
#if sig['TYPE'] == 'rstruct_enable' and localdir == 'input':
# print_verilog_assign(file_name, gen_llink_user_fifoname (llink['NAME'], localdir), sig['NAME'], index1=gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'] + rstruct_iteration + (configuration['RSTRUCT_MULTIPLY_FACTOR'] * llink['WIDTH_MAIN'])), index2=gen_index_msb(sig['SIGWID'], sig['LSB'] + (rstruct_iteration * sig['SIGWID'])))
#print_verilog_assign(file_name, gen_llink_user_fifoname (llink['NAME'], localdir), "'0", index1=gen_index_msb(llink['WIDTH_MAIN']-llink['WIDTH_GALT'], llink['WIDTH_GALT']))
#file_name.write(" assign "+gen_llink_user_fifoname (llink['NAME'], localdir)+" = m_gen2_mode ? "+gen_llink_user_fifoname (llink['NAME'], localdir)+" : "+gen_llink_user_fifoname (llink['NAME'], localdir)+";\n")
else: # if llink['DIR'] == 'output':
if llink['HASVALID']:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'valid':
print_verilog_assign(file_name, sig['NAME'], gen_llink_user_valid (llink['NAME']))
else:
file_name.write(" // "+ gen_llink_user_valid (llink['NAME']) +" is unused\n")
if llink['HASREADY']:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'ready':
print_verilog_assign(file_name, gen_llink_user_ready (llink['NAME']), sig['NAME'])
#else:
# print_verilog_assign(file_name, gen_llink_user_ready (llink['NAME']), "1'b1", comment=gen_llink_user_ready (llink['NAME']) + " is unused" )
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'signal' or sig['TYPE'] == 'bus':
print_verilog_assign(file_name, sig['NAME'], gen_llink_user_fifoname (llink['NAME'], localdir), index1=gen_index_msb(sig['SIGWID'], sig['LSB'] + (rstruct_iteration * sig['SIGWID'])), index2=gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'] + (rstruct_iteration * llink['WIDTH_MAIN'])))
elif sig['TYPE'] == 'signal_valid':
print_verilog_assign(file_name, sig['NAME'], "rx_online & " + gen_llink_user_fifoname (llink['NAME'], localdir), index1=gen_index_msb(sig['SIGWID'], sig['LSB'] + (rstruct_iteration * sig['SIGWID'])), index2=gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'] + (rstruct_iteration * llink['WIDTH_MAIN'])))
if sig['TYPE'] == 'rstruct_enable' and localdir == 'input':
print_verilog_assign(file_name, sig['NAME'], gen_llink_user_fifoname (llink['NAME'], localdir), index1=gen_index_msb(sig['SIGWID'], sig['LSB'] + rstruct_iteration) , index2=gen_index_msb (sig['SIGWID'], (sig['LLINDEX_MAIN_LSB'] * configuration['RSTRUCT_MULTIPLY_FACTOR']) + rstruct_iteration))
#### for sig in llink['SIGNALLIST_MAIN']:
#### if sig['TYPE'] == 'signal' or sig['TYPE'] == 'bus':
#### print_verilog_assign(file_name, gen_llink_user_fifoname (llink['NAME'], localdir), sig['NAME'], index1=sig['LLINDEX_MAIN'], index2=gen_index_msb(sig['SIGWID'],sig['LSB']))
####
#### for sig in llink['SIGNALLIST_GALT']:
#### if sig['TYPE'] == 'signal' or sig['TYPE'] == 'bus':
#### print_verilog_assign(file_name, gen_llink_user_fifoname (llink['NAME'], localdir), sig['NAME'], index1=sig['LLINDEX_GALT'], index2=gen_index_msb(sig['SIGWID'],sig['LSB']))
#### print_verilog_assign(file_name, gen_llink_user_fifoname (llink['NAME'], localdir), "'0", index1=gen_index_msb(llink['WIDTH_MAIN']-llink['WIDTH_GALT'], llink['WIDTH_GALT']))
#### file_name.write(" assign "+gen_llink_user_fifoname (llink['NAME'], localdir)+" = m_gen2_mode ? "+gen_llink_user_fifoname (llink['NAME'], localdir)+" : "+gen_llink_user_fifoname (llink['NAME'], localdir)+";\n")
#### else: # if llink['DIR'] == 'output':
####
#### if llink['HASVALID']:
#### for sig in llink['SIGNALLIST_MAIN']:
#### if sig['TYPE'] == 'valid':
#### print_verilog_assign(file_name, sig['NAME'], gen_llink_user_valid (llink['NAME']))
#### else:
#### file_name.write(" // "+ gen_llink_user_valid (llink['NAME']) +" is unused\n")
####
#### if llink['HASREADY']:
#### for sig in llink['SIGNALLIST_MAIN']:
#### if sig['TYPE'] == 'ready':
#### print_verilog_assign(file_name, gen_llink_user_ready (llink['NAME']), sig['NAME'])
#### else:
#### print_verilog_assign(file_name, gen_llink_user_ready (llink['NAME']), "1'b1", comment=gen_llink_user_ready (llink['NAME']) + " is unused" )
####
#### for sig in llink['SIGNALLIST_MAIN']:
#### if sig['TYPE'] == 'signal' or sig['TYPE'] == 'bus':
#### print_verilog_assign(file_name, sig['NAME'], gen_llink_user_fifoname (llink['NAME'], localdir), index1=gen_index_msb(sig['SIGWID'],sig['LSB']), index2=sig['LLINDEX_MAIN'])
####
#### for sig in llink['SIGNALLIST_GALT']:
#### if sig['TYPE'] == 'signal' or sig['TYPE'] == 'bus':
#### print_verilog_assign(file_name, sig['NAME'], gen_llink_user_fifoname (llink['NAME'], localdir), index1=gen_index_msb(sig['SIGWID'],sig['LSB']), index2=sig['LLINDEX_GALT'])
file_name.write("\n")
file_name.write("endmodule\n")
file_name.close()
return
## make_name_file
##########################################################################################
##########################################################################################
## make_concat_file
## Generate concat file
def make_concat_file(configuration):
for direction in ['master', 'slave']:
name_file_name = "{}_{}_concat".format(configuration['MODULE'], direction)
file_name = open("{}/{}.sv".format(configuration['OUTPUT_DIR'], name_file_name), "w+")
print_verilog_header(file_name)
file_name.write("module {} (\n".format(name_file_name))
# Logic Link Signaling
file_name.write("\n// Data from Logic Links\n")
if direction == 'master':
localdir = 'output';
else:
localdir = 'input';
for llink in configuration['LL_LIST']:
if configuration['REPLICATED_STRUCT'] and gen_direction(name_file_name, llink['DIR'], False) == "input":
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], True), gen_llink_concat_fifoname (llink['NAME'],gen_direction(name_file_name, llink['DIR'], True)), gen_index_msb(llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
else:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], True), gen_llink_concat_fifoname (llink['NAME'],gen_direction(name_file_name, llink['DIR'], True)), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
print_verilog_io_line(file_name, "output", gen_llink_concat_ovrd (llink['NAME'],gen_direction(name_file_name, llink['DIR'], True)))
if llink['HASVALID']:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], True), gen_llink_concat_pushbit (llink['NAME'],gen_direction(name_file_name, llink['DIR'], True)) )
if llink['HASREADY']:
if configuration['REPLICATED_STRUCT']:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], False), gen_llink_concat_credit (llink['NAME'],gen_direction(name_file_name, llink['DIR'], True)), gen_index_msb(4, sysv=False))
else:
print_verilog_io_line(file_name, gen_direction(name_file_name, llink['DIR'], False), gen_llink_concat_credit (llink['NAME'],gen_direction(name_file_name, llink['DIR'], True)))
file_name.write("\n")
file_name.write("// PHY Interconnect\n")
# Logic Link Inputs
for phy in range(configuration['NUM_CHAN']):
print_verilog_io_line(file_name, "output", "tx_phy{}".format(phy), gen_index_msb(configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], sysv=False))
print_verilog_io_line(file_name, "input", "rx_phy{}".format(phy), gen_index_msb(configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_TX_RAW1PHY_DATA_MAIN'], sysv=False))
file_name.write("\n")
print_verilog_io_line(file_name, "input", "clk_wr")
print_verilog_io_line(file_name, "input", "clk_rd")
print_verilog_io_line(file_name, "input", "rst_wr_n")
print_verilog_io_line(file_name, "input", "rst_rd_n")
file_name.write("\n")
print_verilog_io_line(file_name, "input", "m_gen2_mode")
print_verilog_io_line(file_name, "input", "tx_online")
file_name.write("\n")
#print_verilog_io_line(file_name, "output", "rx_stb_userbit", gen_index_msb(configuration['NUM_CHAN'], sysv=False))
#print_verilog_io_line(file_name, "output", "rx_mrk_userbit", gen_index_msb(configuration['NUM_CHAN'], sysv=False))
print_verilog_io_line(file_name, "input", "tx_stb_userbit")
print_verilog_io_line(file_name, "input", "tx_mrk_userbit", gen_index_msb(configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'], sysv=False), comma=False)
file_name.write("\n);\n")
file_name.write("\n")
if (configuration['TX_ENABLE_PACKETIZATION'] and direction == 'master') or (configuration['RX_ENABLE_PACKETIZATION'] and direction == 'slave') :
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// TX Packet Section")
file_name.write("\n")
if direction == 'master':
loc_packet_info = global_struct.g_tx_packet_info
loc_packet_code_req = global_struct.g_packet_code_master_req_tx
loc_packet_code_data = global_struct.g_packet_code_master_data_tx
else:
loc_packet_info = global_struct.g_rx_packet_info
loc_packet_code_req = global_struct.g_packet_code_slave_req_tx
loc_packet_code_data = global_struct.g_packet_code_slave_data_tx
print_verilog_logic_line (file_name , "tx_requestor" , index = gen_index_msb ( configuration['TX_PACKET_NUMBER'] if direction == 'master' else configuration['RX_PACKET_NUMBER'] , sysv=False) )
print_verilog_logic_line (file_name , "tx_grant_onehotish" , index = gen_index_msb ( configuration['TX_PACKET_NUMBER'] if direction == 'master' else configuration['RX_PACKET_NUMBER'] , sysv=False) )
print_verilog_logic_line (file_name , "tx_grant_enc_data" , index = gen_index_msb ( configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH'] , sysv=False) )
print_verilog_logic_line (file_name , "tx_packet_data" , index = gen_index_msb ( configuration['TX_PACKET_DATAWIDTH'] if direction == 'master' else configuration['RX_PACKET_DATAWIDTH'] , sysv=False) )
for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
print_verilog_logic_line(file_name, "tx_packet_data{}".format(enc_index), index = gen_index_msb ( configuration['TX_PACKET_DATAWIDTH'] if direction == 'master' else configuration['RX_PACKET_DATAWIDTH'] , sysv=False) )
file_name.write("\n")
buff_max_value = 0
for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
for packet_chunk in entire_packet['LIST']:
if global_struct.g_PACKET_DEBUG:
print ("packet_chunk ***************")
pprint.pprint (packet_chunk)
if packet_chunk['FIRST_PKT'] == False:
print_verilog_logic_line (file_name , gen_llink_concat_pushbit (packet_chunk['CHUNK_NAME'],"input"))
buff_max_value += 1
if buff_max_value>0:
file_name.write("\n")
if int(configuration['TX_PACKET_NUMBER'] if direction == 'master' else configuration['RX_PACKET_NUMBER']) == 1:
file_name.write(" // Corner case of 1 packet, so no meaninful encoding or arbitration\n")
file_name.write(" // Removing round robin arbiter, replacing with single vector.\n")
file_name.write(" assign tx_grant_onehotish = tx_requestor;\n")
file_name.write(" assign tx_grant_enc_data = 1'd0;\n")
else:
file_name.write(" rrarb #(.REQUESTORS({})) rrarb_itx\n".format (int(configuration['TX_PACKET_NUMBER'] if direction == 'master' else configuration['RX_PACKET_NUMBER']) ))
file_name.write(" (// Outputs\n")
file_name.write(" .grant (tx_grant_onehotish),\n")
file_name.write(" // Inputs\n")
file_name.write(" .clk_core (clk_wr),\n")
file_name.write(" .rst_core_n (rst_wr_n),\n")
file_name.write(" .requestor (tx_requestor),\n")
file_name.write(" .advance (1'b1));\n")
file_name.write("\n")
file_name.write(" // This converts from one-hot-ish rrarb output to encoded value\n")
file_name.write(" always_comb\n")
file_name.write(" begin\n")
file_name.write(" case(tx_grant_onehotish)\n")
for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
if (configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH']) == 0:
file_name.write(" {0:2}'b{1:<4} : tx_grant_enc_data ={2:2}'d{1:<4};\n".format(1, 0, 1, 0))
else:
file_name.write(" {0:2}'b{1:0{0}b} : tx_grant_enc_data = {2:2}'d{3:<4};\n".format(configuration['TX_PACKET_NUMBER'] if direction == 'master' else configuration['RX_PACKET_NUMBER'] , 2**enc_index,
configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH'] , enc_index ))
file_name.write(" {0:{1}} : tx_grant_enc_data = {2:2}'d{3:<4};\n".format("default", 4+(configuration['TX_PACKET_NUMBER'] if direction == 'master' else configuration['RX_PACKET_NUMBER']) ,
configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH'], 0 ))
file_name.write(" endcase\n")
file_name.write(" end\n")
file_name.write("\n")
file_name.write(" // This assigns the data portion of packetizing\n")
file_name.write(" always_comb\n")
file_name.write(" begin\n")
file_name.write(" case(tx_grant_enc_data)\n")
for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
if (configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH']) == 0:
file_name.write(" {0:2}'d{1:<4} : tx_packet_data = tx_packet_data{1};\n".format(1, 0))
else:
file_name.write(" {0:2}'d{1:<4} : tx_packet_data = tx_packet_data{1};\n".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH'], enc_index))
file_name.write(" default : tx_packet_data = tx_packet_data{1};\n".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH'], enc_index))
file_name.write(" endcase\n")
file_name.write(" end\n")
file_name.write("\n")
file_name.write(" // This controls if we can pop the TX FIFO\n")
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
file_name.write(" assign "+gen_llink_concat_ovrd (llink['NAME'],"input")+" = ")
for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
for packet_chunk in entire_packet['LIST']:
if llink['NAME'] == packet_chunk['NAME'] and packet_chunk['LAST_PKT'] == True:
if (configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH']) == 0:
file_name.write("(tx_grant_enc_data == {}'d{}) ? 1'b0 : ".format(1, 0))
else:
file_name.write("(tx_grant_enc_data == {}'d{}) ? 1'b0 : ".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC']))
file_name.write("1'b1;\n")
file_name.write("\n")
file_name.write(" // Request to Arbitrate\n")
for string in loc_packet_code_req:
file_name.write (string)
file_name.write("\n")
add_dly_module = False
for entire_packet in loc_packet_info:
for packet_chunk in entire_packet['LIST']:
if (packet_chunk['PKT_INDEX'] != 0):
add_dly_module = True
if add_dly_module:
file_name.write(" // This adds delay in secondary packets to prevent arbitration corner case\n")
file_name.write(" always_ff @(posedge clk_wr or negedge rst_wr_n)\n")
file_name.write(" if (~rst_wr_n)\n")
file_name.write(" begin\n")
for entire_packet in loc_packet_info:
for packet_chunk in entire_packet['LIST']:
if packet_chunk['PKT_INDEX'] != 0:
file_name.write(" {:20}<= 1'b0;\n".format (gen_llink_concat_pushbit (packet_chunk['CHUNK_NAME'],"input")))
file_name.write(" end\n")
file_name.write(" else\n")
file_name.write(" begin\n")
for entire_packet in loc_packet_info:
for packet_chunk in entire_packet['LIST']:
if packet_chunk['PKT_INDEX'] != 0:
##file_name.write(" {:20}<= {:20};\n".format (gen_llink_concat_pushbit (packet_chunk['CHUNK_NAME'],"input"), gen_llink_concat_pushbit (packet_chunk['NAME'] + str ((packet_chunk['PKT_INDEX']-1) if packet_chunk['PKT_INDEX'] > 1 else "") ,"input")))
file_name.write(" {:20}<= (tx_grant_enc_data == {}'d{}) & {};\n".format(gen_llink_concat_pushbit (packet_chunk['CHUNK_NAME'],"input"), configuration['TX_PACKET_ID_WIDTH'] if direction == 'master' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC']-1, gen_llink_concat_pushbit (packet_chunk['NAME'] + (str ("{0:02d}".format(packet_chunk['PKT_INDEX']-1)) if packet_chunk['PKT_INDEX'] > 1 else "") ,"input")))
file_name.write(" end\n")
file_name.write("\n")
file_name.write(" // Data to Transmit\n")
for string in loc_packet_code_data:
file_name.write (string)
file_name.write("// TX Packet Section\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
else: ## No packetizing
# Logic Link Signaling
if direction == 'master':
localdir = 'output';
else:
localdir = 'input';
file_name.write("// No TX Packetization, so tie off packetization signals\n")
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
print_verilog_assign(file_name, gen_llink_concat_ovrd (llink['NAME'],"input"), "1'b0")
file_name.write("\n")
if (configuration['TX_ENABLE_PACKETIZATION'] and direction == 'slave') or (configuration['RX_ENABLE_PACKETIZATION'] and direction == 'master') :
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// RX Packet Section\n")
file_name.write("\n")
if direction == 'master':
loc_packet_info = global_struct.g_rx_packet_info
else:
loc_packet_info = global_struct.g_tx_packet_info
print_verilog_logic_line (file_name , "rx_grant_enc_data" , index = gen_index_msb ( configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'] , sysv=False) )
print_verilog_logic_line (file_name , "rx_packet_data" , index = gen_index_msb ( configuration['TX_PACKET_DATAWIDTH'] if direction == 'slave' else configuration['RX_PACKET_DATAWIDTH'] , sysv=False) )
if (configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH']) == 0:
file_name.write("\n")
file_name.write(" // Corner case of 1 packet, so no meaninful encoding\n")
file_name.write(" assign rx_grant_enc_data = 1'd0;\n")
## Fist, we'll check if we need any buffering and the max buffering needed
rx_buffer_size = 0
for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
for packet_chunk in entire_packet['LIST']:
if packet_chunk['PKT_INDEX'] > rx_buffer_size:
rx_buffer_size = packet_chunk['PKT_INDEX']
if rx_buffer_size != 0:
for buff in range(rx_buffer_size):
print_verilog_logic_line (file_name , "rx_grant_enc_dly{}_reg".format(buff), index = gen_index_msb ( configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'] , sysv=False) )
for buff in range(rx_buffer_size):
print_verilog_logic_line (file_name , "rx_buffer_dly{}_reg".format(buff), index = gen_index_msb ( configuration['TX_PACKET_DATAWIDTH'] if direction == 'slave' else configuration['RX_PACKET_DATAWIDTH'] , sysv=False) )
file_name.write("\n")
file_name.write(" // This controls if we override the RX Push Bit (if the signal is 0, that is only time Push Bit could be valid)\n")
for llink in configuration['LL_LIST']:
if llink['DIR'] != localdir:
file_name.write(" assign {:20} = ".format(gen_llink_concat_ovrd (llink['NAME'],"output")))
for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
for packet_chunk in entire_packet['LIST']:
if llink['NAME'] == packet_chunk['NAME'] and packet_chunk['LAST_PKT'] == True:
if (configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH']) == 0:
file_name.write("(rx_grant_enc_data == {}'d{}) ? 1'b0 : ".format(1, 0))
else:
file_name.write("(rx_grant_enc_data == {}'d{}) ? 1'b0 : ".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC']))
file_name.write("1'b1;\n")
file_name.write("\n")
## It is used for PUSHBIT
rx_data_dict = dict()
rx_pushbit_dict = dict()
for entire_packet in sorted (loc_packet_info, key=itemgetter('ENC')):
for packet_chunk in entire_packet['LIST']:
delay_value = packet_chunk['LAST_PKT_INDEX']-packet_chunk['PKT_INDEX']-1
if packet_chunk['NAME'] in rx_pushbit_dict:
if packet_chunk['HASVALID']:
string = rx_pushbit_dict [packet_chunk['NAME']] + " ||\n "
if delay_value == -1: # -1 means live value
string += " ((rx_grant_enc_data == {}'d{}) &&".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['LAST_PKT_ENC'])
string += " ({} [{}] == 1'b1))".format("rx_packet_data", packet_chunk['PUSHBIT_LOC'])
else:
string += " ((rx_grant_enc_data == {}'d{}) &&".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['LAST_PKT_ENC'])
string += " (rx_grant_enc_dly{}_reg == {}'d{}) &&".format(delay_value, configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC'])
string += " ({} [{}] == 1'b1))".format("rx_buffer_dly{}_reg".format(delay_value), packet_chunk['PUSHBIT_LOC'])
rx_pushbit_dict [packet_chunk['NAME']] = string
else: ## New entry
rx_element_dict = dict()
if packet_chunk['HASVALID']:
string = ""
if (configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH']) == 0:
string += " ((rx_grant_enc_data == {}'d{}) &&".format(1, packet_chunk['LAST_PKT_ENC'])
string += " ({} [{}] == 1'b1))".format("rx_packet_data", packet_chunk['PUSHBIT_LOC'] )
elif delay_value == -1: # -1 means live value
string += " ((rx_grant_enc_data == {}'d{}) &&".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['LAST_PKT_ENC'])
string += " ({} [{}] == 1'b1))".format("rx_packet_data", packet_chunk['PUSHBIT_LOC'] )
else:
string += " ((rx_grant_enc_data == {}'d{}) &&".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['LAST_PKT_ENC'])
string += " (rx_grant_enc_dly{}_reg == {}'d{}) &&".format(delay_value, configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC'])
string += " ({} [{}] == 1'b1))".format("rx_buffer_dly{}_reg".format(delay_value), packet_chunk['PUSHBIT_LOC'])
rx_pushbit_dict [packet_chunk['NAME']] = string
if global_struct.g_PACKET_DEBUG:
print ("before // This is RX Push Bit")
pprint.pprint (rx_pushbit_dict)
file_name.write(" // This is RX Push Bit\n")
for rx_pushbit_key in sorted (rx_pushbit_dict.keys()) :
file_name.write(" assign {:20} ={};\n".format(gen_llink_concat_pushbit (rx_pushbit_key,"output"), rx_pushbit_dict[rx_pushbit_key]))
file_name.write("\n")
### for llink in configuration['LL_LIST']:
### if llink['DIR'] != localdir:
### num_whole_assignment = 0
### for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
### for packet_chunk in entire_packet['LIST']:
### if llink['NAME'] == packet_chunk['NAME']:
### if packet_chunk['HASVALID'] == True:
### num_whole_assignment += 1
###
### if global_struct.g_PACKET_DEBUG:
### print("RX pushbit llink {} num_whole_assignment = {}\n".format(llink['NAME'], num_whole_assignment))
###
### if num_whole_assignment > 1 :
### file_name.write(" assign {:20} = ".format(gen_llink_concat_pushbit(llink['NAME'],'output')))
### for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
### for packet_chunk in entire_packet['LIST']:
### if llink['NAME'] == packet_chunk['NAME']:
### if packet_chunk['HASVALID'] == True:
### if num_whole_assignment > 1:
### file_name.write("(rx_grant_enc_data == {}'d{}) ? ".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC']))
### file_name.write("rx_packet_data[{}] ".format(packet_chunk['PUSHBIT_LOC']))
### num_whole_assignment -= 1
### if (num_whole_assignment != 0):
### file_name.write(": ")
### else:
### file_name.write(" assign {:20} = ".format(gen_llink_concat_pushbit(llink['NAME'],'output')))
### for enc_index,entire_packet in enumerate(sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False)):
### for packet_chunk in entire_packet['LIST']:
### if llink['NAME'] == packet_chunk['NAME']:
### if packet_chunk['LAST_PKT'] == True :
### max_pkt_index = packet_chunk['PKT_INDEX']
###
### for packet_chunk in entire_packet['LIST']:
### if llink['NAME'] == packet_chunk['NAME']:
### if packet_chunk['FIRST_PKT'] == True and packet_chunk['LAST_PKT'] == True and packet_chunk['HASVALID'] == True:
### file_name.write("rx_packet_data[{}] ".format(packet_chunk['PUSHBIT_LOC']))
### elif packet_chunk['FIRST_PKT'] == True and packet_chunk['HASVALID'] == True:
### file_name.write("(rx_buffer_dly{}_reg[{}] & (rx_grant_enc_dly{}_reg == {}'d{})) & ".format(packet_chunk['LAST_PKT_INDEX'] - packet_chunk['PKT_INDEX'] -1, packet_chunk['PUSHBIT_LOC'], packet_chunk['LAST_PKT_INDEX'] - packet_chunk['PKT_INDEX'] -1, configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC']))
### elif packet_chunk['LAST_PKT'] == True :
### file_name.write("(rx_grant_enc_data == {}'d{}) ".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], packet_chunk['ENC']))
### file_name.write(";\n")
### file_name.write("\n")
## This section builds a dict of LL Data, with possibly multiple sources.
## It is used for and RX Data later on.
for entire_packet in sorted (loc_packet_info, key=itemgetter('SIZE','PKT_NAME'), reverse=False):
for packet_chunk in entire_packet['LIST']:
rx_data_key = "{}".format(packet_chunk['CHUNK_NAME'])
if rx_data_key in rx_data_dict:
rx_data_dict[rx_data_key] ['ENC'] = rx_data_dict[rx_data_key] ['ENC']+"_"+str(entire_packet['ENC'])
else: ## New entry
rx_element_dict = dict()
rx_element_dict ['ENC'] = str(entire_packet['ENC'])
rx_element_dict ['NAME'] = packet_chunk['NAME']
rx_element_dict ['WIDTH'] = packet_chunk['WIDTH']
rx_element_dict ['LLINK_LSB'] = packet_chunk['LLINK_LSB']
rx_element_dict ['DELAY'] = packet_chunk['LAST_PKT_INDEX'] - packet_chunk['PKT_INDEX']-1 # -1 means live
rx_element_dict ['FIFODATA_LOC'] = packet_chunk['FIFODATA_LOC']
rx_data_dict[rx_data_key] = rx_element_dict
if global_struct.g_PACKET_DEBUG:
print ("before // This is RX Data")
pprint.pprint (rx_data_dict)
file_name.write(" // This is RX Data\n")
for rx_data_key in sorted (rx_data_dict.keys()) :
enc_list = rx_data_dict[rx_data_key]['ENC'].split("_")
enc_index = len(enc_list)
total_encoding = len(enc_list)-1
if rx_data_dict[rx_data_key]['WIDTH'] > 0:
file_name.write(" assign {:20} {:13} =".format(gen_llink_concat_fifoname (rx_data_dict[rx_data_key]['NAME'],"output") , gen_index_msb (rx_data_dict[rx_data_key]['WIDTH'], rx_data_dict[rx_data_key]['LLINK_LSB']) ))
for encoding_index, encoding in enumerate(enc_list):
if total_encoding > 0:
if encoding_index != total_encoding:
if (rx_data_dict[rx_data_key]['DELAY'] == -1):
file_name.write(" (rx_grant_enc_data == {}'d{}) ?".format(configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], encoding))
else:
file_name.write(" (rx_grant_enc_dly{}_reg == {}'d{}) ?".format(rx_data_dict[rx_data_key]['DELAY'], configuration['TX_PACKET_ID_WIDTH'] if direction == 'slave' else configuration['RX_PACKET_ID_WIDTH'], encoding))
else:
file_name.write(" ")
if rx_data_dict[rx_data_key]['DELAY'] == -1: # -1 means live value
file_name.write(" {:20} ".format("rx_packet_data" ))
else:
file_name.write(" {:20} ".format("rx_buffer_dly{}_reg".format(rx_data_dict[rx_data_key]['DELAY']) ))
file_name.write("{:13}".format(gen_index_msb(rx_data_dict[rx_data_key]['WIDTH'], rx_data_dict[rx_data_key]['FIFODATA_LOC']) ))
if total_encoding > 0:
if encoding_index != total_encoding:
file_name.write(" :\n ")
else:
file_name.write(" ;\n")
else:
file_name.write(";\n")
if rx_buffer_size != 0:
file_name.write("\n")
file_name.write(" // This is Buffer and Encoding Delay\n")
file_name.write(" always_ff @(posedge clk_wr or negedge rst_wr_n)\n")
file_name.write(" if (~rst_wr_n)\n")
file_name.write(" begin\n")
for buff in range(rx_buffer_size):
print_verilog_regnb (file_name , "rx_grant_enc_dly{}_reg".format(buff) , "'0")
for buff in range(rx_buffer_size):
print_verilog_regnb (file_name , "rx_buffer_dly{}_reg".format(buff) , "'0")
file_name.write(" end\n")
file_name.write(" else\n")
file_name.write(" begin\n")
for buff in range(rx_buffer_size):
if buff == 0:
print_verilog_regnb (file_name , "rx_grant_enc_dly{}_reg".format(buff) , "rx_grant_enc_data")
else:
print_verilog_regnb (file_name , "rx_grant_enc_dly{}_reg".format(buff) , "rx_grant_enc_dly{}_reg".format(buff-1))
for buff in range(rx_buffer_size):
if buff == 0:
print_verilog_regnb (file_name , "rx_buffer_dly{}_reg".format(buff) , "rx_packet_data")
else:
print_verilog_regnb (file_name , "rx_buffer_dly{}_reg".format(buff) , "rx_buffer_dly{}_reg".format(buff-1))
file_name.write(" end\n")
file_name.write("\n")
file_name.write("// RX Packet Section\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
else:
if direction == 'master':
localdir = 'input';
else:
localdir = 'output';
file_name.write("// No RX Packetization, so tie off packetization signals\n")
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
print_verilog_assign(file_name, gen_llink_concat_ovrd (llink['NAME'],"output"), "1'b0")
file_name.write("\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// TX Section\n")
file_name.write("\n")
file_name.write("// TX_CH_WIDTH = {}; // {} running at {} Rate\n".format(configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], configuration['CHAN_TYPE'], configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']))
file_name.write("// TX_DATA_WIDTH = {}; // Usable Data per Channel\n".format(configuration['CHAN_TX_USEABLE1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_USEABLE1PHY_DATA_MAIN'] ))
file_name.write("// TX_PERSISTENT_STROBE = 1'b{};\n".format(int(configuration['TX_PERSISTENT_STROBE'])))
file_name.write("// TX_PERSISTENT_MARKER = 1'b{};\n".format(int(configuration['TX_PERSISTENT_MARKER'])))
file_name.write("// TX_STROBE_GEN2_LOC = 'd{};\n".format(int(configuration['TX_STROBE_GEN2_LOC'])))
file_name.write("// TX_MARKER_GEN2_LOC = 'd{};\n".format(int(configuration['TX_MARKER_GEN2_LOC'])))
file_name.write("// TX_STROBE_GEN1_LOC = 'd{};\n".format(int(configuration['TX_STROBE_GEN1_LOC'])))
file_name.write("// TX_MARKER_GEN1_LOC = 'd{};\n".format(int(configuration['TX_MARKER_GEN1_LOC'])))
file_name.write("// TX_ENABLE_STROBE = 1'b{};\n".format(int(configuration['TX_ENABLE_STROBE'])))
file_name.write("// TX_ENABLE_MARKER = 1'b{};\n".format(int(configuration['TX_ENABLE_MARKER'])))
file_name.write("// TX_DBI_PRESENT = 1'b{};\n".format(int(configuration['TX_DBI_PRESENT'])))
file_name.write("// TX_REG_PHY = 1'b{};\n".format(int(configuration['TX_REG_PHY'])))
file_name.write("\n")
file_name.write(" localparam TX_REG_PHY = 1'b{}; // If set, this enables boundary FF for timing reasons\n".format(int(configuration['TX_REG_PHY'])))
file_name.write("\n")
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "tx_phy_preflop_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
use_recov_strobe = False
if ((configuration['TX_ENABLE_STROBE'] if direction == 'master' else configuration['RX_ENABLE_STROBE']) == True and
(configuration['TX_PERSISTENT_STROBE'] if direction == 'master' else configuration['RX_PERSISTENT_STROBE']) == False ) :
use_recov_strobe = True
use_recov_marker = False
if ((configuration['TX_ENABLE_MARKER'] if direction == 'master' else configuration['RX_ENABLE_MARKER']) == True and
(configuration['TX_PERSISTENT_MARKER'] if direction == 'master' else configuration['RX_PERSISTENT_MARKER']) == False ) :
use_recov_marker = True
if use_recov_strobe :
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "tx_phy_preflop_recov_strobe_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
if configuration ['GEN2_AS_GEN1_EN']:
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "tx_phy_galt_preflop_recov_strobe_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
print_verilog_logic_line (file_name , "tx_phy_final_preflop_recov_strobe_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
if use_recov_marker:
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "tx_phy_preflop_recov_marker_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
if configuration ['GEN2_AS_GEN1_EN']:
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "tx_phy_galt_preflop_recov_marker_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
print_verilog_logic_line (file_name , "tx_phy_final_preflop_recov_marker_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "tx_phy_flop_{}_reg".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
if configuration['TX_SPARE_WIDTH'] if direction == 'master' else configuration['RX_SPARE_WIDTH'] > 0:
print_verilog_logic_line (file_name , "tx_spare_data", index = gen_index_msb (configuration['TX_SPARE_WIDTH'] if direction == 'master' else configuration['RX_SPARE_WIDTH'], sysv=False) )
file_name.write("\n")
file_name.write(" always_ff @(posedge clk_wr or negedge rst_wr_n)\n")
file_name.write(" if (~rst_wr_n)\n")
file_name.write(" begin\n")
for phy in range(configuration['NUM_CHAN']):
print_verilog_regnb (file_name , "tx_phy_flop_{}_reg".format(phy) , "{}'b0".format(configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
file_name.write(" end\n")
file_name.write(" else\n")
file_name.write(" begin\n")
for phy in range(configuration['NUM_CHAN']):
if configuration ['GEN2_AS_GEN1_EN']:
if use_recov_marker:
print_verilog_regnb (file_name , "tx_phy_flop_{}_reg".format(phy) , "tx_phy_final_preflop_recov_marker_{}".format(phy))
elif use_recov_strobe and not use_recov_marker:
print_verilog_regnb (file_name , "tx_phy_flop_{}_reg".format(phy) , "tx_phy_final_preflop_recov_strobe_{}".format(phy))
else:
print_verilog_regnb (file_name , "tx_phy_flop_{}_reg".format(phy) , "tx_phy_preflop_{}".format(phy))
else:
if use_recov_marker:
print_verilog_regnb (file_name , "tx_phy_flop_{}_reg".format(phy) , "tx_phy_preflop_recov_marker_{}".format(phy))
elif use_recov_strobe and not use_recov_marker:
print_verilog_regnb (file_name , "tx_phy_flop_{}_reg".format(phy) , "tx_phy_preflop_recov_strobe_{}".format(phy))
else:
print_verilog_regnb (file_name , "tx_phy_flop_{}_reg".format(phy) , "tx_phy_preflop_{}".format(phy))
file_name.write(" end\n")
file_name.write("\n")
for phy in range(configuration['NUM_CHAN']):
if configuration ['GEN2_AS_GEN1_EN']:
if use_recov_marker:
print_verilog_assign(file_name, "tx_phy{}".format(phy), "TX_REG_PHY ? tx_phy_flop_{}_reg : tx_phy_final_preflop_recov_marker_{}".format(phy,phy))
elif use_recov_strobe and not use_recov_marker:
print_verilog_assign(file_name, "tx_phy{}".format(phy), "TX_REG_PHY ? tx_phy_flop_{}_reg : tx_phy_final_preflop_recov_strobe_{}".format(phy,phy))
else:
print_verilog_assign(file_name, "tx_phy{}".format(phy), "TX_REG_PHY ? tx_phy_flop_{}_reg : tx_phy_preflop_{}".format(phy,phy))
else:
if use_recov_marker:
print_verilog_assign(file_name, "tx_phy{}".format(phy), "TX_REG_PHY ? tx_phy_flop_{}_reg : tx_phy_preflop_recov_marker_{}".format(phy,phy))
elif use_recov_strobe and not use_recov_marker:
print_verilog_assign(file_name, "tx_phy{}".format(phy), "TX_REG_PHY ? tx_phy_flop_{}_reg : tx_phy_preflop_recov_strobe_{}".format(phy,phy))
else:
print_verilog_assign(file_name, "tx_phy{}".format(phy), "TX_REG_PHY ? tx_phy_flop_{}_reg : tx_phy_preflop_{}".format(phy,phy))
file_name.write("\n")
##################### Dynamic Gen2/Gen1 section
if configuration ['GEN2_AS_GEN1_EN']:
for phy in range(configuration['NUM_CHAN']):
if use_recov_marker:
print_verilog_assign(file_name, "tx_phy_final_preflop_recov_marker_{}".format(phy), "m_gen2_mode ? tx_phy_preflop_recov_marker_{} : tx_phy_galt_preflop_recov_marker_{}".format(phy,phy))
elif use_recov_strobe and not use_recov_marker:
print_verilog_assign(file_name, "tx_phy_final_preflop_recov_strobe_{}".format(phy), "m_gen2_mode ? tx_phy_preflop_recov_strobe_{} : tx_phy_galt_preflop_recov_strobe_{}".format(phy,phy))
file_name.write("\n")
if use_recov_strobe:
loc_strobe_loc = configuration['TX_STROBE_GEN1_LOC'] if direction == 'master' else configuration['RX_STROBE_GEN1_LOC']
for phy in range(configuration['NUM_CHAN']):
if loc_strobe_loc != 0:
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_strobe_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (loc_strobe_loc, 0) ,
index2=gen_index_msb (loc_strobe_loc, 0) )
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_strobe_{0}".format(phy), "(~tx_online) ? tx_stb_userbit : tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (1, loc_strobe_loc),
index2=gen_index_msb (1, loc_strobe_loc))
if loc_strobe_loc != ((configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])-1):
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_strobe_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb ((configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])-loc_strobe_loc-1, loc_strobe_loc+1) ,
index2=gen_index_msb ((configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])-loc_strobe_loc-1, loc_strobe_loc+1) )
file_name.write("\n")
##Note, this is intended to be if, not elif
if use_recov_marker and not use_recov_strobe:
loc_marker_loc = configuration['TX_MARKER_GEN1_LOC'] if direction == 'master' else configuration['RX_MARKER_GEN1_LOC']
marker_count = 1
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Half':
marker_count = 2
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Quarter':
marker_count = 4
for phy in range(configuration['NUM_CHAN']):
for bus_index in range(marker_count):
beat_size = (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * bus_index
beat_msb = ((configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * (bus_index+1)) - 1
if loc_marker_loc != 0:
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (loc_marker_loc , beat_size) ,
index2=gen_index_msb (loc_marker_loc , beat_size) )
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_marker_{0}".format(phy), "(~tx_online) ? tx_mrk_userbit[{1}] : tx_phy_preflop_{0}".format(phy, bus_index),
index1=gen_index_msb (1, loc_marker_loc + beat_size) ,
index2=gen_index_msb (1, loc_marker_loc + beat_size) )
if loc_marker_loc != (beat_msb - beat_size):
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) ,
index2=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) )
file_name.write("\n")
elif use_recov_marker and use_recov_strobe:
loc_marker_loc = configuration['TX_MARKER_GEN1_LOC'] if direction == 'master' else configuration['RX_MARKER_GEN1_LOC']
marker_count = 1
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Half':
marker_count = 2
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Quarter':
marker_count = 4
for phy in range(configuration['NUM_CHAN']):
for bus_index in range(marker_count):
beat_size = (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * bus_index
beat_msb = ((configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * (bus_index+1)) - 1
if loc_marker_loc != 0:
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_recov_strobe_{0}".format(phy),
index1=gen_index_msb (loc_marker_loc , beat_size) ,
index2=gen_index_msb (loc_marker_loc , beat_size) )
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_marker_{0}".format(phy), "(~tx_online) ? tx_mrk_userbit[{1}] : tx_phy_preflop_recov_strobe_{0}".format(phy, bus_index),
index1=gen_index_msb (1, loc_marker_loc + beat_size) ,
index2=gen_index_msb (1, loc_marker_loc + beat_size) )
if loc_marker_loc != (beat_msb - beat_size):
print_verilog_assign(file_name, "tx_phy_galt_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_recov_strobe_{0}".format(phy),
index1=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) ,
index2=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) )
file_name.write("\n")
##################### Normal, non Dynamic Gen2/Gen1 section
if use_recov_strobe:
loc_strobe_loc = configuration['TX_STROBE_GEN2_LOC'] if direction == 'master' else configuration['RX_STROBE_GEN2_LOC']
for phy in range(configuration['NUM_CHAN']):
if loc_strobe_loc != 0:
print_verilog_assign(file_name, "tx_phy_preflop_recov_strobe_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (loc_strobe_loc, 0) ,
index2=gen_index_msb (loc_strobe_loc, 0) )
print_verilog_assign(file_name, "tx_phy_preflop_recov_strobe_{0}".format(phy), "(~tx_online) ? tx_stb_userbit : tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (1, loc_strobe_loc),
index2=gen_index_msb (1, loc_strobe_loc))
if loc_strobe_loc != ((configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])-1):
print_verilog_assign(file_name, "tx_phy_preflop_recov_strobe_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb ((configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])-loc_strobe_loc-1, loc_strobe_loc+1) ,
index2=gen_index_msb ((configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])-loc_strobe_loc-1, loc_strobe_loc+1) )
file_name.write("\n")
##Note, this is intended to be if, not elif
if use_recov_marker and not use_recov_strobe:
loc_marker_loc = configuration['TX_MARKER_GEN2_LOC'] if direction == 'master' else configuration['RX_MARKER_GEN2_LOC']
marker_count = 1
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Half':
marker_count = 2
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Quarter':
marker_count = 4
for phy in range(configuration['NUM_CHAN']):
for bus_index in range(marker_count):
beat_size = (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * bus_index
beat_msb = ((configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * (bus_index+1)) - 1
if loc_marker_loc != 0:
print_verilog_assign(file_name, "tx_phy_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (loc_marker_loc , beat_size) ,
index2=gen_index_msb (loc_marker_loc , beat_size) )
print_verilog_assign(file_name, "tx_phy_preflop_recov_marker_{0}".format(phy), "(~tx_online) ? tx_mrk_userbit[{1}] : tx_phy_preflop_{0}".format(phy, bus_index),
index1=gen_index_msb (1, loc_marker_loc + beat_size) ,
index2=gen_index_msb (1, loc_marker_loc + beat_size) )
if loc_marker_loc != (beat_msb - beat_size):
print_verilog_assign(file_name, "tx_phy_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_{0}".format(phy),
index1=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) ,
index2=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) )
file_name.write("\n")
elif use_recov_marker and use_recov_strobe:
loc_marker_loc = configuration['TX_MARKER_GEN2_LOC'] if direction == 'master' else configuration['RX_MARKER_GEN2_LOC']
marker_count = 1
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Half':
marker_count = 2
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Quarter':
marker_count = 4
for phy in range(configuration['NUM_CHAN']):
for bus_index in range(marker_count):
beat_size = (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * bus_index
beat_msb = ((configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) * (bus_index+1)) - 1
if loc_marker_loc != 0:
print_verilog_assign(file_name, "tx_phy_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_recov_strobe_{0}".format(phy),
index1=gen_index_msb (loc_marker_loc , beat_size) ,
index2=gen_index_msb (loc_marker_loc , beat_size) )
print_verilog_assign(file_name, "tx_phy_preflop_recov_marker_{0}".format(phy), "(~tx_online) ? tx_mrk_userbit[{1}] : tx_phy_preflop_recov_strobe_{0}".format(phy, bus_index),
index1=gen_index_msb (1, loc_marker_loc + beat_size) ,
index2=gen_index_msb (1, loc_marker_loc + beat_size) )
if loc_marker_loc != (beat_msb - beat_size):
print_verilog_assign(file_name, "tx_phy_preflop_recov_marker_{0}".format(phy), " tx_phy_preflop_recov_strobe_{0}".format(phy),
index1=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) ,
index2=gen_index_msb (beat_msb - (loc_marker_loc + beat_size), loc_marker_loc + beat_size + 1) )
file_name.write("\n")
if configuration['TX_SPARE_WIDTH'] if direction == 'master' else configuration['RX_SPARE_WIDTH'] > 0:
print_verilog_assign(file_name, "tx_spare_data", "{}'b0".format(configuration['TX_SPARE_WIDTH'] if direction == 'master' else configuration['RX_SPARE_WIDTH']))
file_name.write("\n")
if direction == 'master':
for string in global_struct.g_concat_code_vector_master_tx:
file_name.write (string)
else:
for string in global_struct.g_concat_code_vector_slave_tx:
file_name.write (string)
file_name.write("// TX Section\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// RX Section\n")
file_name.write("\n")
file_name.write("// RX_CH_WIDTH = {}; // {} running at {} Rate\n".format(configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'slave' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], configuration['CHAN_TYPE'], configuration['TX_RATE'] if direction == 'slave' else configuration['RX_RATE']))
file_name.write("// RX_DATA_WIDTH = {}; // Usable Data per Channel\n".format(configuration['CHAN_TX_USEABLE1PHY_DATA_MAIN'] if direction == 'slave' else configuration['CHAN_RX_USEABLE1PHY_DATA_MAIN'] ))
file_name.write("// RX_PERSISTENT_STROBE = 1'b{};\n".format(int(configuration['RX_PERSISTENT_STROBE'])))
file_name.write("// RX_PERSISTENT_MARKER = 1'b{};\n".format(int(configuration['RX_PERSISTENT_MARKER'])))
file_name.write("// RX_STROBE_GEN2_LOC = 'd{};\n".format(int(configuration['RX_STROBE_GEN2_LOC'])))
file_name.write("// RX_MARKER_GEN2_LOC = 'd{};\n".format(int(configuration['RX_MARKER_GEN2_LOC'])))
file_name.write("// RX_STROBE_GEN1_LOC = 'd{};\n".format(int(configuration['RX_STROBE_GEN1_LOC'])))
file_name.write("// RX_MARKER_GEN1_LOC = 'd{};\n".format(int(configuration['RX_MARKER_GEN1_LOC'])))
file_name.write("// RX_ENABLE_STROBE = 1'b{};\n".format(int(configuration['RX_ENABLE_STROBE'])))
file_name.write("// RX_ENABLE_MARKER = 1'b{};\n".format(int(configuration['RX_ENABLE_MARKER'])))
file_name.write("// RX_DBI_PRESENT = 1'b{};\n".format(int(configuration['RX_DBI_PRESENT'])))
file_name.write("// RX_REG_PHY = 1'b{};\n".format(int(configuration['RX_REG_PHY'])))
file_name.write("\n")
file_name.write(" localparam RX_REG_PHY = 1'b{}; // If set, this enables boundary FF for timing reasons\n".format(int(configuration['RX_REG_PHY'])))
file_name.write("\n")
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "rx_phy_postflop_{}".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'slave' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
for phy in range(configuration['NUM_CHAN']):
print_verilog_logic_line (file_name , "rx_phy_flop_{}_reg".format(phy) , index = gen_index_msb ( configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'slave' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] , sysv=False) )
if configuration['TX_SPARE_WIDTH'] if direction == 'slave' else configuration['RX_SPARE_WIDTH'] > 0:
print_verilog_logic_line (file_name , "rx_spare_data", index = gen_index_msb (configuration['TX_SPARE_WIDTH'] if direction == 'slave' else configuration['RX_SPARE_WIDTH'], sysv=False) )
file_name.write("\n")
file_name.write(" always_ff @(posedge clk_rd or negedge rst_rd_n)\n")
file_name.write(" if (~rst_rd_n)\n")
file_name.write(" begin\n")
for phy in range(configuration['NUM_CHAN']):
print_verilog_regnb (file_name , "rx_phy_flop_{}_reg".format(phy) , "{}'b0".format(configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'slave' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
file_name.write(" end\n")
file_name.write(" else\n")
file_name.write(" begin\n")
for phy in range(configuration['NUM_CHAN']):
print_verilog_regnb (file_name , "rx_phy_flop_{}_reg".format(phy) , "rx_phy{}".format(phy))
file_name.write(" end\n")
file_name.write("\n")
file_name.write("\n")
for phy in range(configuration['NUM_CHAN']):
print_verilog_assign(file_name, "rx_phy_postflop_{}".format(phy), "RX_REG_PHY ? rx_phy_flop_{}_reg : rx_phy{}".format(phy,phy))
file_name.write("\n")
if direction == 'master':
for string in global_struct.g_concat_code_vector_master_rx:
file_name.write (string)
else:
for string in global_struct.g_concat_code_vector_slave_rx:
file_name.write (string)
if configuration['REPLICATED_STRUCT']:
for llink in configuration['LL_LIST']:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == "rstruct_enable":
llink_lsb = sig['LLINDEX_MAIN_LSB'] * configuration['RSTRUCT_MULTIPLY_FACTOR']
for rstruct_iteration in list (range (0, configuration['RSTRUCT_MULTIPLY_FACTOR'])):
file_name.write(" assign {0:20}[{1:4}] = {2};\n".format(gen_llink_concat_fifoname (llink['NAME'],"output" ), llink_lsb, gen_llink_concat_pushbit (llink['NAME'],llink['DIR'])+"_r"+str(rstruct_iteration) ))
llink_lsb += 1
file_name.write("\n")
file_name.write("// RX Section\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
file_name.write("\n")
file_name.write("endmodule\n")
file_name.close()
return
## make_concat_file
##########################################################################################
##########################################################################################
## make_top_file
## Make the top level file
def make_top_file(configuration):
for direction in ['master', 'slave']:
name_file_name = "{}_{}_top".format(configuration['MODULE'], direction)
file_name = open("{}/{}.sv".format(configuration['OUTPUT_DIR'], name_file_name), "w+")
print_verilog_header(file_name)
file_name.write("module {} (\n".format(name_file_name))
print_verilog_io_line(file_name, "input", "clk_wr")
print_verilog_io_line(file_name, "input", "rst_wr_n")
file_name.write("\n")
file_name.write(" // Control signals\n")
print_verilog_io_line(file_name, "input", "tx_online")
print_verilog_io_line(file_name, "input", "rx_online")
file_name.write("\n")
if direction == 'master':
localdir = 'output';
else:
localdir = 'input';
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
print_verilog_io_line(file_name, "input", "init_{}_credit".format(llink['NAME']), "[7:0]")
file_name.write("\n")
file_name.write(" // PHY Interconnect\n")
for phy in range(configuration['NUM_CHAN']):
print_verilog_io_line(file_name, "output", "tx_phy{0}".format(phy), gen_index_msb(configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], sysv=False))
print_verilog_io_line(file_name, "input", "rx_phy{0}".format(phy), gen_index_msb(configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_TX_RAW1PHY_DATA_MAIN'], sysv=False))
# List User Signals
for llink in configuration['LL_LIST']:
#if (llink['WIDTH_GALT'] != 0) and (llink['WIDTH_MAIN'] != 0):
# file_name.write("\n // {0} channel\n".format(llink['NAME']))
# for sig_gen2 in llink['SIGNALLIST_MAIN']:
# found_gen1_match = 0;
# for sig_gen1 in llink['SIGNALLIST_GALT']:
# if sig_gen2['NAME'] == sig_gen1['NAME']:
# found_gen1_match = 1
# localdir = gen_direction(name_file_name, sig_gen2['DIR'])
# print_verilog_io_line(file_name, localdir, sig_gen2['NAME'], index=gen_index_msb(sig_gen2['SIGWID'] + sig_gen1['SIGWID'],sig_gen1['LSB'], sysv=False))
# if found_gen1_match == 0:
# localdir = gen_direction(name_file_name, sig_gen2['DIR'])
# print_verilog_io_line(file_name, localdir, sig_gen2['NAME'], index=gen_index_msb(sig_gen2['SIGWID'],sig_gen2['LSB'], sysv=False))
#
#else:
file_name.write("\n // {0} channel\n".format(llink['NAME']))
for sig_gen2 in llink['SIGNALLIST_MAIN']:
if sig_gen2['TYPE'] == "rstruct_enable" and direction == 'master':
continue
localdir = gen_direction(name_file_name, sig_gen2['DIR'])
print_verilog_io_line(file_name, localdir, sig_gen2['NAME'], index=gen_index_msb(sig_gen2['SIGWID'] * configuration['RSTRUCT_MULTIPLY_FACTOR'],sig_gen2['LSB'], sysv=False))
file_name.write("\n")
file_name.write(" // Debug Status Outputs\n")
for llink in configuration['LL_LIST']:
localdir = gen_direction(name_file_name, llink['DIR'], True)
print_verilog_io_line(file_name, "output", gen_llink_debug_status(llink['NAME'],localdir), "[31:0]")
file_name.write("\n // Configuration\n")
print_verilog_io_line(file_name, "input", "m_gen2_mode")
file_name.write("\n")
#if configuration['RX_USER_MARKER']:
# print_verilog_io_line(file_name, "output", "rx_mrk_userbit", gen_index_msb(configuration['NUM_CHAN'], sysv=False))
#if configuration['RX_USER_STROBE']:
# print_verilog_io_line(file_name, "output", "rx_stb_userbit", gen_index_msb(configuration['NUM_CHAN'], sysv=False))
if configuration['TX_USER_MARKER'] if direction == 'master' else configuration['RX_USER_MARKER']:
print_verilog_io_line(file_name, "input", "tx_mrk_userbit", gen_index_msb(configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'], sysv=False))
if configuration['TX_USER_STROBE'] if direction == 'master' else configuration['RX_USER_STROBE']:
print_verilog_io_line(file_name, "input", "tx_stb_userbit")
file_name.write("\n")
print_verilog_io_line(file_name, "input", "delay_x_value", "[15:0]")
print_verilog_io_line(file_name, "input", "delay_y_value", "[15:0]")
print_verilog_io_line(file_name, "input", "delay_z_value", "[15:0]",comma=False)
file_name.write("\n);\n")
file_name.write("\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// Interconnect Wires\n")
for llink in configuration['LL_LIST']:
if llink['HASVALID']:
print_verilog_logic_line (file_name , gen_llink_concat_pushbit (llink['NAME'], gen_direction(name_file_name, llink['DIR'], False)))
print_verilog_logic_line (file_name , gen_llink_user_valid (llink['NAME'] ))
if configuration['REPLICATED_STRUCT'] and gen_direction(name_file_name, llink['DIR'], False) == "output":
print_verilog_logic_line (file_name , gen_llink_concat_fifoname (llink['NAME'], gen_direction(name_file_name, llink['DIR'], False)), gen_index_msb(llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
print_verilog_logic_line (file_name , gen_llink_user_fifoname (llink['NAME'], gen_direction(name_file_name, llink['DIR'], True)), gen_index_msb(llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
else:
print_verilog_logic_line (file_name , gen_llink_concat_fifoname (llink['NAME'], gen_direction(name_file_name, llink['DIR'], False)), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
print_verilog_logic_line (file_name , gen_llink_user_fifoname (llink['NAME'], gen_direction(name_file_name, llink['DIR'], True)), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False))
if llink['HASREADY']:
if configuration['REPLICATED_STRUCT'] and gen_direction(name_file_name, llink['DIR'], False) == "input":
print_verilog_logic_line (file_name , gen_llink_concat_credit (llink['NAME'], gen_direction(name_file_name, llink['DIR'], False)), gen_index_msb(4, sysv=False))
else:
print_verilog_logic_line (file_name , gen_llink_concat_credit (llink['NAME'], gen_direction(name_file_name, llink['DIR'], False)))
print_verilog_logic_line (file_name , gen_llink_user_ready (llink['NAME'] ))
print_verilog_logic_line (file_name , gen_llink_concat_ovrd (llink['NAME'], gen_direction(name_file_name, llink['DIR'], False)))
file_name.write("\n")
print_verilog_logic_line (file_name , "tx_auto_mrk_userbit", gen_index_msb(configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'], sysv=False) )
print_verilog_logic_line (file_name , "tx_auto_stb_userbit" )
print_verilog_logic_line (file_name , "tx_online_delay" )
print_verilog_logic_line (file_name , "rx_online_delay" )
print_verilog_logic_line (file_name , "rx_online_holdoff" )
#if configuration['RX_USER_MARKER'] == False:
# print_verilog_logic_line (file_name ,"rx_mrk_userbit", gen_index_msb(configuration['NUM_CHAN'], sysv=False), comment="No RX User Marker, so no connect")
#if configuration['RX_USER_STROBE'] == False:
# print_verilog_logic_line (file_name ,"rx_stb_userbit", gen_index_msb(configuration['NUM_CHAN'], sysv=False), comment="No RX User Strobe, so no connect")
if (configuration['TX_USER_MARKER'] if direction == 'master' else configuration['RX_USER_MARKER']) == False:
print_verilog_logic_line (file_name ,"tx_mrk_userbit", gen_index_msb(configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'], sysv=False), comment="No TX User Marker, so tie off")
if (configuration['TX_USER_STROBE'] if direction == 'master' else configuration['RX_USER_STROBE']) == False:
print_verilog_logic_line (file_name ,"tx_stb_userbit", comment="No TX User Strobe, so tie off")
if (configuration['TX_USER_MARKER'] if direction == 'master' else configuration['RX_USER_MARKER']) == False:
print_verilog_assign(file_name, "tx_mrk_userbit", "'0")
if (configuration['TX_USER_STROBE'] if direction == 'master' else configuration['RX_USER_STROBE']) == False:
print_verilog_assign(file_name, "tx_stb_userbit", "'1") ## Modest value in driving a 1.
file_name.write("\n")
file_name.write("// Interconnect Wires\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// Auto Sync\n")
file_name.write("\n")
## This is corner case catcher for recoverable markers but persistent strobes.
## This is on receive, so we look for RX if direction is master
if (not(configuration['RX_PERSISTENT_MARKER'] if direction == 'master' else configuration['TX_PERSISTENT_MARKER']) and
(configuration['RX_PERSISTENT_STROBE'] if direction == 'master' else configuration['TX_PERSISTENT_STROBE']) ):
gen1_index = 0;
gen2_index = 0;
if (configuration['CHAN_TYPE'] == "Gen2Only" or configuration['CHAN_TYPE'] == "Gen2"):
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Quarter':
gen2_index = (configuration['RX_MARKER_GEN2_LOC'] if direction == 'master' else configuration['TX_MARKER_GEN2_LOC']) + 240
elif (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Half':
gen2_index = (configuration['RX_MARKER_GEN2_LOC'] if direction == 'master' else configuration['TX_MARKER_GEN2_LOC']) + 80
else:
gen2_index = (configuration['RX_MARKER_GEN2_LOC'] if direction == 'master' else configuration['TX_MARKER_GEN2_LOC']) + 0
if (configuration['CHAN_TYPE'] == "Gen1Only" or configuration['CHAN_TYPE'] == "Gen2"):
if (configuration['TX_RATE'] if direction == 'master' else configuration['RX_RATE']) == 'Half':
gen1_index = (configuration['RX_MARKER_GEN1_LOC'] if direction == 'master' else configuration['TX_MARKER_GEN1_LOC']) + 40
else:
gen1_index = (configuration['RX_MARKER_GEN1_LOC'] if direction == 'master' else configuration['TX_MARKER_GEN1_LOC']) + 0
if configuration['GEN2_AS_GEN1_EN']:
print_verilog_assign(file_name, "rx_online_holdoff", " m_gen2_mode ? rx_phy0[{}] : rx_phy0[{}]".format(gen2_index,gen1_index))
elif (configuration['CHAN_TYPE'] == "Gen2Only" or configuration['CHAN_TYPE'] == "Gen2"):
print_verilog_assign(file_name, "rx_online_holdoff", " rx_phy0[{}]".format(gen2_index))
else:
print_verilog_assign(file_name, "rx_online_holdoff", " rx_phy0[{}]".format(gen1_index))
else:
print_verilog_assign(file_name, "rx_online_holdoff", "1'b0")
file_name.write("\n")
file_name.write(" ll_auto_sync #(.MARKER_WIDTH({}),\n".format(configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']))
if configuration['TX_PERSISTENT_MARKER'] if direction == 'master' else configuration['RX_PERSISTENT_MARKER']:
file_name.write(" .PERSISTENT_MARKER(1'b1),\n")
else:
file_name.write(" .PERSISTENT_MARKER(1'b0),\n")
if ((not configuration['TX_USER_MARKER'] and direction == 'master') or
(not configuration['RX_USER_MARKER'] and direction != 'master') ) :
file_name.write(" .NO_MARKER(1'b1),\n")
if configuration['TX_PERSISTENT_STROBE'] if direction == 'master' else configuration['RX_PERSISTENT_STROBE']:
file_name.write(" .PERSISTENT_STROBE(1'b1)) ll_auto_sync_i\n")
else:
file_name.write(" .PERSISTENT_STROBE(1'b0)) ll_auto_sync_i\n")
file_name.write(" (// Outputs\n")
file_name.write(" .tx_online_delay (tx_online_delay),\n")
file_name.write(" .tx_auto_mrk_userbit (tx_auto_mrk_userbit),\n")
file_name.write(" .tx_auto_stb_userbit (tx_auto_stb_userbit),\n")
file_name.write(" .rx_online_delay (rx_online_delay),\n")
file_name.write(" // Inputs\n")
file_name.write(" .clk_wr (clk_wr),\n")
file_name.write(" .rst_wr_n (rst_wr_n),\n")
file_name.write(" .tx_online (tx_online),\n")
file_name.write(" .delay_z_value (delay_z_value[15:0]),\n")
file_name.write(" .delay_y_value (delay_y_value[15:0]),\n")
file_name.write(" .tx_mrk_userbit (tx_mrk_userbit),\n")
file_name.write(" .tx_stb_userbit (tx_stb_userbit),\n")
file_name.write(" .rx_online (rx_online),\n")
file_name.write(" .rx_online_holdoff (rx_online_holdoff),\n")
file_name.write(" .delay_x_value (delay_x_value[15:0]));\n")
file_name.write("\n")
file_name.write("// Auto Sync\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// Logic Link Instantiation\n")
file_name.write("\n")
if direction == 'master':
localdir = 'output';
else:
localdir = 'input';
for llink in configuration['LL_LIST']:
if llink['HASVALID_NOREADY_REPSTRUCT']:
file_name.write(" // No AXI Ready, so bypassing main Logic Link FIFO and Credit logic.\n")
if llink['DIR'] == localdir:
print_verilog_assign(file_name, "tx_{0}_data".format(llink['NAME']), "txfifo_{0}_data".format(llink['NAME']), index1=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']), index2=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']))
print_verilog_assign(file_name, "tx_{0}_debug_status".format(llink['NAME']), "{12'h0, tx_online_delay, rx_online_delay, 18'h0} ;", index1=gen_index_msb (32), semicolon=False)
print_verilog_assign(file_name, "tx_{0}_pushbit".format(llink['NAME']), "user_{0}_vld".format(llink['NAME']))
else:
print_verilog_assign(file_name, "rxfifo_{0}_data".format(llink['NAME']), "rx_{0}_data".format(llink['NAME']), index1=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']), index2=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']))
print_verilog_assign(file_name, "rx_{0}_debug_status".format(llink['NAME']), "{12'h0, tx_online_delay, rx_online_delay, 18'h0} ;", index1=gen_index_msb (32), semicolon=False)
print_verilog_assign(file_name, "user_{0}_vld".format(llink['NAME']), "rx_online_delay & rx_{0}_pushbit".format(llink['NAME']))
elif not llink['HASREADY'] and not llink['HASVALID']:
file_name.write(" // No AXI Valid or Ready, so bypassing main Logic Link FIFO and Credit logic.\n")
if llink['DIR'] == localdir:
if configuration['REPLICATED_STRUCT']:
print_verilog_assign(file_name, "tx_{0}_data".format(llink['NAME']), "txfifo_{0}_data".format(llink['NAME']), index1=gen_index_msb (llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR']), index2=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']))
else:
print_verilog_assign(file_name, "tx_{0}_data".format(llink['NAME']), "txfifo_{0}_data".format(llink['NAME']), index1=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']), index2=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']))
print_verilog_assign(file_name, "tx_{0}_debug_status".format(llink['NAME']), "{12'h0, tx_online_delay, rx_online_delay, 18'h0} ;", index1=gen_index_msb (32), semicolon=False)
else:
if configuration['REPLICATED_STRUCT']:
print_verilog_assign(file_name, "rxfifo_{0}_data".format(llink['NAME']), "rx_{0}_data".format(llink['NAME']), index1=gen_index_msb (llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR']), index2=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']))
else:
print_verilog_assign(file_name, "rxfifo_{0}_data".format(llink['NAME']), "rx_{0}_data".format(llink['NAME']), index1=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']), index2=gen_index_msb (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR']))
print_verilog_assign(file_name, "rx_{0}_debug_status".format(llink['NAME']), "{12'h0, tx_online_delay, rx_online_delay, 18'h0} ;", index1=gen_index_msb (32), semicolon=False)
else:
if llink['DIR'] == localdir:
if configuration['REPLICATED_STRUCT']:
file_name.write(" ll_transmit #(.WIDTH({1}), .DEPTH(8'd{2}), .TX_CRED_SIZE(3'h{3}), .ASYMMETRIC_CREDIT(1'b1), .DEFAULT_TX_CRED(8'd{4})) ll_transmit_i{0}\n".format(llink['NAME'], llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], llink['TX_FIFO_DEPTH'], configuration['RSTRUCT_MULTIPLY_FACTOR'], llink['RX_FIFO_DEPTH']))
else:
file_name.write(" ll_transmit #(.WIDTH({1}), .DEPTH(8'd{2}), .TX_CRED_SIZE(3'h{3}), .ASYMMETRIC_CREDIT(1'b0), .DEFAULT_TX_CRED(8'd{4})) ll_transmit_i{0}\n".format(llink['NAME'], llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], llink['TX_FIFO_DEPTH'], "1", llink['RX_FIFO_DEPTH']))
file_name.write(" (// Outputs\n")
if llink['HASREADY']:
file_name.write(" .user_i_ready (user_{0}_ready),\n".format(llink['NAME']))
else:
file_name.write(" .user_i_ready (),\n")
file_name.write(" .tx_i_data (tx_{0}_data[{1}:0]),\n".format(llink['NAME'], (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])-1))
file_name.write(" .tx_i_pushbit (tx_{0}_pushbit),\n".format(llink['NAME']))
file_name.write(" .tx_i_debug_status (tx_{0}_debug_status[31:0]),\n".format(llink['NAME']))
file_name.write(" // Inputs\n")
file_name.write(" .clk_wr (clk_wr),\n")
file_name.write(" .rst_wr_n (rst_wr_n),\n")
if configuration['REPLICATED_STRUCT']:
file_name.write(" .end_of_txcred_coal (tx_mrk_userbit[{}]),\n".format((configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']) - 1))
else:
file_name.write(" .end_of_txcred_coal (1'b1),\n")
file_name.write(" .tx_online (tx_online_delay),\n")
file_name.write(" .rx_online (rx_online_delay),\n")
file_name.write(" .init_i_credit (init_{0}_credit[7:0]),\n".format(llink['NAME']))
file_name.write(" .tx_i_pop_ovrd (tx_{0}_pop_ovrd),\n".format(llink['NAME']))
file_name.write(" .txfifo_i_data (txfifo_{0}_data[{1}:0]),\n".format(llink['NAME'], (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])-1))
file_name.write(" .user_i_valid (user_{0}_vld),\n".format(llink['NAME']))
if llink['HASREADY']:
if configuration['REPLICATED_STRUCT'] and gen_direction(name_file_name, llink['DIR'], False) == "input":
file_name.write(" .rx_i_credit (rx_{0}_credit[3:0]));\n".format(llink['NAME']))
else:
file_name.write(" .rx_i_credit ({{3'b0,rx_{0}_credit}}));\n".format(llink['NAME']))
else:
file_name.write(" .rx_i_credit (4'b1));\n")
else:
if configuration['REPLICATED_STRUCT']:
file_name.write(" ll_receive #(.WIDTH({1}), .DEPTH(8'd{2})) ll_receive_i{0}\n".format(llink['NAME'], llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], (int(llink['RX_FIFO_DEPTH']) + configuration['RSTRUCT_MULTIPLY_FACTOR'] - 1) // configuration['RSTRUCT_MULTIPLY_FACTOR']))
file_name.write(" (// Outputs\n")
file_name.write(" .rxfifo_i_data (rxfifo_{0}_data[{1}:0]),\n".format(llink['NAME'], (llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])-1))
else:
file_name.write(" ll_receive #(.WIDTH({1}), .DEPTH(8'd{2})) ll_receive_i{0}\n".format(llink['NAME'], llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], llink['RX_FIFO_DEPTH']))
file_name.write(" (// Outputs\n")
file_name.write(" .rxfifo_i_data (rxfifo_{0}_data[{1}:0]),\n".format(llink['NAME'], (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])-1))
file_name.write(" .user_i_valid (user_{0}_vld),\n".format(llink['NAME']))
if llink['HASREADY']:
file_name.write(" .tx_i_credit (tx_{0}_credit),\n".format(llink['NAME']))
else:
file_name.write(" .tx_i_credit (),\n")
file_name.write(" .rx_i_debug_status (rx_{0}_debug_status[31:0]),\n".format(llink['NAME']))
file_name.write(" // Inputs\n")
file_name.write(" .clk_wr (clk_wr),\n")
file_name.write(" .rst_wr_n (rst_wr_n),\n")
file_name.write(" .rx_online (rx_online_delay),\n")
file_name.write(" .tx_online (tx_online_delay),\n")
file_name.write(" .rx_i_push_ovrd (rx_{0}_push_ovrd),\n".format(llink['NAME']))
if configuration['REPLICATED_STRUCT']:
file_name.write(" .rx_i_data (rx_{0}_data[{1}:0]),\n".format(llink['NAME'], (llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])-1))
else:
file_name.write(" .rx_i_data (rx_{0}_data[{1}:0]),\n".format(llink['NAME'], (llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])-1))
file_name.write(" .rx_i_pushbit (rx_{0}_pushbit),\n".format(llink['NAME']))
if llink['HASREADY']:
file_name.write(" .user_i_ready (user_{0}_ready));\n".format(llink['NAME']))
else:
file_name.write(" .user_i_ready (1'b1));\n")
file_name.write("\n")
file_name.write("// Logic Link Instantiation\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// User Interface\n")
file_name.write("\n")
file_name.write(" {0}_{1}_name {0}_{1}_name\n".format(configuration['MODULE'], direction))
file_name.write(" (\n")
# List User Signals
for llink in configuration['LL_LIST']:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == "rstruct_enable" and localdir == "output":
continue
file_name.write(" .{2:30} ({2}{1}),\n".format(localdir, gen_index_msb(sig['SIGWID'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sig['LSB'], sysv=False), sig['NAME']))
file_name.write("\n")
# List Logic Link Signals
for llink in configuration['LL_LIST']:
localmsb = str (int(llink['WIDTH_MAIN']) - 1);
prefix = 'rx';
if llink['DIR'] == 'output' and direction == 'master':
prefix = 'tx';
if llink['DIR'] == 'input' and direction == 'slave':
prefix = 'tx';
if llink['HASVALID']:
file_name.write(" .{0:30} ({0}),\n".format("user_{}_vld".format(llink['NAME'])))
if configuration['REPLICATED_STRUCT'] and localdir == "input":
file_name.write(" .{0:30} ({0}{1}),\n".format("{}fifo_{}_data".format(prefix,llink['NAME']), gen_index_msb(llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False)))
else:
file_name.write(" .{0:30} ({0}{1}),\n".format("{}fifo_{}_data".format(prefix,llink['NAME']), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'], sysv=False)))
if llink['HASREADY']:
file_name.write(" .{0:30} ({0}),\n".format("user_{}_ready".format(llink['NAME'])))
file_name.write("\n")
if llink['HASVALID_NOREADY_NOREP']:
file_name.write(" .{0:30} ({1}),\n".format("rx_online", "rx_online_delay"))
file_name.write(" .{0:30} ({0}{1})\n".format("m_gen2_mode", ""))
file_name.write("\n );")
file_name.write("\n")
file_name.write("// User Interface \n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("// PHY Interface\n")
file_name.write("\n")
file_name.write(" {0}_{1}_concat {0}_{1}_concat\n".format(configuration['MODULE'], direction))
file_name.write(" (\n")
# Logic Link Signaling
if direction == 'master':
localdir = 'output';
prefix_tx = 'tx';
prefix_rx = 'rx';
else:
localdir = 'input';
prefix_tx = 'tx';
prefix_rx = 'rx';
for llink in configuration['LL_LIST']:
if llink['DIR'] == localdir:
file_name.write(" .{0:30} ({0}{1}),\n".format("{0}_{1}_data".format(prefix_tx,llink['NAME']), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])))
file_name.write(" .{0:30} ({0}),\n".format("{0}_{1}_pop_ovrd".format(prefix_tx,llink['NAME'])))
if llink['HASVALID']:
file_name.write(" .{0:30} ({0}),\n".format("{0}_{1}_pushbit".format(prefix_tx,llink['NAME'])))
if llink['HASREADY']:
if configuration['REPLICATED_STRUCT']:
file_name.write(" .{0:30} ({0}{1}),\n".format("{0}_{1}_credit".format(prefix_rx,llink['NAME']), gen_index_msb(4)))
else:
file_name.write(" .{0:30} ({0}{1}),\n".format("{0}_{1}_credit".format(prefix_rx,llink['NAME']), "" ))
else:
if configuration['REPLICATED_STRUCT']:
file_name.write(" .{0:30} ({0}{1}),\n".format("{0}_{1}_data".format(prefix_rx,llink['NAME']), gen_index_msb(llink['WIDTH_RX_RSTRUCT'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])))
else:
file_name.write(" .{0:30} ({0}{1}),\n".format("{0}_{1}_data".format(prefix_rx,llink['NAME']), gen_index_msb(llink['WIDTH_MAIN'] * configuration['RSTRUCT_MULTIPLY_FACTOR'])))
file_name.write(" .{0:30} ({0}),\n".format("{0}_{1}_push_ovrd".format(prefix_rx,llink['NAME'])))
if llink['HASVALID']:
file_name.write(" .{0:30} ({0}),\n".format("{0}_{1}_pushbit".format(prefix_rx,llink['NAME'])))
if llink['HASREADY']:
if configuration['REPLICATED_STRUCT']:
if configuration['RSTRUCT_MULTIPLY_FACTOR'] == 1:
vector = "1"
if configuration['RSTRUCT_MULTIPLY_FACTOR'] == 2:
vector = "3"
if configuration['RSTRUCT_MULTIPLY_FACTOR'] == 4:
vector = "f"
file_name.write(" .{0:30} ({1}),\n".format("{0}_{1}_credit".format(prefix_tx,llink['NAME']) , "{0}_{1}_credit ? 4'h{2} : 4'h0".format(prefix_tx, llink['NAME'], vector) ))
else:
file_name.write(" .{0:30} ({0}),\n".format("{0}_{1}_credit".format(prefix_tx,llink['NAME'])))
file_name.write("\n")
# Logic Link Inputs
for phy in range(configuration['NUM_CHAN']):
localindex = "[{0}:0]".format((configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])-1)
file_name.write(" .{0:30} ({0}{1}),\n".format("tx_phy{}".format(phy), localindex))
localindex = "[{0}:0]".format((configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] if direction == 'master' else configuration['CHAN_TX_RAW1PHY_DATA_MAIN'])-1)
file_name.write(" .{0:30} ({0}{1}),\n".format("rx_phy{}".format(phy), localindex))
file_name.write("\n")
file_name.write(" .{0:30} ({1}),\n".format("clk_wr", "clk_wr"))
file_name.write(" .{0:30} ({1}),\n".format("clk_rd", "clk_wr"))
file_name.write(" .{0:30} ({1}),\n".format("rst_wr_n", "rst_wr_n"))
file_name.write(" .{0:30} ({1}),\n".format("rst_rd_n", "rst_wr_n"))
file_name.write("\n")
file_name.write(" .{0:30} ({0}{1}),\n".format("m_gen2_mode", ""))
file_name.write(" .{0:30} ({1}),\n".format("tx_online", "tx_online_delay"))
file_name.write("\n")
file_name.write(" .{0:30} ({1}),\n".format("tx_stb_userbit", "tx_auto_stb_userbit"))
file_name.write(" .{0:30} ({1})\n".format("tx_mrk_userbit", "tx_auto_mrk_userbit"))
file_name.write("\n")
file_name.write(" );\n")
file_name.write("\n")
file_name.write("// PHY Interface\n")
file_name.write("//////////////////////////////////////////////////////////////////\n")
file_name.write("\n")
file_name.write("\n")
file_name.write("endmodule\n")
file_name.close()
return
## make_top_file
##########################################################################################
##########################################################################################
## make_list_files
## Make the .f files
def make_list_files(configuration):
cwd = os.getcwd()
path = os.path.realpath(configuration['OUTPUT_DIR'])
proj_dir = os.getenv("PROJ_DIR")
path = path.replace (proj_dir,"${PROJ_DIR}")
for direction in ['master', 'slave']:
name_file_name = "{}_{}".format(configuration['MODULE'], direction)
file_name = open("{}/{}.f".format(configuration['OUTPUT_DIR'], name_file_name), "w+")
file_name.write("// Generated Files\n")
file_name.write("{}/{}_{}_top.sv \n".format(path,configuration['MODULE'], direction))
file_name.write("{}/{}_{}_concat.sv\n".format(path,configuration['MODULE'], direction))
file_name.write("{}/{}_{}_name.sv \n".format(path,configuration['MODULE'], direction))
file_name.write("\n")
file_name.write("// Logic Link files\n")
file_name.write("-f ${PROJ_DIR}/llink/rtl/llink.f\n")
file_name.write("\n")
file_name.write("// Common Files\n")
file_name.write("-f ${PROJ_DIR}/common/rtl/common.f\n")
file_name.close()
return
## make_list_files
##########################################################################################
##########################################################################################
## make_info_file
## this makes the INFO file, but most of the information has been
## generated elsewhere (e.g. in the g_info_print list)
def make_info_file(configuration):
name_file_name = "{}_info".format(configuration['MODULE'])
file_name = open("{}/{}.txt".format(configuration['OUTPUT_DIR'], name_file_name), "w+")
print_verilog_header(file_name)
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("// Data and Channel Size\n")
for string in global_struct.g_info_print:
file_name.write (string)
file_name.write ("// Data and Channel Size\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("\n")
if (not configuration ['TX_ENABLE_PACKETIZATION'] or not configuration ['RX_ENABLE_PACKETIZATION']) and not configuration['GEN2_AS_GEN1_EN']:
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("// AXI to Logic Link Data Mapping\n")
file_name.write ("// This AXI Data FIFO packing\n")
for string in global_struct.g_llink_vector_print_tx:
file_name.write (string)
file_name.write ("\n")
for string in global_struct.g_llink_vector_print_rx:
file_name.write (string)
file_name.write ("// AXI to Logic Link Data Mapping\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("\n")
if configuration ['TX_ENABLE_PACKETIZATION']:
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("// Master to Slave Packetization\n")
for string in global_struct.g_packet_print_tx:
file_name.write (string)
file_name.write ("\n")
file_name.write ("// Master to Slave Packetization\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("\n")
if configuration ['RX_ENABLE_PACKETIZATION']:
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("// Slave to Master Packetization\n")
for string in global_struct.g_packet_print_rx:
string = re.sub('tx_packet_enc', 'rx_packet_enc', string)
string = re.sub('tx_packet_data', 'rx_packet_data', string)
string = re.sub('tx_packet_common', 'rx_packet_common', string)
string = re.sub('= tx_', '= rx_', string)
file_name.write (string)
file_name.write ("\n")
file_name.write ("// Slave to Master Packetization\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("// AXI to PHY IF Mapping AXI Manager Transmit\n")
for string in global_struct.g_debug_raw_data_vector_print_tx:
file_name.write (string)
file_name.write ("// AXI to PHY IF Mapping AXI Manager Transmit\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.write ("// AXI to PHY IF Mapping AXI Manager Receive\n")
for string in global_struct.g_debug_raw_data_vector_print_rx:
file_name.write (string)
file_name.write ("// AXI to PHY IF Mapping AXI Manager Receive\n")
file_name.write ("//////////////////////////////////////////////////////////////////////\n")
file_name.close()
return
## make_info_file
##########################################################################################
##########################################################################################
## print_aib_assign_text_check_for_aib_bit
## Common functioned used to insert DBI, Markers, etc.
def print_aib_assign_text_check_for_aib_bit(configuration, local_lsb1, use_tx, sysv = True):
check_for_more_bit = True
starting_lsb = local_lsb1
if global_struct.g_SIGNAL_DEBUG:
print ("entering print_aib_assign_text_check_for_aib_bit for {} for lsb {}".format("TX" if use_tx else "RX", local_lsb1))
while (check_for_more_bit):
if global_struct.g_SIGNAL_DEBUG:
print (" executing print_aib_assign_text_check_for_aib_bit for {} for lsb {}".format("TX" if use_tx else "RX", local_lsb1))
check_for_more_bit = False
## This stops us from rolling over into the next region
if configuration ['REPLICATED_STRUCT']:
if ((local_lsb1 // (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if use_tx else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'])) !=
(starting_lsb // (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if use_tx else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'])) ):
if global_struct.g_SIGNAL_DEBUG:
print (" early exit print_aib_assign_text_check_for_aib_bit for {} for lsb {}".format("TX" if use_tx else "RX", local_lsb1))
continue
if local_lsb1 == (configuration['NUM_CHAN'] * (configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] if use_tx else configuration['CHAN_RX_RAW1PHY_DATA_MAIN'])):
if global_struct.g_SIGNAL_DEBUG:
print (" early exit print_aib_assign_text_check_for_aib_bit for {} for lsb {}".format("TX" if use_tx else "RX", local_lsb1))
continue
if use_tx:
if configuration['TX_DBI_PRESENT']:
if ((local_lsb1 + 1) % 40) == 0 or ((local_lsb1 + 1) % 40 == 39):
global_struct.g_debug_raw_data_vector_print_tx.append("{0:15} [{1:4}] = 1'b0 // DBI\n".format(" Channel {} TX ".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_tx.append("{0:20} [{1:4}] = 1'b0 ; // DBI\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_slave_rx.append("// DBI = {0:17} [{1:4}];\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_dv_tx_dbi_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
if configuration['TX_ENABLE_STROBE'] and configuration['TX_PERSISTENT_STROBE'] :
if (( local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] == configuration['TX_STROBE_GEN2_LOC']) or
(configuration ['REPLICATED_STRUCT'] and local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN'] == configuration['TX_STROBE_GEN2_LOC']) ):
global_struct.g_debug_raw_data_vector_print_tx.append("{0:15} [{1:4}] = 1'b1 // STROBE\n".format(" Channel {} TX ".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_tx.append("{0:20} [{1:4}] = tx_stb_userbit ; // STROBE\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN'], int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_slave_rx.append("// STROBE = {0:17} [{1:4}]\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_dv_tx_strobe_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
elif ((configuration ['REPLICATED_STRUCT'] and local_lsb1 % configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] == configuration['TX_STROBE_GEN2_LOC']) ):
global_struct.g_debug_raw_data_vector_print_tx.append("{0:15} [{1:4}] = 1'b1 // STROBE\n".format(" Channel {} TX ".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_tx.append("{0:20} [{1:4}] = 1'b0 ; // STROBE (unused)\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN'], int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_slave_rx.append("// STROBE = {0:17} [{1:4}]\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_dv_tx_strobe_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
if configuration['TX_ENABLE_MARKER'] and configuration['TX_PERSISTENT_MARKER'] :
if local_lsb1 % configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] == configuration['TX_MARKER_GEN2_LOC']:
global_struct.g_debug_raw_data_vector_print_tx.append("{0:15} [{1:4}] = 1'b0 // MARKER\n".format(" Channel {} TX ".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_tx.append("{0:20} [{1:4}] = tx_mrk_userbit[{2}] ; // MARKER\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN'], (int(local_lsb1) % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']) // configuration['CHAN_TX_RAW1PHY_BEAT_MAIN']))
global_struct.g_concat_code_vector_slave_rx.append("// MARKER = {0:17} [{1:4}]\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_TX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_TX_RAW1PHY_DATA_MAIN'], (int(local_lsb1) % configuration['CHAN_TX_RAW1PHY_DATA_MAIN']) // configuration['CHAN_TX_RAW1PHY_BEAT_MAIN']))
global_struct.g_dv_tx_marker_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
else:
if configuration['RX_DBI_PRESENT'] :
if ((local_lsb1 + 1) % 40) == 0 or ((local_lsb1 + 1) % 40 == 39):
global_struct.g_debug_raw_data_vector_print_rx.append("{0:15} [{1:4}] = 1'b0 // DBI\n".format(" Channel {} RX ".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_rx.append("// DBI = {0:17} [{1:4}];\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_slave_tx.append("{0:20} [{1:4}] = 1'b0 ; // DBI\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_dv_rx_dbi_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
if configuration['RX_ENABLE_STROBE'] and configuration['RX_PERSISTENT_STROBE'] :
if (( local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] == configuration['RX_STROBE_GEN2_LOC']) or
(configuration ['REPLICATED_STRUCT'] and local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN'] == configuration['RX_STROBE_GEN2_LOC']) ):
global_struct.g_debug_raw_data_vector_print_rx.append("{0:15} [{1:4}] = 1'b1 // STROBE\n".format(" Channel {} RX ".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_rx.append("// STROBE = {0:17} [{1:4}]\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_slave_tx.append("{0:20} [{1:4}] = tx_stb_userbit ; // STROBE\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_dv_rx_strobe_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
elif ((configuration ['REPLICATED_STRUCT'] and local_lsb1 % configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'] == configuration['RX_STROBE_GEN2_LOC']) ):
global_struct.g_debug_raw_data_vector_print_rx.append("{0:15} [{1:4}] = 1'b1 // STROBE\n".format(" Channel {} RX ".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_rx.append("// STROBE = {0:17} [{1:4}]\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_slave_tx.append("{0:20} [{1:4}] = 1'b0 ; // STROBE (unused)\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_dv_rx_strobe_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
if configuration['RX_ENABLE_MARKER'] and configuration['RX_PERSISTENT_MARKER'] :
if local_lsb1 % configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'] == configuration['RX_MARKER_GEN2_LOC']:
global_struct.g_debug_raw_data_vector_print_rx.append("{0:15} [{1:4}] = 1'b0 // MARKER\n".format(" Channel {} RX ".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']))
global_struct.g_concat_code_vector_master_rx.append("// MARKER = {0:17} [{1:4}]\n".format("rx_phy_postflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], (int(local_lsb1) % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']) // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']))
global_struct.g_concat_code_vector_slave_tx.append("{0:20} [{1:4}] = tx_mrk_userbit[{2}] ; // MARKER\n".format(" assign tx_phy_preflop_{}".format(int(local_lsb1) // configuration['CHAN_RX_RAW1PHY_DATA_MAIN']), local_lsb1 % configuration['CHAN_RX_RAW1PHY_DATA_MAIN'], (int(local_lsb1) % configuration['CHAN_RX_RAW1PHY_DATA_MAIN']) // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']))
global_struct.g_dv_rx_marker_vector_print.append ("(1<<{}) | ".format(local_lsb1))
local_lsb1 += 1
check_for_more_bit = True
continue
return local_lsb1
## print_aib_assign_text_check_for_aib_bit
##########################################################################################
##########################################################################################
## print_aib_mapping_text
## Prints out AIB signaling
## This is a big function.
## configuration, direction are obvious
## signal2 = user signal
## wid1 = width of signal (may be less than entire signal)
## lsb1 = lsbit position of AIB line when viewed as long data vector
## lsb2 = lsbit position of signal2 (-1 means it is a scaler)
## llink_lsb = starting position inside the Logic Link (-1 means not part of logic link data)
## llink_name = logic link name (.e.g AR, awbus, etc)
def print_aib_mapping_text(configuration, direction, signal2, wid1, lsb1, lsb2 = -1, llink_lsb=-1, llink_name=""):
sysv = global_struct.g_USE_SYSTEMV_INDEXING
use_tx = True if direction == "output" else False
## This section prints:
## // AXI to Logic Link Data Mapping
if use_tx:
if llink_lsb != -1:
global_struct.g_llink_vector_print_tx.append (" assign {0:20} {1:13} = {2:20} {3:13}\n".format(gen_llink_concat_fifoname (llink_name,"input" ), gen_index_msb (wid1, llink_lsb, sysv), signal2, gen_index_msb (wid1, lsb2, sysv)))
else:
if llink_lsb != -1:
global_struct.g_llink_vector_print_rx.append (" assign {0:20} {1:13} = {2:20} {3:13}\n".format(gen_llink_concat_fifoname (llink_name,"output"), gen_index_msb (wid1, llink_lsb, sysv), signal2, gen_index_msb (wid1, lsb2, sysv)))
local_lsb1 = lsb1
local_lsb2 = lsb2
if configuration ['REPLICATED_STRUCT'] and 0:
tx_chan_width = configuration['CHAN_TX_RAW1PHY_DATA_RSTRUCT']
rx_chan_width = configuration['CHAN_RX_RAW1PHY_DATA_RSTRUCT']
else:
tx_chan_width = configuration['CHAN_TX_RAW1PHY_DATA_MAIN']
rx_chan_width = configuration['CHAN_RX_RAW1PHY_DATA_MAIN']
enable_galt = configuration['GEN2_AS_GEN1_EN']
for each_bit in list (range (0, int(wid1))):
if use_tx:
## TX and RX Section for RTL
## Update, 2 DBI bits so we need to do it twice in both place.
local_lsb1 = print_aib_assign_text_check_for_aib_bit (configuration, local_lsb1, use_tx, sysv)
if llink_lsb == -1:
global_struct.g_concat_code_vector_master_tx.append(" assign tx_phy_preflop_{0} [{1:4}] = {2:20} ;\n".format(int(local_lsb1) // tx_chan_width, local_lsb1 % tx_chan_width, signal2, llink_lsb))
if signal2 != "1'b0":
global_struct.g_concat_code_vector_slave_rx.append(" assign {2:20} = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // tx_chan_width, local_lsb1 % tx_chan_width, re.sub("^tx_", "rx_", signal2), llink_lsb))
else:
global_struct.g_concat_code_vector_slave_rx.append("// {2:20} = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // tx_chan_width, local_lsb1 % tx_chan_width, "nc", llink_lsb))
elif local_lsb2 == -1:
global_struct.g_concat_code_vector_master_tx.append(" assign tx_phy_preflop_{0} [{1:4}] = {2:20}[{3:4}] ;\n".format(int(local_lsb1) // tx_chan_width, local_lsb1 % tx_chan_width, gen_llink_concat_fifoname (llink_name,"input" ), llink_lsb))
if signal2 != "1'b0":
global_struct.g_concat_code_vector_slave_rx.append(" assign {2:20}[{3:4}] = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // tx_chan_width, local_lsb1 % tx_chan_width, gen_llink_concat_fifoname (llink_name,"output" ), llink_lsb))
else:
global_struct.g_concat_code_vector_master_tx.append(" assign tx_phy_preflop_{0} [{1:4}] = {2:20}[{3:4}] ;\n".format(int(local_lsb1) // tx_chan_width, local_lsb1 % tx_chan_width, gen_llink_concat_fifoname (llink_name,"input" ), llink_lsb))
if signal2 != "1'b0":
global_struct.g_concat_code_vector_slave_rx.append(" assign {2:20}[{3:4}] = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // tx_chan_width, local_lsb1 % tx_chan_width, gen_llink_concat_fifoname (llink_name,"output" ), llink_lsb))
## DV Vectors
if signal2 != "1'b0":
if llink_lsb == -1:
global_struct.g_dv_vector_print.append ("{:20} = {:4};\n".format( "tx_{}_f".format(signal2), local_lsb1))
elif local_lsb2 == -1:
global_struct.g_dv_vector_print.append ("{0:20} = {2:4}; {3:20}[{4:4}] = {2:4};\n".format("tx_{}_f".format(signal2), local_lsb2, local_lsb1, gen_llink_concat_fifoname (llink_name,"input") +"_f", llink_lsb))
else:
global_struct.g_dv_vector_print.append ("{0:20}[{1:4}] = {2:4}; {3:20}[{4:4}] = {2:4};\n".format("tx_{}_f".format(signal2), local_lsb2, local_lsb1, gen_llink_concat_fifoname (llink_name,"input") +"_f", llink_lsb))
llink_lsb+=1
## AXI to PHY IF Mapping AXI Manager Transmit
rec_strobe_or_marker_str = ""
if configuration['TX_ENABLE_STROBE'] and configuration['TX_PERSISTENT_STROBE'] == False and configuration['TX_STROBE_GEN2_LOC'] == local_lsb1 % tx_chan_width:
rec_strobe_or_marker_str = " // RECOVERED_STROBE"
if configuration['TX_ENABLE_MARKER'] and configuration['TX_PERSISTENT_MARKER'] == False and configuration['TX_MARKER_GEN2_LOC'] == local_lsb1 % configuration['CHAN_TX_RAW1PHY_BEAT_MAIN']:
rec_strobe_or_marker_str = " // RECOVERED_MARKER [{0}]".format((local_lsb1 % tx_chan_width) // configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'])
global_struct.g_debug_raw_data_vector_print_tx.append("{0:15} [{1:4}] = ".format(" Channel {} TX ".format(local_lsb1 // tx_chan_width), local_lsb1 % tx_chan_width))
if local_lsb2 != -1:
global_struct.g_debug_raw_data_vector_print_tx.append("{0:20} [{1:4}]{2}\n".format(signal2, local_lsb2,rec_strobe_or_marker_str))
local_lsb2 += 1
else:
global_struct.g_debug_raw_data_vector_print_tx.append("{0:20}{2}\n".format(signal2, local_lsb2, rec_strobe_or_marker_str))
local_lsb1 += 1
## There is wierd corner case where all the "valid" data is sent, but there are still strobes, markers, dbis. So we have to do this "twice" once before and once after the data.
## Update, 2 DBI bits so we need to do it twice in both place.
if configuration ['REPLICATED_STRUCT']:
if 0 == (local_lsb1 % (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if use_tx else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'])):
if global_struct.g_SIGNAL_DEBUG:
print (" skip print_aib_assign_text_check_for_aib_bit for {} for lsb {}".format("TX" if use_tx else "RX", local_lsb1))
continue
local_lsb1 = print_aib_assign_text_check_for_aib_bit (configuration, local_lsb1, use_tx, sysv)
else:
## RX and TX Section for RTL
## Update, 2 DBI bits so we need to do it twice in both place.
local_lsb1 = print_aib_assign_text_check_for_aib_bit (configuration, local_lsb1, use_tx, sysv)
if llink_lsb == -1:
global_struct.g_concat_code_vector_slave_tx.append(" assign tx_phy_preflop_{0} [{1:4}] = {2:20} ;\n".format(int(local_lsb1) // rx_chan_width, local_lsb1 % rx_chan_width, re.sub("^rx_", "tx_", signal2), llink_lsb))
if signal2 != "1'b0":
global_struct.g_concat_code_vector_master_rx.append(" assign {2:20} = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // rx_chan_width, local_lsb1 % rx_chan_width, signal2, llink_lsb))
else:
global_struct.g_concat_code_vector_master_rx.append("// {2:20} = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // rx_chan_width, local_lsb1 % rx_chan_width, "nc", llink_lsb))
elif local_lsb2 == -1:
global_struct.g_concat_code_vector_slave_tx.append(" assign tx_phy_preflop_{0} [{1:4}] = {2:20}[{3:4}] ;\n".format(int(local_lsb1) // rx_chan_width, local_lsb1 % rx_chan_width, gen_llink_concat_fifoname (llink_name,"input" ), llink_lsb))
if signal2 != "1'b0":
global_struct.g_concat_code_vector_master_rx.append(" assign {2:20}[{3:4}] = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // rx_chan_width, local_lsb1 % rx_chan_width, gen_llink_concat_fifoname (llink_name,"output" ), llink_lsb))
else:
global_struct.g_concat_code_vector_slave_tx.append(" assign tx_phy_preflop_{0} [{1:4}] = {2:20}[{3:4}] ;\n".format(int(local_lsb1) // rx_chan_width, local_lsb1 % rx_chan_width, gen_llink_concat_fifoname (llink_name,"input" ), llink_lsb))
if signal2 != "1'b0":
global_struct.g_concat_code_vector_master_rx.append(" assign {2:20}[{3:4}] = rx_phy_postflop_{0} [{1:4}];\n".format(int(local_lsb1) // rx_chan_width, local_lsb1 % rx_chan_width, gen_llink_concat_fifoname (llink_name,"output" ), llink_lsb))
## DV Vectors
if signal2 != "1'b0":
if local_lsb2 != -1:
global_struct.g_dv_vector_print.append ("{0:20}[{1:4}] = {2:4}; {3:20}[{4:4}] = {2:4};\n".format("rx_{}_f".format(signal2), local_lsb2, local_lsb1, gen_llink_concat_fifoname (llink_name,"output") +"_f", llink_lsb))
llink_lsb+=1
else:
global_struct.g_dv_vector_print.append ("{:20} = {:4};\n".format( "rx_{}_f".format(signal2), local_lsb1))
rec_strobe_or_marker_str = ""
if configuration['RX_ENABLE_STROBE'] and configuration['RX_PERSISTENT_STROBE'] == False and configuration['RX_STROBE_GEN2_LOC'] == local_lsb1 % rx_chan_width:
rec_strobe_or_marker_str = " // RECOVERED_STROBE"
if configuration['RX_ENABLE_MARKER'] and configuration['RX_PERSISTENT_MARKER'] == False and configuration['RX_MARKER_GEN2_LOC'] == local_lsb1 % configuration['CHAN_RX_RAW1PHY_BEAT_MAIN']:
rec_strobe_or_marker_str = " // RECOVERED_MARKER [{0}]".format((local_lsb1 % rx_chan_width) // configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'])
global_struct.g_debug_raw_data_vector_print_rx.append("{0:15} [{1:4}] = ".format(" Channel {} RX ".format(local_lsb1 // rx_chan_width), local_lsb1 % rx_chan_width))
if local_lsb2 != -1:
global_struct.g_debug_raw_data_vector_print_rx.append("{0:20} [{1:4}]{2}\n".format(signal2, local_lsb2, rec_strobe_or_marker_str))
local_lsb2 += 1
else:
global_struct.g_debug_raw_data_vector_print_rx.append("{0:20}{2}\n".format(signal2, local_lsb2, rec_strobe_or_marker_str))
local_lsb1 += 1
## There is wierd corner case where all the "valid" data is sent, but there are still strobes, markers, dbis. So we have to do this "twice" once before and once after the data.
## Update, 2 DBI bits so we need to do it twice in both place.
if configuration ['REPLICATED_STRUCT']:
if 0 == (local_lsb1 % (configuration['CHAN_TX_RAW1PHY_BEAT_MAIN'] if use_tx else configuration['CHAN_RX_RAW1PHY_BEAT_MAIN'])):
if global_struct.g_SIGNAL_DEBUG:
print (" skip print_aib_assign_text_check_for_aib_bit for {} for lsb {}".format("TX" if use_tx else "RX", local_lsb1))
continue
local_lsb1 = print_aib_assign_text_check_for_aib_bit (configuration, local_lsb1, use_tx, sysv)
return local_lsb1
## print_aib_mapping_text
##########################################################################################
##########################################################################################
## make_dv_file
#deprecated
def make_dv_file(configuration):
for direction in ['master', 'slave']:
name_file_name = "{}_{}_rawdata_map".format(configuration['MODULE'], direction)
file_name = open("{}/{}.svi".format(configuration['OUTPUT_DIR'], name_file_name), "w+")
print_verilog_header(file_name)
for string in global_struct.g_dv_vector_print:
string = re.sub('^tx_tx_', 'tx_', string)
string = re.sub('^rx_rx_', 'rx_', string)
if direction == 'slave':
string = re.sub('^tx_', 'UnL1ke1ySt!nG', string)
string = re.sub(' tx_', ' UnL1ke1ySt!nG', string)
string = re.sub('^rx_', 'tx_', string)
string = re.sub(' rx_', 'tx_', string)
string = re.sub('UnL1ke1ySt!nG', 'rx_', string)
file_name.write (string)
file_name.write ("\n")
if direction == 'master':
file_name.write ("tx_dbi_bit_f = ")
else:
file_name.write ("rx_dbi_bit_f = ")
for string in global_struct.g_dv_tx_dbi_vector_print:
file_name.write (string)
file_name.write ("0;\n")
if direction == 'master':
file_name.write ("rx_dbi_bit_f = ")
else:
file_name.write ("tx_dbi_bit_f = ")
for string in global_struct.g_dv_rx_dbi_vector_print:
file_name.write (string)
file_name.write ("0;\n")
if direction == 'master':
file_name.write ("tx_strobe_bit_f = ")
else:
file_name.write ("rx_strobe_bit_f = ")
for string in global_struct.g_dv_tx_strobe_vector_print:
file_name.write (string)
file_name.write ("0;\n")
if direction == 'master':
file_name.write ("rx_strobe_bit_f = ")
else:
file_name.write ("tx_strobe_bit_f = ")
for string in global_struct.g_dv_rx_strobe_vector_print:
file_name.write (string)
file_name.write ("0;\n")
if direction == 'master':
file_name.write ("tx_marker_bit_f = ")
else:
file_name.write ("rx_marker_bit_f = ")
for string in global_struct.g_dv_tx_marker_vector_print:
file_name.write (string)
file_name.write ("0;\n")
if direction == 'master':
file_name.write ("rx_marker_bit_f = ")
else:
file_name.write ("tx_marker_bit_f = ")
for string in global_struct.g_dv_rx_marker_vector_print:
file_name.write (string)
file_name.write ("0;\n")
file_name.close()
return
## make_dv_file
##########################################################################################
##########################################################################################
## print_logic_links
## Prints out information about the data structure inside the logic links
def print_logic_links(configuration):
for llink in configuration['LL_LIST']:
print ("")
print ("Logic Link: {0} master {1} data {2} bits".format(llink['NAME'], llink['DIR'], llink['WIDTH_MAIN']))
if llink['HASVALID'] == False:
print (" : No Valid")
else:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'valid':
print (" : VALID {0}".format(sig['NAME']))
if llink['HASREADY'] == False:
print (" : No Ready")
else:
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'ready':
print (" : READY {0}".format(sig['NAME']))
if len(llink['SIGNALLIST_MAIN']) != 0 and len(llink['SIGNALLIST_GALT']) != 0:
print (" MAIN Signaling data width {} bits".format (llink['WIDTH_MAIN']))
for sig in llink['SIGNALLIST_MAIN']:
if sig['TYPE'] == 'signal' or sig['TYPE'] == 'signal_valid':
print (" : {0:20} {1:<8} {2}_data {3}".format(sig['NAME'], " ", llink['NAME'], gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'])))
elif sig['TYPE'] == 'bus':
print (" : {0:20} {1:<8} {2}_data {3}".format(sig['NAME'], "[{}:{}]".format(sig['MSB'],sig['LSB']), llink['NAME'], gen_index_msb (sig['SIGWID'], sig['LLINDEX_MAIN_LSB'])))
if len(llink['SIGNALLIST_MAIN']) != 0 and len(llink['SIGNALLIST_GALT']) != 0:
print (" GALT Signaling data width {} bits".format (llink['WIDTH_GALT']))
for sig in llink['SIGNALLIST_GALT']:
if sig['TYPE'] == 'signal' or sig['TYPE'] == 'signal_valid':
print (" : {0:20} {1:<8} {2}_data {3}".format(sig['NAME'], " ", llink['NAME'], gen_index_msb (sig['SIGWID'], sig['LLINDEX_GALT_LSB'])))
elif sig['TYPE'] == 'bus':
print (" : {0:20} {1:<8} {2}_data {3}".format(sig['NAME'], "[{}:{}]".format(sig['MSB'],sig['LSB']), llink['NAME'], gen_index_msb (sig['SIGWID'], sig['LLINDEX_GALT_LSB'])))
print ("\n")
return
## print_logic_links
##########################################################################################
##########################################################################################
## print_verilog_header
def print_verilog_header(file_name):
file_name.write ("////////////////////////////////////////////////////////////\n")
file_name.write ("//\n")
file_name.write ("// (C) Copyright 2021 Eximius Design\n")
file_name.write ("//\n")
file_name.write ("// Licensed under the Apache License, Version 2.0 (the \"License\");\n")
file_name.write ("// you may not use this file except in compliance with the License.\n")
file_name.write ("// You may obtain a copy of the License at\n")
file_name.write ("//\n")
file_name.write ("// http://www.apache.org/licenses/LICENSE-2.0\n")
file_name.write ("//\n")
file_name.write ("// Unless required by applicable law or agreed to in writing, software\n")
file_name.write ("// distributed under the License is distributed on an \"AS IS\" BASIS,\n")
file_name.write ("// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n")
file_name.write ("// See the License for the specific language governing permissions and\n")
file_name.write ("// limitations under the License.\n")
file_name.write ("////////////////////////////////////////////////////////////\n")
file_name.write ("\n")
## print_verilog_header
##########################################################################################
def main():
## Initialize Global Variables / Structs
global_struct.clear_global_variables()
parser = ArgumentParser(description='Logic Link Generation Script.')
parser.add_argument('--cfg', type=str, required=True, help='config file for logic link')
parser.add_argument('--odir', type=str, required=False, help='location to write output files (default is ./module_name)')
parser.add_argument('--cfg_debug', required=False, help='print config file debug info', action="store_true")
parser.add_argument('--signal_debug', required=False, help='print signal processing debug info', action="store_true")
parser.add_argument('--packet_debug', required=False, help='print copious packet debug info', action="store_true")
parser.add_argument('--sysv_indexing', default=True, required=False, help='Set to True to use SystemVerilog indexing. Set to False to use traditional bus indexing.')
args = parser.parse_args()
if (args.cfg_debug):
global_struct.g_CFG_DEBUG = True
if (args.signal_debug):
global_struct.g_SIGNAL_DEBUG = True
if (args.packet_debug):
global_struct.g_PACKET_DEBUG = True
configuration = parse_config_file(args.cfg)
if configuration['REPLICATED_STRUCT']:
orig_module = configuration['MODULE']
for rate in ['Full', 'Half', 'Quarter']:
global_struct.clear_global_variables()
configuration = parse_config_file(args.cfg)
## Skip quarter rate versions if we are in Gen1
if configuration['CHAN_TYPE'] == "Gen1Only" and rate == "Quarter":
continue
if args.odir == None:
args.odir = configuration['MODULE']
configuration['TX_RATE'] = rate
configuration['RX_RATE'] = rate
configuration['MODULE'] = orig_module +"_"+rate.lower()
if not os.path.exists(args.odir):
os.makedirs(args.odir)
configuration['OUTPUT_DIR'] = args.odir
configuration = calculate_channel_parameters(configuration)
if global_struct.g_SIGNAL_DEBUG:
print_logic_links(configuration)
configuration = calculate_bit_locations(configuration)
make_name_file(configuration)
make_concat_file(configuration)
make_top_file(configuration)
make_list_files(configuration)
make_info_file(configuration)
#make_dv_file(configuration)
print ("Asymmetric Master and Slave with {:10} rate generated with base module name {:30} in this directory {}".format(rate, configuration['MODULE'], args.odir))
else:
if args.odir == None:
args.odir = configuration['MODULE']
if not os.path.exists(args.odir):
os.makedirs(args.odir)
configuration['OUTPUT_DIR'] = args.odir
configuration = calculate_channel_parameters(configuration)
if global_struct.g_SIGNAL_DEBUG:
print_logic_links(configuration)
configuration = calculate_bit_locations(configuration)
make_name_file(configuration)
make_concat_file(configuration)
make_top_file(configuration)
make_list_files(configuration)
make_info_file(configuration)
#make_dv_file(configuration)
print ("Files generated here: {}".format(args.odir))
#if (configuration['TX_ENABLE_PACKETIZATION'] or configuration['RX_ENABLE_PACKETIZATION']):
#llink_dv_packet_postproc.generate_dv_packet("{}/{}_info.txt".format(args.odir,configuration['MODULE']), args.odir)
if __name__ == "__main__":
main()
| 66.125718
| 454
| 0.593002
| 27,564
| 230,382
| 4.583515
| 0.026955
| 0.045401
| 0.048979
| 0.023271
| 0.883853
| 0.843217
| 0.810646
| 0.761501
| 0.710511
| 0.679975
| 0
| 0.015521
| 0.257229
| 230,382
| 3,483
| 455
| 66.144703
| 0.722787
| 0.08982
| 0
| 0.533708
| 0
| 0.016453
| 0.288347
| 0.078558
| 0
| 0
| 0
| 0.000287
| 0
| 1
| 0.008026
| false
| 0.000803
| 0.006019
| 0
| 0.020867
| 0.169342
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d05ac23fa7dcc2d33d5b37d5d5ac2a64b1e1f1a0
| 43,183
|
py
|
Python
|
Bin/apropiacion.py
|
mfneirae/CvLAC-Complete
|
a7e4f3c93a3f22b732fbb4670fe294a9ec3030ab
|
[
"MIT"
] | 1
|
2021-07-15T20:40:45.000Z
|
2021-07-15T20:40:45.000Z
|
Bin/apropiacion.py
|
mfneirae/CvLAC-Complete
|
a7e4f3c93a3f22b732fbb4670fe294a9ec3030ab
|
[
"MIT"
] | null | null | null |
Bin/apropiacion.py
|
mfneirae/CvLAC-Complete
|
a7e4f3c93a3f22b732fbb4670fe294a9ec3030ab
|
[
"MIT"
] | 3
|
2020-04-01T15:16:32.000Z
|
2021-07-15T21:01:07.000Z
|
#
#
# #############################################################################
# Copyright (c) 2018 Universidad Nacional de Colombia All Rights Reserved.
#
# This work was made as a development to improve data collection
# for self-assessment and accreditation processes in the Vicedeanship
# of academic affairs in the Engineering Faculty of the Universidad
# Nacional de Colombia and is licensed under a Creative Commons
# Attribution-NonCommercial - ShareAlike 4.0 International License
# and MIT Licence.
#
# by Manuel Embus.
#
# For more information write me to jai@mfneirae.com
# Or visit my webpage at https://mfneirae.com/
# #############################################################################
#
#
def evenextract():
from settings import my_url, name, doc, last, RH, COD_PRODUCTO
import init, bs4, logging, sys, re
global conteventos
LOG_FILENAME = './Logs/Registros.log'
logging.basicConfig(filename=LOG_FILENAME,level=logging.DEBUG,
format = "%(asctime)s:%(levelname)s:%(message)s")
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
if len(sys.argv) > 1:
level_name = sys.argv[1]
level = LEVELS.get(level_name, logging.NOTSET)
logging.basicConfig(level=level)
from urllib.request import urlopen as uReq
from bs4 import BeautifulSoup as soup
uClient = uReq(my_url)
page_html = uClient.read()
uClient.close()
all = 0
a = 0
x = 0
y = 0
conteventos = 0
auto = ""
vincula = ""
insti = ""
vinculain = ""
page_soup = soup(page_html,"html.parser")
containers = page_soup.findAll("table")
for a in range(0,len(containers)):
buscaeventos = containers[a].h3
#print(buscaeventos)
try:
if buscaeventos.text == "Eventos científicos":
all = a
#print(all)
break
except AttributeError:
pass
if all != 0:
containerb = containers[all]
container = containerb.findAll("table")
for x in range(0, len(container)):
cont = container[x]
info_evento = cont.td.text
#Nombre del evento
index1 = info_evento.find("Nombre del evento:") + 18
index2 = info_evento.find("Tipo de evento:")
NombreEvento = info_evento[index1:index2]
# Tipo de Evento
index1 = info_evento.find("Tipo de evento:") + 15
index2 = info_evento.find(" Ámbito:")
TipoEvento = info_evento[index1:index2]
if TipoEvento.strip() == "Otro":
TipoEvento = "1"
elif TipoEvento.strip() == "Taller":
TipoEvento = "2"
elif TipoEvento.strip() == "Congreso":
TipoEvento = "3"
elif TipoEvento.strip() == "Encuentro":
TipoEvento = "4"
elif TipoEvento.strip() == "Seminario":
TipoEvento = "5"
elif TipoEvento.strip() == "Simposio":
TipoEvento = "6"
else:
logging.critical('Añadir a Tipo_Evento: ' + TipoEvento)
print ("ALERTA: Revisar el archivo Registros.log")
#Ambito
index1 = info_evento.find("\xa0\r\n Ámbito: ") + 51
index2 = info_evento.find("\xa0 \r\n Realizado el:")
Ambito = info_evento[index1:index2]
#Fecha de Realización inicio y fin
index1 = info_evento.find("Realizado el:") + 13
index2 = index1 + 4
AnoEventoini = info_evento[index1:index2]
if AnoEventoini == "," or AnoEventoini == ",\xa0\r\n":
MesEventoini = ""
AnoEventoini = ""
FechaEventoini = ""
MesEventofin = ""
AnoEventofin = ""
FechaEventofin = ""
else:
index1 = index1 + 5
index2 = index1 + 2
MesEventoini = info_evento[index1:index2]
index1 = info_evento.find("Realizado el:") + 13
index2 = index1 + 10
FechaEventoini = info_evento[index1:index2]
index1 = info_evento.find(",",index1,len(info_evento)) + 48
index2 = index1 + 4
AnoEventofin = info_evento[index1:index2]
if AnoEventofin == " \xa0\r\n" or AnoEventofin == ",":
MesEventofin = ""
AnoEventofin = ""
FechaEventofin = ""
else:
index1 = index1 + 5
index2 = index1 + 2
MesEventofin = info_evento[index1:index2]
index1 = info_evento.find("Realizado el:") + 13
index1 = info_evento.find(",",index1,len(info_evento)) + 48
index2 = index1 + 10
FechaEventofin = info_evento[index1:index2]
#Lugar Evento
index1 = info_evento.find(" \xa0\r\n en ") + 51
index2 = info_evento.find(" \xa0 - \xa0\r\n")
LugarEvento = info_evento[index1:index2]
b_eventos = cont.findAll("td")
#Autores
autores = b_eventos[3].findAll("li")
if len(autores) == 0:
auto = "";
vincula = "";
else:
for z in range(0, len(autores)):
autor = autores[z].text
index1 = autor.find("Nombre:") + 8
index2= autor.find("\r\n Rol en el evento: ")
if len(auto) == 0:
auto = autor[index1:index2]
else:
auto = auto + ", " + autor[index1:index2]
index1 = autor.find("Rol en el evento: ") + 18
index2= autor.find("\r\n ",index1,len(autor))
if len(vincula) == 0:
vincula = autor[index1:index2]
else:
vincula = vincula + ", " + autor[index1:index2]
#Instituciones
Instituciones = b_eventos[2].findAll("li")
if len(Instituciones) == 0:
insti = "";
vinculain = "";
else:
for z in range(0, len(Instituciones)):
institu = Instituciones[z].text
index1 = institu.find("Nombre de la institución:") + 25
index2= institu.find("\r\n Tipo de vinculación")
if len(insti) == 0:
insti = institu[index1:index2]
else:
insti = insti + ", " + institu[index1:index2]
index1 = institu.find("Tipo de vinculación") + 19
index2 = institu.find("'",index1,len(institu))
if len(vinculain) == 0:
vinculain = institu[index1:index2]
else:
vinculain = vinculain + ", " + institu[index1:index2]
#Productos Asociados
productos = b_eventos[1].findAll("li")
if len(productos) == 0:
init.rel_persona_producto_colciencias.append(str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ "0" + ";"\
+ "" + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',TipoEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',LugarEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Ambito.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',auto.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vincula.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',insti.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vinculain.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "\n")
init.colciencias_apropiacion.append(str(RH) + str(COD_PRODUCTO) + ";"\
+ str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "\n")
init.inrel_personas_producto_colciencias.append( \
"REPLACE INTO `uapa_db`.`rel_personas_producto_colciencias`(`cod_rel_per_prod_col`,`cod_producto`,`cod_rh`,`cod_tipo_producto`,`nombre_producto`,`evento_asociado`,`datos_complementarios`,`lugar`,`ano`,`ambito`,`palabras_clave`,`areas`,`sectores`,`coautores`,`vincula_coautores`,`editorial`,`volumen`,`paginas`,`doi`,`finalidad`,`instituciones_asociadas`,`tipo_vinculacion_institucion`) VALUES"
+ "('"+ str(RH) + str(COD_PRODUCTO) + "',"
+ str(COD_PRODUCTO) + ","\
+ "'" + str(RH) + "',"\
+ "0" + ","\
+ "null" + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',LugarEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Ambito.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',auto.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vincula.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',insti.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vinculain.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "');\n")
init.incolciencias_apropiacion.append( \
"REPLACE INTO `uapa_db`.`colciencias_apropiacion`(`cod_colciencias_apropiacion`,`cod_rh`,`cod_rel_per_prod_col`,`fecha_ini`,`fecha_fin`,`cod_tipo_evento`) VALUES"
+ "('" + str(COD_PRODUCTO) + "',"\
+ "'" + str(RH) + "',"\
+ "'" + str(RH) + str(COD_PRODUCTO) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ");\n")
COD_PRODUCTO = COD_PRODUCTO + 1
else:
for y in range(0, len(productos)):
prod = productos[y].text
index1 = prod.find("Nombre del producto:") + 20
index2 = prod.find("Tipo de producto:")
NombreProducto = prod[index1:index2]
index1 = prod.find("Tipo de producto:") + 17
index2 = prod.find("\r\n",index1,len(prod))
Tipopub = prod[index1:index2]
if Tipopub == "Producción bibliográfica - Trabajos en eventos (Capítulos de memoria) - Completo":
Tipopub = "2"
elif Tipopub == "Producción técnica - Presentación de trabajo - Comunicación":
Tipopub = "3"
elif Tipopub == "Demás trabajos - Demás trabajos - Póster":
Tipopub = "4"
elif Tipopub == "Producción técnica - Presentación de trabajo - Conferencia":
Tipopub = "5"
elif Tipopub == "Producción técnica - Presentación de trabajo - Ponencia":
Tipopub = "6"
elif Tipopub == "Producción bibliográfica - Trabajos en eventos (Capítulos de memoria) - Resumen":
Tipopub = "12"
elif Tipopub == "Producción técnica - Presentación de trabajo - Congreso":
Tipopub = "13"
elif Tipopub == "Producción técnica - Presentación de trabajo - Simposio":
Tipopub = "14"
elif Tipopub == "Producción técnica - Presentación de trabajo - Seminario":
Tipopub = "15"
elif Tipopub == "Producción técnica - Presentación de trabajo - Otro":
Tipopub = "16"
else:
logging.critical('Añadir a Tipo_Producto: ' + TipoEvento)
print ("ALERTA: Revisar el archivo Eventos.log")
init.rel_persona_producto_colciencias.append(str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Tipopub.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreProducto.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',TipoEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',LugarEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Ambito.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',auto.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vincula.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ "" + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',insti.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vinculain.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "\n")
init.inrel_personas_producto_colciencias.append( \
"REPLACE INTO `uapa_db`.`rel_personas_producto_colciencias`(`cod_rel_per_prod_col`,`cod_producto`,`cod_rh`,`cod_tipo_producto`,`nombre_producto`,`evento_asociado`,`datos_complementarios`,`lugar`,`ano`,`ambito`,`palabras_clave`,`areas`,`sectores`,`coautores`,`vincula_coautores`,`editorial`,`volumen`,`paginas`,`doi`,`finalidad`,`instituciones_asociadas`,`tipo_vinculacion_institucion`) VALUES"
+ "('"+ str(RH) + str(COD_PRODUCTO) + "',"
+ str(COD_PRODUCTO) + ","\
+ "'" + str(RH) + "',"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Tipopub.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreProducto.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',LugarEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Ambito.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',auto.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vincula.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',insti.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',vinculain.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "');\n")
init.colciencias_apropiacion.append(str(RH) + str(COD_PRODUCTO) + ";"\
+ str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',TipoEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ","\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "\n")
init.incolciencias_apropiacion.append( \
"REPLACE INTO `uapa_db`.`colciencias_apropiacion`(`cod_colciencias_apropiacion`,`cod_rh`,`cod_rel_per_prod_col`,`fecha_ini`,`fecha_fin`,`cod_tipo_evento`) VALUES"
+ "('" + str(COD_PRODUCTO) + "',"\
+ "'" + str(RH) + "',"\
+ "'" + str(RH) + str(COD_PRODUCTO) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventoini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEventofin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',TipoEvento.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ");\n")
COD_PRODUCTO = COD_PRODUCTO + 1
auto = ""
vincula = ""
insti = ""
vinculain = ""
else:
logging.info(' El Docente ' + name + ' ' + last + ' no tiene Eventos Asociados')
conteventos = [COD_PRODUCTO]
def estrategiaextract():
from settings import my_url, name, doc, last, RH, COD_PRODUCTO
import init, bs4, logging, sys, re
from urllib.request import urlopen as uReq
from bs4 import BeautifulSoup as soup
global contEstrategia
uClient = uReq(my_url)
page_html = uClient.read()
uClient.close()
all = 0
a = 0
x = 0
y = 0
auto = ""
vincula = ""
insti = ""
vinculain = ""
page_soup = soup(page_html,"html.parser")
containers = page_soup.findAll("table")
for a in range(0,len(containers)):
buscaEstrategias = containers[a].h3
#print(buscaEstrategias)
try:
if buscaEstrategias.text == "Estrategias pedagógicas para el fomento a la CTI":
all = a
#print(all)
break
except AttributeError:
pass
if all != 0:
containerb = containers[all]
container = containerb.findAll("blockquote")
for x in range(0, len(container)):
cont = container[x]
info_Estrategia = cont.text
#Nombre de la Estrategia
index1 = info_Estrategia.find("Nombre de la Estrategia ") + 26
index2 = info_Estrategia.find("\xa0\r\n Inicio en")
NombreEstrategia = info_Estrategia[index1:index2]
#Fecha de Realización inicio y fin
index1 = info_Estrategia.find("\xa0\r\n Inicio en") + 44
index1 = info_Estrategia.find(" - ",index1,len(info_Estrategia)) + 3
index2 = index1 + 4
AnoEstrategiaini = info_Estrategia[index1:index2]
if AnoEstrategiaini == "," or AnoEstrategiaini == ",\xa0\r\n":
MesEstrategiaini = ""
AnoEstrategiaini = ""
FechaEstrategiaini = ""
MesEstrategiafin = ""
AnoEstrategiafin = ""
FechaEstrategiafin = ""
else:
index1 = info_Estrategia.find("\xa0\r\n Inicio en") + 44
index2 = info_Estrategia.find(" - ")
MesEstrategiaini = info_Estrategia[index1:index2]
index1 = info_Estrategia.find("\xa0\r\n Inicio en") + 44
index2 = info_Estrategia.find(",\xa0\r\n Finalizó en")
FechaEstrategiaini = info_Estrategia[index1:index2]
index1 = info_Estrategia.find(",\xa0\r\n Finalizó en :") + 49
index1 = info_Estrategia.find(" - ",index1,len(info_Estrategia)) + 3
index2 = info_Estrategia.find(",\xa0 \t\t\t\r\n")
AnoEstrategiafin = info_Estrategia[index1:index2]
if AnoEstrategiafin == "" or AnoEstrategiafin == "":
MesEstrategiafin = ""
AnoEstrategiafin = ""
FechaEstrategiafin = ""
else:
index1 = info_Estrategia.find(",\xa0\r\n Finalizó en :") + 49
index2 = info_Estrategia.find(" - ",index1,len(info_Estrategia))
MesEstrategiafin = info_Estrategia[index1:index2]
index1 = info_Estrategia.find(",\xa0\r\n Finalizó en :") + 49
index2 = index1 + 10
index2 = info_Estrategia.find(",\xa0 \t\t\t\r\n")
FechaEstrategiafin = info_Estrategia[index1:index2]
init.rel_persona_producto_colciencias.append(str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',"7".replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreEstrategia.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "0" + ","\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEstrategiaini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "\n")
init.inrel_personas_producto_colciencias.append( \
"REPLACE INTO `uapa_db`.`rel_personas_producto_colciencias`(`cod_rel_per_prod_col`,`cod_producto`,`cod_rh`,`cod_tipo_producto`,`nombre_producto`,`evento_asociado`,`datos_complementarios`,`lugar`,`ano`,`ambito`,`palabras_clave`,`areas`,`sectores`,`coautores`,`vincula_coautores`,`editorial`,`volumen`,`paginas`,`doi`,`finalidad`,`instituciones_asociadas`,`tipo_vinculacion_institucion`) VALUES"
+ "('"+ str(RH) + str(COD_PRODUCTO) + "',"
+ str(COD_PRODUCTO) + ","\
+ "'" + str(RH) + "',"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',"7".replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',NombreEstrategia.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEstrategiaini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ");\n")
init.colciencias_apropiacion.append(str(RH) + str(COD_PRODUCTO) + ";"\
+ str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEstrategiaini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEstrategiaini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesEstrategiaini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEstrategiafin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoEstrategiafin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesEstrategiafin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "\n")
init.incolciencias_apropiacion.append( \
"REPLACE INTO `uapa_db`.`colciencias_apropiacion`(`cod_colciencias_apropiacion`,`cod_rh`,`cod_rel_per_prod_col`,`fecha_ini`,`fecha_fin`,`cod_tipo_evento`) VALUES"
+ "('" + str(COD_PRODUCTO) + "',"\
+ "'" + str(RH) + "',"\
+ "'" + str(RH) + str(COD_PRODUCTO) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEstrategiaini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaEstrategiafin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "0" + ");\n")
COD_PRODUCTO = COD_PRODUCTO + 1
else:
logging.info(' El Docente ' + name + ' ' + last + ' no tiene Estrategias Asociadas')
contEstrategia = [COD_PRODUCTO]
def redesextract():
from settings import my_url, name, doc, last, RH, COD_PRODUCTO
import init, bs4, logging, sys, re
from urllib.request import urlopen as uReq
from bs4 import BeautifulSoup as soup
global contredes
uClient = uReq(my_url)
page_html = uClient.read()
uClient.close()
all = 0
a = 0
x = 0
y = 0
auto = ""
vincula = ""
insti = ""
vinculain = ""
page_soup = soup(page_html,"html.parser")
containers = page_soup.findAll("table")
for a in range(0,len(containers)):
buscaReds = containers[a].h3
#print(buscaReds)
try:
if buscaReds.text == "Redes de conocimiento especializado":
all = a
#print(all)
break
except AttributeError:
pass
if all != 0:
containerb = containers[all]
container = containerb.findAll("blockquote")
for x in range(0, len(container)):
cont = container[x]
info_red = cont.text
#Nombre de la red
index1 = info_red.find("Nombre de la red ") + 17
index2 = info_red.find("\xa0\r\n Tipo de red")
Nombrered = info_red[index1:index2]
# Tipo de Red
index1 = info_red.find("Tipo de red") + 11
index2 = info_red.find(",\xa0\r\n Creada el:")
Tipored = info_red[index1:index2]
# Lugar Red
index1 = info_red.find("\xa0\r\n en ") + 42
index2 = info_red.find(" \xa0 \r\n")
LugarRed = info_red[index1:index2]
#Fecha de Realización inicio y fin
index1 = info_red.find("Creada el:") + 10
index2 = index1 + 4
AnoRedini = info_red[index1:index2]
if AnoRedini == "," or AnoRedini == ",\xa0\r\n":
MesRedini = ""
AnoRedini = ""
FechaRedini = ""
MesRedfin = ""
AnoRedfin = ""
FechaRedfin = ""
else:
index1 = index1 + 5
index2 = index1 + 2
MesRedini = info_red[index1:index2]
index1 = info_red.find("Creada el:") + 10
index2 = index1 + 10
FechaRedini = info_red[index1:index2]
index1 = info_red.find(",",index1,index1 + 58) + 40
index2 = index1 + 4
AnoRedfin = info_red[index1:index2]
if AnoRedfin == " " or AnoRedfin == ",":
MesRedfin = ""
AnoRedfin = ""
FechaRedfin = ""
else:
index1 = index1 + 5
index2 = index1 + 2
MesRedfin = info_red[index1:index2]
index1 = info_red.find("Creada el:") + 10
index1 = info_red.find(",",index1,index1 + 58) + 40
index2 = index1 + 10
FechaRedfin = info_red[index1:index2]
init.rel_persona_producto_colciencias.append(str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',"1".replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Nombrered.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "0" + ","\
+ "" + ";"\
+ "" + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',LugarRed.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "" + ";"\
+ "\n")
init.inrel_personas_producto_colciencias.append( \
"REPLACE INTO `uapa_db`.`rel_personas_producto_colciencias`(`cod_rel_per_prod_col`,`cod_producto`,`cod_rh`,`cod_tipo_producto`,`nombre_producto`,`evento_asociado`,`datos_complementarios`,`lugar`,`ano`,`ambito`,`palabras_clave`,`areas`,`sectores`,`coautores`,`vincula_coautores`,`editorial`,`volumen`,`paginas`,`doi`,`finalidad`,`instituciones_asociadas`,`tipo_vinculacion_institucion`) VALUES"
+ "('"+ str(RH) + str(COD_PRODUCTO) + "',"
+ str(COD_PRODUCTO) + ","\
+ "'" + str(RH) + "',"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',"7".replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',Nombrered.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "null" + ","\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',LugarRed.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ","\
+ "null" + ");\n")
init.colciencias_apropiacion.append(str(RH) + str(COD_PRODUCTO) + ";"\
+ str(RH) + ";"\
+ str(COD_PRODUCTO) + ";"\
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaRedini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoRedini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesRedini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaRedfin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',AnoRedfin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',MesRedfin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + ";" \
+ "\n")
init.incolciencias_apropiacion.append( \
"REPLACE INTO `uapa_db`.`colciencias_apropiacion`(`cod_colciencias_apropiacion`,`cod_rh`,`cod_rel_per_prod_col`,`fecha_ini`,`fecha_fin`,`cod_tipo_evento`) VALUES"
+ "('" + str(COD_PRODUCTO) + "',"\
+ "'" + str(RH) + "',"\
+ "'" + str(RH) + str(COD_PRODUCTO) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaRedini.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "'" + re.sub(r'[^A-Za-z0-9éèáàéñèíìúùó ò]',r'',re.sub(' +',' ',FechaRedfin.replace('"',"").replace("'","").strip().replace(";" , "|").replace("\r\n","").replace("\n","").replace("\r",""))) + "',"\
+ "0" + ");\n")
COD_PRODUCTO = COD_PRODUCTO + 1
else:
logging.info(' El Docente ' + name + ' ' + last + ' no tiene Redes Asociadas')
contredes = [COD_PRODUCTO]
| 67.054348
| 413
| 0.447838
| 4,103
| 43,183
| 4.638557
| 0.078479
| 0.044136
| 0.045082
| 0.030895
| 0.801019
| 0.779739
| 0.765185
| 0.73513
| 0.730559
| 0.717055
| 0
| 0.017482
| 0.288655
| 43,183
| 643
| 414
| 67.158631
| 0.60209
| 0.021374
| 0
| 0.658333
| 0
| 0.006667
| 0.208469
| 0.095414
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005
| false
| 0.005
| 0.02
| 0
| 0.025
| 0.003333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d05c8d8b0b5e7ac5e270736ae84d4cc49f52efa5
| 33
|
py
|
Python
|
src_nlp/tensorflow/toward_control/vocab/imdb/__init__.py
|
ashishpatel26/finch
|
bf2958c0f268575e5d51ad08fbc08b151cbea962
|
[
"MIT"
] | 1
|
2019-02-12T09:22:00.000Z
|
2019-02-12T09:22:00.000Z
|
src_nlp/tensorflow/toward_control/vocab/imdb/__init__.py
|
loopzxl/finch
|
bf2958c0f268575e5d51ad08fbc08b151cbea962
|
[
"MIT"
] | null | null | null |
src_nlp/tensorflow/toward_control/vocab/imdb/__init__.py
|
loopzxl/finch
|
bf2958c0f268575e5d51ad08fbc08b151cbea962
|
[
"MIT"
] | 1
|
2020-10-15T21:34:17.000Z
|
2020-10-15T21:34:17.000Z
|
from .imdb_vocab import IMDBVocab
| 33
| 33
| 0.878788
| 5
| 33
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 33
| 1
| 33
| 33
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d07d0a776fb56851c974190631a3ab8fe5ce45c0
| 2,782
|
py
|
Python
|
tests/tensor/mul_test.py
|
kbrodt/tor4
|
d09740b746c534e67a72f492c7c03654f5888a46
|
[
"MIT"
] | null | null | null |
tests/tensor/mul_test.py
|
kbrodt/tor4
|
d09740b746c534e67a72f492c7c03654f5888a46
|
[
"MIT"
] | null | null | null |
tests/tensor/mul_test.py
|
kbrodt/tor4
|
d09740b746c534e67a72f492c7c03654f5888a46
|
[
"MIT"
] | null | null | null |
from tor4 import tensor
def test_tensor_mul_with_scalar():
a = tensor(data=[1, 2, 3])
am2 = a * 2
assert am2.tolist() == [2, 4, 6]
assert not am2.requires_grad
def test_tensor_rmul_with_scalar():
a = tensor(data=[1, 2, 3])
am3 = 3 * a
assert am3.tolist() == [3, 6, 9]
assert not am3.requires_grad
def test_tensor_mul():
a = tensor(data=[1, 2, 3])
b = tensor(data=[-1, 3, 1])
amb = a * b
assert amb.tolist() == [-1, 6, 3]
assert not amb.requires_grad
def test_tensor_mul_backward():
a = tensor(data=[1, 2, 3])
b = tensor(data=[-1, 3, 1.0], requires_grad=True)
amb = a * b
amb.backward(tensor([1, 2, 3]))
assert amb.tolist() == [-1, 6, 3]
assert not a.requires_grad
assert b.requires_grad
assert amb.requires_grad
assert a.grad is None
assert b.grad.tolist() == [1, 4, 9]
def test_tensor_rmul_backward():
a = tensor(data=[1, 2, 3.0], requires_grad=True)
b = tensor(data=[-1, 3, 1])
amb = a * b
amb.backward(tensor([3, 2, 1]))
assert amb.tolist() == [-1, 6, 3]
assert a.requires_grad
assert not b.requires_grad
assert amb.requires_grad
assert a.grad.tolist() == [-3, 6, 1]
assert b.grad is None
def test_tensor_imul_backward():
a = tensor(data=[1, 2, 3.0], requires_grad=True)
b = tensor(data=[-1, 3, 1])
try:
a *= b
raise AssertionError()
except RuntimeError:
assert True
def test_tensor_mul_broadcast_backward():
a = tensor(data=[[1, 2, 3], [1, 1, 2]])
b = tensor(data=[-1, 3, 1.0], requires_grad=True)
amb = a * b
amb.backward(tensor([[1, 1, 1], [1, 1, 1]]))
assert amb.tolist() == [[-1, 6, 3], [-1, 3, 2]]
assert not a.requires_grad
assert b.requires_grad
assert amb.requires_grad
assert a.grad is None
assert b.grad.tolist() == [2, 3, 5]
def test_tensor_mul_broadcast2_backward():
a = tensor(data=[[1, 2, 3], [1, 1, 2]])
b = tensor(data=[[-1, 3, 1.0]], requires_grad=True)
amb = a * b
amb.backward(tensor([[1, 1, 1], [1, 1, 1]]))
assert amb.tolist() == [[-1, 6, 3], [-1, 3, 2]]
assert not a.requires_grad
assert b.requires_grad
assert amb.requires_grad
assert a.grad is None
assert b.grad.tolist() == [[2, 3, 5]]
def test_tensor_mul_broadcast3_backward():
a = tensor(data=[[[1, 2, 3], [1, 1, 2]], [[1, 2, 3], [1, 1, 2]]])
b = tensor(data=[[1], [0.0]], requires_grad=True)
amb = a * b
amb.backward(tensor([[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]]))
assert amb.tolist() == [[[1, 2, 3], [0, 0, 0]], [[1, 2, 3], [0, 0, 0]]]
assert not a.requires_grad
assert b.requires_grad
assert amb.requires_grad
assert a.grad is None
assert b.grad.tolist() == [[12], [8]]
| 26
| 75
| 0.57225
| 467
| 2,782
| 3.291221
| 0.094218
| 0.032531
| 0.035133
| 0.039037
| 0.802863
| 0.786597
| 0.731945
| 0.710475
| 0.649967
| 0.643461
| 0
| 0.077398
| 0.242991
| 2,782
| 106
| 76
| 26.245283
| 0.652422
| 0
| 0
| 0.5375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.475
| 1
| 0.1125
| false
| 0
| 0.0125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d0825522ea7c61126f8928391eb70a380b2dcfc1
| 28
|
py
|
Python
|
Python/Chapter6/package1/c4.py
|
MyHeartWillGoOnWendy/frontend-notes
|
40a823b968c91b7a2a40bbf17519ca23bdc1e215
|
[
"MIT"
] | 8
|
2019-01-07T14:21:46.000Z
|
2020-05-29T07:33:40.000Z
|
Python/Chapter6/package1/c4.py
|
wkl007/frontend-notes
|
c8c5d2fd281a9353885548d57602641dd3820ae6
|
[
"MIT"
] | null | null | null |
Python/Chapter6/package1/c4.py
|
wkl007/frontend-notes
|
c8c5d2fd281a9353885548d57602641dd3820ae6
|
[
"MIT"
] | 4
|
2019-06-02T08:04:05.000Z
|
2021-12-22T05:39:46.000Z
|
import t
print(t.sys.path)
| 7
| 17
| 0.714286
| 6
| 28
| 3.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 3
| 18
| 9.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
d0e5b7f5656176d949dc4f1467de893f543272a1
| 242
|
py
|
Python
|
src/exceptions.py
|
Chisanan232/LWTs
|
da91a1636325f141a7b7f96132f2391a5e973549
|
[
"Apache-2.0"
] | 1
|
2022-03-18T15:21:02.000Z
|
2022-03-18T15:21:02.000Z
|
src/exceptions.py
|
Chisanan232/LWTs
|
da91a1636325f141a7b7f96132f2391a5e973549
|
[
"Apache-2.0"
] | null | null | null |
src/exceptions.py
|
Chisanan232/LWTs
|
da91a1636325f141a7b7f96132f2391a5e973549
|
[
"Apache-2.0"
] | null | null | null |
class DeviceModelDoesnotExistException(Exception):
def __str__(self):
return "Target device model doesn't exist."
class ParameterCannotBeNone(Exception):
def __str__(self):
return "Parameter cannot all be None."
| 18.615385
| 51
| 0.714876
| 25
| 242
| 6.6
| 0.76
| 0.145455
| 0.181818
| 0.230303
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206612
| 242
| 12
| 52
| 20.166667
| 0.859375
| 0
| 0
| 0.333333
| 0
| 0
| 0.2625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ef877f32f0711c105853e72a3f783372419f7222
| 217
|
py
|
Python
|
tests/test_signature.py
|
liushengli2020/AnEventApi
|
31f1fac2cefec82503f96ae520d9d156dcef49d6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_signature.py
|
liushengli2020/AnEventApi
|
31f1fac2cefec82503f96ae520d9d156dcef49d6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_signature.py
|
liushengli2020/AnEventApi
|
31f1fac2cefec82503f96ae520d9d156dcef49d6
|
[
"Apache-2.0"
] | null | null | null |
from eventapp.services.signature_util import generate_signature
def test_quit_event_failed(client, app):
assert generate_signature('123','abc') == '8f16771f9f8851b26f4d460fa17de93e2711c7e51337cb8a608a0f81e1c1b6ae'
| 72.333333
| 112
| 0.857143
| 21
| 217
| 8.571429
| 0.857143
| 0.188889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216749
| 0.064516
| 217
| 3
| 112
| 72.333333
| 0.669951
| 0
| 0
| 0
| 1
| 0
| 0.321101
| 0.293578
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
4be623b445ec525331e29cad329516230bb52806
| 90
|
py
|
Python
|
Temp_F_to_C.py
|
juliaviolet/Python_Bootcamp_Jos-_Padilla
|
0a061283edcb7b33d5a7e165e8811ee61d694515
|
[
"MIT"
] | null | null | null |
Temp_F_to_C.py
|
juliaviolet/Python_Bootcamp_Jos-_Padilla
|
0a061283edcb7b33d5a7e165e8811ee61d694515
|
[
"MIT"
] | null | null | null |
Temp_F_to_C.py
|
juliaviolet/Python_Bootcamp_Jos-_Padilla
|
0a061283edcb7b33d5a7e165e8811ee61d694515
|
[
"MIT"
] | null | null | null |
def to_celsius(x):
return (x-32)*5/9
for x in range(0,101,10):
print(x,to_celsius(x))
| 18
| 25
| 0.655556
| 21
| 90
| 2.714286
| 0.714286
| 0.315789
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12987
| 0.144444
| 90
| 4
| 26
| 22.5
| 0.61039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
4bec5d650297a051bcf67d0dfde5e15ac5c7b75c
| 248
|
py
|
Python
|
tools/esp_prov/security/__init__.py
|
fbucafusco/esp-idf
|
c2ccc383dae2a47c2c2dc8c7ad78175a3fd11361
|
[
"Apache-2.0"
] | null | null | null |
tools/esp_prov/security/__init__.py
|
fbucafusco/esp-idf
|
c2ccc383dae2a47c2c2dc8c7ad78175a3fd11361
|
[
"Apache-2.0"
] | null | null | null |
tools/esp_prov/security/__init__.py
|
fbucafusco/esp-idf
|
c2ccc383dae2a47c2c2dc8c7ad78175a3fd11361
|
[
"Apache-2.0"
] | null | null | null |
# SPDX-FileCopyrightText: 2018-2022 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
#
from .security0 import * # noqa: F403, F401
from .security1 import * # noqa: F403, F401
from .security2 import * # noqa: F403, F401
| 31
| 71
| 0.725806
| 33
| 248
| 5.454545
| 0.666667
| 0.166667
| 0.233333
| 0.3
| 0.244444
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149038
| 0.16129
| 248
| 7
| 72
| 35.428571
| 0.716346
| 0.629032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4bedd61391cc20127dbffd9653579357f217fdd9
| 77
|
py
|
Python
|
Project_Codev0.1/Class-diagram_Classes/class User_Preference.py
|
cyberseihis/Wallsource
|
4bd981e75c3ebf97c9673ffb80147ef2bdf7d61a
|
[
"MIT"
] | null | null | null |
Project_Codev0.1/Class-diagram_Classes/class User_Preference.py
|
cyberseihis/Wallsource
|
4bd981e75c3ebf97c9673ffb80147ef2bdf7d61a
|
[
"MIT"
] | null | null | null |
Project_Codev0.1/Class-diagram_Classes/class User_Preference.py
|
cyberseihis/Wallsource
|
4bd981e75c3ebf97c9673ffb80147ef2bdf7d61a
|
[
"MIT"
] | null | null | null |
class User_Preference:
def __get__(self, name ):
return self.name
| 25.666667
| 29
| 0.675325
| 10
| 77
| 4.7
| 0.8
| 0.340426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.246753
| 77
| 3
| 30
| 25.666667
| 0.810345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
4bff3034903b61ed6ec4f12df2b3fe0be1014015
| 65
|
py
|
Python
|
src/__init__.py
|
tbaudier/gaga
|
87ec65450b9048ddf87cfa22a53ecba12fd019bd
|
[
"Apache-2.0"
] | null | null | null |
src/__init__.py
|
tbaudier/gaga
|
87ec65450b9048ddf87cfa22a53ecba12fd019bd
|
[
"Apache-2.0"
] | null | null | null |
src/__init__.py
|
tbaudier/gaga
|
87ec65450b9048ddf87cfa22a53ecba12fd019bd
|
[
"Apache-2.0"
] | null | null | null |
# import files
from .gaga import *
from .gaga_helpers import *
| 10.833333
| 27
| 0.723077
| 9
| 65
| 5.111111
| 0.555556
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 65
| 5
| 28
| 13
| 0.884615
| 0.184615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ef6702ff39cc4b1869ac17f6bc76152290b0c4f8
| 210
|
py
|
Python
|
mtpm/utils/__init__.py
|
Gregory-Eales/mban
|
d8b35db51c7e601b1db777d9a80343600374250b
|
[
"Apache-2.0"
] | 1
|
2021-04-01T13:56:38.000Z
|
2021-04-01T13:56:38.000Z
|
mtpm/utils/__init__.py
|
Gregory-Eales/multi-task-policy-modularization
|
d8b35db51c7e601b1db777d9a80343600374250b
|
[
"Apache-2.0"
] | null | null | null |
mtpm/utils/__init__.py
|
Gregory-Eales/multi-task-policy-modularization
|
d8b35db51c7e601b1db777d9a80343600374250b
|
[
"Apache-2.0"
] | null | null | null |
from .graph import *
from .image import *
from .dir import *
from .log import *
from .seed import *
from .train_loop import *
from .experiment_loop import *
from .data import *
from .multi_task_wrapper import *
| 23.333333
| 33
| 0.747619
| 31
| 210
| 4.935484
| 0.451613
| 0.522876
| 0.183007
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 210
| 9
| 33
| 23.333333
| 0.874286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
32272258671efe15d18cf721dc18d0f0eedf61f9
| 339
|
py
|
Python
|
Exercicios-mundo-3/desafio109/teste.py
|
talitadeoa/Exercicios-Python
|
6ffac5b403ef4636d8b7b37aba7998dade8a88b8
|
[
"MIT"
] | null | null | null |
Exercicios-mundo-3/desafio109/teste.py
|
talitadeoa/Exercicios-Python
|
6ffac5b403ef4636d8b7b37aba7998dade8a88b8
|
[
"MIT"
] | null | null | null |
Exercicios-mundo-3/desafio109/teste.py
|
talitadeoa/Exercicios-Python
|
6ffac5b403ef4636d8b7b37aba7998dade8a88b8
|
[
"MIT"
] | null | null | null |
import moeda
p = float(input('Digite o preço: R$'))
print(f'A metade de {moeda.moeda(p)} é {moeda.metade(p,True)}')
print(f'O dobro de {moeda.moeda(p)} é {moeda.dobro(p,True)}')
print(f'Aumentado 13% de {moeda.moeda(p)}, temos {moeda.aumentar(p,13,True)}')
print(f'Diminuindo 14% de {moeda.moeda(p)}, temos {moeda.diminuir(p,14,True)}')
| 37.666667
| 79
| 0.678466
| 63
| 339
| 3.650794
| 0.380952
| 0.130435
| 0.208696
| 0.226087
| 0.365217
| 0.365217
| 0
| 0
| 0
| 0
| 0
| 0.02623
| 0.100295
| 339
| 9
| 79
| 37.666667
| 0.727869
| 0
| 0
| 0
| 0
| 0.333333
| 0.764012
| 0.286136
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
324713177bfed4170d705490d88d509ee01c5c89
| 27
|
py
|
Python
|
template.py
|
teetangh/Kaustav-Kaggle-Workspace
|
1f226b469cc20edb7bf6a2cbf024260e40ca8b18
|
[
"MIT"
] | null | null | null |
template.py
|
teetangh/Kaustav-Kaggle-Workspace
|
1f226b469cc20edb7bf6a2cbf024260e40ca8b18
|
[
"MIT"
] | null | null | null |
template.py
|
teetangh/Kaustav-Kaggle-Workspace
|
1f226b469cc20edb7bf6a2cbf024260e40ca8b18
|
[
"MIT"
] | null | null | null |
print("This is a Template")
| 27
| 27
| 0.740741
| 5
| 27
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 27
| 1
| 27
| 27
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
328ec1bff3706d0e39894795d1ff349214f7a3f2
| 165
|
py
|
Python
|
OmniDB/OmniDB_app/views/__init__.py
|
ziodave/OmniDB
|
27e857ec10c401ee16f85d8705db3bcdd4222aff
|
[
"MIT"
] | null | null | null |
OmniDB/OmniDB_app/views/__init__.py
|
ziodave/OmniDB
|
27e857ec10c401ee16f85d8705db3bcdd4222aff
|
[
"MIT"
] | null | null | null |
OmniDB/OmniDB_app/views/__init__.py
|
ziodave/OmniDB
|
27e857ec10c401ee16f85d8705db3bcdd4222aff
|
[
"MIT"
] | null | null | null |
from . import login, connections, users, workspace, tree, tree_snippets, tree_postgresql, tree_oracle, tree_mysql, tree_mariadb, monitor_dashboard, plugins, polling
| 82.5
| 164
| 0.824242
| 21
| 165
| 6.190476
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09697
| 165
| 1
| 165
| 165
| 0.872483
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
32a626409791aaf66c11fea2190c048f3f2227d4
| 98
|
py
|
Python
|
search/tests.py
|
RossBrunton/BMAT
|
5e102935cc6166f4d8ea13051769787c47303153
|
[
"MIT"
] | null | null | null |
search/tests.py
|
RossBrunton/BMAT
|
5e102935cc6166f4d8ea13051769787c47303153
|
[
"MIT"
] | 47
|
2015-09-02T10:22:41.000Z
|
2021-06-10T19:15:00.000Z
|
search/tests.py
|
RossBrunton/BMAT
|
5e102935cc6166f4d8ea13051769787c47303153
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth.models import User
import json
| 19.6
| 43
| 0.826531
| 15
| 98
| 5.4
| 0.733333
| 0.246914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 98
| 4
| 44
| 24.5
| 0.94186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
32a934d958e9e267a75747fc575dc868fbeeccb2
| 38
|
py
|
Python
|
jdaviz/configs/imviz/plugins/links_control/__init__.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | 55
|
2019-05-24T18:53:05.000Z
|
2022-03-14T08:45:52.000Z
|
jdaviz/configs/imviz/plugins/links_control/__init__.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | 1,105
|
2019-05-09T15:17:35.000Z
|
2022-03-31T21:22:18.000Z
|
jdaviz/configs/imviz/plugins/links_control/__init__.py
|
rosteen/jdaviz
|
e02c08d68ef71c5e40600785f46e65e5ae95e236
|
[
"MIT",
"BSD-3-Clause"
] | 49
|
2019-05-07T18:05:42.000Z
|
2022-03-22T15:15:34.000Z
|
from .links_control import * # noqa
| 19
| 37
| 0.710526
| 5
| 38
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 38
| 1
| 38
| 38
| 0.866667
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
32b250b7514ed4c2cb906a3570ba00587a51c3b9
| 32
|
py
|
Python
|
pynemo/core/base/model/__init__.py
|
SSripilaipong/pynemo
|
f4dedd2599ec78b2ffe73f55b1d2b8b5da1b1e7f
|
[
"MIT"
] | null | null | null |
pynemo/core/base/model/__init__.py
|
SSripilaipong/pynemo
|
f4dedd2599ec78b2ffe73f55b1d2b8b5da1b1e7f
|
[
"MIT"
] | null | null | null |
pynemo/core/base/model/__init__.py
|
SSripilaipong/pynemo
|
f4dedd2599ec78b2ffe73f55b1d2b8b5da1b1e7f
|
[
"MIT"
] | null | null | null |
from .node import NodeModelBase
| 16
| 31
| 0.84375
| 4
| 32
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
087ff6b94264780b84b0d85ecd99ae0971b7e0b8
| 13,235
|
py
|
Python
|
src/analysis_helper.py
|
augmento-ai/quant-reseach
|
6b3bc4c01a8d533dfa1826d59aa90fbc4c6f98cd
|
[
"MIT"
] | 56
|
2019-06-14T18:05:28.000Z
|
2022-01-24T15:32:40.000Z
|
src/analysis_helper.py
|
augmento-ai/quant-reseach
|
6b3bc4c01a8d533dfa1826d59aa90fbc4c6f98cd
|
[
"MIT"
] | 1
|
2020-04-01T09:31:04.000Z
|
2020-04-01T12:32:31.000Z
|
src/analysis_helper.py
|
augmento-ai/quant-reseach
|
6b3bc4c01a8d533dfa1826d59aa90fbc4c6f98cd
|
[
"MIT"
] | 33
|
2019-06-19T13:27:31.000Z
|
2022-01-25T23:57:17.000Z
|
import numpy as np
import numba as nb
@nb.jit("(f8[:])(f8[:], f8[:])", nopython=True, nogil=True, cache=True)
def nb_safe_divide(a, b):
# divide each element in a by each element in b
# if element b == 0.0, return element = 0.0
c = np.zeros(a.shape[0], dtype=np.float64)
for i in range(a.shape[0]):
if b[i] != 0.0:
c[i] = a[i] / b[i]
return c
@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, parallel=False)
def nb_causal_rolling_average(arr, window_size):
# create an output array
out_arr = np.zeros(arr.shape[0])
# create an array from the input array, with added space for the rolling window
new_arr = np.hstack((np.ones(window_size-1) * arr[0], arr))
# for each output element, find the mean of the last few input elements
#for i in nb.prange(out_arr.shape[0]):
for i in range(out_arr.shape[0]):
out_arr[i] = np.mean(new_arr[i : i + window_size])
return out_arr
@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, parallel=False)
def nb_causal_rolling_sd(arr, window_size):
# create an output array
out_arr = np.zeros(arr.shape[0])
# create an array from the input array, with added space for the rolling window
new_arr = np.hstack((np.ones(window_size-1) * arr[0], arr))
# for each output element, find the mean and std of the last few
# input elements, and standardise the input element by the mean and std of the window
#for i in nb.prange(out_arr.shape[0]):
for i in range(out_arr.shape[0]):
num = new_arr[i+window_size-1] - np.mean(new_arr[i : i + window_size-1])
denom = np.std(new_arr[i : i + window_size-1])
if denom != 0.0:
out_arr[i] = num / denom
return out_arr
@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, parallel=False)
def nb_causal_rolling_sd_rand(arr, window_size_rand):
# create an output array
out_arr = np.zeros(arr.shape[0])
# create an array from the input array, with added space for the rolling window
new_arr = np.hstack((np.ones(window_size_rand-1) * arr[0], arr))
# create an array from the input array, with added space for the rolling window
new_arr = np.hstack((np.ones(window_size_rand-1) * arr[0], arr))
# for each output element, find the mean and std of the last few
# input elements, and standardise the input element by the mean and std of the window
#for i in nb.prange(out_arr.shape[0]):
for i in range(out_arr.shape[0]):
window_size_std = 1.0
window_size = round(np.random.normal(window_size_rand, window_size_std))
num = new_arr[i+window_size-1] - np.mean(new_arr[i : i + window_size-1])
denom = np.std(new_arr[i : i + window_size-1])
if denom != 0.0:
out_arr[i] = num / denom
return out_arr
@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, parallel=False)
def nb_causal_rolling_norm(arr, window_size):
# create an output array
out_arr = np.zeros(arr.shape[0])
# create an array from the input array, with added space for the rolling window
new_arr = np.hstack((np.ones(window_size-1) * arr[0], arr))
# for each output element, find the mean and std of the last few
# input elements, and standardise the input element by the mean and std of the window
#for i in nb.prange(out_arr.shape[0]):
for i in range(out_arr.shape[0]):
num = new_arr[i+window_size-1] - np.mean(new_arr[i : i + window_size])
denom = np.max(np.abs(new_arr[i : i + window_size] - np.mean(new_arr[i : i + window_size])))
if denom != 0.0:
out_arr[i] = num / denom
return out_arr
@nb.jit("(f8[:])(f8[:], i8, f8)", nopython=True, nogil=True, parallel=False)
def nb_causal_rolling_norm_rand(arr, window_size_rand, peturb):
# create an output array
out_arr = np.zeros(arr.shape[0])
# create an array from the input array, with added space for the rolling window
new_arr = np.hstack((np.ones(window_size_rand-1) * arr[0], arr))
index_new = window_size_rand
# for each output element, find the mean and std of the last few
# input elements, and standardise the input element by the mean and std of the window
#for i in nb.prange(out_arr.shape[0]):
for i in range(out_arr.shape[0]):
window_size_std = peturb * np.float64(window_size_rand)
window_size = round(np.random.normal(window_size_rand, window_size_std))
i_end_new = i + window_size_rand
i_start_new = i_end_new - window_size
if i_start_new < 0:
i_start_new = 0
out_arr[i] = np.mean(new_arr[i_start_new : i_end_new])
#print(out_arr[i-1:i+1])
#num = new_arr[i+window_size-1] - np.mean(new_arr[i : i + window_size])
#denom = np.max(np.abs(new_arr[i : i + window_size] - np.mean(new_arr[i : i + window_size])))
#if denom != 0.0:
# out_arr[i] = num / denom
return out_arr
@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, parallel=False)
def nb_causal_rolling_average(arr, window_size):
# create an output array
out_arr = np.zeros(arr.shape[0])
# create an array from the input array, with added space for the rolling window
new_arr = np.hstack((np.ones(window_size-1) * arr[0], arr))
# for each output element, find the mean of the last few input elements
#for i in nb.prange(out_arr.shape[0]):
for i in range(out_arr.shape[0]):
out_arr[i] = np.mean(new_arr[i : i + window_size])
return out_arr
#@nb.jit("(f8[:])(f8[:], f8[:], i8, i8, f8)", nopython=True, nogil=True)
def nb_calc_sentiment_score_rand_b(sent_a, sent_b, ra_win_size_short, ra_win_size_long,peturb):
# example method for creating a stationary sentiment score based on Augmento data
# compare the raw sentiment values
sent_ratio = nb_safe_divide(sent_a, sent_b)
# smooth the sentiment ratio
sent_ratio_short = nb_causal_rolling_norm_rand(sent_ratio, ra_win_size_short, peturb)
sent_ratio_long = nb_causal_rolling_norm_rand(sent_ratio, ra_win_size_long, peturb)
# create a stationary(ish) representation of the smoothed sentiment ratio
sent_score = sent_ratio_short - sent_ratio_long
return sent_score
@nb.jit("(f8[:])(f8[:], f8[:], i8, i8, f8)", nopython=True, nogil=True)
def nb_calc_sentiment_score_rand_a(sent_a, sent_b, ra_win_size, std_win_size, peturb):
# example method for creating a stationary sentiment score based on Augmento data
# compare the raw sentiment values
sent_ratio = nb_safe_divide(sent_a, sent_b)
# smooth the sentiment ratio
sent_ratio_smooth = nb_causal_rolling_norm_rand(sent_ratio, ra_win_size, peturb)
# create a stationary(ish) representation of the smoothed sentiment ratio
sent_score = nb_causal_rolling_sd(sent_ratio_smooth, std_win_size)
return sent_score
@nb.jit("(f8[:])(f8[:], f8[:], i8, i8)", nopython=True, nogil=True)
def nb_calc_sentiment_score_a(sent_a, sent_b, ra_win_size, std_win_size):
# example method for creating a stationary sentiment score based on Augmento data
# compare the raw sentiment values
sent_ratio = nb_safe_divide(sent_a, sent_b)
# smooth the sentiment ratio
sent_ratio_smooth = nb_causal_rolling_average(sent_ratio, ra_win_size)
# create a stationary(ish) representation of the smoothed sentiment ratio
sent_score = nb_causal_rolling_sd(sent_ratio_smooth, std_win_size)
return sent_score
@nb.jit("(f8[:])(f8[:], f8[:], i8, i8)", nopython=True, nogil=True)
def nb_calc_sentiment_score_b(sent_a, sent_b, ra_win_size_short, ra_win_size_long):
# example method for creating a stationary sentiment score based on Augmento data
# compare the raw sentiment values
sent_ratio = nb_safe_divide(sent_a, sent_b)
# smooth the sentiment ratio
sent_ratio_short = nb_causal_rolling_average(sent_ratio, ra_win_size_short)
sent_ratio_long = nb_causal_rolling_average(sent_ratio, ra_win_size_long)
# create a stationary(ish) representation of the smoothed sentiment ratio
sent_score = sent_ratio_short - sent_ratio_long
return sent_score
@nb.jit("(f8[:])(f8[:], f8[:], i8, i8)", nopython=True, nogil=True)
def nb_calc_sentiment_score_c(sent_a, sent_b, ra_win_size, std_win_size):
# example method for creating a stationary sentiment score based on Augmento data
# compare the raw sentiment values
sent_ratio = nb_safe_divide(sent_a, sent_b)
# smooth the sentiment ratio
sent_ratio_smooth = nb_causal_rolling_average(sent_ratio, ra_win_size)
# create a stationary(ish) representation of the smoothed sentiment ratio
sent_score = nb_causal_rolling_norm(sent_ratio_smooth, std_win_size)
return sent_score
@nb.jit("(f8[:])(f8[:], f8[:], f8, f8)", nopython=True, nogil=True, cache=True)
def nb_backtest_a(price, sent_score, start_pnl, buy_sell_fee):
# example backtest with approximate model for long/short contracts
# create an array to hold our pnl, and set the first value
pnl = np.zeros(price.shape, dtype=np.float64)
pnl[0] = start_pnl
# for each step, run the market model
for i_p in range(1, price.shape[0]):
# if sentiment score is positive, simulate long position
# else if sentiment score is negative, simulate short position
# else if the sentiment score is 0.0, hold
# (note that this is a very approximate market simulation!)
n_sample_delay = 2
if i_p < n_sample_delay:
pnl[i_p] = pnl[i_p-1]
if sent_score[i_p-n_sample_delay] > 0.0:
pnl[i_p] = (price[i_p] / price[i_p-1]) * pnl[i_p-1]
elif sent_score[i_p-n_sample_delay] <= 0.0:
pnl[i_p] = (price[i_p-1] / price[i_p]) * pnl[i_p-1]
elif sent_score[i_p-n_sample_delay] == 0.0:
pnl[i_p] = pnl[i_p-1]
# simulate a trade fee if we cross from long to short, or visa versa
if i_p > 1 and np.sign(sent_score[i_p-1]) != np.sign(sent_score[i_p-2]):
pnl[i_p] = pnl[i_p] - (buy_sell_fee * pnl[i_p])
return pnl
@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, cache=True)
def moving_average(arr, window):
# output array
ma_arr = np.zeros(arr.shape[0])
# add space for rolling window
new_arr = np.hstack((np.ones(window-1) * arr[0], arr))
# calculate moving average
#for i in nb.prange(arr.shape[0]):
for i in range(arr.shape[0]):
num = new_arr[i+window-1] - np.mean(new_arr[i : i+window-1])
denom = np.std(new_arr[i : i + window-1])
if denom != 0.0:
ma_arr[i] = num / denom
return ma_arr
#@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, cache=True)
#def signal_ma(positive, negative, short, long):
@nb.jit("(f8[:])(f8[:], f8[:], f8[:], f8, f8, f8)",nopython=True, nogil=True,cache=True)
def sma_crossover_backtest(price, leading_arr, lagging_arr, start_pnl, buy_sell_fee, threshold=0.0):
# create an array to hold our pnl, and set the first value
pnl = np.zeros(price.shape, dtype=np.float64)
pnl[0] = start_pnl
# BUY if Leading SMA is above Lagging SMA by some threshold.
# SELL if Leading SMA is below Lagging SMA by some threshold.
sent_signal = leading_arr - lagging_arr
# for each step, run the market model
for i_p in range(1, price.shape[0]):
if sent_signal[i_p-1] > threshold:
pnl[i_p] = (price[i_p] / price[i_p-1]) * pnl[i_p-1]
elif sent_signal[i_p-1] < threshold:
pnl[i_p] = (price[i_p-1] / price[i_p]) * pnl[i_p-1]
elif sent_signal[i_p-1] == threshold:
pnl[i_p] = pnl[i_p-1]
# simulate a trade fee if we cross from long to short, or visa versa
if i_p > 1 and np.sign(sent_signal[i_p-1]) != np.sign(sent_signal[i_p-2]):
pnl[i_p] = pnl[i_p] - (buy_sell_fee * pnl[i_p])
return pnl
#@nb.jit("(f8[:])(f8[:], f8[:], i8)", nopython=True, nogil=True, cache=True)
#def forward_volume(volume_data, price_data, threshold=2000000):
# price_rate_change = np.full(len(volume_data), np.nan)
# for i in range(len(volume_data)):
# sum_volume = 0
# for j in range(len(price_data)):
# sum_volume += price_data[j]
# if sum_volume >= threshold:
# price_rate_change[i] = (price_data[j] - price_data[i])/price_data[i]
# break
@nb.jit("(f8[:])(f8[:], f8[:], i8)", nopython=True, nogil=True, cache=True)
def forward_volume(volume_data, price_data, threshold=2000000):
price_rate_change = np.zeros(len(price_data))
for i in range((len(volume_data))):
j = i+1
sum_volume = 0.0
while (sum_volume < threshold) & (j < len(price_rate_change)):
sum_volume += volume_data[j]
if sum_volume >= threshold:
price_rate_change[i] = (price_data[j]-price_data[i])/price_data[i]
j += 1
return price_rate_change
@nb.jit("(f8[:])(f8[:], f8[:], f8)", nopython=True, nogil=True, cache=True)
def forward_volume(volume_data, price_data, threshold):
price_rate_change = np.zeros(len(price_data))
for i in range((len(volume_data))):
j = i+1
sum_volume = 0.0
while (sum_volume < threshold) & (j < len(price_rate_change)):
sum_volume += volume_data[j]
if sum_volume >= threshold:
price_rate_change[i] = (price_data[j]-price_data[i])/price_data[i]
j += 1
return price_rate_change
@nb.jit("(f8[:])(f8[:], i8)", nopython=True, nogil=True, cache=True)
def volume_normalized(volume_data, n_hours):
norm_volume = np.zeros(len(volume_data))
start = 0
for i in range(n_hours,len(volume_data), n_hours):
for j in range(start,i):
norm_volume[j] = volume_data[j]/np.sum(volume_data[start:i])
start = i
return norm_volume
| 34.646597
| 100
| 0.694371
| 2,317
| 13,235
| 3.752266
| 0.075097
| 0.009892
| 0.021739
| 0.020704
| 0.878652
| 0.854037
| 0.837359
| 0.829998
| 0.824707
| 0.807453
| 0
| 0.020446
| 0.175897
| 13,235
| 381
| 101
| 34.737533
| 0.776657
| 0.34031
| 0
| 0.596591
| 0
| 0
| 0.047233
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102273
| false
| 0
| 0.011364
| 0
| 0.215909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0897ac8d30a10c6801761a0537049ba99f80a78b
| 130
|
py
|
Python
|
leo/test/unittest/at-path-test2.py
|
ATikhonov2/leo-editor
|
225aac990a9b2804aaa9dea29574d6e072e30474
|
[
"MIT"
] | 2
|
2020-01-19T18:11:05.000Z
|
2020-01-19T18:12:07.000Z
|
leo/test/unittest/at-path-test2.py
|
ATikhonov2/leo-editor
|
225aac990a9b2804aaa9dea29574d6e072e30474
|
[
"MIT"
] | 1
|
2020-01-15T01:57:04.000Z
|
2020-01-15T01:57:04.000Z
|
leo/test/unittest/at-path-test2.py
|
ATikhonov2/leo-editor
|
225aac990a9b2804aaa9dea29574d6e072e30474
|
[
"MIT"
] | null | null | null |
#@+leo-ver=5-thin
#@+node:ekr.20120228145505.4841: * @thin at-path-test2.py
#@@language python
# unittest/at-path-test2.py
#@-leo
| 21.666667
| 57
| 0.7
| 21
| 130
| 4.333333
| 0.714286
| 0.131868
| 0.241758
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 0.076923
| 130
| 5
| 58
| 26
| 0.583333
| 0.923077
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
08c0e4b1a24ff06b1927f74ae22be97bc5c81a4b
| 51
|
py
|
Python
|
Analysis/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
Analysis/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
Analysis/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | 10
|
2019-03-22T15:30:12.000Z
|
2021-02-10T02:55:50.000Z
|
from . import Simulations
from . import TimeSeries
| 17
| 25
| 0.803922
| 6
| 51
| 6.833333
| 0.666667
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 51
| 2
| 26
| 25.5
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3eb76f4c89e63d51da9a625483cad9361a9cbfe3
| 79
|
py
|
Python
|
darwinpush/__init__.py
|
fasteroute/darwinpush
|
c919049e076cbdf61007fc9cc1c5a0271cde7929
|
[
"Apache-2.0"
] | 3
|
2015-08-15T15:38:06.000Z
|
2019-08-06T11:09:32.000Z
|
darwinpush/__init__.py
|
grundleborg/darwinpush
|
c919049e076cbdf61007fc9cc1c5a0271cde7929
|
[
"Apache-2.0"
] | 34
|
2015-07-22T13:47:16.000Z
|
2015-08-12T17:40:23.000Z
|
darwinpush/__init__.py
|
grundleborg/darwinpush
|
c919049e076cbdf61007fc9cc1c5a0271cde7929
|
[
"Apache-2.0"
] | 1
|
2015-08-30T15:26:24.000Z
|
2015-08-30T15:26:24.000Z
|
from darwinpush.client import Client
from darwinpush.listener import Listener
| 19.75
| 40
| 0.860759
| 10
| 79
| 6.8
| 0.5
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 79
| 3
| 41
| 26.333333
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ed12b82c6539de77044647c95b5edac683366a1
| 185
|
py
|
Python
|
app/routes/__init__.py
|
Hoybaby/Python-Project1
|
33336c7303fa4397d3db9d511d1104f01ab32363
|
[
"MIT"
] | null | null | null |
app/routes/__init__.py
|
Hoybaby/Python-Project1
|
33336c7303fa4397d3db9d511d1104f01ab32363
|
[
"MIT"
] | 5
|
2021-03-13T21:40:51.000Z
|
2021-03-17T04:36:19.000Z
|
app/routes/__init__.py
|
Hoybaby/python-newsfeed
|
33336c7303fa4397d3db9d511d1104f01ab32363
|
[
"MIT"
] | null | null | null |
from .home import bp as home
from .dashboard import bp as dashboard
from .api import bp as api
# the .home syntax direct the program to find the module name home then import BP routes.
| 37
| 89
| 0.778378
| 34
| 185
| 4.235294
| 0.5
| 0.222222
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 185
| 4
| 90
| 46.25
| 0.96
| 0.47027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eb0492803526a96f8ae0fbb2042c0b01d2ed476f
| 620
|
py
|
Python
|
tests/test_utils.py
|
RustyBower/PoshC2
|
6cc2675aae59a7000d558a113f6db0d09bba3736
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_utils.py
|
RustyBower/PoshC2
|
6cc2675aae59a7000d558a113f6db0d09bba3736
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_utils.py
|
RustyBower/PoshC2
|
6cc2675aae59a7000d558a113f6db0d09bba3736
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from poshc2.Utils import validate_sleep_time
def test_validate_sleep_time():
assert validate_sleep_time("5h") is not None
assert validate_sleep_time("4m") is not None
assert validate_sleep_time("3s ") is not None
assert validate_sleep_time(" 5000h ") is not None
assert validate_sleep_time(" 999 s ") is None
assert validate_sleep_time("999 s") is None
assert validate_sleep_time("999d") is None
assert validate_sleep_time("s") is None
assert validate_sleep_time("asdf") is None
assert validate_sleep_time("") is None
assert validate_sleep_time(None) is None
| 36.470588
| 53
| 0.748387
| 96
| 620
| 4.552083
| 0.229167
| 0.386728
| 0.505721
| 0.578947
| 0.71167
| 0.71167
| 0.512586
| 0.212815
| 0.212815
| 0.212815
| 0
| 0.033333
| 0.177419
| 620
| 17
| 54
| 36.470588
| 0.823529
| 0
| 0
| 0.142857
| 0
| 0
| 0.057971
| 0
| 0
| 0
| 0
| 0
| 0.785714
| 1
| 0.071429
| true
| 0
| 0.142857
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
eb13414f089f2dda50f397a346ac462c8dfe14d6
| 68
|
py
|
Python
|
source/utils/bp4dbg/adios2/bp4dbg/__init__.py
|
yunai2384/ADIOS2
|
c88fd748720dfdfb0d7f8a529d7838ea86ecfa65
|
[
"ECL-2.0",
"Apache-2.0"
] | 190
|
2017-04-05T20:16:22.000Z
|
2022-03-30T20:26:01.000Z
|
source/utils/bp4dbg/adios2/bp4dbg/__init__.py
|
yunai2384/ADIOS2
|
c88fd748720dfdfb0d7f8a529d7838ea86ecfa65
|
[
"ECL-2.0",
"Apache-2.0"
] | 1,514
|
2017-02-03T16:19:17.000Z
|
2022-03-29T16:36:48.000Z
|
source/utils/bp4dbg/adios2/bp4dbg/__init__.py
|
yunai2384/ADIOS2
|
c88fd748720dfdfb0d7f8a529d7838ea86ecfa65
|
[
"ECL-2.0",
"Apache-2.0"
] | 114
|
2016-12-06T16:47:45.000Z
|
2022-02-01T19:56:01.000Z
|
from .data import *
from .idxtable import *
from .metadata import *
| 17
| 23
| 0.735294
| 9
| 68
| 5.555556
| 0.555556
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 68
| 3
| 24
| 22.666667
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eb308f9e04b38457f8d85407e08b4afc6e1811ef
| 129
|
py
|
Python
|
core/models/__init__.py
|
darakudou/setlist_forecast
|
21bcad5b86be84223ae9c643552cdcb9d58bb9c2
|
[
"MIT"
] | null | null | null |
core/models/__init__.py
|
darakudou/setlist_forecast
|
21bcad5b86be84223ae9c643552cdcb9d58bb9c2
|
[
"MIT"
] | null | null | null |
core/models/__init__.py
|
darakudou/setlist_forecast
|
21bcad5b86be84223ae9c643552cdcb9d58bb9c2
|
[
"MIT"
] | null | null | null |
from .idol import *
from .music import *
from .tweet import *
from .calender import *
from .live import *
from .setlist import *
| 18.428571
| 23
| 0.72093
| 18
| 129
| 5.166667
| 0.444444
| 0.537634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 129
| 6
| 24
| 21.5
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
de3c21b3902b3e3e73de21e84ca913a62a69a202
| 41
|
py
|
Python
|
cpgames/modules/core/ski/__init__.py
|
Wasabii88/Games
|
33262ca1958207a24e57e3532feded7e275b1dd1
|
[
"MIT"
] | 1
|
2022-01-09T03:06:46.000Z
|
2022-01-09T03:06:46.000Z
|
cpgames/modules/core/ski/__init__.py
|
beiwei365/Games
|
f6499f378802d3212a08aeca761191b58714b7f0
|
[
"MIT"
] | null | null | null |
cpgames/modules/core/ski/__init__.py
|
beiwei365/Games
|
f6499f378802d3212a08aeca761191b58714b7f0
|
[
"MIT"
] | null | null | null |
'''initialize'''
from .ski import SkiGame
| 20.5
| 24
| 0.731707
| 5
| 41
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 2
| 24
| 20.5
| 0.810811
| 0.243902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
de572abbf75e1f871aff8bcdd9231d802598c095
| 222
|
py
|
Python
|
monero_glue/compat/gc.py
|
ph4r05/monero-agent
|
0bac0e6f33142b2bb885565bfd1ef8ac04559280
|
[
"MIT"
] | 20
|
2018-04-05T22:06:10.000Z
|
2021-09-18T10:43:44.000Z
|
monero_glue/compat/gc.py
|
ph4r05/monero-agent
|
0bac0e6f33142b2bb885565bfd1ef8ac04559280
|
[
"MIT"
] | null | null | null |
monero_glue/compat/gc.py
|
ph4r05/monero-agent
|
0bac0e6f33142b2bb885565bfd1ef8ac04559280
|
[
"MIT"
] | 5
|
2018-08-06T15:06:04.000Z
|
2021-07-16T01:58:43.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Dusan Klinec, ph4r05, 2018
def collect(*args, **kwargs):
pass
def mem_free(*args, **kwargs):
return 1000
def mem_alloc(*args, **kwargs):
return 100
| 13.875
| 36
| 0.621622
| 31
| 222
| 4.387097
| 0.741935
| 0.220588
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 0.202703
| 222
| 15
| 37
| 14.8
| 0.683616
| 0.346847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.166667
| 0
| 0.333333
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
|
0
| 6
|
deb2fc3ce955c69b8956358fde1af9c73b549f7d
| 4,807
|
py
|
Python
|
ibm_whcs_sdk/insights_for_medical_literature/__init__.py
|
paul-felt/whcs-python-sdk
|
4b668a9f2d60b89c133adee644d1b5ff25b41228
|
[
"Apache-2.0"
] | 5
|
2020-04-09T14:50:01.000Z
|
2022-01-10T23:27:33.000Z
|
ibm_whcs_sdk/insights_for_medical_literature/__init__.py
|
paul-felt/whcs-python-sdk
|
4b668a9f2d60b89c133adee644d1b5ff25b41228
|
[
"Apache-2.0"
] | 21
|
2020-04-08T10:43:57.000Z
|
2021-12-03T21:48:29.000Z
|
ibm_whcs_sdk/insights_for_medical_literature/__init__.py
|
paul-felt/whcs-python-sdk
|
4b668a9f2d60b89c133adee644d1b5ff25b41228
|
[
"Apache-2.0"
] | 6
|
2020-04-08T18:28:03.000Z
|
2021-04-05T16:37:45.000Z
|
# Copyright 2018 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .insights_for_medical_literature_v1 import InsightsForMedicalLiteratureServiceV1
from .insights_for_medical_literature_v1 import AggregationModel
from .insights_for_medical_literature_v1 import Aggregations
from .insights_for_medical_literature_v1 import AnnotationModel
from .insights_for_medical_literature_v1 import ArtifactModel
from .insights_for_medical_literature_v1 import Attribute
from .insights_for_medical_literature_v1 import AttributeEntry
from .insights_for_medical_literature_v1 import Attributes
from .insights_for_medical_literature_v1 import Backend
from .insights_for_medical_literature_v1 import BooleanConcepts
from .insights_for_medical_literature_v1 import BooleanOperands
from .insights_for_medical_literature_v1 import BoolOperand
from .insights_for_medical_literature_v1 import CategoriesModel
from .insights_for_medical_literature_v1 import Category
from .insights_for_medical_literature_v1 import CommonDataModel
from .insights_for_medical_literature_v1 import Concept
from .insights_for_medical_literature_v1 import ConceptInfoModel
from .insights_for_medical_literature_v1 import ConceptListModel
from .insights_for_medical_literature_v1 import ConceptModel
from .insights_for_medical_literature_v1 import Concepts
from .insights_for_medical_literature_v1 import CorporaConfigModel
from .insights_for_medical_literature_v1 import CorpusModel
from .insights_for_medical_literature_v1 import CorpusInfoModel
#from .insights_for_medical_literature_v1 import CorpusProvider
from .insights_for_medical_literature_v1 import Count
from .insights_for_medical_literature_v1 import DataModel
from .insights_for_medical_literature_v1 import DateHistograms
from .insights_for_medical_literature_v1 import DictionaryEntry
from .insights_for_medical_literature_v1 import Documents
from .insights_for_medical_literature_v1 import EntryModel
from .insights_for_medical_literature_v1 import FieldOptions
from .insights_for_medical_literature_v1 import GetDocumentInfoResponse
from .insights_for_medical_literature_v1 import HistogramData
from .insights_for_medical_literature_v1 import HitCount
from .insights_for_medical_literature_v1 import MetadataFields
from .insights_for_medical_literature_v1 import MetadataModel
from .insights_for_medical_literature_v1 import Message
from .insights_for_medical_literature_v1 import Order
from .insights_for_medical_literature_v1 import Passage
from .insights_for_medical_literature_v1 import Passages
from .insights_for_medical_literature_v1 import PassagesModel
from .insights_for_medical_literature_v1 import PossibleValues
from .insights_for_medical_literature_v1 import Qualifier
from .insights_for_medical_literature_v1 import Query
from .insights_for_medical_literature_v1 import Range
from .insights_for_medical_literature_v1 import RangeModel
from .insights_for_medical_literature_v1 import Ranges
from .insights_for_medical_literature_v1 import RankedDocLinks
from .insights_for_medical_literature_v1 import RankedDocument
from .insights_for_medical_literature_v1 import RelatedConceptModel
from .insights_for_medical_literature_v1 import RelatedConceptsModel
from .insights_for_medical_literature_v1 import RelationModel
from .insights_for_medical_literature_v1 import ReturnsModel
from .insights_for_medical_literature_v1 import SearchableConcept
from .insights_for_medical_literature_v1 import SearchMatchesModel
from .insights_for_medical_literature_v1 import SearchModel
from .insights_for_medical_literature_v1 import SentenceModel
from .insights_for_medical_literature_v1 import ServiceStatus
from .insights_for_medical_literature_v1 import SortEntry
from .insights_for_medical_literature_v1 import StringBuilder
from .insights_for_medical_literature_v1 import Supports
from .insights_for_medical_literature_v1 import TextSpan
from .insights_for_medical_literature_v1 import Title
from .insights_for_medical_literature_v1 import Typeahead
from .insights_for_medical_literature_v1 import TypesModel
from .insights_for_medical_literature_v1 import UnstructuredModel
from .insights_for_medical_literature_v1 import Values
from .insights_for_medical_literature_v1 import YearAndHits
from .insights_for_medical_literature_v1 import IMLException
| 57.22619
| 85
| 0.898689
| 644
| 4,807
| 6.279503
| 0.217391
| 0.204748
| 0.255935
| 0.375371
| 0.682493
| 0.682493
| 0.682493
| 0
| 0
| 0
| 0
| 0.017615
| 0.078843
| 4,807
| 83
| 86
| 57.915663
| 0.895664
| 0.129811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.044118
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
724a971c225812f89543b939b7a66083af18e5f1
| 27
|
py
|
Python
|
src/tfluna/__init__.py
|
clementnuss/tfluna-python
|
7f4588c1ce270447fc026d927e06789f06da0c76
|
[
"MIT"
] | 1
|
2021-05-24T12:29:15.000Z
|
2021-05-24T12:29:15.000Z
|
src/tfluna/__init__.py
|
clementnuss/tfluna-python
|
7f4588c1ce270447fc026d927e06789f06da0c76
|
[
"MIT"
] | null | null | null |
src/tfluna/__init__.py
|
clementnuss/tfluna-python
|
7f4588c1ce270447fc026d927e06789f06da0c76
|
[
"MIT"
] | 1
|
2021-05-24T12:32:02.000Z
|
2021-05-24T12:32:02.000Z
|
from .tfluna import TfLuna
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0e3c9874a39deb6444b03eec71ff0fb4184f31b
| 72
|
py
|
Python
|
continual_learning/eval/metrics/__init__.py
|
jaryP/ContinualAI
|
7d9b7614066d219ebd72049692da23ad6ec132b0
|
[
"MIT"
] | null | null | null |
continual_learning/eval/metrics/__init__.py
|
jaryP/ContinualAI
|
7d9b7614066d219ebd72049692da23ad6ec132b0
|
[
"MIT"
] | null | null | null |
continual_learning/eval/metrics/__init__.py
|
jaryP/ContinualAI
|
7d9b7614066d219ebd72049692da23ad6ec132b0
|
[
"MIT"
] | null | null | null |
from .base import ClassificationMetric, ContinualLearningMetric, Metric
| 36
| 71
| 0.875
| 6
| 72
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 72
| 1
| 72
| 72
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0ef35d15d21a7df0e4c0c4595fa7eed92c29a4a
| 47
|
py
|
Python
|
nam/models/activation/__init__.py
|
mrahman93/nam
|
1a2f286a87ffa024040e3330088b4a375700c1c6
|
[
"MIT"
] | 15
|
2021-03-26T16:00:44.000Z
|
2022-03-26T07:43:10.000Z
|
src/baseline/nam/models/activation/__init__.py
|
fau-is/gam_comparison
|
c47e8f8ced281e0a71b7959a211cb5b289ac7606
|
[
"MIT"
] | 6
|
2021-01-03T22:55:54.000Z
|
2022-03-11T02:50:38.000Z
|
src/baseline/nam/models/activation/__init__.py
|
fau-is/gam_comparison
|
c47e8f8ced281e0a71b7959a211cb5b289ac7606
|
[
"MIT"
] | 9
|
2021-02-08T18:45:52.000Z
|
2022-03-18T19:42:57.000Z
|
from .exu import ExU
from .relu import LinReLU
| 15.666667
| 25
| 0.787234
| 8
| 47
| 4.625
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 26
| 23.5
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0fc2357e36f3822593fc1ed854c0204525f5fcb
| 46,407
|
py
|
Python
|
mabs/utils/reproblems.py
|
ryanstwrt/multi_agent_blackboard_system
|
b8f6ab71dfe0742a6f690de19b97d10504fc1768
|
[
"MIT"
] | 1
|
2021-08-02T10:29:35.000Z
|
2021-08-02T10:29:35.000Z
|
mabs/utils/reproblems.py
|
ryanstwrt/multi_agent_blackboard_system
|
b8f6ab71dfe0742a6f690de19b97d10504fc1768
|
[
"MIT"
] | 10
|
2020-03-14T07:39:34.000Z
|
2021-11-03T22:55:28.000Z
|
mabs/utils/reproblems.py
|
ryanstwrt/multi_agent_blackboard_system
|
b8f6ab71dfe0742a6f690de19b97d10504fc1768
|
[
"MIT"
] | 1
|
2021-07-18T14:43:10.000Z
|
2021-07-18T14:43:10.000Z
|
#!/usr/bin/env python
"""
A real-world multi-objective problem suite (the RE benchmark set)
Reference:
Ryoji Tanabe, Hisao Ishibuchi, "An Easy-to-use Real-world Multi-objective Problem Suite" Applied Soft Computing. 89: 106078 (2020)
Copyright (c) 2020 Ryoji Tanabe
I re-implemented the RE problem set by referring to its C source code (reproblem.c). While variables directly copied from the C source code are written in CamelCase, the other variables are written in snake_case. It is somewhat awkward.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import numpy as np
import os
def get_problem(name, set_random_seed=False):
benchmark = {'re21': RE21(),
're22': RE22(),
're23': RE23(),
're24': RE24(),
're25': RE25(),
're31': RE31(),
're32': RE32(),
're33': RE33(),
're34': RE34(),
're35': RE35(),
're36': RE36(),
're37': RE37(),
're41': RE41(),
're42': RE42(),
're61': RE61(),
're91': RE91(set_random_seed=set_random_seed),
'cre21': CRE21(),
'cre22': CRE22(),
'cre23': CRE23(),
'cre24': CRE24(),
'cre25': CRE25(),
'cre31': CRE31(),
'cre32': CRE32(),
'cre51': CRE51(),}
return benchmark[name]
class RE21():
def __init__(self):
self.problem_name = 'RE21'
self.n_objectives = 2
self.n_variables = 4
self.n_constraints = 0
self.n_original_constraints = 0
F = 10.0
sigma = 10.0
tmp_val = F / sigma
self.ubound = np.full(self.n_variables, 3 * tmp_val)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = tmp_val
self.lbound[1] = np.sqrt(2.0) * tmp_val
self.lbound[2] = np.sqrt(2.0) * tmp_val
self.lbound[3] = tmp_val
def evaluate(self, x):
f = np.zeros(self.n_objectives)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
F = 10.0
sigma = 10.0
E = 2.0 * 1e5
L = 200.0
f[0] = L * ((2 * x1) + np.sqrt(2.0) * x2 + np.sqrt(x3) + x4)
f[1] = ((F * L) / E) * ((2.0 / x1) + (2.0 * np.sqrt(2.0) / x2) - (2.0 * np.sqrt(2.0) / x3) + (2.0 / x4))
return f
class RE22():
def __init__(self):
self.problem_name = 'RE22'
self.n_objectives = 2
self.n_variables = 3
self.n_constraints = 0
self.n_original_constraints = 2
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 0.2
self.lbound[1] = 0.0
self.lbound[2] = 0.0
self.ubound[0] = 15
self.ubound[1] = 20
self.ubound[2] = 40
self.feasible_vals = np.array([0.20, 0.31, 0.40, 0.44, 0.60, 0.62, 0.79, 0.80, 0.88, 0.93, 1.0, 1.20, 1.24, 1.32, 1.40, 1.55, 1.58, 1.60, 1.76, 1.80, 1.86, 2.0, 2.17, 2.20, 2.37, 2.40, 2.48, 2.60, 2.64, 2.79, 2.80, 3.0, 3.08, 3,10, 3.16, 3.41, 3.52, 3.60, 3.72, 3.95, 3.96, 4.0, 4.03, 4.20, 4.34, 4.40, 4.65, 4.74, 4.80, 4.84, 5.0, 5.28, 5.40, 5.53, 5.72, 6.0, 6.16, 6.32, 6.60, 7.11, 7.20, 7.80, 7.90, 8.0, 8.40, 8.69, 9.0, 9.48, 10.27, 11.0, 11.06, 11.85, 12.0, 13.0, 14.0, 15.0])
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x = [float(x1) for x1 in x]
#Reference: getNearestValue_sample2.py (https://gist.github.com/icchi-h/1d0bb1c52ebfdd31f14b3e811328390a)
idx = np.abs(np.asarray(self.feasible_vals) - x[0]).argmin()
x1 = self.feasible_vals[idx]
x2 = x[1]
x3 = x[2]
#First original objective function
f[0] = (29.4 * x1) + (0.6 * x2 * x3)
# Original constraint functions
g[0] = (x1 * x3) - 7.735 * ((x1 * x1) / x2) - 180.0
g[1] = 4.0 - (x3 / x2)
g = np.where(g < 0, -g, 0)
f[1] = g[0] + g[1]
f = np.array([float(x) for x in f])
return f
class RE23():
def __init__(self):
self.problem_name = 'RE23'
self.n_objectives = 2
self.n_variables = 4
self.n_constraints = 0
self.n_original_constraints = 3
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 1
self.lbound[1] = 1
self.lbound[2] = 10
self.lbound[3] = 10
self.ubound[0] = 100
self.ubound[1] = 100
self.ubound[2] = 200
self.ubound[3] = 240
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = 0.0625 * int(np.round(x[0]))
x2 = 0.0625 * int(np.round(x[1]))
x3 = x[2]
x4 = x[3]
#First original objective function
f[0] = (0.6224 * x1 * x3* x4) + (1.7781 * x2 * x3 * x3) + (3.1661 * x1 * x1 * x4) + (19.84 * x1 * x1 * x3)
# Original constraint functions
g[0] = x1 - (0.0193 * x3)
g[1] = x2 - (0.00954 * x3)
g[2] = (np.pi * x3 * x3 * x4) + ((4.0/3.0) * (np.pi * x3 * x3 * x3)) - 1296000
g = np.where(g < 0, -g, 0)
f[1] = g[0] + g[1] + g[2]
return f
class RE24():
def __init__(self):
self.problem_name = 'RE24'
self.n_objectives = 2
self.n_variables = 2
self.n_constraints = 0
self.n_original_constraints = 4
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 0.5
self.lbound[1] = 0.5
self.ubound[0] = 4
self.ubound[1] = 50
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[0]
x2 = x[1]
#First original objective function
f[0] = x1 + (120 * x2)
E = 700000
sigma_b_max = 700
tau_max = 450
delta_max = 1.5
sigma_k = (E * x1 * x1) / 100
sigma_b = 4500 / (x1 * x2)
tau = 1800 / x2
delta = (56.2 * 10000) / (E * x1 * x2 * x2)
g[0] = 1 - (sigma_b / sigma_b_max)
g[1] = 1 - (tau / tau_max)
g[2] = 1 - (delta / delta_max)
g[3] = 1 - (sigma_b / sigma_k)
g = np.where(g < 0, -g, 0)
f[1] = g[0] + g[1] + g[2] + g[3]
return f
class RE25():
def __init__(self):
self.problem_name = 'RE25'
self.n_objectives = 2
self.n_variables = 3
self.n_constraints = 0
self.n_original_constraints = 6
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 1
self.lbound[1] = 0.6
self.lbound[2] = 0.09
self.ubound[0] = 70
self.ubound[1] = 3
self.ubound[2] = 0.5
self.feasible_vals = np.array([0.009, 0.0095, 0.0104, 0.0118, 0.0128, 0.0132, 0.014, 0.015, 0.0162, 0.0173, 0.018, 0.02, 0.023, 0.025, 0.028, 0.032, 0.035, 0.041, 0.047, 0.054, 0.063, 0.072, 0.08, 0.092, 0.105, 0.12, 0.135, 0.148, 0.162, 0.177, 0.192, 0.207, 0.225, 0.244, 0.263, 0.283, 0.307, 0.331, 0.362, 0.394, 0.4375, 0.5])
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = np.round(x[0])
x2 = x[1]
#Reference: getNearestValue_sample2.py (https://gist.github.com/icchi-h/1d0bb1c52ebfdd31f14b3e811328390a)
idx = np.abs(np.asarray(self.feasible_vals) - x[2]).argmin()
x3 = self.feasible_vals[idx]
# first original objective function
f[0] = (np.pi * np.pi * x2 * x3 * x3 * (x1 + 2)) / 4.0
# constraint functions
Cf = ((4.0 * (x2 / x3) - 1) / (4.0 * (x2 / x3) - 4)) + (0.615 * x3 / x2)
Fmax = 1000.0
S = 189000.0
G = 11.5 * 1e+6
K = (G * x3 * x3 * x3 * x3) / (8 * x1 * x2 * x2 * x2)
lmax = 14.0
lf = (Fmax / K) + 1.05 * (x1 + 2) * x3
dmin = 0.2
Dmax = 3
Fp = 300.0
sigmaP = Fp / K
sigmaPM = 6
sigmaW = 1.25
g[0] = -((8 * Cf * Fmax * x2) / (np.pi * x3 * x3 * x3)) + S
g[1] = -lf + lmax
g[2] = -3 + (x2 / x3)
g[3] = -sigmaP + sigmaPM
g[4] = -sigmaP - ((Fmax - Fp) / K) - 1.05 * (x1 + 2) * x3 + lf
g[5] = sigmaW- ((Fmax - Fp) / K)
g = np.where(g < 0, -g, 0)
f[1] = g[0] + g[1] + g[2] + g[3] + g[4] + g[5]
return f
class RE31():
def __init__(self):
self.problem_name = 'RE31'
self.n_objectives = 3
self.n_variables = 3
self.n_constraints = 0
self.n_original_constraints = 3
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 0.00001
self.lbound[1] = 0.00001
self.lbound[2] = 1.0
self.ubound[0] = 100.0
self.ubound[1] = 100.0
self.ubound[2] = 3.0
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
# First original objective function
f[0] = x1 * np.sqrt(16.0 + (x3 * x3)) + x2 * np.sqrt(1.0 + x3 * x3)
# Second original objective function
f[1] = (20.0 * np.sqrt(16.0 + (x3 * x3))) / (x1 * x3)
# Constraint functions
g[0] = 0.1 - f[0]
g[1] = 100000.0 - f[1]
g[2] = 100000 - ((80.0 * np.sqrt(1.0 + x3 * x3)) / (x3 * x2))
g = np.where(g < 0, -g, 0)
f[2] = g[0] + g[1] + g[2]
return f
class RE32():
def __init__(self):
self.problem_name = 'RE32'
self.n_objectives = 3
self.n_variables = 4
self.n_constraints = 0
self.n_original_constraints = 4
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 0.125
self.lbound[1] = 0.1
self.lbound[2] = 0.1
self.lbound[3] = 0.125
self.ubound[0] = 5.0
self.ubound[1] = 10.0
self.ubound[2] = 10.0
self.ubound[3] = 5.0
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
P = 6000
L = 14
E = 30 * 1e6
# // deltaMax = 0.25
G = 12 * 1e6
tauMax = 13600
sigmaMax = 30000
# First original objective function
f[0] = (1.10471 * x1 * x1 * x2) + (0.04811 * x3 * x4) * (14.0 + x2)
# Second original objective function
f[1] = (4 * P * L * L * L) / (E * x4 * x3 * x3 * x3)
# Constraint functions
M = P * (L + (x2 / 2))
tmpVar = ((x2 * x2) / 4.0) + np.power((x1 + x3) / 2.0, 2)
R = np.sqrt(tmpVar)
tmpVar = ((x2 * x2) / 12.0) + np.power((x1 + x3) / 2.0, 2)
J = 2 * np.sqrt(2) * x1 * x2 * tmpVar
tauDashDash = (M * R) / J
tauDash = P / (np.sqrt(2) * x1 * x2)
tmpVar = tauDash * tauDash + ((2 * tauDash * tauDashDash * x2) / (2 * R)) + (tauDashDash * tauDashDash)
tau = np.sqrt(tmpVar)
sigma = (6 * P * L) / (x4 * x3 * x3)
tmpVar = 4.013 * E * np.sqrt((x3 * x3 * x4 * x4 * x4 * x4 * x4 * x4) / 36.0) / (L * L)
tmpVar2 = (x3 / (2 * L)) * np.sqrt(E / (4 * G))
PC = tmpVar * (1 - tmpVar2)
g[0] = tauMax - tau
g[1] = sigmaMax - sigma
g[2] = x4 - x1
g[3] = PC - P
g = np.where(g < 0, -g, 0)
f[2] = g[0] + g[1] + g[2] + g[3]
return f
class RE33():
def __init__(self):
self.problem_name = 'RE33'
self.n_objectives = 3
self.n_variables = 4
self.n_constraints = 0
self.n_original_constraints = 4
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 55
self.lbound[1] = 75
self.lbound[2] = 1000
self.lbound[3] = 11
self.ubound[0] = 80
self.ubound[1] = 110
self.ubound[2] = 3000
self.ubound[3] = 20
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
# First original objective function
f[0] = 4.9 * 1e-5 * (x2 * x2 - x1 * x1) * (x4 - 1.0)
# Second original objective function
f[1] = ((9.82 * 1e6) * (x2 * x2 - x1 * x1)) / (x3 * x4 * (x2 * x2 * x2 - x1 * x1 * x1))
# Reformulated objective functions
g[0] = (x2 - x1) - 20.0
g[1] = 0.4 - (x3 / (3.14 * (x2 * x2 - x1 * x1)))
g[2] = 1.0 - (2.22 * 1e-3 * x3 * (x2 * x2 * x2 - x1 * x1 * x1)) / np.power((x2 * x2 - x1 * x1), 2)
g[3] = (2.66 * 1e-2 * x3 * x4 * (x2 * x2 * x2 - x1 * x1 * x1)) / (x2 * x2 - x1 * x1) - 900.0
g = np.where(g < 0, -g, 0)
f[2] = g[0] + g[1] + g[2] + g[3]
return f
class RE34():
def __init__(self):
self.problem_name = 'RE34'
self.n_objectives = 3
self.n_variables = 5
self.n_constraints = 0
self.n_original_constraints = 0
self.lbound = np.full(self.n_variables, 1)
self.ubound = np.full(self.n_variables, 3)
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
x5 = x[4]
f[0] = 1640.2823 + (2.3573285 * x1) + (2.3220035 * x2) + (4.5688768 * x3) + (7.7213633 * x4) + (4.4559504 * x5)
f[1] = 6.5856 + (1.15 * x1) - (1.0427 * x2) + (0.9738 * x3) + (0.8364 * x4) - (0.3695 * x1 * x4) + (0.0861 * x1 * x5) + (0.3628 * x2 * x4) - (0.1106 * x1 * x1) - (0.3437 * x3 * x3) + (0.1764 * x4 * x4)
f[2] = -0.0551 + (0.0181 * x1) + (0.1024 * x2) + (0.0421 * x3) - (0.0073 * x1 * x2) + (0.024 * x2 * x3) - (0.0118 * x2 * x4) - (0.0204 * x3 * x4) - (0.008 * x3 * x5) - (0.0241 * x2 * x2) + (0.0109 * x4 * x4)
return f
class RE35():
def __init__(self):
self.problem_name = 'RE35'
self.n_objectives = 3
self.n_variables = 7
self.n_constraints = 0
self.n_original_constraints = 11
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[1] = 2.6
self.lbound[2] = 0.7
self.lbound[0] = 17
self.lbound[3] = 7.3
self.lbound[4] = 7.3
self.lbound[5] = 2.9
self.lbound[6] = 5.0
self.ubound[1] = 3.6
self.ubound[2] = 0.8
self.ubound[0] = 28
self.ubound[3] = 8.3
self.ubound[4] = 8.3
self.ubound[5] = 3.9
self.ubound[6] = 5.5
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[1]
x2 = x[2]
x3 = np.round(x[0])
x4 = x[3]
x5 = x[4]
x6 = x[5]
x7 = x[6]
# First original objective function (weight)
f[0] = 0.7854 * x1 * (x2 * x2) * (((10.0 * x3 * x3) / 3.0) + (14.933 * x3) - 43.0934) - 1.508 * x1 * (x6 * x6 + x7 * x7) + 7.477 * (x6 * x6 * x6 + x7 * x7 * x7) + 0.7854 * (x4 * x6 * x6 + x5 * x7 * x7)
# Second original objective function (stress)
tmpVar = np.power((745.0 * x4) / (x2 * x3), 2.0) + 1.69 * 1e7
f[1] = np.sqrt(tmpVar) / (0.1 * x6 * x6 * x6)
# Constraint functions
g[0] = -(1.0 / (x1 * x2 * x2 * x3)) + 1.0 / 27.0
g[1] = -(1.0 / (x1 * x2 * x2 * x3 * x3)) + 1.0 / 397.5
g[2] = -(x4 * x4 * x4) / (x2 * x3 * x6 * x6 * x6 * x6) + 1.0 / 1.93
g[3] = -(x5 * x5 * x5) / (x2 * x3 * x7 * x7 * x7 * x7) + 1.0 / 1.93
g[4] = -(x2 * x3) + 40.0
g[5] = -(x1 / x2) + 12.0
g[6] = -5.0 + (x1 / x2)
g[7] = -1.9 + x4 - 1.5 * x6
g[8] = -1.9 + x5 - 1.1 * x7
g[9] = -f[1] + 1300.0
tmpVar = np.power((745.0 * x5) / (x2 * x3), 2.0) + 1.575 * 1e8
g[10] = -np.sqrt(tmpVar) / (0.1 * x7 * x7 * x7) + 1100.0
g = np.where(g < 0, -g, 0)
f[2] = g[0] + g[1] + g[2] + g[3] + g[4] + g[5] + g[6] + g[7] + g[8] + g[9] + g[10]
return f
class RE36():
def __init__(self):
self.problem_name = 'RE36'
self.n_objectives = 3
self.n_variables = 4
self.n_constraints = 0
self.n_original_constraints = 1
self.lbound = np.full(self.n_variables, 12)
self.ubound = np.full(self.n_variables, 60)
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
# all the four variables must be inverger values
x1 = np.round(x[0])
x2 = np.round(x[1])
x3 = np.round(x[2])
x4 = np.round(x[3])
# First original objective function
f[0] = np.abs(6.931 - ((x3 / x1) * (x4 / x2)))
# Second original objective function (the maximum value among the four variables)
l = [x1, x2, x3, x4]
f[1] = max(l)
g[0] = 0.5 - (f[0] / 6.931)
g = np.where(g < 0, -g, 0)
f[2] = g[0]
return f
class RE37():
def __init__(self):
self.problem_name = 'RE37'
self.n_objectives = 3
self.n_variables = 4
self.n_constraints = 0
self.n_original_constraints = 0
self.lbound = np.full(self.n_variables, 0)
self.ubound = np.full(self.n_variables, 1)
def evaluate(self, x):
f = np.zeros(self.n_objectives)
xAlpha = x[0]
xHA = x[1]
xOA = x[2]
xOPTT = x[3]
# f1 (TF_max)
f[0] = 0.692 + (0.477 * xAlpha) - (0.687 * xHA) - (0.080 * xOA) - (0.0650 * xOPTT) - (0.167 * xAlpha * xAlpha) - (0.0129 * xHA * xAlpha) + (0.0796 * xHA * xHA) - (0.0634 * xOA * xAlpha) - (0.0257 * xOA * xHA) + (0.0877 * xOA * xOA) - (0.0521 * xOPTT * xAlpha) + (0.00156 * xOPTT * xHA) + (0.00198 * xOPTT * xOA) + (0.0184 * xOPTT * xOPTT)
# f2 (X_cc)
f[1] = 0.153 - (0.322 * xAlpha) + (0.396 * xHA) + (0.424 * xOA) + (0.0226 * xOPTT) + (0.175 * xAlpha * xAlpha) + (0.0185 * xHA * xAlpha) - (0.0701 * xHA * xHA) - (0.251 * xOA * xAlpha) + (0.179 * xOA * xHA) + (0.0150 * xOA * xOA) + (0.0134 * xOPTT * xAlpha) + (0.0296 * xOPTT * xHA) + (0.0752 * xOPTT * xOA) + (0.0192 * xOPTT * xOPTT)
# f3 (TT_max)
f[2] = 0.370 - (0.205 * xAlpha) + (0.0307 * xHA) + (0.108 * xOA) + (1.019 * xOPTT) - (0.135 * xAlpha * xAlpha) + (0.0141 * xHA * xAlpha) + (0.0998 * xHA * xHA) + (0.208 * xOA * xAlpha) - (0.0301 * xOA * xHA) - (0.226 * xOA * xOA) + (0.353 * xOPTT * xAlpha) - (0.0497 * xOPTT * xOA) - (0.423 * xOPTT * xOPTT) + (0.202 * xHA * xAlpha * xAlpha) - (0.281 * xOA * xAlpha * xAlpha) - (0.342 * xHA * xHA * xAlpha) - (0.245 * xHA * xHA * xOA) + (0.281 * xOA * xOA * xHA) - (0.184 * xOPTT * xOPTT * xAlpha) - (0.281 * xHA * xAlpha * xOA)
return f
class RE41():
def __init__(self):
self.problem_name = 'RE41'
self.n_objectives = 4
self.n_variables = 7
self.n_constraints = 0
self.n_original_constraints = 10
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 0.5
self.lbound[1] = 0.45
self.lbound[2] = 0.5
self.lbound[3] = 0.5
self.lbound[4] = 0.875
self.lbound[5] = 0.4
self.lbound[6] = 0.4
self.ubound[0] = 1.5
self.ubound[1] = 1.35
self.ubound[2] = 1.5
self.ubound[3] = 1.5
self.ubound[4] = 2.625
self.ubound[5] = 1.2
self.ubound[6] = 1.2
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
x5 = x[4]
x6 = x[5]
x7 = x[6]
# First original objective function
f[0] = 1.98 + 4.9 * x1 + 6.67 * x2 + 6.98 * x3 + 4.01 * x4 + 1.78 * x5 + 0.00001 * x6 + 2.73 * x7
# Second original objective function
f[1] = 4.72 - 0.5 * x4 - 0.19 * x2 * x3
# Third original objective function
Vmbp = 10.58 - 0.674 * x1 * x2 - 0.67275 * x2
Vfd = 16.45 - 0.489 * x3 * x7 - 0.843 * x5 * x6
f[2] = 0.5 * (Vmbp + Vfd)
# Constraint functions
g[0] = 1 -(1.16 - 0.3717 * x2 * x4 - 0.0092928 * x3)
g[1] = 0.32 -(0.261 - 0.0159 * x1 * x2 - 0.06486 * x1 - 0.019 * x2 * x7 + 0.0144 * x3 * x5 + 0.0154464 * x6)
g[2] = 0.32 -(0.214 + 0.00817 * x5 - 0.045195 * x1 - 0.0135168 * x1 + 0.03099 * x2 * x6 - 0.018 * x2 * x7 + 0.007176 * x3 + 0.023232 * x3 - 0.00364 * x5 * x6 - 0.018 * x2 * x2)
g[3] = 0.32 -(0.74 - 0.61 * x2 - 0.031296 * x3 - 0.031872 * x7 + 0.227 * x2 * x2)
g[4] = 32 -(28.98 + 3.818 * x3 - 4.2 * x1 * x2 + 1.27296 * x6 - 2.68065 * x7)
g[5] = 32 -(33.86 + 2.95 * x3 - 5.057 * x1 * x2 - 3.795 * x2 - 3.4431 * x7 + 1.45728)
g[6] = 32 -(46.36 - 9.9 * x2 - 4.4505 * x1)
g[7] = 4 - f[1]
g[8] = 9.9 - Vmbp
g[9] = 15.7 - Vfd
g = np.where(g < 0, -g, 0)
f[3] = g[0] + g[1] + g[2] + g[3] + g[4] + g[5] + g[6] + g[7] + g[8] + g[9]
return f
class RE42():
def __init__(self):
self.problem_name = 'RE42'
self.n_objectives = 4
self.n_variables = 6
self.n_constraints = 0
self.n_original_constraints = 9
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 150.0
self.lbound[1] = 20.0
self.lbound[2] = 13.0
self.lbound[3] = 10.0
self.lbound[4] = 14.0
self.lbound[5] = 0.63
self.ubound[0] = 274.32
self.ubound[1] = 32.31
self.ubound[2] = 25.0
self.ubound[3] = 11.71
self.ubound[4] = 18.0
self.ubound[5] = 0.75
def evaluate(self, x):
f = np.zeros(self.n_objectives)
# NOT g
constraintFuncs = np.zeros(self.n_original_constraints)
x_L = x[0]
x_B = x[1]
x_D = x[2]
x_T = x[3]
x_Vk = x[4]
x_CB = x[5]
displacement = 1.025 * x_L * x_B * x_T * x_CB
V = 0.5144 * x_Vk
g = 9.8065
Fn = V / np.power(g * x_L, 0.5)
a = (4977.06 * x_CB * x_CB) - (8105.61 * x_CB) + 4456.51
b = (-10847.2 * x_CB * x_CB) + (12817.0 * x_CB) - 6960.32
power = (np.power(displacement, 2.0/3.0) * np.power(x_Vk, 3.0)) / (a + (b * Fn))
outfit_weight = 1.0 * np.power(x_L , 0.8) * np.power(x_B , 0.6) * np.power(x_D, 0.3) * np.power(x_CB, 0.1)
steel_weight = 0.034 * np.power(x_L ,1.7) * np.power(x_B ,0.7) * np.power(x_D ,0.4) * np.power(x_CB ,0.5)
machinery_weight = 0.17 * np.power(power, 0.9)
light_ship_weight = steel_weight + outfit_weight + machinery_weight
ship_cost = 1.3 * ((2000.0 * np.power(steel_weight, 0.85)) + (3500.0 * outfit_weight) + (2400.0 * np.power(power, 0.8)))
capital_costs = 0.2 * ship_cost
DWT = displacement - light_ship_weight
running_costs = 40000.0 * np.power(DWT, 0.3)
round_trip_miles = 5000.0
sea_days = (round_trip_miles / 24.0) * x_Vk
handling_rate = 8000.0
daily_consumption = ((0.19 * power * 24.0) / 1000.0) + 0.2
fuel_price = 100.0
fuel_cost = 1.05 * daily_consumption * sea_days * fuel_price
port_cost = 6.3 * np.power(DWT, 0.8)
fuel_carried = daily_consumption * (sea_days + 5.0)
miscellaneous_DWT = 2.0 * np.power(DWT, 0.5)
cargo_DWT = DWT - fuel_carried - miscellaneous_DWT
port_days = 2.0 * ((cargo_DWT / handling_rate) + 0.5)
RTPA = 350.0 / (sea_days + port_days)
voyage_costs = (fuel_cost + port_cost) * RTPA
annual_costs = capital_costs + running_costs + voyage_costs
annual_cargo = cargo_DWT * RTPA
f[0] = annual_costs / annual_cargo
f[1] = light_ship_weight
# f_2 is dealt as a minimization problem
f[2] = -annual_cargo
# Reformulated objective functions
constraintFuncs[0] = (x_L / x_B) - 6.0
constraintFuncs[1] = -(x_L / x_D) + 15.0
constraintFuncs[2] = -(x_L / x_T) + 19.0
constraintFuncs[3] = 0.45 * np.power(DWT, 0.31) - x_T
constraintFuncs[4] = 0.7 * x_D + 0.7 - x_T
constraintFuncs[5] = 50000.0 - DWT
constraintFuncs[6] = DWT - 3000.0
constraintFuncs[7] = 0.32 - Fn
KB = 0.53 * x_T
BMT = ((0.085 * x_CB - 0.002) * x_B * x_B) / (x_T * x_CB)
KG = 1.0 + 0.52 * x_D
constraintFuncs[8] = (KB + BMT - KG) - (0.07 * x_B)
constraintFuncs = np.where(constraintFuncs < 0, -constraintFuncs, 0)
f[3] = constraintFuncs[0] + constraintFuncs[1] + constraintFuncs[2] + constraintFuncs[3] + constraintFuncs[4] + constraintFuncs[5] + constraintFuncs[6] + constraintFuncs[7] + constraintFuncs[8]
return f
class RE61():
def __init__(self):
self.problem_name = 'RE61'
self.n_objectives = 6
self.n_variables = 3
self.n_constraints = 0
self.n_original_constraints = 7
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 0.01
self.lbound[1] = 0.01
self.lbound[2] = 0.01
self.ubound[0] = 0.45
self.ubound[1] = 0.10
self.ubound[2] = 0.10
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
# First original objective function
f[0] = 106780.37 * (x[1] + x[2]) + 61704.67
#Second original objective function
f[1] = 3000 * x[0]
# Third original objective function
f[2] = 305700 * 2289 * x[1] / np.power(0.06*2289, 0.65)
# Fourth original objective function
f[3] = 250 * 2289 * np.exp(-39.75*x[1]+9.9*x[2]+2.74)
# Fifth original objective function
f[4] = 25 * (1.39 /(x[0]*x[1]) + 4940*x[2] -80)
# Constraint functions
g[0] = 1 - (0.00139/(x[0]*x[1])+4.94*x[2]-0.08)
g[1] = 1 - (0.000306/(x[0]*x[1])+1.082*x[2]-0.0986)
g[2] = 50000 - (12.307/(x[0]*x[1]) + 49408.24*x[2]+4051.02)
g[3] = 16000 - (2.098/(x[0]*x[1])+8046.33*x[2]-696.71)
g[4] = 10000 - (2.138/(x[0]*x[1])+7883.39*x[2]-705.04)
g[5] = 2000 - (0.417*x[0]*x[1] + 1721.26*x[2]-136.54)
g[6] = 550 - (0.164/(x[0]*x[1])+631.13*x[2]-54.48)
g = np.where(g < 0, -g, 0)
f[5] = g[0] + g[1] + g[2] + g[3] + g[4] + g[5] + g[6]
return f
class RE91():
def __init__(self, set_random_seed=False):
self.problem_name = 'RE91'
self.n_objectives = 9
self.n_variables = 7
self.n_constraints = 0
self.n_original_constraints = 0
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 0.5
self.lbound[1] = 0.45
self.lbound[2] = 0.5
self.lbound[3] = 0.5
self.lbound[4] = 0.875
self.lbound[5] = 0.4
self.lbound[6] = 0.4
self.ubound[0] = 1.5
self.ubound[1] = 1.35
self.ubound[2] = 1.5
self.ubound[3] = 1.5
self.ubound[4] = 2.625
self.ubound[5] = 1.2
self.ubound[6] = 1.2
if set_random_seed:
np.random.seed(seed=0)
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
x5 = x[4]
x6 = x[5]
x7 = x[6]
# stochastic variables
x8 = 0.006 * (np.random.normal(0, 1)) + 0.345
x9 = 0.006 * (np.random.normal(0, 1)) + 0.192
x10 = 10 * (np.random.normal(0, 1)) + 0.0
x11 = 10 * (np.random.normal(0, 1)) + 0.0
# First function
f[0] = 1.98 + 4.9 * x1 + 6.67 * x2 + 6.98 * x3 + 4.01 * x4 + 1.75 * x5 + 0.00001 * x6 + 2.73 * x7
# Second function
f[1] = max(0.0, (1.16 - 0.3717* x2 * x4 - 0.00931 * x2 * x10 - 0.484 * x3 * x9 + 0.01343 * x6 * x10 )/1.0)
# Third function
f[2] = max(0.0, (0.261 - 0.0159 * x1 * x2 - 0.188 * x1 * x8 - 0.019 * x2 * x7 + 0.0144 * x3 * x5 + 0.87570001 * x5 * x10 + 0.08045 * x6 * x9 + 0.00139 * x8 * x11 + 0.00001575 * x10 * x11)/0.32)
# Fourth function
f[3] = max(0.0, (0.214 + 0.00817 * x5 - 0.131 * x1 * x8 - 0.0704 * x1 * x9 + 0.03099 * x2 * x6 - 0.018 * x2 * x7 + 0.0208 * x3 * x8 + 0.121 * x3 * x9 - 0.00364 * x5 * x6 + 0.0007715 * x5 * x10 - 0.0005354 * x6 * x10 + 0.00121 * x8 * x11 + 0.00184 * x9 * x10 - 0.018 * x2 * x2)/0.32)
# Fifth function
f[4] = max(0.0, (0.74 - 0.61* x2 - 0.163 * x3 * x8 + 0.001232 * x3 * x10 - 0.166 * x7 * x9 + 0.227 * x2 * x2)/0.32)
# Sixth function
tmp = (( 28.98 + 3.818 * x3 - 4.2 * x1 * x2 + 0.0207 * x5 * x10 + 6.63 * x6 * x9 - 7.77 * x7 * x8 + 0.32 * x9 * x10) + (33.86 + 2.95 * x3 + 0.1792 * x10 - 5.057 * x1 * x2 - 11 * x2 * x8 - 0.0215 * x5 * x10 - 9.98 * x7 * x8 + 22 * x8 * x9) + (46.36 - 9.9 * x2 - 12.9 * x1 * x8 + 0.1107 * x3 * x10) )/3
f[5] = max(0.0, tmp/32)
# Seventh function
f[6] = max(0.0, (4.72 - 0.5 * x4 - 0.19 * x2 * x3 - 0.0122 * x4 * x10 + 0.009325 * x6 * x10 + 0.000191 * x11 * x11)/4.0)
# EighthEighth function
f[7] = max(0.0, (10.58 - 0.674 * x1 * x2 - 1.95 * x2 * x8 + 0.02054 * x3 * x10 - 0.0198 * x4 * x10 + 0.028 * x6 * x10)/9.9)
# Ninth function
f[8] = max(0.0, (16.45 - 0.489 * x3 * x7 - 0.843 * x5 * x6 + 0.0432 * x9 * x10 - 0.0556 * x9 * x11 - 0.000786 * x11 * x11)/15.7)
return f
class CRE21():
def __init__(self):
self.problem_name = 'CRE21'
self.n_objectives = 2
self.n_variables = 3
self.n_constraints = 3
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 0.00001
self.lbound[1] = 0.00001
self.lbound[2] = 1.0
self.ubound[0] = 100.0
self.ubound[1] = 100.0
self.ubound[2] = 3.0
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
# First original objective function
f[0] = x1 * np.sqrt(16.0 + (x3 * x3)) + x2 * np.sqrt(1.0 + x3 * x3)
# Second original objective function
f[1] = (20.0 * np.sqrt(16.0 + (x3 * x3))) / (x1 * x3)
# Constraint functions
g[0] = 0.1 - f[0]
g[1] = 100000.0 - f[1]
g[2] = 100000 - ((80.0 * np.sqrt(1.0 + x3 * x3)) / (x3 * x2))
g = np.where(g < 0, -g, 0)
return f, g
class CRE22():
def __init__(self):
self.problem_name = 'CRE22'
self.n_objectives = 2
self.n_variables = 4
self.n_constraints = 4
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 0.125
self.lbound[1] = 0.1
self.lbound[2] = 0.1
self.lbound[3] = 0.125
self.ubound[0] = 5.0
self.ubound[1] = 10.0
self.ubound[2] = 10.0
self.ubound[3] = 5.0
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
P = 6000
L = 14
E = 30 * 1e6
# // deltaMax = 0.25
G = 12 * 1e6
tauMax = 13600
sigmaMax = 30000
# First original objective function
f[0] = (1.10471 * x1 * x1 * x2) + (0.04811 * x3 * x4) * (14.0 + x2)
# Second original objective function
f[1] = (4 * P * L * L * L) / (E * x4 * x3 * x3 * x3)
# Constraint functions
M = P * (L + (x2 / 2))
tmpVar = ((x2 * x2) / 4.0) + np.power((x1 + x3) / 2.0, 2)
R = np.sqrt(tmpVar)
tmpVar = ((x2 * x2) / 12.0) + np.power((x1 + x3) / 2.0, 2)
J = 2 * np.sqrt(2) * x1 * x2 * tmpVar
tauDashDash = (M * R) / J
tauDash = P / (np.sqrt(2) * x1 * x2)
tmpVar = tauDash * tauDash + ((2 * tauDash * tauDashDash * x2) / (2 * R)) + (tauDashDash * tauDashDash)
tau = np.sqrt(tmpVar)
sigma = (6 * P * L) / (x4 * x3 * x3)
tmpVar = 4.013 * E * np.sqrt((x3 * x3 * x4 * x4 * x4 * x4 * x4 * x4) / 36.0) / (L * L)
tmpVar2 = (x3 / (2 * L)) * np.sqrt(E / (4 * G))
PC = tmpVar * (1 - tmpVar2)
g[0] = tauMax - tau
g[1] = sigmaMax - sigma
g[2] = x4 - x1
g[3] = PC - P
g = np.where(g < 0, -g, 0)
return f, g
class CRE23():
def __init__(self):
self.problem_name = 'CRE23'
self.n_objectives = 2
self.n_variables = 4
self.n_constraints = 4
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 55
self.lbound[1] = 75
self.lbound[2] = 1000
self.lbound[3] = 11
self.ubound[0] = 80
self.ubound[1] = 110
self.ubound[2] = 3000
self.ubound[3] = 20
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
# First original objective function
f[0] = 4.9 * 1e-5 * (x2 * x2 - x1 * x1) * (x4 - 1.0)
# Second original objective function
f[1] = ((9.82 * 1e6) * (x2 * x2 - x1 * x1)) / (x3 * x4 * (x2 * x2 * x2 - x1 * x1 * x1))
# Reformulated objective functions
g[0] = (x2 - x1) - 20.0
g[1] = 0.4 - (x3 / (3.14 * (x2 * x2 - x1 * x1)))
g[2] = 1.0 - (2.22 * 1e-3 * x3 * (x2 * x2 * x2 - x1 * x1 * x1)) / np.power((x2 * x2 - x1 * x1), 2)
g[3] = (2.66 * 1e-2 * x3 * x4 * (x2 * x2 * x2 - x1 * x1 * x1)) / (x2 * x2 - x1 * x1) - 900.0
g = np.where(g < 0, -g, 0)
return f, g
class CRE24():
def __init__(self):
self.problem_name = 'CRE24'
self.n_objectives = 2
self.n_variables = 7
self.n_constraints = 11
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 2.6
self.lbound[1] = 0.7
self.lbound[2] = 17
self.lbound[3] = 7.3
self.lbound[4] = 7.3
self.lbound[5] = 2.9
self.lbound[6] = 5.0
self.ubound[0] = 3.6
self.ubound[1] = 0.8
self.ubound[2] = 28
self.ubound[3] = 8.3
self.ubound[4] = 8.3
self.ubound[5] = 3.9
self.ubound[6] = 5.5
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_constraints)
x1 = x[0]
x2 = x[1]
x3 = np.round(x[2])
x4 = x[3]
x5 = x[4]
x6 = x[5]
x7 = x[6]
# First original objective function (weight)
f[0] = 0.7854 * x1 * (x2 * x2) * (((10.0 * x3 * x3) / 3.0) + (14.933 * x3) - 43.0934) - 1.508 * x1 * (x6 * x6 + x7 * x7) + 7.477 * (x6 * x6 * x6 + x7 * x7 * x7) + 0.7854 * (x4 * x6 * x6 + x5 * x7 * x7)
# Second original objective function (stress)
tmpVar = np.power((745.0 * x4) / (x2 * x3), 2.0) + 1.69 * 1e7
f[1] = np.sqrt(tmpVar) / (0.1 * x6 * x6 * x6)
# Constraint functions
g[0] = -(1.0 / (x1 * x2 * x2 * x3)) + 1.0 / 27.0
g[1] = -(1.0 / (x1 * x2 * x2 * x3 * x3)) + 1.0 / 397.5
g[2] = -(x4 * x4 * x4) / (x2 * x3 * x6 * x6 * x6 * x6) + 1.0 / 1.93
g[3] = -(x5 * x5 * x5) / (x2 * x3 * x7 * x7 * x7 * x7) + 1.0 / 1.93
g[4] = -(x2 * x3) + 40.0
g[5] = -(x1 / x2) + 12.0
g[6] = -5.0 + (x1 / x2)
g[7] = -1.9 + x4 - 1.5 * x6
g[8] = -1.9 + x5 - 1.1 * x7
g[9] = -f[1] + 1300.0
tmpVar = np.power((745.0 * x5) / (x2 * x3), 2.0) + 1.575 * 1e8
g[10] = -np.sqrt(tmpVar) / (0.1 * x7 * x7 * x7) + 1100.0
g = np.where(g < 0, -g, 0)
return f, g
class CRE25():
def __init__(self):
self.problem_name = 'CRE25'
self.n_objectives = 2
self.n_variables = 4
self.n_constraints = 1
self.lbound = np.full(self.n_variables, 12)
self.ubound = np.full(self.n_variables, 60)
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_constraints)
# all the four variables must be inverger values
x1 = np.round(x[0])
x2 = np.round(x[1])
x3 = np.round(x[2])
x4 = np.round(x[3])
# First original objective function
f[0] = np.abs(6.931 - ((x3 / x1) * (x4 / x2)))
# Second original objective function (the maximum value among the four variables)
l = [x1, x2, x3, x4]
f[1] = max(l)
g[0] = 0.5 - (f[0] / 6.931)
g = np.where(g < 0, -g, 0)
return f, g
class CRE31():
def __init__(self):
self.problem_name = 'CRE31'
self.n_objectives = 3
self.n_variables = 7
self.n_constraints = 10
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 0.5
self.lbound[1] = 0.45
self.lbound[2] = 0.5
self.lbound[3] = 0.5
self.lbound[4] = 0.875
self.lbound[5] = 0.4
self.lbound[6] = 0.4
self.ubound[0] = 1.5
self.ubound[1] = 1.35
self.ubound[2] = 1.5
self.ubound[3] = 1.5
self.ubound[4] = 2.625
self.ubound[5] = 1.2
self.ubound[6] = 1.2
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_constraints)
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
x5 = x[4]
x6 = x[5]
x7 = x[6]
# First original objective function
f[0] = 1.98 + 4.9 * x1 + 6.67 * x2 + 6.98 * x3 + 4.01 * x4 + 1.78 * x5 + 0.00001 * x6 + 2.73 * x7
# Second original objective function
f[1] = 4.72 - 0.5 * x4 - 0.19 * x2 * x3
# Third original objective function
Vmbp = 10.58 - 0.674 * x1 * x2 - 0.67275 * x2
Vfd = 16.45 - 0.489 * x3 * x7 - 0.843 * x5 * x6
f[2] = 0.5 * (Vmbp + Vfd)
# Constraint functions
g[0] = 1 -(1.16 - 0.3717 * x2 * x4 - 0.0092928 * x3)
g[1] = 0.32 -(0.261 - 0.0159 * x1 * x2 - 0.06486 * x1 - 0.019 * x2 * x7 + 0.0144 * x3 * x5 + 0.0154464 * x6)
g[2] = 0.32 -(0.214 + 0.00817 * x5 - 0.045195 * x1 - 0.0135168 * x1 + 0.03099 * x2 * x6 - 0.018 * x2 * x7 + 0.007176 * x3 + 0.023232 * x3 - 0.00364 * x5 * x6 - 0.018 * x2 * x2)
g[3] = 0.32 -(0.74 - 0.61 * x2 - 0.031296 * x3 - 0.031872 * x7 + 0.227 * x2 * x2)
g[4] = 32 -(28.98 + 3.818 * x3 - 4.2 * x1 * x2 + 1.27296 * x6 - 2.68065 * x7)
g[5] = 32 -(33.86 + 2.95 * x3 - 5.057 * x1 * x2 - 3.795 * x2 - 3.4431 * x7 + 1.45728)
g[6] = 32 -(46.36 - 9.9 * x2 - 4.4505 * x1)
g[7] = 4 - f[1]
g[8] = 9.9 - Vmbp
g[9] = 15.7 - Vfd
g = np.where(g < 0, -g, 0)
return f, g
class CRE32():
def __init__(self):
self.problem_name = 'CRE32'
self.n_objectives = 3
self.n_variables = 6
self.n_constraints = 9
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 150.0
self.lbound[1] = 20.0
self.lbound[2] = 13.0
self.lbound[3] = 10.0
self.lbound[4] = 14.0
self.lbound[5] = 0.63
self.ubound[0] = 274.32
self.ubound[1] = 32.31
self.ubound[2] = 25.0
self.ubound[3] = 11.71
self.ubound[4] = 18.0
self.ubound[5] = 0.75
def evaluate(self, x):
f = np.zeros(self.n_objectives)
# NOT g
constraintFuncs = np.zeros(self.n_constraints)
x_L = x[0]
x_B = x[1]
x_D = x[2]
x_T = x[3]
x_Vk = x[4]
x_CB = x[5]
displacement = 1.025 * x_L * x_B * x_T * x_CB
V = 0.5144 * x_Vk
g = 9.8065
Fn = V / np.power(g * x_L, 0.5)
a = (4977.06 * x_CB * x_CB) - (8105.61 * x_CB) + 4456.51
b = (-10847.2 * x_CB * x_CB) + (12817.0 * x_CB) - 6960.32
power = (np.power(displacement, 2.0/3.0) * np.power(x_Vk, 3.0)) / (a + (b * Fn))
outfit_weight = 1.0 * np.power(x_L , 0.8) * np.power(x_B , 0.6) * np.power(x_D, 0.3) * np.power(x_CB, 0.1)
steel_weight = 0.034 * np.power(x_L ,1.7) * np.power(x_B ,0.7) * np.power(x_D ,0.4) * np.power(x_CB ,0.5)
machinery_weight = 0.17 * np.power(power, 0.9)
light_ship_weight = steel_weight + outfit_weight + machinery_weight
ship_cost = 1.3 * ((2000.0 * np.power(steel_weight, 0.85)) + (3500.0 * outfit_weight) + (2400.0 * np.power(power, 0.8)))
capital_costs = 0.2 * ship_cost
DWT = displacement - light_ship_weight
running_costs = 40000.0 * np.power(DWT, 0.3)
round_trip_miles = 5000.0
sea_days = (round_trip_miles / 24.0) * x_Vk
handling_rate = 8000.0
daily_consumption = ((0.19 * power * 24.0) / 1000.0) + 0.2
fuel_price = 100.0
fuel_cost = 1.05 * daily_consumption * sea_days * fuel_price
port_cost = 6.3 * np.power(DWT, 0.8)
fuel_carried = daily_consumption * (sea_days + 5.0)
miscellaneous_DWT = 2.0 * np.power(DWT, 0.5)
cargo_DWT = DWT - fuel_carried - miscellaneous_DWT
port_days = 2.0 * ((cargo_DWT / handling_rate) + 0.5)
RTPA = 350.0 / (sea_days + port_days)
voyage_costs = (fuel_cost + port_cost) * RTPA
annual_costs = capital_costs + running_costs + voyage_costs
annual_cargo = cargo_DWT * RTPA
f[0] = annual_costs / annual_cargo
f[1] = light_ship_weight
# f_2 is dealt as a minimization problem
f[2] = -annual_cargo
# Reformulated objective functions
constraintFuncs[0] = (x_L / x_B) - 6.0
constraintFuncs[1] = -(x_L / x_D) + 15.0
constraintFuncs[2] = -(x_L / x_T) + 19.0
constraintFuncs[3] = 0.45 * np.power(DWT, 0.31) - x_T
constraintFuncs[4] = 0.7 * x_D + 0.7 - x_T
constraintFuncs[5] = 50000.0 - DWT
constraintFuncs[6] = DWT - 3000.0
constraintFuncs[7] = 0.32 - Fn
KB = 0.53 * x_T
BMT = ((0.085 * x_CB - 0.002) * x_B * x_B) / (x_T * x_CB)
KG = 1.0 + 0.52 * x_D
constraintFuncs[8] = (KB + BMT - KG) - (0.07 * x_B)
constraintFuncs = np.where(constraintFuncs < 0, -constraintFuncs, 0)
return f, constraintFuncs
class CRE51():
def __init__(self):
self.problem_name = 'CRE51'
self.n_objectives = 5
self.n_variables = 3
self.n_constraints = 7
self.lbound = np.zeros(self.n_variables)
self.ubound = np.zeros(self.n_variables)
self.lbound[0] = 0.01
self.lbound[1] = 0.01
self.lbound[2] = 0.01
self.ubound[0] = 0.45
self.ubound[1] = 0.10
self.ubound[2] = 0.10
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_constraints)
# First original objective function
f[0] = 106780.37 * (x[1] + x[2]) + 61704.67
#Second original objective function
f[1] = 3000 * x[0]
# Third original objective function
f[2] = 305700 * 2289 * x[1] / np.power(0.06*2289, 0.65)
# Fourth original objective function
f[3] = 250 * 2289 * np.exp(-39.75*x[1]+9.9*x[2]+2.74)
# Fifth original objective function
f[4] = 25 * (1.39 /(x[0]*x[1]) + 4940*x[2] -80)
# Constraint functions
g[0] = 1 - (0.00139/(x[0]*x[1])+4.94*x[2]-0.08)
g[1] = 1 - (0.000306/(x[0]*x[1])+1.082*x[2]-0.0986)
g[2] = 50000 - (12.307/(x[0]*x[1]) + 49408.24*x[2]+4051.02)
g[3] = 16000 - (2.098/(x[0]*x[1])+8046.33*x[2]-696.71)
g[4] = 10000 - (2.138/(x[0]*x[1])+7883.39*x[2]-705.04)
g[5] = 2000 - (0.417*x[0]*x[1] + 1721.26*x[2]-136.54)
g[6] = 550 - (0.164/(x[0]*x[1])+631.13*x[2]-54.48)
g = np.where(g < 0, -g, 0)
return f, g
| 36.369122
| 536
| 0.483203
| 7,362
| 46,407
| 2.967672
| 0.091687
| 0.041651
| 0.042796
| 0.046686
| 0.788539
| 0.782406
| 0.740663
| 0.721668
| 0.710134
| 0.705969
| 0
| 0.186441
| 0.353804
| 46,407
| 1,276
| 537
| 36.369122
| 0.542117
| 0.080419
| 0
| 0.759507
| 0
| 0
| 0.004889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05036
| false
| 0
| 0.002056
| 0
| 0.102775
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
19db0aca374fded6aea18efcee5e860c5595c849
| 4,059
|
py
|
Python
|
tests/test_urls_endpoints.py
|
victorskl/gen3-indexd
|
c22ffb1f1472e0732d3b44cd39b1241ff8156cc6
|
[
"Apache-2.0"
] | 11
|
2018-05-31T06:29:44.000Z
|
2020-10-21T14:09:36.000Z
|
tests/test_urls_endpoints.py
|
victorskl/gen3-indexd
|
c22ffb1f1472e0732d3b44cd39b1241ff8156cc6
|
[
"Apache-2.0"
] | 171
|
2017-11-13T16:56:35.000Z
|
2022-03-29T19:37:35.000Z
|
tests/test_urls_endpoints.py
|
victorskl/gen3-indexd
|
c22ffb1f1472e0732d3b44cd39b1241ff8156cc6
|
[
"Apache-2.0"
] | 25
|
2018-03-06T19:03:24.000Z
|
2021-11-27T19:39:49.000Z
|
import random
import pytest
from tests.test_client import get_doc
@pytest.fixture(scope="function")
def test_data(client, user):
system_random = random.SystemRandom()
url_x_count = system_random.randint(2, 5)
url_x_type = url_x_count
url_x = "s3://awesome-x/bucket/key"
versioned_count = system_random.randint(5, 10)
for _ in range(versioned_count):
doc = get_doc(has_urls_metadata=True, has_version=True)
if url_x_type > 0:
doc["urls"].append(url_x)
doc["urls_metadata"][url_x] = {"state": "uploaded"}
url_x_type -= 1
print(doc)
res = client.post("/index/", json=doc, headers=user)
assert res.status_code == 200
rec = client.get("/index/", json=doc, headers=user)
assert rec.status_code == 200
url_x_type = url_x_count
unversioned_count = system_random.randint(6, 10)
for _ in range(unversioned_count):
doc = get_doc(has_urls_metadata=True)
if url_x_type > 0:
doc["urls"].append(url_x)
doc["urls_metadata"][url_x] = {"state": "uploaded"}
url_x_type -= 1
print(doc)
res = client.post("/index/", json=doc, headers=user)
assert res.status_code == 200
rec = client.get("/index/", json=doc, headers=user)
assert rec.status_code == 200
return url_x_count, versioned_count, unversioned_count
def test_query_urls(client, test_data):
"""
Args:
client (test fixture)
test_data (tuple[int, int, int]:
"""
url_x_count, versioned_count, unversioned_count = test_data
# test get all
res = client.get("/_query/urls/q")
assert res.status_code == 200
urls_list = res.json
print(urls_list)
assert len(urls_list) == versioned_count + unversioned_count
# test list versioned urls
res = client.get("/_query/urls/q?versioned=true")
assert res.status_code == 200
urls_list = res.json
print(urls_list)
assert len(urls_list) == versioned_count
# test list un versioned
res = client.get("/_query/urls/q?versioned=false")
assert res.status_code == 200
urls_list = res.json
print(urls_list)
assert len(urls_list) == unversioned_count
# test exclude url
res = client.get("/_query/urls/q?exclude=awesome-x")
assert res.status_code == 200
urls_list = res.json
print(urls_list)
assert len(urls_list) == versioned_count + unversioned_count - 2 * url_x_count
# test include
res = client.get("/_query/urls/q?include=awesome-x")
assert res.status_code == 200
urls_list = res.json
print(urls_list)
assert len(urls_list) == 2 * url_x_count
# test include and exclude
res = client.get("/_query/urls/q?include=endpointurl&exclude=awesome-x")
assert res.status_code == 200
urls_list = res.json
print(urls_list)
assert len(urls_list) == versioned_count + unversioned_count - 2 * url_x_count
def test_query_urls_metadata(client, test_data):
"""
Args:
client (test fixture)
test_data (tuple[int, int, int]:
"""
url_x_count, _, unversioned_count = test_data
# test get all
res = client.get("_query/urls/metadata/q?key=state&value=uploaded&url=awesome-x")
assert res.status_code == 200
urls_list = res.json
assert len(urls_list) == 2 * url_x_count
# test list versioned urls
res = client.get(
"_query/urls/metadata/q?key=state&value=uploaded&url=awesome-x&versioned=True"
)
assert res.status_code == 200
urls_list = res.json
assert len(urls_list) == url_x_count
# test list un versioned
res = client.get(
"_query/urls/metadata/q?key=state&value=uploaded&url=endpointurl&versioned=False"
)
assert res.status_code == 200
urls_list = res.json
assert len(urls_list) == unversioned_count
# test unknown state
res = client.get("_query/urls/metadata/q?key=state&value=uploadedx&url=awesome-x")
assert res.status_code == 200
urls_list = res.json
assert len(urls_list) == 0
| 31.465116
| 89
| 0.660261
| 579
| 4,059
| 4.392055
| 0.129534
| 0.081793
| 0.071569
| 0.089658
| 0.854109
| 0.833661
| 0.804168
| 0.745969
| 0.721589
| 0.714117
| 0
| 0.019017
| 0.222715
| 4,059
| 128
| 90
| 31.710938
| 0.787005
| 0.082286
| 0
| 0.606742
| 0
| 0.044944
| 0.160262
| 0.130281
| 0
| 0
| 0
| 0
| 0.269663
| 1
| 0.033708
| false
| 0
| 0.033708
| 0
| 0.078652
| 0.089888
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
19eee05fd1b7f614b250386649f3a046fa4ede59
| 45
|
py
|
Python
|
carsus/io/kurucz/__init__.py
|
parikshit14/carsus
|
3f67e8068829829361d7b1da9020e1fde9dcac2e
|
[
"BSD-3-Clause"
] | 21
|
2016-06-01T16:12:03.000Z
|
2022-02-04T09:03:38.000Z
|
carsus/io/kurucz/__init__.py
|
parikshit14/carsus
|
3f67e8068829829361d7b1da9020e1fde9dcac2e
|
[
"BSD-3-Clause"
] | 149
|
2016-05-03T17:50:42.000Z
|
2022-03-25T14:48:51.000Z
|
carsus/io/kurucz/__init__.py
|
parikshit14/carsus
|
3f67e8068829829361d7b1da9020e1fde9dcac2e
|
[
"BSD-3-Clause"
] | 34
|
2016-05-03T16:39:11.000Z
|
2022-02-03T16:39:49.000Z
|
from .gfall import GFALLReader, GFALLIngester
| 45
| 45
| 0.866667
| 5
| 45
| 7.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
df9fae53d97b2763e95c92f4276f8e2ccf159e28
| 163
|
py
|
Python
|
blog/admin.py
|
netocraft/web-netocraft
|
87e35ea1ea23b8a75eeabb33fc10aac57cccb2ac
|
[
"Unlicense"
] | null | null | null |
blog/admin.py
|
netocraft/web-netocraft
|
87e35ea1ea23b8a75eeabb33fc10aac57cccb2ac
|
[
"Unlicense"
] | null | null | null |
blog/admin.py
|
netocraft/web-netocraft
|
87e35ea1ea23b8a75eeabb33fc10aac57cccb2ac
|
[
"Unlicense"
] | null | null | null |
from django.contrib import admin
from .models import Post, Profile, Relacion
admin.site.register(Post)
admin.site.register(Profile)
admin.site.register(Relacion)
| 23.285714
| 43
| 0.815951
| 23
| 163
| 5.782609
| 0.478261
| 0.203008
| 0.383459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08589
| 163
| 6
| 44
| 27.166667
| 0.892617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
dfee6cca1d75f229a1c3968fa75634388ce223f1
| 32
|
py
|
Python
|
python/lib/flight_recorder/__init__.py
|
ajragusa/OpenFlow-Flight-Recorder
|
437fbdf72fe18aa9af020630fa46e3f801adb59f
|
[
"Apache-2.0"
] | null | null | null |
python/lib/flight_recorder/__init__.py
|
ajragusa/OpenFlow-Flight-Recorder
|
437fbdf72fe18aa9af020630fa46e3f801adb59f
|
[
"Apache-2.0"
] | null | null | null |
python/lib/flight_recorder/__init__.py
|
ajragusa/OpenFlow-Flight-Recorder
|
437fbdf72fe18aa9af020630fa46e3f801adb59f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
import mongo
| 8
| 17
| 0.71875
| 5
| 32
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 3
| 18
| 10.666667
| 0.821429
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dff865151655263c0851cf98b07e4c00487a7d92
| 72
|
py
|
Python
|
apex_utils/apex/__init__.py
|
ViugiNick/sentiment-discovery
|
c781b1236a52a981af40733de13ea1598d4255d9
|
[
"BSD-3-Clause"
] | 1
|
2018-10-16T10:56:47.000Z
|
2018-10-16T10:56:47.000Z
|
apex_utils/apex/__init__.py
|
atsnova/sentiment-discovery
|
7f5ab28918a6fc29318a30f557b9454f0f5cc26a
|
[
"BSD-3-Clause"
] | null | null | null |
apex_utils/apex/__init__.py
|
atsnova/sentiment-discovery
|
7f5ab28918a6fc29318a30f557b9454f0f5cc26a
|
[
"BSD-3-Clause"
] | 1
|
2019-03-13T11:43:13.000Z
|
2019-03-13T11:43:13.000Z
|
from . import RNN
from . import reparameterization
#from . import utils
| 18
| 32
| 0.777778
| 9
| 72
| 6.222222
| 0.555556
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 3
| 33
| 24
| 0.933333
| 0.263889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f04a82b7a12234e59d660fa5330cb058565f182
| 49
|
py
|
Python
|
inferlo/interop/__init__.py
|
InferLO/inferlo
|
a65efce721d7f99d2f274dd94a1aaf7ca159e944
|
[
"Apache-2.0"
] | 1
|
2022-01-27T18:44:07.000Z
|
2022-01-27T18:44:07.000Z
|
inferlo/interop/__init__.py
|
InferLO/inferlo
|
a65efce721d7f99d2f274dd94a1aaf7ca159e944
|
[
"Apache-2.0"
] | 3
|
2022-01-23T18:02:30.000Z
|
2022-01-27T23:10:51.000Z
|
inferlo/interop/__init__.py
|
InferLO/inferlo
|
a65efce721d7f99d2f274dd94a1aaf7ca159e944
|
[
"Apache-2.0"
] | 1
|
2021-09-03T06:12:57.000Z
|
2021-09-03T06:12:57.000Z
|
from .libdai.libdai_interop import LibDaiInterop
| 24.5
| 48
| 0.877551
| 6
| 49
| 7
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a054f6a58f3694fb4eafcb7758f9c2e1b97db060
| 190
|
py
|
Python
|
text.py
|
zachmerrill/pyrdle
|
573035cecbe3ee5cae36562a2e3b53ea4f2950a0
|
[
"MIT"
] | null | null | null |
text.py
|
zachmerrill/pyrdle
|
573035cecbe3ee5cae36562a2e3b53ea4f2950a0
|
[
"MIT"
] | null | null | null |
text.py
|
zachmerrill/pyrdle
|
573035cecbe3ee5cae36562a2e3b53ea4f2950a0
|
[
"MIT"
] | null | null | null |
class Text():
RED = "\033[1;31;1m"
GREEN = '\033[1;32;1m'
YELLOW = '\033[1;33;1m'
UNDERLINE = '\033[4m'
def apply(str, format):
return(format + str + '\033[0m')
| 21.111111
| 40
| 0.515789
| 29
| 190
| 3.37931
| 0.655172
| 0.122449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208633
| 0.268421
| 190
| 8
| 41
| 23.75
| 0.496403
| 0
| 0
| 0
| 0
| 0
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0.142857
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
a0a8199f74e27a27e32250a2c661d417ee347afc
| 38
|
py
|
Python
|
perses/bias/__init__.py
|
schallerdavid/perses
|
58bd6e626e027879e136f56e175683893e016f8c
|
[
"MIT"
] | 99
|
2016-01-19T18:10:37.000Z
|
2022-03-26T02:43:08.000Z
|
perses/bias/__init__.py
|
schallerdavid/perses
|
58bd6e626e027879e136f56e175683893e016f8c
|
[
"MIT"
] | 878
|
2015-09-18T19:25:30.000Z
|
2022-03-31T02:33:04.000Z
|
perses/bias/__init__.py
|
schallerdavid/perses
|
58bd6e626e027879e136f56e175683893e016f8c
|
[
"MIT"
] | 30
|
2015-09-21T15:26:35.000Z
|
2022-01-10T20:07:24.000Z
|
from perses.bias.bias_engine import *
| 19
| 37
| 0.815789
| 6
| 38
| 5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2655acfcc3779e4b713b207120598cd7e55c1f55
| 183
|
py
|
Python
|
examples/underscored/if_expr.py
|
doboy/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 7
|
2016-09-23T00:44:05.000Z
|
2021-10-04T21:19:12.000Z
|
examples/underscored/if_expr.py
|
jameswu1991/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 1
|
2016-09-23T00:45:05.000Z
|
2019-02-16T19:05:37.000Z
|
examples/underscored/if_expr.py
|
jameswu1991/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 3
|
2016-09-23T01:13:15.000Z
|
2018-07-20T21:22:17.000Z
|
# print(3 if False else 5 if True else 5)
# print(3 if True else 5)
(___, ____) = (3, 5)
(_, __) = (False, True)
print ___ if _ else ____ if __ else ____
print ___ if __ else ____
| 22.875
| 42
| 0.644809
| 28
| 183
| 3.035714
| 0.25
| 0.176471
| 0.188235
| 0.258824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050725
| 0.245902
| 183
| 7
| 43
| 26.142857
| 0.565217
| 0.349727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
cd5039f2e87fc3585f066cdff018fb581e015680
| 26
|
py
|
Python
|
utils/models/vnet/__init__.py
|
bhklab/ptl-oar-segmentation
|
354c3ee7f042a025f74e210a7b8462beac9b727d
|
[
"Apache-2.0"
] | 3
|
2022-01-18T19:25:46.000Z
|
2022-02-05T18:53:24.000Z
|
utils/models/vnet/__init__.py
|
bhklab/ptl-oar-segmentation
|
354c3ee7f042a025f74e210a7b8462beac9b727d
|
[
"Apache-2.0"
] | null | null | null |
utils/models/vnet/__init__.py
|
bhklab/ptl-oar-segmentation
|
354c3ee7f042a025f74e210a7b8462beac9b727d
|
[
"Apache-2.0"
] | null | null | null |
from .model import VNet3D
| 13
| 25
| 0.807692
| 4
| 26
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.153846
| 26
| 1
| 26
| 26
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cd6326eea52f1280b2978f885564e60a956b3dbb
| 4,169
|
py
|
Python
|
numlab/builtin/nl_float.py
|
jmorgadov/NumLab
|
96a3771837b87132674e65ec3bb1f0ab5f5f089f
|
[
"MIT"
] | 9
|
2022-01-19T22:40:58.000Z
|
2022-02-24T02:39:51.000Z
|
numlab/builtin/nl_float.py
|
jmorgadov/NumLab
|
96a3771837b87132674e65ec3bb1f0ab5f5f089f
|
[
"MIT"
] | 41
|
2021-11-09T18:22:10.000Z
|
2022-02-06T19:04:23.000Z
|
numlab/builtin/nl_float.py
|
jmorgadov/NumLab
|
96a3771837b87132674e65ec3bb1f0ab5f5f089f
|
[
"MIT"
] | null | null | null |
import numlab.exceptions as excpt
from numlab.lang.type import Instance, Type
nl_bool = Type.get("bool")
nl_str = Type.get("str")
nl_int = Type.get("int")
nl_float = Type.get("float")
@nl_float.method("__new__")
def nl__new__(value: float):
_inst = Instance(nl_float)
_inst.set("value", float(value))
return _inst
@nl_float.method("__bool__")
def nl__bool__(self: Instance):
return nl_bool(self.get("value") != 0)
@nl_float.method("__add__")
def nl__add__(self, other: Instance):
if other.type.subtype(nl_float):
return Type.resolve_type(self.get("value") + other.get("value"))
raise excpt.InvalidTypeError("Can't add float to non-float")
@nl_float.method("__iadd__")
def nl__iadd__(self, other: Instance):
if other.type.subtype(nl_float):
self.set("value", self.get("value") + other.get("value"))
return self
raise excpt.InvalidTypeError("Can't add float to non-float")
@nl_float.method("__sub__")
def nl__sub__(self, other: Instance):
if other.type.subtype(nl_float):
return Type.resolve_type(self.get("value") - other.get("value"))
raise excpt.InvalidTypeError("Can't subtract float from non-float")
@nl_float.method("__isub__")
def nl__isub__(self, other: Instance):
if other.type.subtype(nl_float):
self.set("value", self.get("value") - other.get("value"))
return self
raise excpt.InvalidTypeError("Can't subtract float from non-float")
@nl_float.method("__mul__")
def nl__mul__(self, other: Instance):
if other.type.subtype(nl_float):
return Type.resolve_type(self.get("value") * other.get("value"))
raise excpt.InvalidTypeError("Can't multiply float by non-float")
@nl_float.method("__imul__")
def nl__imul__(self, other: Instance):
if other.type.subtype(nl_float):
self.set("value", self.get("value") * other.get("value"))
return self
raise excpt.InvalidTypeError("Can't multiply float by non-float")
@nl_float.method("__pow__")
def nl__pow__(self, other: Instance):
if other.type.subtype(nl_int):
return Type.resolve_type(self.get("value") ** other.get("value"))
raise excpt.InvalidTypeError("Can't raise float to non-int")
@nl_float.method("__truediv__")
def nl__div__(self, other: Instance):
if other.type.subtype(nl_float):
return Type.resolve_type(self.get("value") / other.get("value"))
raise excpt.InvalidTypeError("Can't divide float by non-float")
@nl_float.method("__idiv__")
def nl__idiv__(self, other: Instance):
if other.type.subtype(nl_float):
self.set("value", self.get("value") / other.get("value"))
return self
raise excpt.InvalidTypeError("Can't divide float by non-float")
@nl_float.method("__eq__")
def nl__eq__(self, other: Instance):
if other.type.subtype(nl_float):
return nl_bool(self.get("value") == other.get("value"))
raise excpt.InvalidTypeError("Can't compare float to non-float")
@nl_float.method("__lt__")
def nl__lt__(self, other: Instance):
if other.type.subtype(nl_float):
return nl_bool(self.get("value") < other.get("value"))
raise excpt.InvalidTypeError("Can't compare float to non-float")
@nl_float.method("__gt__")
def nl__gt__(self, other: Instance):
if other.type.subtype(nl_float):
return nl_bool(self.get("value") > other.get("value"))
raise excpt.InvalidTypeError("Can't compare float to non-float")
@nl_float.method("__le__")
def nl__le__(self, other: Instance):
if other.type.subtype(nl_float):
return nl_bool(self.get("value") <= other.get("value"))
raise excpt.InvalidTypeError("Can't compare float to non-float")
@nl_float.method("__ge__")
def nl__ge__(self, other: Instance):
if other.type.subtype(nl_float):
return nl_bool(self.get("value") >= other.get("value"))
raise excpt.InvalidTypeError("Can't compare float to non-float")
@nl_float.method("__str__")
def nl__str__(self):
return nl_str(str(self.get("value")))
@nl_float.method("__repr__")
def nl__repr__(self):
return nl_str(str(self.get("value")))
@nl_float.method("__hash__")
def nl__hash__(self):
return hash(self.get("value"))
| 30.430657
| 73
| 0.694651
| 614
| 4,169
| 4.381107
| 0.091205
| 0.088476
| 0.091822
| 0.09368
| 0.80632
| 0.80632
| 0.797398
| 0.797398
| 0.783643
| 0.783643
| 0
| 0.000282
| 0.149676
| 4,169
| 136
| 74
| 30.654412
| 0.758533
| 0
| 0
| 0.329897
| 0
| 0
| 0.187335
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.195876
| false
| 0
| 0.020619
| 0.041237
| 0.412371
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cd8c0526118e240bd53b4ac841a553998f597dbf
| 114
|
py
|
Python
|
galaxies/__init__.py
|
philrosenfield/ResolvedStellarPops
|
ab24083ae5080545165ccf7589d5a22c7989ce75
|
[
"BSD-3-Clause"
] | null | null | null |
galaxies/__init__.py
|
philrosenfield/ResolvedStellarPops
|
ab24083ae5080545165ccf7589d5a22c7989ce75
|
[
"BSD-3-Clause"
] | null | null | null |
galaxies/__init__.py
|
philrosenfield/ResolvedStellarPops
|
ab24083ae5080545165ccf7589d5a22c7989ce75
|
[
"BSD-3-Clause"
] | null | null | null |
from .galaxies import *
from .galaxy import *
from .simgalaxy import *
from .starpop import *
from .asts import *
| 19
| 24
| 0.736842
| 15
| 114
| 5.6
| 0.466667
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 114
| 5
| 25
| 22.8
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
26aeefdeb957d1c03fe74be87e56e607bab2767a
| 27
|
py
|
Python
|
application/controllers/admin/content/__init__.py
|
mutalisk999/bibi
|
2e0043d207c83ef7fc55d20b9c0a1b7e493a551c
|
[
"Apache-2.0"
] | 1,037
|
2017-03-24T11:18:55.000Z
|
2022-03-19T14:02:27.000Z
|
application/controllers/admin/content/__init__.py
|
spacecode-live/An-e-commerce-fullstack-solution-for-Flask-
|
28130a18a73afbb99bf850e857dd1f14c08fbca1
|
[
"Apache-2.0"
] | 8
|
2017-03-26T02:53:24.000Z
|
2018-09-14T03:18:26.000Z
|
application/controllers/admin/content/__init__.py
|
spacecode-live/An-e-commerce-fullstack-solution-for-Flask-
|
28130a18a73afbb99bf850e857dd1f14c08fbca1
|
[
"Apache-2.0"
] | 365
|
2017-03-24T11:29:02.000Z
|
2021-11-24T03:14:19.000Z
|
from . import banner, item
| 13.5
| 26
| 0.740741
| 4
| 27
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 27
| 1
| 27
| 27
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
26ba57057bc810a77a4b6afad58790f177b93c3c
| 39
|
py
|
Python
|
airtrack/src/__init__.py
|
ckarageorgkaneen/airtrack-pybpod
|
86cad41dbea4f7ba496868d171758c348ed7c1f2
|
[
"MIT"
] | 1
|
2021-09-16T17:42:29.000Z
|
2021-09-16T17:42:29.000Z
|
airtrack/src/__init__.py
|
ckarageorgkaneen/airtrack-pybpod
|
86cad41dbea4f7ba496868d171758c348ed7c1f2
|
[
"MIT"
] | 12
|
2021-08-01T17:50:27.000Z
|
2021-08-08T17:33:58.000Z
|
airtrack/src/__init__.py
|
ckarageorgkaneen/airtrack
|
86cad41dbea4f7ba496868d171758c348ed7c1f2
|
[
"MIT"
] | null | null | null |
from airtrack.src.base import Airtrack
| 19.5
| 38
| 0.846154
| 6
| 39
| 5.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
26ee52424e72655b49fed742330b608438aa1b1b
| 325
|
py
|
Python
|
torchcv/models/__init__.py
|
CVHj/torchcv
|
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
|
[
"MIT"
] | 433
|
2017-11-30T15:46:58.000Z
|
2022-01-16T08:06:11.000Z
|
torchcv/models/__init__.py
|
CVHj/torchcv
|
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
|
[
"MIT"
] | 51
|
2018-01-29T15:14:33.000Z
|
2021-08-23T12:02:18.000Z
|
fpn-hoi/torchcv/models/__init__.py
|
TheFairBear/Box-Attention-SSD-HOI
|
6101e209a709899c5645342784c8f451028ff46e
|
[
"MIT"
] | 92
|
2018-01-20T07:45:36.000Z
|
2021-05-28T10:43:53.000Z
|
from torchcv.models.ssd.net import SSD300, SSD512
from torchcv.models.ssd.box_coder import SSDBoxCoder
from torchcv.models.fpnssd.net import FPNSSD512
from torchcv.models.fpnssd.box_coder import FPNSSDBoxCoder
from torchcv.models.retinanet.net import RetinaNet
from torchcv.models.retinanet.box_coder import RetinaBoxCoder
| 36.111111
| 61
| 0.858462
| 46
| 325
| 6
| 0.347826
| 0.23913
| 0.369565
| 0.144928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030201
| 0.083077
| 325
| 8
| 62
| 40.625
| 0.895973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f81a51389e1496b1ca5a707c01207f83600ed046
| 19
|
py
|
Python
|
ulmo/twc/__init__.py
|
sblack-usu/ulmo
|
3213bf0302b44e77abdff1f3f66e7f1083571ce8
|
[
"BSD-3-Clause"
] | 123
|
2015-01-29T12:35:52.000Z
|
2021-12-15T21:09:33.000Z
|
ulmo/twc/__init__.py
|
sblack-usu/ulmo
|
3213bf0302b44e77abdff1f3f66e7f1083571ce8
|
[
"BSD-3-Clause"
] | 107
|
2015-01-05T17:56:22.000Z
|
2021-11-19T22:46:23.000Z
|
ulmo/twc/__init__.py
|
sblack-usu/ulmo
|
3213bf0302b44e77abdff1f3f66e7f1083571ce8
|
[
"BSD-3-Clause"
] | 49
|
2015-02-15T18:11:34.000Z
|
2022-01-25T14:25:32.000Z
|
from . import kbdi
| 9.5
| 18
| 0.736842
| 3
| 19
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 19
| 1
| 19
| 19
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f849f0a4fd0cf9b1dc3b650cc5ef05d2c55a030a
| 8,875
|
py
|
Python
|
tests/engine/block_layout/test_block_non_replaced_normal_flow.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 71
|
2015-04-13T09:44:14.000Z
|
2019-03-24T01:03:02.000Z
|
tests/engine/block_layout/test_block_non_replaced_normal_flow.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 35
|
2019-05-06T15:26:09.000Z
|
2022-03-28T06:30:33.000Z
|
tests/engine/block_layout/test_block_non_replaced_normal_flow.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 139
|
2015-05-30T18:37:43.000Z
|
2019-03-27T17:14:05.000Z
|
from colosseum.constants import AUTO, BLOCK, RTL, SOLID
from colosseum.declaration import CSS
from ...utils import LayoutTestCase, TestNode
class WidthTests(LayoutTestCase):
def test_no_horizontal_properties(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, height=10)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (1024, 10)},
'padding_box': {'position': (0, 0), 'size': (1024, 10)},
'content': {'position': (0, 0), 'size': (1024, 10)},
}
)
def test_left_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, height=10, margin_left=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (1024, 10)},
'padding_box': {'position': (0, 0), 'size': (1024, 10)},
'content': {'position': (0, 0), 'size': (1024, 10)},
}
)
def test_right_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, height=10, margin_right=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (1024, 10)},
'padding_box': {'position': (0, 0), 'size': (1024, 10)},
'content': {'position': (0, 0), 'size': (1024, 10)},
}
)
def test_left_and_right_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, height=10, margin_left=AUTO, margin_right=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (1024, 10)},
'padding_box': {'position': (0, 0), 'size': (1024, 10)},
'content': {'position': (0, 0), 'size': (1024, 10)},
}
)
def test_width(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=50, height=10)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (50, 10)},
'padding_box': {'position': (0, 0), 'size': (50, 10)},
'content': {'position': (0, 0), 'size': (50, 10)},
}
)
def test_width_auto_left_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=50, height=10, margin_left=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (974, 0), 'size': (50, 10)},
'padding_box': {'position': (974, 0), 'size': (50, 10)},
'content': {'position': (974, 0), 'size': (50, 10)},
}
)
def test_width_auto_right_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=50, height=10, margin_right=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (50, 10)},
'padding_box': {'position': (0, 0), 'size': (50, 10)},
'content': {'position': (0, 0), 'size': (50, 10)},
}
)
def test_width_auto_left_and_right_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=50, height=10, margin_left=AUTO, margin_right=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (487, 0), 'size': (50, 10)},
'padding_box': {'position': (487, 0), 'size': (50, 10)},
'content': {'position': (487, 0), 'size': (50, 10)},
}
)
def test_width_fixed_left_and_right_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=50, height=10, margin_left=30, margin_right=40)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (30, 0), 'size': (50, 10)},
'padding_box': {'position': (30, 0), 'size': (50, 10)},
'content': {'position': (30, 0), 'size': (50, 10)},
}
)
def test_width_fixed_left_and_right_margin_rtl(self):
node = TestNode(
name='div', style=CSS(
display=BLOCK, width=50, height=10,
margin_left=30, margin_right=40, direction=RTL
)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (934, 0), 'size': (50, 10)},
'padding_box': {'position': (934, 0), 'size': (50, 10)},
'content': {'position': (934, 0), 'size': (50, 10)},
}
)
def test_width_exceeds_parent(self):
node = TestNode(
name='div', style=CSS(
display=BLOCK, width=500, height=20,
padding=50, border_width=60, border_style=SOLID,
margin=70
)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (70, 70), 'size': (720, 240)},
'padding_box': {'position': (130, 130), 'size': (600, 120)},
'content': {'position': (180, 180), 'size': (500, 20)},
}
)
def test_width_exceeds_parent_auto_left_and_right_margins(self):
node = TestNode(
name='div', style=CSS(
display=BLOCK, width=500, height=20,
padding=50, border_width=60, border_style=SOLID,
margin_left=AUTO, margin_right=AUTO
)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (152, 0), 'size': (720, 240)},
'padding_box': {'position': (212, 60), 'size': (600, 120)},
'content': {'position': (262, 110), 'size': (500, 20)},
}
)
class HeightTests(LayoutTestCase):
def test_no_vertical_properties(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=10)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (10, 0)},
'padding_box': {'position': (0, 0), 'size': (10, 0)},
'content': {'position': (0, 0), 'size': (10, 0)},
}
)
def test_height(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=10, height=50)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (10, 50)},
'padding_box': {'position': (0, 0), 'size': (10, 50)},
'content': {'position': (0, 0), 'size': (10, 50)},
}
)
def test_height_auto_top_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=10, height=50, margin_top=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (10, 50)},
'padding_box': {'position': (0, 0), 'size': (10, 50)},
'content': {'position': (0, 0), 'size': (10, 50)},
}
)
def test_height_auto_bottom_margin(self):
node = TestNode(
name='div', style=CSS(display=BLOCK, width=10, height=50, margin_bottom=AUTO)
)
self.layout_node(node)
self.assertLayout(
node,
{
'tag': 'div',
'border_box': {'position': (0, 0), 'size': (10, 50)},
'padding_box': {'position': (0, 0), 'size': (10, 50)},
'content': {'position': (0, 0), 'size': (10, 50)},
}
)
| 30.393836
| 106
| 0.45093
| 896
| 8,875
| 4.321429
| 0.081473
| 0.055527
| 0.077479
| 0.108471
| 0.913481
| 0.890754
| 0.851756
| 0.796229
| 0.789773
| 0.789773
| 0
| 0.078799
| 0.380845
| 8,875
| 291
| 107
| 30.498282
| 0.625842
| 0
| 0
| 0.475
| 0
| 0
| 0.131606
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 1
| 0.066667
| false
| 0
| 0.0125
| 0
| 0.0875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f8822ea5dc4fe4359c8f052bccc79640e5ed2e42
| 147
|
py
|
Python
|
src/TowerDefence/Command/quit_page.py
|
sevashasla/TowerDefence
|
73625d88cdb70d4c026d6f452604d193bc32c127
|
[
"MIT"
] | null | null | null |
src/TowerDefence/Command/quit_page.py
|
sevashasla/TowerDefence
|
73625d88cdb70d4c026d6f452604d193bc32c127
|
[
"MIT"
] | null | null | null |
src/TowerDefence/Command/quit_page.py
|
sevashasla/TowerDefence
|
73625d88cdb70d4c026d6f452604d193bc32c127
|
[
"MIT"
] | null | null | null |
from .command import Command
class QuitPageCommand(Command):
def __init__(self):
pass
def __str__(self) -> str:
return "Quit current page"
| 16.333333
| 31
| 0.734694
| 19
| 147
| 5.263158
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170068
| 147
| 8
| 32
| 18.375
| 0.819672
| 0
| 0
| 0
| 0
| 0
| 0.115646
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
3e4006535c22a3266fb2c587c6b313b8d0d471b7
| 258
|
py
|
Python
|
guniflask/config/__init__.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | 12
|
2018-09-06T06:14:59.000Z
|
2021-04-18T06:30:44.000Z
|
guniflask/config/__init__.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | null | null | null |
guniflask/config/__init__.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | 2
|
2019-09-08T22:01:26.000Z
|
2020-08-03T07:23:29.000Z
|
from .app_settings import Settings
from .app_settings import settings
from .env import app_name_from_env
from .env import load_app_env
from .env import set_app_default_env
from .load_utils import load_app_settings
from .load_utils import load_profile_config
| 32.25
| 43
| 0.864341
| 44
| 258
| 4.704545
| 0.272727
| 0.135266
| 0.188406
| 0.202899
| 0.521739
| 0.299517
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108527
| 258
| 7
| 44
| 36.857143
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e42e6811af2695a3e6cd8a4af32166ddc34ffa4
| 194
|
py
|
Python
|
Python Programs/InheritanceExample/ChineseChef.py
|
JCharlieDev/Python
|
d213c6cb60110156b19d96d8bb9b809e69e89ce5
|
[
"MIT"
] | null | null | null |
Python Programs/InheritanceExample/ChineseChef.py
|
JCharlieDev/Python
|
d213c6cb60110156b19d96d8bb9b809e69e89ce5
|
[
"MIT"
] | null | null | null |
Python Programs/InheritanceExample/ChineseChef.py
|
JCharlieDev/Python
|
d213c6cb60110156b19d96d8bb9b809e69e89ce5
|
[
"MIT"
] | null | null | null |
from Chef import Chef
class ChineseChef(Chef):
def MakeFriedRice(self):
print("The chef makes Fried Rice")
def MakeSpecialdish(self):
print("The chef makes Dumplings")
| 21.555556
| 42
| 0.680412
| 24
| 194
| 5.5
| 0.625
| 0.136364
| 0.181818
| 0.242424
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231959
| 194
| 9
| 43
| 21.555556
| 0.885906
| 0
| 0
| 0
| 0
| 0
| 0.251282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
3e66c516484bcd5f263b5f90dd3c006eee5e3a4c
| 28
|
py
|
Python
|
marginal_finder/__init__.py
|
pjamesjoyce/marginal-finder
|
a2b3d69774e4e8669fa72a0160accf4419fb3b81
|
[
"BSD-3-Clause"
] | null | null | null |
marginal_finder/__init__.py
|
pjamesjoyce/marginal-finder
|
a2b3d69774e4e8669fa72a0160accf4419fb3b81
|
[
"BSD-3-Clause"
] | null | null | null |
marginal_finder/__init__.py
|
pjamesjoyce/marginal-finder
|
a2b3d69774e4e8669fa72a0160accf4419fb3b81
|
[
"BSD-3-Clause"
] | null | null | null |
from .market_finder import *
| 28
| 28
| 0.821429
| 4
| 28
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e4250108c88a5e10b7fc1807726acfe3685f025f
| 24,196
|
py
|
Python
|
AppDB/appscale/datastore/fdb/stats/entities.py
|
loftwah/appscale
|
586fc1347ebc743d7a632de698f4dbfb09ae38d6
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppDB/appscale/datastore/fdb/stats/entities.py
|
loftwah/appscale
|
586fc1347ebc743d7a632de698f4dbfb09ae38d6
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppDB/appscale/datastore/fdb/stats/entities.py
|
loftwah/appscale
|
586fc1347ebc743d7a632de698f4dbfb09ae38d6
|
[
"Apache-2.0"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
"""
Each stat kind is populated from one or more stat sections (which are described
in the containers module).
Ns_Kind_CompositeIndex -> composite-indexes
Kind_CompositeIndex -> composite-indexes
Ns_Kind_IsRootEntity -> entities + builtin-indexes
Ns_Kind_NotRootEntity -> entities + builtin-indexes
Kind_IsRootEntity -> entities + builtin-indexes
Kind_NotRootEntity -> entities + builtin-indexes
Ns_PropertyType_PropertyName_Kind -> entity-properties + index-properties
Ns_PropertyName_Kind -> entity-properties + index-properties
Ns_PropertyType_Kind -> entity-properties + index-properties
PropertyType_PropertyName_Kind -> entity-properties + index-properties
Ns_PropertyType -> entity-properties + index-properties
PropertyName_Kind -> entity-properties + index-properties
PropertyType_Kind -> entity-properties + index-properties
PropertyType -> entity-properties + index-properties
Ns_Kind -> entities + builtin-indexes + composite-indexes
Kind -> entities + builtin-indexes + composite-indexes
Namespace -> entities + builtin-indexes + composite-indexes
Ns_Total -> entities + builtin-indexes + composite-indexes
Total -> entities + builtin-indexes + composite-indexes
"""
import datetime
import logging
import sys
import time
from collections import defaultdict
import six
from appscale.common.unpackaged import APPSCALE_PYTHON_APPSERVER
from appscale.datastore.fdb.stats.containers import CountBytes, StatsPropTypes
sys.path.append(APPSCALE_PYTHON_APPSERVER)
from google.appengine.datastore import entity_pb
# The value the datastore uses to populate the meaning field for timestammps.
GD_WHEN = 7
logger = logging.getLogger(__name__)
def fill_entity(project_id, kind, properties, name=None, id_=None,
namespace=''):
entity = entity_pb.EntityProto()
key = entity.mutable_key()
key.set_app(project_id)
if namespace:
key.set_name_space(namespace)
path = key.mutable_path()
element = path.add_element()
element.set_type(kind)
if name is not None:
element.set_name(name)
else:
element.set_id(id_)
group = entity.mutable_entity_group()
group.add_element().CopyFrom(element)
for prop_name, value in six.iteritems(properties):
prop = entity.add_property()
prop.set_name(prop_name)
prop.set_multiple(False)
value_pb = prop.mutable_value()
if isinstance(value, datetime.datetime):
value_pb.set_int64value(
int(time.mktime(value.timetuple()) * 1000000 + value.microsecond))
prop.set_meaning(GD_WHEN)
elif isinstance(value, int):
value_pb.set_int64value(value)
else:
value_pb.set_stringvalue(value.encode('utf-8'))
return entity
def fill_entities(project_id, project_stats, timestamp):
entities = []
composite_stats = project_stats.composite_stats.stats
stats_kind = u'__Stat_Ns_Kind_CompositeIndex__'
for namespace, by_index in six.iteritems(composite_stats):
for (index_id, kind), fields in six.iteritems(by_index):
name = u'_'.join([kind, six.text_type(index_id)])
props = {'index_id': index_id, 'kind_name': kind, 'timestamp': timestamp,
'count': fields.count, 'bytes': fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name,
namespace=namespace))
stats_kind = u'__Stat_Kind_CompositeIndex__'
composite_stats_by_index = defaultdict(CountBytes)
for namespace, by_index in six.iteritems(composite_stats):
for key, fields in six.iteritems(by_index):
composite_stats_by_index[key] += fields
for (index_id, kind), fields in six.iteritems(composite_stats_by_index):
name = u'_'.join([kind, six.text_type(index_id)])
props = {'index_id': index_id, 'kind_name': kind, 'timestamp': timestamp,
'count': fields.count, 'bytes': fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name))
entity_stats = project_stats.entity_stats
stats_kind = u'__Stat_Ns_Kind_IsRootEntity__'
for namespace, by_kind in six.iteritems(entity_stats.entities_root):
for kind, entity_fields in six.iteritems(by_kind):
builtin_fields = entity_stats.builtin_indexes_root[namespace][kind]
props = {'kind_name': kind, 'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, kind,
namespace=namespace))
stats_kind = u'__Stat_Ns_Kind_NotRootEntity__'
for namespace, by_kind in six.iteritems(entity_stats.entities_notroot):
for kind, entity_fields in six.iteritems(by_kind):
builtin_fields = entity_stats.builtin_indexes_notroot[namespace][kind]
props = {'kind_name': kind, 'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, kind,
namespace=namespace))
stats_kind = u'__Stat_Ns_Kind__'
entity_stats_by_ns_kind = defaultdict(lambda: defaultdict(CountBytes))
for namespace, by_kind in six.iteritems(entity_stats.entities_root):
for kind, fields in six.iteritems(by_kind):
entity_stats_by_ns_kind[namespace][kind] += fields
for namespace, by_kind in six.iteritems(entity_stats.entities_notroot):
for kind, fields in six.iteritems(by_kind):
entity_stats_by_ns_kind[namespace][kind] += fields
builtin_stats_by_ns_kind = defaultdict(lambda: defaultdict(CountBytes))
for namespace, by_kind in six.iteritems(entity_stats.builtin_indexes_root):
for kind, fields in six.iteritems(by_kind):
builtin_stats_by_ns_kind[namespace][kind] += fields
for namespace, by_kind in six.iteritems(entity_stats.builtin_indexes_notroot):
for kind, fields in six.iteritems(by_kind):
builtin_stats_by_ns_kind[namespace][kind] += fields
composite_stats_by_ns_kind = defaultdict(lambda: defaultdict(CountBytes))
for namespace, by_index in six.iteritems(composite_stats):
for (index_id, kind), fields in six.iteritems(by_index):
composite_stats_by_ns_kind[namespace][kind] += fields
for namespace, by_kind in six.iteritems(entity_stats_by_ns_kind):
for kind, entity_fields in six.iteritems(by_kind):
builtin_fields = builtin_stats_by_ns_kind[namespace][kind]
composite_fields = composite_stats_by_ns_kind[namespace][kind]
props = {'kind_name': kind, 'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'composite_index_count': composite_fields.count,
'composite_index_bytes': composite_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes +
composite_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, kind,
namespace=namespace))
stats_kind = u'__Stat_Kind_IsRootEntity__'
root_entity_stats_by_kind = defaultdict(CountBytes)
for namespace, by_kind in six.iteritems(entity_stats.entities_root):
for kind, fields in six.iteritems(by_kind):
root_entity_stats_by_kind[kind] += fields
root_builtin_stats_by_kind = defaultdict(CountBytes)
for namespace, by_kind in six.iteritems(entity_stats.builtin_indexes_root):
for kind, fields in six.iteritems(by_kind):
root_builtin_stats_by_kind[kind] += fields
for kind, entity_fields in six.iteritems(root_entity_stats_by_kind):
builtin_fields = root_builtin_stats_by_kind[kind]
props = {'kind_name': kind, 'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count, 'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, kind))
stats_kind = u'__Stat_Kind_NotRootEntity__'
notroot_entity_stats_by_kind = defaultdict(CountBytes)
for namespace, by_kind in six.iteritems(entity_stats.entities_notroot):
for kind, fields in six.iteritems(by_kind):
notroot_entity_stats_by_kind[kind] += fields
notroot_builtin_stats_by_kind = defaultdict(CountBytes)
for namespace, by_kind in six.iteritems(entity_stats.builtin_indexes_notroot):
for kind, fields in six.iteritems(by_kind):
notroot_builtin_stats_by_kind[kind] += fields
for kind, entity_fields in six.iteritems(notroot_entity_stats_by_kind):
builtin_fields = notroot_builtin_stats_by_kind[kind]
props = {'kind_name': kind, 'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count, 'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, kind))
stats_kind = u'__Stat_Kind__'
entity_stats_by_kind = defaultdict(CountBytes)
for kind, fields in six.iteritems(root_entity_stats_by_kind):
entity_stats_by_kind[kind] += fields
for kind, fields in six.iteritems(notroot_entity_stats_by_kind):
entity_stats_by_kind[kind] += fields
builtin_stats_by_kind = defaultdict(CountBytes)
for kind, fields in six.iteritems(root_builtin_stats_by_kind):
builtin_stats_by_kind[kind] += fields
for kind, fields in six.iteritems(notroot_builtin_stats_by_kind):
builtin_stats_by_kind[kind] += fields
composite_stats_by_kind = defaultdict(CountBytes)
for (index_id, kind), fields in six.iteritems(composite_stats_by_index):
composite_stats_by_kind[kind] += fields
for kind, entity_fields in six.iteritems(entity_stats_by_kind):
builtin_fields = builtin_stats_by_kind[kind]
composite_fields = composite_stats_by_kind[kind]
props = {'kind_name': kind, 'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count, 'entity_bytes': entity_fields.bytes,
'composite_index_count': composite_fields.count,
'composite_index_bytes': composite_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes +
composite_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, kind))
stats_kind = u'__Stat_Namespace__'
composite_stats_by_ns = defaultdict(CountBytes)
for namespace, by_kind in six.iteritems(composite_stats):
composite_stats_by_ns[namespace] += sum(six.itervalues(by_kind),
CountBytes())
entity_stats_by_ns = defaultdict(CountBytes)
for namespace, by_kind in six.iteritems(entity_stats.entities_root):
entity_stats_by_ns[namespace] += sum(six.itervalues(by_kind), CountBytes())
for namespace, by_kind in six.iteritems(entity_stats.entities_notroot):
entity_stats_by_ns[namespace] += sum(six.itervalues(by_kind), CountBytes())
builtin_stats_by_ns = defaultdict(CountBytes)
for namespace, by_kind in six.iteritems(entity_stats.builtin_indexes_root):
builtin_stats_by_ns[namespace] += sum(six.itervalues(by_kind), CountBytes())
for namespace, by_kind in six.iteritems(entity_stats.builtin_indexes_notroot):
builtin_stats_by_ns[namespace] += sum(six.itervalues(by_kind), CountBytes())
for namespace, entity_fields in six.iteritems(entity_stats_by_ns):
builtin_fields = builtin_stats_by_ns[namespace]
composite_fields = composite_stats_by_ns[namespace]
props = {'subject_namespace': namespace, 'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count, 'entity_bytes': entity_fields.bytes,
'composite_index_count': composite_fields.count,
'composite_index_bytes': composite_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes +
composite_fields.bytes}
if namespace:
entities.append(fill_entity(project_id, stats_kind, props, namespace))
else:
entities.append(fill_entity(project_id, stats_kind, props, id_=1))
stats_kind = u'__Stat_Ns_Total__'
name = u'total_entity_usage'
for namespace, entity_fields in six.iteritems(entity_stats_by_ns):
builtin_fields = builtin_stats_by_ns[namespace]
composite_fields = composite_stats_by_ns[namespace]
props = {'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count, 'entity_bytes': entity_fields.bytes,
'composite_index_count': composite_fields.count,
'composite_index_bytes': composite_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes +
composite_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name,
namespace=namespace))
stats_kind = u'__Stat_Total__'
name = u'total_entity_usage'
entity_fields = sum(six.itervalues(entity_stats_by_ns), CountBytes())
builtin_fields = sum(six.itervalues(builtin_stats_by_ns), CountBytes())
composite_fields = sum(six.itervalues(composite_stats_by_ns), CountBytes())
props = {'timestamp': timestamp,
'builtin_index_count': builtin_fields.count,
'builtin_index_bytes': builtin_fields.bytes,
'count': entity_fields.count, 'entity_bytes': entity_fields.bytes,
'composite_index_count': composite_fields.count,
'composite_index_bytes': composite_fields.bytes,
'bytes': entity_fields.bytes + builtin_fields.bytes +
composite_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name))
prop_stats = project_stats.property_stats
stats_kind = u'__Stat_Ns_PropertyType_PropertyName_Kind__'
for namespace, by_kind in six.iteritems(prop_stats.entity_stats):
for kind, by_type in six.iteritems(by_kind):
for prop_type, by_name in six.iteritems(by_type):
type_name = StatsPropTypes.NAMES[prop_type]
for prop_name, entity_fields in six.iteritems(by_name):
name = u'_'.join([type_name, prop_name, kind])
index_fields = prop_stats.index_stats[namespace][kind][prop_type]\
[prop_name]
props = {'kind_name': kind, 'timestamp': timestamp,
'property_type': type_name, 'property_name': prop_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name,
namespace=namespace))
stats_kind = u'__Stat_Ns_PropertyType_Kind__'
for namespace, by_kind in six.iteritems(prop_stats.entity_stats):
for kind, by_type in six.iteritems(by_kind):
for prop_type, by_name in six.iteritems(by_type):
type_name = StatsPropTypes.NAMES[prop_type]
name = u'_'.join([type_name, kind])
entity_fields = sum(six.itervalues(by_name), CountBytes())
index_fields = sum(
six.itervalues(prop_stats.index_stats[namespace][kind][prop_type]),
CountBytes())
props = {'kind_name': kind, 'timestamp': timestamp,
'property_type': type_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name,
namespace=namespace))
stats_kind = u'__Stat_Ns_PropertyName_Kind__'
for namespace, by_kind in six.iteritems(prop_stats.entity_stats):
for kind, by_type in six.iteritems(by_kind):
combined_entities = defaultdict(CountBytes)
combined_indexes = defaultdict(CountBytes)
for prop_type, by_name in six.iteritems(by_type):
for prop_name, fields in six.iteritems(by_name):
combined_entities[prop_name] += fields
combined_indexes[prop_name] += prop_stats.index_stats[namespace]\
[kind][prop_type][prop_name]
for prop_name, entity_fields in six.iteritems(combined_entities):
name = u'_'.join([prop_name, kind])
index_fields = combined_indexes[prop_name]
props = {'kind_name': kind, 'timestamp': timestamp,
'property_name': prop_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name,
namespace=namespace))
stats_kind = u'__Stat_Ns_PropertyType__'
for namespace, by_kind in six.iteritems(prop_stats.entity_stats):
combined_entities = defaultdict(CountBytes)
combined_indexes = defaultdict(CountBytes)
for kind, by_type in six.iteritems(by_kind):
for prop_type, by_name in six.iteritems(by_type):
combined_entities[prop_type] += sum(
six.itervalues(by_name), CountBytes())
combined_indexes[prop_type] += sum(
six.itervalues(prop_stats.index_stats[namespace][kind][prop_type]),
CountBytes())
for prop_type, entity_fields in six.iteritems(combined_entities):
type_name = StatsPropTypes.NAMES[prop_type]
index_fields = combined_indexes[prop_type]
props = {'timestamp': timestamp, 'property_type': type_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, type_name,
namespace=namespace))
stats_kind = u'__Stat_PropertyName_Kind__'
combined_entities = defaultdict(lambda: defaultdict(CountBytes))
combined_indexes = defaultdict(lambda: defaultdict(CountBytes))
for namespace, by_kind in six.iteritems(prop_stats.entity_stats):
for kind, by_type in six.iteritems(by_kind):
for prop_type, by_name in six.iteritems(by_type):
for prop_name, fields in six.iteritems(by_name):
combined_entities[prop_name][kind] += fields
combined_indexes[prop_name][kind] += prop_stats.index_stats\
[namespace][kind][prop_type][prop_name]
for prop_name, by_kind in six.iteritems(combined_entities):
for kind, entity_fields in six.iteritems(by_kind):
index_fields = combined_indexes[prop_name][kind]
name = u'_'.join([prop_name, kind])
props = {'timestamp': timestamp, 'kind_name': kind,
'property_name': prop_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name))
stats_kind = u'__Stat_PropertyType_Kind__'
combined_entities = defaultdict(lambda: defaultdict(CountBytes))
combined_indexes = defaultdict(lambda: defaultdict(CountBytes))
for namespace, by_kind in six.iteritems(prop_stats.entity_stats):
for kind, by_type in six.iteritems(by_kind):
for prop_type, by_name in six.iteritems(by_type):
combined_entities[prop_type][kind] += sum(six.itervalues(by_name),
CountBytes())
combined_indexes[prop_type][kind] += sum(
six.itervalues(prop_stats.index_stats[namespace][kind][prop_type]),
CountBytes())
for prop_type, by_kind in six.iteritems(combined_entities):
type_name = StatsPropTypes.NAMES[prop_type]
for kind, entity_fields in six.iteritems(by_kind):
index_fields = combined_indexes[prop_type][kind]
name = u'_'.join([type_name, kind])
props = {'timestamp': timestamp, 'kind_name': kind,
'property_type': type_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name))
stats_kind = u'__Stat_PropertyType_PropertyName_Kind__'
entity_props_by_type_name_kind = defaultdict(
lambda: defaultdict(lambda: defaultdict(CountBytes)))
index_props_by_type_name_kind = defaultdict(
lambda: defaultdict(lambda: defaultdict(CountBytes)))
for namespace, by_kind in six.iteritems(prop_stats.entity_stats):
for kind, by_type in six.iteritems(by_kind):
for prop_type, by_name in six.iteritems(by_type):
for prop_name, entity_fields in six.iteritems(by_name):
entity_props_by_type_name_kind[prop_type][prop_name][kind] += \
entity_fields
index_props_by_type_name_kind[prop_type][prop_name][kind] += \
prop_stats.index_stats[namespace][kind][prop_type][prop_name]
for prop_type, by_name in six.iteritems(entity_props_by_type_name_kind):
type_name = StatsPropTypes.NAMES[prop_type]
for prop_name, by_kind in six.iteritems(by_name):
for kind, entity_fields in six.iteritems(by_kind):
index_fields = index_props_by_type_name_kind[prop_type][prop_name][kind]
name = u'_'.join([type_name, prop_name, kind])
props = {'timestamp': timestamp, 'kind_name': kind,
'property_type': type_name, 'property_name': prop_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, name))
stats_kind = u'__Stat_PropertyType__'
for prop_type, by_name in six.iteritems(entity_props_by_type_name_kind):
type_name = StatsPropTypes.NAMES[prop_type]
entity_fields = sum(
(sum(six.itervalues(by_kind), CountBytes())
for by_kind in six.itervalues(by_name)), CountBytes())
index_fields = sum(
(sum(six.itervalues(by_kind), CountBytes())
for by_kind in six.itervalues(index_props_by_type_name_kind[prop_type])),
CountBytes())
props = {'timestamp': timestamp, 'property_type': type_name,
'builtin_index_count': index_fields.count,
'builtin_index_bytes': index_fields.bytes,
'count': entity_fields.count,
'entity_bytes': entity_fields.bytes,
'bytes': entity_fields.bytes + index_fields.bytes}
entities.append(fill_entity(project_id, stats_kind, props, type_name))
return entities
| 48.782258
| 80
| 0.704497
| 3,013
| 24,196
| 5.291736
| 0.041487
| 0.031234
| 0.070246
| 0.036126
| 0.888673
| 0.861139
| 0.819995
| 0.796726
| 0.770823
| 0.7463
| 0
| 0.000723
| 0.199289
| 24,196
| 495
| 81
| 48.880808
| 0.822236
| 0.051579
| 0
| 0.649758
| 0
| 0
| 0.09782
| 0.026896
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004831
| false
| 0
| 0.021739
| 0
| 0.031401
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e46d9a6d63997ac96a71dd00e0fff5f78c1de048
| 2,187
|
py
|
Python
|
course5/model.py
|
GT-AcerZhang/ReinforcementLearning
|
77347af3202051e8661a22cf2f955cbbca1472af
|
[
"Apache-2.0"
] | 21
|
2020-11-06T07:05:23.000Z
|
2021-09-04T04:45:18.000Z
|
course5/model.py
|
GT-AcerZhang/ReinforcementLearning
|
77347af3202051e8661a22cf2f955cbbca1472af
|
[
"Apache-2.0"
] | 1
|
2020-11-19T09:40:50.000Z
|
2020-11-20T01:00:30.000Z
|
course5/model.py
|
GT-AcerZhang/ReinforcementLearning
|
77347af3202051e8661a22cf2f955cbbca1472af
|
[
"Apache-2.0"
] | 3
|
2021-03-09T02:57:05.000Z
|
2021-05-04T05:24:09.000Z
|
import parl
from parl import layers
class Model(parl.Model):
def __init__(self, act_dim):
self.conv1 = layers.conv2d(num_filters=32, filter_size=3, stride=2, padding=1, act='relu')
self.conv2 = layers.conv2d(num_filters=32, filter_size=3, stride=2, padding=1, act='relu')
self.conv3 = layers.conv2d(num_filters=32, filter_size=3, stride=2, padding=1, act='relu')
self.conv4 = layers.conv2d(num_filters=32, filter_size=3, stride=2, padding=1, act='relu')
self.fc = layers.fc(size=512, act='relu')
self.policy_fc = layers.fc(size=act_dim)
self.value_fc = layers.fc(size=1)
def policy(self, obs):
"""
Args:
obs: 输入的图像,shape为[N, C, H, W]
Returns:
policy_logits: N * ACTION_DIM
"""
conv1 = self.conv1(obs)
conv2 = self.conv2(conv1)
conv3 = self.conv3(conv2)
conv4 = self.conv4(conv3)
flatten = layers.flatten(conv4, axis=1)
fc_output = self.fc(flatten)
policy_logits = self.policy_fc(fc_output)
return policy_logits
def value(self, obs):
"""
Args:
obs: 输入的图像,shape为[N, C, H, W]
Returns:
values: N
"""
conv1 = self.conv1(obs)
conv2 = self.conv2(conv1)
conv3 = self.conv3(conv2)
conv4 = self.conv4(conv3)
flatten = layers.flatten(conv4, axis=1)
fc_output = self.fc(flatten)
values = self.value_fc(fc_output)
values = layers.squeeze(values, axes=[1])
return values
def policy_and_value(self, obs):
"""
Args:
obs: 输入的图像,shape为[N, C, H, W]
Returns:
policy_logits: N * ACTION_DIM
values: N
"""
conv1 = self.conv1(obs)
conv2 = self.conv2(conv1)
conv3 = self.conv3(conv2)
conv4 = self.conv4(conv3)
flatten = layers.flatten(conv4, axis=1)
fc_output = self.fc(flatten)
policy_logits = self.policy_fc(fc_output)
values = self.value_fc(fc_output)
values = layers.squeeze(values, axes=[1])
return policy_logits, values
| 27.683544
| 98
| 0.572474
| 285
| 2,187
| 4.263158
| 0.175439
| 0.046091
| 0.045267
| 0.072428
| 0.807407
| 0.807407
| 0.807407
| 0.807407
| 0.807407
| 0.807407
| 0
| 0.048026
| 0.304984
| 2,187
| 78
| 99
| 28.038462
| 0.751316
| 0.112026
| 0
| 0.585366
| 0
| 0
| 0.011161
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.04878
| 0
| 0.243902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e4b4811a628951cfbfcb8ab4e5bc2ec79854c3e7
| 21
|
py
|
Python
|
batchglm/api/models/__init__.py
|
SabrinaRichter/batchglm
|
2da429f895f7eb577a835da334f4ae146a9422ce
|
[
"BSD-3-Clause"
] | null | null | null |
batchglm/api/models/__init__.py
|
SabrinaRichter/batchglm
|
2da429f895f7eb577a835da334f4ae146a9422ce
|
[
"BSD-3-Clause"
] | null | null | null |
batchglm/api/models/__init__.py
|
SabrinaRichter/batchglm
|
2da429f895f7eb577a835da334f4ae146a9422ce
|
[
"BSD-3-Clause"
] | null | null | null |
from . import glm_nb
| 10.5
| 20
| 0.761905
| 4
| 21
| 3.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e4f6daab7f763498f7398f221af9ff2d630eeac2
| 111
|
py
|
Python
|
app/language_features/importing/test_imported.py
|
andykmiles/code-boutique
|
26d05202f832af163f2900c36237988f37ceea8a
|
[
"MIT"
] | null | null | null |
app/language_features/importing/test_imported.py
|
andykmiles/code-boutique
|
26d05202f832af163f2900c36237988f37ceea8a
|
[
"MIT"
] | null | null | null |
app/language_features/importing/test_imported.py
|
andykmiles/code-boutique
|
26d05202f832af163f2900c36237988f37ceea8a
|
[
"MIT"
] | 2
|
2021-06-03T02:59:49.000Z
|
2021-06-14T20:42:12.000Z
|
import imported
# only works if no __init__.py in this dir
def test_doit():
assert imported.doit() == 999
| 18.5
| 42
| 0.711712
| 18
| 111
| 4.111111
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033708
| 0.198198
| 111
| 5
| 43
| 22.2
| 0.797753
| 0.36036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
90036aa5f697c75eee23f19d736aff8964f98217
| 166
|
py
|
Python
|
lib/python2.7/site-packages/braintree/exceptions/unexpected_error.py
|
ervinpepic/E-commerce
|
2c15255d1730728cf35c166b9f88cffcb99f5323
|
[
"MIT"
] | 182
|
2015-01-09T05:26:46.000Z
|
2022-03-16T14:10:06.000Z
|
lib/python2.7/site-packages/braintree/exceptions/unexpected_error.py
|
ervinpepic/E-commerce
|
2c15255d1730728cf35c166b9f88cffcb99f5323
|
[
"MIT"
] | 95
|
2015-02-24T23:29:56.000Z
|
2022-03-13T03:27:58.000Z
|
lib/python2.7/site-packages/braintree/exceptions/unexpected_error.py
|
ervinpepic/E-commerce
|
2c15255d1730728cf35c166b9f88cffcb99f5323
|
[
"MIT"
] | 93
|
2015-02-19T17:59:06.000Z
|
2022-03-19T17:01:25.000Z
|
from braintree.exceptions.braintree_error import BraintreeError
class UnexpectedError(BraintreeError):
""" Raised for unknown or unexpected errors. """
pass
| 27.666667
| 63
| 0.783133
| 17
| 166
| 7.588235
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 166
| 5
| 64
| 33.2
| 0.908451
| 0.240964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
900714459ee5149c86db44d4526c589074f03350
| 121
|
py
|
Python
|
scripts/mat_animation/__init__.py
|
hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators
|
9a8801e9c663174b753c4852b2313c5a3f302434
|
[
"MIT"
] | null | null | null |
scripts/mat_animation/__init__.py
|
hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators
|
9a8801e9c663174b753c4852b2313c5a3f302434
|
[
"MIT"
] | null | null | null |
scripts/mat_animation/__init__.py
|
hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators
|
9a8801e9c663174b753c4852b2313c5a3f302434
|
[
"MIT"
] | null | null | null |
# from .robot_model import Plotting
from .MatGymLikeENv import Plotting, PlottingComplex, MatTiagoEnvComplex, MatTiagoEnv
| 60.5
| 85
| 0.859504
| 12
| 121
| 8.583333
| 0.75
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 121
| 2
| 85
| 60.5
| 0.936364
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
901f29a3e8ad2425afab26192199682bdfcc85ed
| 58
|
py
|
Python
|
config/mylabs_c/client/image/__init__.py
|
happyfaults/pymylabs
|
d5ef98b3422cb0f58cd4fa63de6c3756eba7cb16
|
[
"MIT"
] | null | null | null |
config/mylabs_c/client/image/__init__.py
|
happyfaults/pymylabs
|
d5ef98b3422cb0f58cd4fa63de6c3756eba7cb16
|
[
"MIT"
] | null | null | null |
config/mylabs_c/client/image/__init__.py
|
happyfaults/pymylabs
|
d5ef98b3422cb0f58cd4fa63de6c3756eba7cb16
|
[
"MIT"
] | null | null | null |
from .. import Interactor
class App(Interactor):
pass
| 14.5
| 25
| 0.724138
| 7
| 58
| 6
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189655
| 58
| 4
| 26
| 14.5
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
9047ae9677b2b05ad8923352b65e5d86e629447b
| 28
|
py
|
Python
|
venv/Lib/site-packages/psychopy/tests/test_all_visual/__init__.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/psychopy/tests/test_all_visual/__init__.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/psychopy/tests/test_all_visual/__init__.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
from psychopy import visual
| 14
| 27
| 0.857143
| 4
| 28
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f4d0620423efd2d11c2a6568d4b52636aabae2a
| 75,499
|
py
|
Python
|
src/original_variant_AlexNet_experiment.py
|
aslansd/DNNforVPL
|
7cda3eb327050f98b0867a4eca4cadb813d2c466
|
[
"MIT"
] | null | null | null |
src/original_variant_AlexNet_experiment.py
|
aslansd/DNNforVPL
|
7cda3eb327050f98b0867a4eca4cadb813d2c466
|
[
"MIT"
] | null | null | null |
src/original_variant_AlexNet_experiment.py
|
aslansd/DNNforVPL
|
7cda3eb327050f98b0867a4eca4cadb813d2c466
|
[
"MIT"
] | null | null | null |
"""
Created by Aslan Satary Dizaji (a.satarydizaji@eni-g.de)
"""
import copy
import gc
import glob
import numpy as np
import os
import random
import scipy.io
import shutil
import time
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.optim
import torchvision.transforms as transforms
from PIL import Image
from scipy.spatial.distance import pdist, squareform
from sklearn.decomposition import PCA
from torch.hub import load_state_dict_from_url
from intrinsic_dimension_2NN import estimate
from layer_rotation import layer_rotation
from mutual_info_EDGE import EDGE
from original_variant_AlexNet_model import DNNforVPL
from reading_stimuli import reading_stimuli
# The pretrained weights of AlexNet
model_urls = {'alexnet': 'https://download.pytorch.org/models/alexnet-owt-4df8aa71.pth'}
pretrained_dict = load_state_dict_from_url(model_urls['alexnet'])
### A class for formatting different metrics of accuracy during training and transfer
class AverageMeter(object):
"""Computes and stores the average and current values"""
def __init__(self, name, fmt = ':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n = 1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '}'
if self.name == 'Accuracy':
self.__dict__['val'] = self.val.item()
self.__dict__['avg'] = self.avg.item()
self.__dict__['sum'] = self.sum.item()
output = fmtstr.format(**self.__dict__)
else:
output = fmtstr.format(**self.__dict__)
return output
### A class for showing a progress bar during training and transfer
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix = ""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
### A function for computing accuracy during training and transfer
def accuracy(output, target, topk = 1):
"""Computes the accuracy over the top1 predictions"""
with torch.no_grad():
batch_size = target.size(0)
_, pred = output.topk(1, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
correct_k = correct[:1].view(-1).float().sum(0, keepdim = True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
### A function for adjusting the learning rate during training
def adjust_learning_rate(optimizer, session, lr):
"""Sets the learning rate to the initial LR decayed by 2 every 1 session"""
lr = lr * (0.5 ** (session))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
### A function for saving the checkpoints during training
def save_checkpoint(state, is_best, group, filename):
""" Saves the checkpoints during training """
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, 'DNNforVPL_best_' + group + '.pth.tar')
### A fucntion which performs different experiments with the original variant of AlexNet
def original_variant_alexnet(parent_folder = 'Original Variant of Alexnet_New Results', number_simulation = 10, number_PCA_component = 20, num_sample_artiphysiology = 1000):
### Initializing the main variables
x_sample_artiphysiology_index = np.zeros((num_sample_artiphysiology, 3), dtype = np.int64)
for i in range(0, num_sample_artiphysiology):
x_sample_artiphysiology_index[i, 0] = random.randrange(1)
x_sample_artiphysiology_index[i, 1] = random.randrange(20)
x_sample_artiphysiology_index[i, 2] = random.randrange(180)
number_group = 4
number_layer = 5
number_layer_freeze = 6
all_simulation_training_accuracy = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_transfer_accuracy = np.zeros((number_simulation, number_group, number_layer_freeze, 10), dtype = np.float32)
all_simulation_all_MI_original = np.zeros((number_simulation, number_group, number_layer, number_layer_freeze), dtype = np.float32)
all_simulation_all_MI_noise = np.zeros((number_simulation, number_group, number_layer, number_layer_freeze), dtype = np.float32)
all_simulation_all_ID = np.zeros((number_simulation, number_group, number_layer, number_layer_freeze, 19), dtype = np.float32)
all_x_sample_ID = np.zeros((number_simulation, number_group), dtype = np.float32)
all_simulation_training_accuracy_permuted = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_all_ID_permuted = np.zeros((number_simulation, number_group, number_layer, number_layer_freeze, 19), dtype = np.float32)
all_PCA_explained_variance_layer_1 = np.zeros((number_simulation, number_group, number_layer_freeze, number_PCA_component), dtype = np.float32)
all_PCA_explained_variance_layer_2 = np.zeros((number_simulation, number_group, number_layer_freeze, number_PCA_component), dtype = np.float32)
all_PCA_explained_variance_layer_3 = np.zeros((number_simulation, number_group, number_layer_freeze, number_PCA_component), dtype = np.float32)
all_PCA_explained_variance_layer_4 = np.zeros((number_simulation, number_group, number_layer_freeze, number_PCA_component), dtype = np.float32)
all_PCA_explained_variance_layer_5 = np.zeros((number_simulation, number_group, number_layer_freeze, number_PCA_component), dtype = np.float32)
all_simulation_weight_change_layer_1 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_weight_change_layer_2 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_weight_change_layer_3 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_weight_change_layer_4 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_weight_change_layer_5 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_layer_rotation_layer_1 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_layer_rotation_layer_2 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_layer_rotation_layer_3 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_layer_rotation_layer_4 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
all_simulation_layer_rotation_layer_5 = np.zeros((number_simulation, number_group, number_layer_freeze, 180), dtype = np.float32)
os.mkdir(parent_folder)
for simulation_counter in range(number_simulation):
print('Simulation: ', simulation_counter + 1)
os.mkdir(parent_folder + '/Simulation_' + str(simulation_counter + 1))
group_counter = -1
for group_training in ['group1', 'group2', 'group3', 'group4']:
gc.collect()
best_acc1 = 0
group_counter = group_counter + 1
print('Group: ', group_training)
os.mkdir(parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training)
### Training Stimuli
# The structure of image names in different groups
if group_training == 'group1':
SF_training = [170]
Ori_training = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group2':
SF_training = [53, 170, 276]
Ori_training = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group3':
SF_training = [170]
Ori_training = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
elif group_training == 'group4':
SF_training = [53, 170, 276]
Ori_training = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
# Reading all images
if group_training == 'group1' or group_training == 'group2':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/training_groups1&2/*.TIFF')
elif group_training == 'group3' or group_training == 'group4':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/training_groups3&4/*.TIFF')
file_names = [os.path.basename(x) for x in file_name_paths]
x_val_training, y_val_training, z_val_training, x_tensor_training, y_tensor_training = reading_stimuli(file_names = file_names, file_name_paths = file_name_paths, orientation = Ori_training, spatial_frequency = SF_training)
x_tensor_training = torch.stack(x_tensor_training)
y_tensor_training = torch.stack(y_tensor_training)
print(x_tensor_training.shape, y_tensor_training.shape)
### SF Transfer Stimuli
# The structure of image names in different groups
if group_training == 'group1':
group_transfer = 'group1'
SF_transfer = [96]
Ori_transfer = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group2':
group_transfer = 'group2'
SF_transfer= [96]
Ori_transfer = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group3':
group_transfer = 'group3'
SF_transfer = [96]
Ori_transfer = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
elif group_training == 'group4':
group_transfer = 'group4'
SF_transfer = [96]
Ori_transfer = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
# Reading all images
if group_transfer == 'group1' or group_transfer == 'group2':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/transferSF_groups1&2/*.TIFF')
elif group_transfer == 'group3' or group_transfer == 'group4':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/transferSF_groups3&4/*.TIFF')
file_names = [os.path.basename(x) for x in file_name_paths]
x_val_transfer, y_val_transfer, z_val_transfer, x_tensor_transfer, y_tensor_transfer = reading_stimuli(file_names = file_names, file_name_paths = file_name_paths, orientation = Ori_transfer, spatial_frequency = SF_transfer)
x_tensor_transfer = torch.stack(x_tensor_transfer)
y_tensor_transfer = torch.stack(y_tensor_transfer)
print(x_tensor_transfer.shape, y_tensor_transfer.shape)
layer_freeze_counter = -1
for layer_freeze in [None, 0, 3, 6, 8, 10]:
layer_freeze_counter = layer_freeze_counter + 1
print('Frozen Layer: ', layer_freeze)
# Read the reference image
file_name_path_ref = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/reference_stimulus.TIFF')
# Define the main reference variable
x_val_ref = np.zeros((224, 224, 3), dtype = np.float32)
x_tensor_ref = []
# Load image
img = Image.open(file_name_path_ref[0]).convert('RGB')
# Resize image
width, height = img.size
new_width = width * 256 // min(img.size)
new_height = height * 256 // min(img.size)
img = img.resize((new_width, new_height), Image.BILINEAR)
# Center crop image
width, height = img.size
startx = width // 2 - (224 // 2)
starty = height // 2 - (224 // 2)
img = np.asarray(img).reshape(height, width, 3)
img = img[starty:starty + 224, startx:startx + 224]
assert img.shape[0] == 224 and img.shape[1] == 224, (img.shape, height, width)
# Save image
x_val_ref[:, :, :] = img[:, :, :]
# Convert image to tensor, then normalize and copy it
x_temp = torch.from_numpy(np.transpose(x_val_ref[:, :, :], (2, 0, 1)))
normalize = transforms.Normalize(mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225])
for i in range(len(SF_training) * len(Ori_training)):
x_tensor_ref.append(normalize(x_temp))
x_tensor_ref = torch.stack(x_tensor_ref)
print(x_tensor_ref.shape)
# Select GPU
global device
gpu = 0
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("Use GPU: {} for training".format(gpu))
# Load the PyTorch model
model = DNNforVPL()
model_dict = model.state_dict()
# Filter out unnecessary keys
pretrained_dict_model = {k : v for k, v in pretrained_dict.items() if k in model_dict}
# Overwrite entries in the existing state dict
model_dict.update(pretrained_dict_model)
# Load the new state dict
model.load_state_dict(model_dict)
# Initialize by zero the weights of the fully-connected layer of the model
nn.init.zeros_(model.classifier[0].weight)
nn.init.zeros_(model.classifier[0].bias)
# Set all the parameters of the model to be trainable
for param in model.parameters():
param.requires_grad = True
if layer_freeze != None:
model.features[layer_freeze].weight.requires_grad = False
model.features[layer_freeze].bias.requires_grad = False
# Send the model to GPU/CPU
model = model.to(device)
# Model summary
print(model)
cudnn.benchmark = True
### Extracting the activations of convolutional layers of the network per transfer stimulus before training
# The indices of consecutive convolutional layers: (0, 3, 6, 8, 10)
# The sizes of consecutive convolutional layers: (55, 27, 13, 13, 13)
# The positions of central units of consecutive convolutional layers: (27, 13, 6, 6, 6)
# The number of channels of consecutive convolutional layers: (64, 192, 384, 256, 256)
if layer_freeze == None:
os.mkdir(parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training + '/before_training')
saving_folder = parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training + '/before_training'
# The target stimuli
feature_sample_artiphysiology = np.zeros((num_sample_artiphysiology, 3), dtype = np.int64)
all_x_sample = np.zeros((num_sample_artiphysiology, 3, 224, 224), dtype = np.float32)
all_unit_activity_Conv2d_1 = np.zeros((num_sample_artiphysiology, 64, 55, 55), dtype = np.float32)
all_unit_activity_Conv2d_2 = np.zeros((num_sample_artiphysiology, 192, 27, 27), dtype = np.float32)
all_unit_activity_Conv2d_3 = np.zeros((num_sample_artiphysiology, 384, 13, 13), dtype = np.float32)
all_unit_activity_Conv2d_4 = np.zeros((num_sample_artiphysiology, 256, 13, 13), dtype = np.float32)
all_unit_activity_Conv2d_5 = np.zeros((num_sample_artiphysiology, 256, 13, 13), dtype = np.float32)
for i in range(num_sample_artiphysiology):
feature_sample_artiphysiology[i, :] = [SF_transfer[x_sample_artiphysiology_index[i, 0]], Ori_transfer[x_sample_artiphysiology_index[i, 1]], x_sample_artiphysiology_index[i, 2]]
index = torch.tensor(z_val_transfer[x_sample_artiphysiology_index[i, 0], x_sample_artiphysiology_index[i, 1], x_sample_artiphysiology_index[i, 2]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_transfer, 0, index)
x_sample = x_sample.cuda(gpu)
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
unit_activity_layer_11 = model.features[11](unit_activity_layer_10)
unit_activity_layer_12 = model.features[12](unit_activity_layer_11)
all_x_sample[i, :] = x_sample.detach().cpu().clone().numpy()
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_5[i, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
# Saving the properties of sample stimuli used for calculating intrinsic dimension
scipy.io.savemat(saving_folder + '/feature_sample_artiphysiology.mat', mdict = {'feature_sample_artiphysiology': feature_sample_artiphysiology})
### Calculating the intrinsic dimension
all_x_sample_ID[simulation_counter, group_counter] = estimate(squareform(pdist(all_x_sample.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 0, layer_freeze_counter, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 1, layer_freeze_counter, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 2, layer_freeze_counter, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 3, layer_freeze_counter, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 4, layer_freeze_counter, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_5.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 0, layer_freeze_counter, 0] = all_simulation_all_ID[simulation_counter, group_counter, 0, layer_freeze_counter, 0]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 1, layer_freeze_counter, 0] = all_simulation_all_ID[simulation_counter, group_counter, 1, layer_freeze_counter, 0]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 2, layer_freeze_counter, 0] = all_simulation_all_ID[simulation_counter, group_counter, 2, layer_freeze_counter, 0]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 3, layer_freeze_counter, 0] = all_simulation_all_ID[simulation_counter, group_counter, 3, layer_freeze_counter, 0]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 4, layer_freeze_counter, 0] = all_simulation_all_ID[simulation_counter, group_counter, 4, layer_freeze_counter, 0]
# Define the main learning parameters
lr = 0.00001
momentum = 0.9
weight_decay = 0.0001
# Define the loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda(gpu)
optimizer = torch.optim.SGD(model.parameters(), lr, momentum = momentum, weight_decay = weight_decay)
# Save the initial weights of the convolutional layers of the model
Conv2d_1_0 = copy.deepcopy(model.features[0].weight)
Conv2d_2_0 = copy.deepcopy(model.features[3].weight)
Conv2d_3_0 = copy.deepcopy(model.features[6].weight)
Conv2d_4_0 = copy.deepcopy(model.features[8].weight)
Conv2d_5_0 = copy.deepcopy(model.features[10].weight)
# Define the main training parameters
start_session = 0
sessions = 1
z_val_shuffle = copy.deepcopy(z_val_training)
for i in range(len(SF_training)):
for j in range(len(Ori_training)):
random.shuffle(z_val_shuffle[i, j, :])
for session in range(start_session, sessions):
# Adjust the learning rate
adjust_learning_rate(optimizer, session, lr)
# Train on the training set
epochs = 180
ID_counter = 0
for epoch in range(epochs):
z_val_shuffle_1D = np.unique(z_val_shuffle[:, :, epoch])
indices = torch.tensor(z_val_shuffle_1D, dtype = torch.long)
x_train = torch.index_select(x_tensor_training, 0, indices)
y_train = torch.index_select(y_tensor_training, 0, indices)
y_train = y_train.squeeze(1)
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Accuracy', ':6.2f')
progress = ProgressMeter(epochs, [batch_time, losses, top1], prefix = ("Training >>> Session: " + str(session) + " Epoch: [{}]").format(epoch))
# Switch to training mode
model.train()
with torch.set_grad_enabled(True):
end = time.time()
x_ref = x_tensor_ref.cuda(gpu)
x_train = x_train.cuda(gpu)
y_train = y_train.cuda(gpu)
# Compute output
output = model(x_train, x_ref)
loss = criterion(output, y_train)
# Measure accuracy and record loss
acc1 = accuracy(output, y_train, topk = 1)
losses.update(loss.item(), x_train.size(0))
top1.update(acc1[0], x_train.size(0))
# Compute gradient and perform SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Save the validation accuracy for plotting
all_simulation_training_accuracy[simulation_counter, group_counter, layer_freeze_counter, epoch] = acc1[0].item()
# Measure elapsed time
batch_time.update(time.time() - end)
progress.display(epoch)
# Remember the best accuracy
is_best = all_simulation_training_accuracy[simulation_counter, group_counter, layer_freeze_counter, epoch] >= best_acc1
best_acc1 = max(all_simulation_training_accuracy[simulation_counter, group_counter, layer_freeze_counter, epoch], best_acc1)
all_simulation_weight_change_layer_1[simulation_counter, group_counter, layer_freeze_counter, epoch] = (torch.pow(torch.sum(torch.pow(model.features[0].weight - Conv2d_1_0, 2)), 0.5) / torch.pow(torch.sum(torch.pow(model.features[0].weight, 2)), 0.5)).item()
all_simulation_weight_change_layer_2[simulation_counter, group_counter, layer_freeze_counter, epoch] = (torch.pow(torch.sum(torch.pow(model.features[3].weight - Conv2d_2_0, 2)), 0.5) / torch.pow(torch.sum(torch.pow(model.features[3].weight, 2)), 0.5)).item()
all_simulation_weight_change_layer_3[simulation_counter, group_counter, layer_freeze_counter, epoch] = (torch.pow(torch.sum(torch.pow(model.features[6].weight - Conv2d_3_0, 2)), 0.5) / torch.pow(torch.sum(torch.pow(model.features[6].weight, 2)), 0.5)).item()
all_simulation_weight_change_layer_4[simulation_counter, group_counter, layer_freeze_counter, epoch] = (torch.pow(torch.sum(torch.pow(model.features[8].weight - Conv2d_4_0, 2)), 0.5) / torch.pow(torch.sum(torch.pow(model.features[8].weight, 2)), 0.5)).item()
all_simulation_weight_change_layer_5[simulation_counter, group_counter, layer_freeze_counter, epoch] = (torch.pow(torch.sum(torch.pow(model.features[10].weight - Conv2d_5_0, 2)), 0.5) / torch.pow(torch.sum(torch.pow(model.features[10].weight, 2)), 0.5)).item()
all_simulation_layer_rotation_layer_1[simulation_counter, group_counter, layer_freeze_counter, epoch] = layer_rotation(model.features[0].weight, Conv2d_1_0)
all_simulation_layer_rotation_layer_2[simulation_counter, group_counter, layer_freeze_counter, epoch] = layer_rotation(model.features[3].weight, Conv2d_2_0)
all_simulation_layer_rotation_layer_3[simulation_counter, group_counter, layer_freeze_counter, epoch] = layer_rotation(model.features[6].weight, Conv2d_3_0)
all_simulation_layer_rotation_layer_4[simulation_counter, group_counter, layer_freeze_counter, epoch] = layer_rotation(model.features[8].weight, Conv2d_4_0)
all_simulation_layer_rotation_layer_5[simulation_counter, group_counter, layer_freeze_counter, epoch] = layer_rotation(model.features[10].weight, Conv2d_5_0)
if (layer_freeze == None or layer_freeze == 0 or layer_freeze == 10) and epoch % 10 == 0:
ID_counter = ID_counter + 1
for i in range(num_sample_artiphysiology):
feature_sample_artiphysiology[i, :] = [SF_transfer[x_sample_artiphysiology_index[i, 0]], Ori_transfer[x_sample_artiphysiology_index[i, 1]], x_sample_artiphysiology_index[i, 2]]
index = torch.tensor(z_val_transfer[x_sample_artiphysiology_index[i, 0], x_sample_artiphysiology_index[i, 1], x_sample_artiphysiology_index[i, 2]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_transfer, 0, index)
x_sample = x_sample.cuda(gpu)
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
unit_activity_layer_11 = model.features[11](unit_activity_layer_10)
unit_activity_layer_12 = model.features[12](unit_activity_layer_11)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_5[i, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
### Calculating the intrinsic dimension
all_simulation_all_ID[simulation_counter, group_counter, 0, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 1, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 2, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 3, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[simulation_counter, group_counter, 4, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_5.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
# Save the checkpoint
save_checkpoint({
'session': session + 1,
'state_dict': model.state_dict(),
'best_acc1': best_acc1,
'optimizer': optimizer.state_dict(),
}, is_best, group_training, 'DNNforVPL_' + group_training + '.pth.tar')
# Read the reference image
file_name_path_ref = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/reference_stimulus.TIFF')
# Define the main reference variable
x_val_ref = np.zeros((224, 224, 3), dtype = np.float32)
x_tensor_ref = []
# Load image
img = Image.open(file_name_path_ref[0]).convert('RGB')
# Resize image
width, height = img.size
new_width = width * 256 // min(img.size)
new_height = height * 256 // min(img.size)
img = img.resize((new_width, new_height), Image.BILINEAR)
# Center crop image
width, height = img.size
startx = width // 2 - (224 // 2)
starty = height // 2 - (224 // 2)
img = np.asarray(img).reshape(height, width, 3)
img = img[starty:starty + 224, startx:startx + 224]
assert img.shape[0] == 224 and img.shape[1] == 224, (img.shape, height, width)
# Save image
x_val_ref[:, :, :] = img[:, :, :]
# Convert image to tensor, then normalize and copy it
x_temp = torch.from_numpy(np.transpose(x_val_ref[:, :, :], (2, 0, 1)))
normalize = transforms.Normalize(mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225])
for i in range(len(SF_transfer) * len(Ori_transfer)):
x_tensor_ref.append(normalize(x_temp))
x_tensor_ref = torch.stack(x_tensor_ref)
print(x_tensor_ref.shape)
# Select GPU
gpu = 0
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("Use GPU: {} for transfer".format(gpu))
# Set all the parameters of the model to be trainable
for param in model.parameters():
param.requires_grad = False
# Send the model to GPU/CPU
model = model.to(device)
# Model summary
print(model)
cudnn.benchmark = True
# Define the main validation parameters
start_session = 0
sessions = 10
for session in range(start_session, sessions):
z_val_shuffle = copy.deepcopy(z_val_transfer)
for j in range(len(SF_transfer)):
for k in range(len(Ori_transfer)):
random.shuffle(z_val_shuffle[j, k, :])
# Evaluate on the validation set
z_val_shuffle_1D = np.unique(z_val_shuffle[:, :, session])
indices = torch.tensor(z_val_shuffle_1D, dtype = torch.long)
x_valid = torch.index_select(x_tensor_transfer, 0, indices)
y_valid = torch.index_select(y_tensor_transfer, 0, indices)
y_valid = y_valid.squeeze(1)
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Accuracy', ':6.2f')
progress = ProgressMeter(1, [batch_time, losses, top1], prefix = ("Transfer >>> Session: " + str(session) + " Epoch: [{}]").format(1))
# Switch to evaluating mode
model.eval()
with torch.no_grad():
end = time.time()
x_ref = x_tensor_ref.cuda(gpu)
x_valid = x_valid.cuda(gpu)
y_valid = y_valid.cuda(gpu)
# Compute output
output = model(x_valid, x_ref)
loss = criterion(output, y_valid)
# Measure accuracy and record loss
acc1 = accuracy(output, y_valid, topk = 1)
losses.update(loss.item(), x_valid.size(0))
top1.update(acc1[0], x_valid.size(0))
# Save the validation accuracy for plotting
all_simulation_transfer_accuracy[simulation_counter, group_counter, layer_freeze_counter, session - start_session] = acc1[0].item()
# Measure elapsed time
batch_time.update(time.time() - end)
progress.display(1)
# Remember the best accuracy
is_best = all_simulation_transfer_accuracy[simulation_counter, group_counter, layer_freeze_counter, session - start_session] >= best_acc1
best_acc1 = max(all_simulation_transfer_accuracy[simulation_counter, group_counter, layer_freeze_counter, session - start_session], best_acc1)
### Extracting the activations of convolutional layers of the network per transfer stimulus after training
# The indices of consecutive convolutional layers: (0, 3, 6, 8, 10)
# The sizes of consecutive convolutional layers: (55, 27, 13, 13, 13)
# The positions of central units of consecutive convolutional layers: (27, 13, 6, 6, 6)
# The number of channels of consecutive convolutional layers: (64, 192, 384, 256, 256)
os.mkdir(parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training + '/after_training_' + str(layer_freeze))
saving_folder = parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training + '/after_training_' + str(layer_freeze)
# The target stimuli
feature_sample_artiphysiology = np.zeros((num_sample_artiphysiology, 3), dtype = np.int64)
all_unit_activity_Conv2d_1 = np.zeros((num_sample_artiphysiology, 64, 55, 55), dtype = np.float32)
all_unit_activity_Conv2d_2 = np.zeros((num_sample_artiphysiology, 192, 27, 27), dtype = np.float32)
all_unit_activity_Conv2d_3 = np.zeros((num_sample_artiphysiology, 384, 13, 13), dtype = np.float32)
all_unit_activity_Conv2d_4 = np.zeros((num_sample_artiphysiology, 256, 13, 13), dtype = np.float32)
all_unit_activity_Conv2d_5 = np.zeros((num_sample_artiphysiology, 256, 13, 13), dtype = np.float32)
for i in range(num_sample_artiphysiology):
feature_sample_artiphysiology[i, :] = [SF_transfer[x_sample_artiphysiology_index[i, 0]], Ori_transfer[x_sample_artiphysiology_index[i, 1]], x_sample_artiphysiology_index[i, 2]]
index = torch.tensor(z_val_transfer[x_sample_artiphysiology_index[i, 0], x_sample_artiphysiology_index[i, 1], x_sample_artiphysiology_index[i, 2]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_transfer, 0, index)
x_sample = x_sample.cuda(gpu)
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
unit_activity_layer_11 = model.features[11](unit_activity_layer_10)
unit_activity_layer_12 = model.features[12](unit_activity_layer_11)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_5[i, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
# Saving the properties of sample stimuli used for calculating intrinsic dimension
scipy.io.savemat(saving_folder + '/feature_sample_artiphysiology.mat', mdict = {'feature_sample_artiphysiology': feature_sample_artiphysiology})
### Calculating the variance explained by PCA
PCA_layer_1 = PCA(n_components = number_PCA_component).fit(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1))
PCA_layer_2 = PCA(n_components = number_PCA_component).fit(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1))
PCA_layer_3 = PCA(n_components = number_PCA_component).fit(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1))
PCA_layer_4 = PCA(n_components = number_PCA_component).fit(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1))
PCA_layer_5 = PCA(n_components = number_PCA_component).fit(all_unit_activity_Conv2d_5.reshape(num_sample_artiphysiology, -1))
all_PCA_explained_variance_layer_1[simulation_counter, group_counter, layer_freeze_counter, :] = PCA_layer_1.explained_variance_ratio_
all_PCA_explained_variance_layer_2[simulation_counter, group_counter, layer_freeze_counter, :] = PCA_layer_2.explained_variance_ratio_
all_PCA_explained_variance_layer_3[simulation_counter, group_counter, layer_freeze_counter, :] = PCA_layer_3.explained_variance_ratio_
all_PCA_explained_variance_layer_4[simulation_counter, group_counter, layer_freeze_counter, :] = PCA_layer_4.explained_variance_ratio_
all_PCA_explained_variance_layer_5[simulation_counter, group_counter, layer_freeze_counter, :] = PCA_layer_5.explained_variance_ratio_
### Calculating the mutual information of original and nuisance stimuli with layers' activities
# The indices of consecutive convolutional layers: (0, 3, 6, 8, 10)
# The sizes of consecutive convolutional layers: (55, 27, 13, 13, 13)
# The positions of central units of consecutive convolutional layers: (27, 13, 6, 6, 6)
# The number of channels of consecutive convolutional layers: (64, 192, 384, 256, 256)
phase_count = 20
counter = -1
x_tensor_training_original = np.zeros((len(SF_training) * len(Ori_training) * phase_count, 3, 224, 224), dtype = np.float32)
x_tensor_training_noise = np.zeros((len(SF_training) * len(Ori_training) * phase_count, 3, 224, 224), dtype = np.float32)
all_unit_activity_MI_Conv2d_1 = np.zeros((len(SF_training) * len(Ori_training) * phase_count, 64, 55, 55), dtype = np.float32)
all_unit_activity_MI_Conv2d_2 = np.zeros((len(SF_training) * len(Ori_training) * phase_count, 192, 27, 27), dtype = np.float32)
all_unit_activity_MI_Conv2d_3 = np.zeros((len(SF_training) * len(Ori_training) * phase_count, 384, 13, 13), dtype = np.float32)
all_unit_activity_MI_Conv2d_4 = np.zeros((len(SF_training) * len(Ori_training) * phase_count, 256, 13, 13), dtype = np.float32)
all_unit_activity_MI_Conv2d_5 = np.zeros((len(SF_training) * len(Ori_training) * phase_count, 256, 13, 13), dtype = np.float32)
for i in range(len(SF_training)):
for j in range(len(Ori_training)):
phase = np.random.permutation(180)[:phase_count]
for k in range(phase_count):
counter = counter + 1
indices_training_1 = torch.tensor(z_val_training[i, j, phase[k]], dtype = torch.long)
indices_training_2 = torch.tensor(z_val_training[int(len(SF_training) / 2 + 0.5) - 1, j, phase[k]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_training, 0, indices_training_1)
x_sample = x_sample.cuda(gpu)
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
unit_activity_layer_11 = model.features[11](unit_activity_layer_10)
unit_activity_layer_12 = model.features[12](unit_activity_layer_11)
x_tensor_training_original[counter, :] = torch.index_select(x_tensor_training, 0, indices_training_1).detach().cpu().clone().numpy()
x_tensor_training_noise[counter, :] = (torch.index_select(x_tensor_training, 0, indices_training_1) - torch.index_select(x_tensor_training, 0, indices_training_2)).cuda(gpu)[0].detach().cpu().clone().numpy()
all_unit_activity_MI_Conv2d_1[counter, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_MI_Conv2d_2[counter, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_MI_Conv2d_3[counter, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_MI_Conv2d_4[counter, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_MI_Conv2d_5[counter, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
### Calculating the mutual information between the original stimuli and layers activities
all_simulation_all_MI_original[simulation_counter, group_counter, 0, layer_freeze_counter] = EDGE(x_tensor_training_original.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_1.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_original[simulation_counter, group_counter, 1, layer_freeze_counter] = EDGE(x_tensor_training_original.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_2.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_original[simulation_counter, group_counter, 2, layer_freeze_counter] = EDGE(x_tensor_training_original.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_3.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_original[simulation_counter, group_counter, 3, layer_freeze_counter] = EDGE(x_tensor_training_original.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_4.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_original[simulation_counter, group_counter, 4, layer_freeze_counter] = EDGE(x_tensor_training_original.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_5.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
### Calculating the mutual information between the nuisance stimuli and layers activities
all_simulation_all_MI_noise[simulation_counter, group_counter, 0, layer_freeze_counter] = EDGE(x_tensor_training_noise.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_1.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_noise[simulation_counter, group_counter, 1, layer_freeze_counter] = EDGE(x_tensor_training_noise.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_2.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_noise[simulation_counter, group_counter, 2, layer_freeze_counter] = EDGE(x_tensor_training_noise.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_3.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_noise[simulation_counter, group_counter, 3, layer_freeze_counter] = EDGE(x_tensor_training_noise.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_4.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
all_simulation_all_MI_noise[simulation_counter, group_counter, 4, layer_freeze_counter] = EDGE(x_tensor_training_noise.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1), all_unit_activity_MI_Conv2d_5.mean(axis = 1).reshape(len(SF_training) * len(Ori_training) * phase_count, -1),
U = 10, gamma = [1, 1], epsilon_vector = 'range', eps_range_factor = 0.1, normalize_epsilon = False, ensemble_estimation = 'median', L_ensemble = 5, hashing = 'p-stable', stochastic = False)
### Training with Permuted Labels
if layer_freeze == None or layer_freeze == 0 or layer_freeze == 10:
print('Training with Permuted Labels')
# Read the reference image
file_name_path_ref = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/reference_stimulus.TIFF')
# Define the main reference variable
x_val_ref = np.zeros((224, 224, 3), dtype = np.float32)
x_tensor_ref = []
# Load image
img = Image.open(file_name_path_ref[0]).convert('RGB')
# Resize image
width, height = img.size
new_width = width * 256 // min(img.size)
new_height = height * 256 // min(img.size)
img = img.resize((new_width, new_height), Image.BILINEAR)
# Center crop image
width, height = img.size
startx = width // 2 - (224 // 2)
starty = height // 2 - (224 // 2)
img = np.asarray(img).reshape(height, width, 3)
img = img[starty:starty + 224, startx:startx + 224]
assert img.shape[0] == 224 and img.shape[1] == 224, (img.shape, height, width)
# Save image
x_val_ref[:, :, :] = img[:, :, :]
# Convert image to tensor, then normalize and copy it
x_temp = torch.from_numpy(np.transpose(x_val_ref[:, :, :], (2, 0, 1)))
normalize = transforms.Normalize(mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225])
for i in range(len(SF_training) * len(Ori_training)):
x_tensor_ref.append(normalize(x_temp))
x_tensor_ref = torch.stack(x_tensor_ref)
print(x_tensor_ref.shape)
# Load the PyTorch model
model = DNNforVPL()
model_dict = model.state_dict()
# Filter out unnecessary keys
pretrained_dict_model = {k : v for k, v in pretrained_dict.items() if k in model_dict}
# Overwrite entries in the existing state dict
model_dict.update(pretrained_dict_model)
# Load the new state dict
model.load_state_dict(model_dict)
# Initialize by zero the weights of the fully-connected layer of the model
nn.init.zeros_(model.classifier[0].weight)
nn.init.zeros_(model.classifier[0].bias)
# Set all the parameters of the model to be trainable
for param in model.parameters():
param.requires_grad = True
if layer_freeze != None:
model.features[layer_freeze].weight.requires_grad = False
model.features[layer_freeze].bias.requires_grad = False
# Send the model to GPU/CPU
model = model.to(device)
cudnn.benchmark = True
# Define the main learning parameters
lr = 0.00001
momentum = 0.9
weight_decay = 0.0001
# Define the loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda(gpu)
optimizer = torch.optim.SGD(model.parameters(), lr, momentum = momentum, weight_decay = weight_decay)
# Define the main training parameters
start_session = 0
sessions = 1
# Random permutation of labels
y_tensor_training_permuted = copy.deepcopy(y_tensor_training)
idx = torch.randperm(y_tensor_training_permuted.nelement())
y_tensor_training_permuted = y_tensor_training_permuted.view(-1)[idx].view(y_tensor_training_permuted.size())
for session in range(start_session, sessions):
# Adjust the learning rate
adjust_learning_rate(optimizer, session, lr)
# Train on the training set
epochs = 180
ID_counter = 0
for epoch in range(epochs):
z_val_shuffle_1D = np.unique(z_val_shuffle[:, :, epoch])
indices = torch.tensor(z_val_shuffle_1D, dtype = torch.long)
x_train = torch.index_select(x_tensor_training, 0, indices)
y_train = torch.index_select(y_tensor_training_permuted, 0, indices)
y_train = y_train.squeeze(1)
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Accuracy', ':6.2f')
progress = ProgressMeter(epochs, [batch_time, losses, top1], prefix = ("Training >>> Session: " + str(session) + " Epoch: [{}]").format(epoch))
# Switch to training mode
model.train()
with torch.set_grad_enabled(True):
end = time.time()
x_ref = x_tensor_ref.cuda(gpu)
x_train = x_train.cuda(gpu)
y_train = y_train.cuda(gpu)
# Compute output
output = model(x_train, x_ref)
loss = criterion(output, y_train)
# Measure accuracy and record loss
acc1 = accuracy(output, y_train, topk = 1)
losses.update(loss.item(), x_train.size(0))
top1.update(acc1[0], x_train.size(0))
# Compute gradient and perform SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Save the validation accuracy for plotting
all_simulation_training_accuracy_permuted[simulation_counter, group_counter, layer_freeze_counter, epoch] = acc1[0].item()
# Measure elapsed time
batch_time.update(time.time() - end)
progress.display(epoch)
# Remember the best accuracy
is_best = all_simulation_training_accuracy_permuted[simulation_counter, group_counter, layer_freeze_counter, epoch] >= best_acc1
best_acc1 = max(all_simulation_training_accuracy_permuted[simulation_counter, group_counter, layer_freeze_counter, epoch], best_acc1)
if epoch % 10 == 0:
ID_counter = ID_counter + 1
for i in range(num_sample_artiphysiology):
feature_sample_artiphysiology[i, :] = [SF_transfer[x_sample_artiphysiology_index[i, 0]], Ori_transfer[x_sample_artiphysiology_index[i, 1]], x_sample_artiphysiology_index[i, 2]]
index = torch.tensor(z_val_transfer[x_sample_artiphysiology_index[i, 0], x_sample_artiphysiology_index[i, 1], x_sample_artiphysiology_index[i, 2]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_transfer, 0, index)
x_sample = x_sample.cuda(gpu)
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
unit_activity_layer_11 = model.features[11](unit_activity_layer_10)
unit_activity_layer_12 = model.features[12](unit_activity_layer_11)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_5[i, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
### Calculating the intrinsic dimension
all_simulation_all_ID_permuted[simulation_counter, group_counter, 0, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 1, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 2, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 3, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[simulation_counter, group_counter, 4, layer_freeze_counter, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_5.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
### Saving the main variables
scipy.io.savemat(parent_folder + '/all_simulation_training_accuracy.mat', mdict = {'all_simulation_training_accuracy': all_simulation_training_accuracy})
scipy.io.savemat(parent_folder + '/all_simulation_transfer_accuracy.mat', mdict = {'all_simulation_transfer_accuracy': all_simulation_transfer_accuracy})
scipy.io.savemat(parent_folder + '/all_simulation_all_MI_original.mat', mdict = {'all_simulation_all_MI_original': all_simulation_all_MI_original})
scipy.io.savemat(parent_folder + '/all_simulation_all_MI_noise.mat', mdict = {'all_simulation_all_MI_noise': all_simulation_all_MI_noise})
scipy.io.savemat(parent_folder + '/all_simulation_all_ID.mat', mdict = {'all_simulation_all_ID': all_simulation_all_ID})
scipy.io.savemat(parent_folder + '/all_x_sample_ID.mat', mdict = {'all_x_sample_ID': all_x_sample_ID})
scipy.io.savemat(parent_folder + '/all_simulation_training_accuracy_permuted.mat', mdict = {'all_simulation_training_accuracy_permuted': all_simulation_training_accuracy_permuted})
scipy.io.savemat(parent_folder + '/all_simulation_all_ID_permuted.mat', mdict = {'all_simulation_all_ID_permuted': all_simulation_all_ID_permuted})
scipy.io.savemat(parent_folder + '/all_PCA_explained_variance_layer_1.mat', mdict = {'all_PCA_explained_variance_layer_1': all_PCA_explained_variance_layer_1})
scipy.io.savemat(parent_folder + '/all_PCA_explained_variance_layer_2.mat', mdict = {'all_PCA_explained_variance_layer_2': all_PCA_explained_variance_layer_2})
scipy.io.savemat(parent_folder + '/all_PCA_explained_variance_layer_3.mat', mdict = {'all_PCA_explained_variance_layer_3': all_PCA_explained_variance_layer_3})
scipy.io.savemat(parent_folder + '/all_PCA_explained_variance_layer_4.mat', mdict = {'all_PCA_explained_variance_layer_4': all_PCA_explained_variance_layer_4})
scipy.io.savemat(parent_folder + '/all_PCA_explained_variance_layer_5.mat', mdict = {'all_PCA_explained_variance_layer_5': all_PCA_explained_variance_layer_5})
scipy.io.savemat(parent_folder + '/all_simulation_weight_change_layer_1.mat', mdict = {'all_simulation_weight_change_layer_1': all_simulation_weight_change_layer_1})
scipy.io.savemat(parent_folder + '/all_simulation_weight_change_layer_2.mat', mdict = {'all_simulation_weight_change_layer_2': all_simulation_weight_change_layer_2})
scipy.io.savemat(parent_folder + '/all_simulation_weight_change_layer_3.mat', mdict = {'all_simulation_weight_change_layer_3': all_simulation_weight_change_layer_3})
scipy.io.savemat(parent_folder + '/all_simulation_weight_change_layer_4.mat', mdict = {'all_simulation_weight_change_layer_4': all_simulation_weight_change_layer_4})
scipy.io.savemat(parent_folder + '/all_simulation_weight_change_layer_5.mat', mdict = {'all_simulation_weight_change_layer_5': all_simulation_weight_change_layer_5})
scipy.io.savemat(parent_folder + '/all_simulation_layer_rotation_layer_1.mat', mdict = {'all_simulation_layer_rotation_layer_1': all_simulation_layer_rotation_layer_1})
scipy.io.savemat(parent_folder + '/all_simulation_layer_rotation_layer_2.mat', mdict = {'all_simulation_layer_rotation_layer_2': all_simulation_layer_rotation_layer_2})
scipy.io.savemat(parent_folder + '/all_simulation_layer_rotation_layer_3.mat', mdict = {'all_simulation_layer_rotation_layer_3': all_simulation_layer_rotation_layer_3})
scipy.io.savemat(parent_folder + '/all_simulation_layer_rotation_layer_4.mat', mdict = {'all_simulation_layer_rotation_layer_4': all_simulation_layer_rotation_layer_4})
scipy.io.savemat(parent_folder + '/all_simulation_layer_rotation_layer_5.mat', mdict = {'all_simulation_layer_rotation_layer_5': all_simulation_layer_rotation_layer_5})
| 73.873777
| 332
| 0.575782
| 8,447
| 75,499
| 4.799929
| 0.058364
| 0.065113
| 0.062893
| 0.042915
| 0.881366
| 0.856702
| 0.827722
| 0.799359
| 0.787989
| 0.763448
| 0
| 0.050448
| 0.335223
| 75,499
| 1,022
| 333
| 73.873777
| 0.757382
| 0.066743
| 0
| 0.540212
| 0
| 0
| 0.047491
| 0.028922
| 0
| 0
| 0
| 0
| 0.004552
| 1
| 0.016692
| false
| 0
| 0.034901
| 0
| 0.059181
| 0.021244
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5fabd5552c09f91e70c56d781a728f2617fb24ed
| 82
|
py
|
Python
|
common/utility/utils.py
|
klo9klo9kloi/win_det_heatmaps
|
fc427bcd593831d627698455b8917eb37add3f6e
|
[
"MIT"
] | 29
|
2020-07-27T10:49:09.000Z
|
2022-03-17T02:15:03.000Z
|
common/utility/utils.py
|
klo9klo9kloi/win_det_heatmaps
|
fc427bcd593831d627698455b8917eb37add3f6e
|
[
"MIT"
] | 6
|
2020-09-30T01:51:34.000Z
|
2022-01-02T08:00:22.000Z
|
common/utility/utils.py
|
klo9klo9kloi/win_det_heatmaps
|
fc427bcd593831d627698455b8917eb37add3f6e
|
[
"MIT"
] | 10
|
2020-07-31T00:43:38.000Z
|
2022-03-07T02:45:25.000Z
|
import numpy as np
def float2int(val):
return tuple(int (x+0.5) for x in val)
| 20.5
| 42
| 0.682927
| 17
| 82
| 3.294118
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046154
| 0.207317
| 82
| 4
| 42
| 20.5
| 0.815385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
3958cb8da7a86fb5d6568774e7d976d0016c1756
| 45
|
py
|
Python
|
nextcord/ext/help/__init__.py
|
ooliver1/nextcord-ext-help
|
2dd740263b67a629573e3afca33415c02e5cef07
|
[
"MIT"
] | null | null | null |
nextcord/ext/help/__init__.py
|
ooliver1/nextcord-ext-help
|
2dd740263b67a629573e3afca33415c02e5cef07
|
[
"MIT"
] | null | null | null |
nextcord/ext/help/__init__.py
|
ooliver1/nextcord-ext-help
|
2dd740263b67a629573e3afca33415c02e5cef07
|
[
"MIT"
] | null | null | null |
from .faked import *
from .help import setup
| 15
| 23
| 0.755556
| 7
| 45
| 4.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 24
| 22.5
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
39958a2f9454b11953ae9627305219796c22e628
| 366
|
py
|
Python
|
src/pipeline/report.py
|
alphagov-mirror/govuk-accessibility-reports
|
88204c03e273fff76b67ab0730a44869f02e28c9
|
[
"MIT"
] | 7
|
2021-02-18T07:15:25.000Z
|
2021-06-28T07:58:04.000Z
|
src/pipeline/report.py
|
alphagov-mirror/govuk-accessibility-reports
|
88204c03e273fff76b67ab0730a44869f02e28c9
|
[
"MIT"
] | 5
|
2021-01-25T18:41:30.000Z
|
2022-03-04T17:36:17.000Z
|
src/pipeline/report.py
|
alphagov-mirror/govuk-accessibility-reports
|
88204c03e273fff76b67ab0730a44869f02e28c9
|
[
"MIT"
] | 3
|
2020-12-14T17:35:58.000Z
|
2021-04-10T20:11:26.000Z
|
class Report:
def __init__(self, report):
self.report = report
@property
def name(self):
return self.report['name']
@property
def filename(self):
return self.report['filename']
@property
def klass(self):
return self.report['class']
@property
def skip(self):
return self.report['skip']
| 17.428571
| 38
| 0.584699
| 41
| 366
| 5.121951
| 0.268293
| 0.285714
| 0.266667
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.297814
| 366
| 20
| 39
| 18.3
| 0.817121
| 0
| 0
| 0.266667
| 0
| 0
| 0.057377
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.266667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
39a467ca49358ae4b3c8418d912f1fd981e7eea0
| 22
|
py
|
Python
|
pyjq/__init__.py
|
spraakbanken/pyjq
|
14d7348f450e3838876149a7891b977a22907e60
|
[
"MIT"
] | null | null | null |
pyjq/__init__.py
|
spraakbanken/pyjq
|
14d7348f450e3838876149a7891b977a22907e60
|
[
"MIT"
] | null | null | null |
pyjq/__init__.py
|
spraakbanken/pyjq
|
14d7348f450e3838876149a7891b977a22907e60
|
[
"MIT"
] | null | null | null |
from .cli import cli
| 7.333333
| 20
| 0.727273
| 4
| 22
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 22
| 2
| 21
| 11
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
39e69a4d5c96f5d122d55a7d967d38228b257aee
| 33
|
py
|
Python
|
example/__init__.py
|
elpeix/kaa
|
b840613cb5eba876d937faf32031651332e5b5f6
|
[
"MIT"
] | null | null | null |
example/__init__.py
|
elpeix/kaa
|
b840613cb5eba876d937faf32031651332e5b5f6
|
[
"MIT"
] | null | null | null |
example/__init__.py
|
elpeix/kaa
|
b840613cb5eba876d937faf32031651332e5b5f6
|
[
"MIT"
] | null | null | null |
from .server import SampleServer
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f2f935cc189228d08887a78168888bdb20e65598
| 1,605
|
py
|
Python
|
exercise/venv/lib/python3.7/site-packages/sqreen/frameworks/blank.py
|
assuzzanne/my-sqreen
|
81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b
|
[
"MIT"
] | null | null | null |
exercise/venv/lib/python3.7/site-packages/sqreen/frameworks/blank.py
|
assuzzanne/my-sqreen
|
81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b
|
[
"MIT"
] | 1
|
2021-06-02T00:27:34.000Z
|
2021-06-02T00:27:34.000Z
|
exercise/venv/lib/python3.7/site-packages/sqreen/frameworks/blank.py
|
assuzzanne/notifications-dispatcher-api
|
81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2016, 2017, 2018, 2019 Sqreen. All rights reserved.
# Please refer to our terms for more information:
#
# https://www.sqreen.io/terms.html
#
""" Blank request for callbacks needing a request when no one is present
"""
from .base import BaseRequest, BaseResponse
class BlankRequest(BaseRequest):
@property
def raw_headers(self):
return {}
@property
def raw_client_ip(self):
return None
@property
def client_user_agent(self):
return None
@property
def cookies_params(self):
return {}
@property
def form_params(self):
return {}
@property
def hostname(self):
return None
@property
def method(self):
return None
@property
def path(self):
return None
@property
def query_params(self):
return {}
@property
def query_params_values(self):
return []
@property
def referer(self):
return None
@property
def remote_port(self):
return None
@property
def remote_addr(self):
return None
@property
def scheme(self):
return None
@property
def server_port(self):
return None
@property
def view_params(self):
return {}
@property
def json_params(self):
return {}
class BlankResponse(BaseResponse):
@property
def status_code(self):
return None
@property
def content_type(self):
return None
@property
def content_length(self):
return None
| 16.546392
| 72
| 0.602492
| 181
| 1,605
| 5.243094
| 0.414365
| 0.231823
| 0.191781
| 0.278188
| 0.465753
| 0.167545
| 0
| 0
| 0
| 0
| 0
| 0.015399
| 0.31215
| 1,605
| 96
| 73
| 16.71875
| 0.844203
| 0.150779
| 0
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.31746
| false
| 0
| 0.015873
| 0.31746
| 0.68254
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
f2fd564d0c65db51adcfff29238d633e38e228e9
| 61
|
py
|
Python
|
farmer/ncc/history/__init__.py
|
tamahassam/farmer
|
512c6fcd5dc5aa223a0fad02527d8000a4cc9ab4
|
[
"Apache-2.0"
] | 10
|
2019-04-04T07:32:47.000Z
|
2021-01-07T00:40:50.000Z
|
farmer/ncc/history/__init__.py
|
tamahassam/farmer
|
512c6fcd5dc5aa223a0fad02527d8000a4cc9ab4
|
[
"Apache-2.0"
] | 59
|
2019-04-18T05:44:31.000Z
|
2021-05-02T10:33:02.000Z
|
farmer/ncc/history/__init__.py
|
tamahassam/farmer
|
512c6fcd5dc5aa223a0fad02527d8000a4cc9ab4
|
[
"Apache-2.0"
] | 4
|
2020-01-23T14:01:43.000Z
|
2021-02-11T04:16:14.000Z
|
from .plot_history import plot_history
from .history import *
| 30.5
| 38
| 0.836066
| 9
| 61
| 5.444444
| 0.444444
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114754
| 61
| 2
| 39
| 30.5
| 0.907407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
844004c0ed2b9c61553e93cd2db618db002fadfc
| 148
|
py
|
Python
|
fortnox/objects/__init__.py
|
andreask/fortnox-python
|
51998d87fb6ca9da954c72f626265eff28667ded
|
[
"MIT"
] | null | null | null |
fortnox/objects/__init__.py
|
andreask/fortnox-python
|
51998d87fb6ca9da954c72f626265eff28667ded
|
[
"MIT"
] | null | null | null |
fortnox/objects/__init__.py
|
andreask/fortnox-python
|
51998d87fb6ca9da954c72f626265eff28667ded
|
[
"MIT"
] | null | null | null |
from .financial_year import FinancialYear
from .voucher import Voucher
from .voucher_row import VoucherRow
from .voucher_series import VoucherSeries
| 37
| 41
| 0.871622
| 19
| 148
| 6.631579
| 0.526316
| 0.261905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101351
| 148
| 4
| 42
| 37
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
844a9a5f5f40929a97641d4191e551ecb723a82a
| 194
|
py
|
Python
|
dwtools3/django/seo/urls.py
|
bazzisoft/dwtools3
|
ed7b457290ca940b6e53ab56df26ece42afc9928
|
[
"MIT"
] | 1
|
2019-09-03T10:42:16.000Z
|
2019-09-03T10:42:16.000Z
|
dwtools3/django/seo/urls.py
|
bazzisoft/dwtools3
|
ed7b457290ca940b6e53ab56df26ece42afc9928
|
[
"MIT"
] | null | null | null |
dwtools3/django/seo/urls.py
|
bazzisoft/dwtools3
|
ed7b457290ca940b6e53ab56df26ece42afc9928
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^admin/seo/metatags-admin-redirect/$', views.seo_metatags_admin_redirect, name='seo_metatags_admin_redirect'),
]
| 24.25
| 120
| 0.768041
| 27
| 194
| 5.296296
| 0.518519
| 0.230769
| 0.335664
| 0.503497
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108247
| 194
| 7
| 121
| 27.714286
| 0.82659
| 0
| 0
| 0
| 0
| 0
| 0.324742
| 0.324742
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
844d90a08eff9a759e56a549bf673b42c7fd0b82
| 2,709
|
py
|
Python
|
tests/test_losses.py
|
alexkyllo/torch-wtte
|
916a4641cb8dacabcb54c22363347cd36c18c041
|
[
"MIT"
] | 1
|
2021-07-26T23:07:49.000Z
|
2021-07-26T23:07:49.000Z
|
tests/test_losses.py
|
alexkyllo/torch-wtte-rnn
|
916a4641cb8dacabcb54c22363347cd36c18c041
|
[
"MIT"
] | null | null | null |
tests/test_losses.py
|
alexkyllo/torch-wtte-rnn
|
916a4641cb8dacabcb54c22363347cd36c18c041
|
[
"MIT"
] | 1
|
2022-03-08T13:17:00.000Z
|
2022-03-08T13:17:00.000Z
|
import torch
from torch_wtte import losses
def test_log_likelihood_discrete():
"""Test that the discrete version of the log-likelihood function
returns the expected result.
"""
tte = torch.tensor([[6, 5, 4, 3, 2], [5, 4, 3, 2, 1]])
uncensored = torch.tensor([[1, 1, 1, 1, 0], [1, 1, 1, 1, 1]])
alpha = torch.tensor([[0.9, 0.9, 0.9, 0.9, 0.9], [0.99, 0.99, 0.99, 0.99, 0.99]])
beta = torch.tensor([[0.9, 0.9, 0.9, 0.9, 0.9], [1.1, 1.1, 1.1, 1.1, 1.1]])
loss_values = losses.log_likelihood_discrete(tte, uncensored, alpha, beta)
# results from wtte-rnn package
expected = torch.tensor(
[
[-6.09469436, -5.24937802, -4.38501338, -3.49575045, -2.95522717],
[-6.24961923, -4.96687732, -3.71907366, -2.5180084, -1.3889585],
]
)
eq_t = torch.isclose(
loss_values,
expected,
)
assert eq_t.all()
def test_log_likelihood_continuous():
"""Test that the discrete version of the log-likelihood function
returns the expected result.
"""
tte = torch.tensor([[6, 5, 4, 3, 2], [5, 4, 3, 2, 1]])
uncensored = torch.tensor([[1, 1, 1, 1, 0], [1, 1, 1, 1, 1]])
alpha = torch.tensor([[0.9, 0.9, 0.9, 0.9, 0.9], [0.99, 0.99, 0.99, 0.99, 0.99]])
beta = torch.tensor([[0.9, 0.9, 0.9, 0.9, 0.9], [1.1, 1.1, 1.1, 1.1, 1.1]])
loss_values = losses.log_likelihood_continuous(tte, uncensored, alpha, beta)
# results from wtte-rnn package
expected = torch.tensor(
[
[-3.91260149, -3.24213782, -2.59143362, -1.97701222, -2.0516759],
[-4.06163704, -3.01458314, -2.07075338, -1.29854872, -0.90475116],
]
)
eq_t = torch.isclose(
loss_values,
expected,
)
assert eq_t.all()
def test_loss_fn():
"""Test that the discrete version of the loss function
returns the expected result.
"""
tte = torch.tensor([[6, 5, 4, 3, 2], [5, 4, 3, 2, 1]])
uncensored = torch.tensor([[1, 1, 1, 1, 0], [1, 1, 1, 1, 1]])
alpha = torch.tensor([[0.9, 0.9, 0.9, 0.9, 0.9], [0.99, 0.99, 0.99, 0.99, 0.99]])
beta = torch.tensor([[0.9, 0.9, 0.9, 0.9, 0.9], [1.1, 1.1, 1.1, 1.1, 1.1]])
inputs = torch.stack([alpha, beta], axis=-1)
target = torch.stack([tte, uncensored], axis=-1)
loss_values = losses.weibull_censored_nll_loss(inputs, target, discrete=True, reduction=None)
# results from wtte-rnn package
expected = torch.tensor(
[
[6.09469436, 5.24937802, 4.38501338, 3.49575045, 2.95522717],
[6.24961923, 4.96687732, 3.71907366, 2.5180084, 1.3889585],
]
)
eq_t = torch.isclose(
loss_values,
expected,
)
assert eq_t.all()
| 35.644737
| 97
| 0.567737
| 442
| 2,709
| 3.41629
| 0.167421
| 0.063576
| 0.077483
| 0.07947
| 0.772848
| 0.772848
| 0.772848
| 0.752318
| 0.752318
| 0.752318
| 0
| 0.226027
| 0.245478
| 2,709
| 75
| 98
| 36.12
| 0.51272
| 0.130306
| 0
| 0.490909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054545
| 1
| 0.054545
| false
| 0
| 0.036364
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fff28ddeddbb9bbda241b51d0ca371212304dd55
| 27
|
py
|
Python
|
anybar/__init__.py
|
philipbl/pyAnyBar
|
5614cbeba90cd24d1fd1c553f8495e78795cdcd8
|
[
"MIT"
] | 55
|
2015-04-09T18:06:51.000Z
|
2021-11-08T11:51:11.000Z
|
anybar/__init__.py
|
philipbl/pyAnyBar
|
5614cbeba90cd24d1fd1c553f8495e78795cdcd8
|
[
"MIT"
] | 4
|
2016-06-14T06:59:45.000Z
|
2021-06-04T16:56:34.000Z
|
anybar/__init__.py
|
philipbl/pyAnyBar
|
5614cbeba90cd24d1fd1c553f8495e78795cdcd8
|
[
"MIT"
] | 12
|
2016-06-27T08:04:02.000Z
|
2021-06-04T12:07:41.000Z
|
from .anybar import AnyBar
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
081cb607f57c37515e3a62379a8cd441762e5e1b
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/poetry/console/commands/self/self.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/poetry/console/commands/self/self.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/poetry/console/commands/self/self.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/49/7f/25/1bf09512d6075ebaac8cfbd7a1476311aaa51d4e25c6e82b8887527c29
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.427083
| 0
| 96
| 1
| 96
| 96
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
082d30e548ca8a0143cc901d604e13bf6a4b861d
| 121
|
py
|
Python
|
fashionnets/models/layer/PassThroughLayer.py
|
NiklasHoltmeyer/FashionNets
|
918e57f122b8cfa36dba1d0b993c763ba35ac815
|
[
"MIT"
] | null | null | null |
fashionnets/models/layer/PassThroughLayer.py
|
NiklasHoltmeyer/FashionNets
|
918e57f122b8cfa36dba1d0b993c763ba35ac815
|
[
"MIT"
] | null | null | null |
fashionnets/models/layer/PassThroughLayer.py
|
NiklasHoltmeyer/FashionNets
|
918e57f122b8cfa36dba1d0b993c763ba35ac815
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
class PassThroughLayer(tf.keras.layers.Layer):
def call(self, inputs):
return inputs
| 24.2
| 46
| 0.727273
| 16
| 121
| 5.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190083
| 121
| 5
| 47
| 24.2
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.