hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c7209b532d579886815f75841b84b9e39b79ecd9
| 185
|
py
|
Python
|
malcolm/modules/pandablocks/blocks/__init__.py
|
hir12111/pymalcolm
|
689542711ff903ee99876c40fc0eae8015e13314
|
[
"Apache-2.0"
] | 11
|
2016-10-04T23:11:39.000Z
|
2022-01-25T15:44:43.000Z
|
malcolm/modules/pandablocks/blocks/__init__.py
|
hir12111/pymalcolm
|
689542711ff903ee99876c40fc0eae8015e13314
|
[
"Apache-2.0"
] | 153
|
2016-06-01T13:31:02.000Z
|
2022-03-31T11:17:18.000Z
|
malcolm/modules/pandablocks/blocks/__init__.py
|
hir12111/pymalcolm
|
689542711ff903ee99876c40fc0eae8015e13314
|
[
"Apache-2.0"
] | 16
|
2016-06-10T13:45:27.000Z
|
2020-10-24T13:45:04.000Z
|
from malcolm.yamlutil import check_yaml_names, make_block_creator
panda_manager_block = make_block_creator(__file__, "panda_manager_block.yaml")
__all__ = check_yaml_names(globals())
| 30.833333
| 78
| 0.848649
| 26
| 185
| 5.269231
| 0.576923
| 0.131387
| 0.20438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075676
| 185
| 5
| 79
| 37
| 0.80117
| 0
| 0
| 0
| 0
| 0
| 0.12973
| 0.12973
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c73a88d98d25716fc2dfa6bc55ef50cde57e22e6
| 2,070
|
py
|
Python
|
src/hub/dataload/sources/drugcentral/__init__.py
|
ravila4/mychem.info
|
9b63b5f0957b5e7b252ca8122734a363905036b3
|
[
"Apache-2.0"
] | 10
|
2017-07-24T11:45:27.000Z
|
2022-02-14T13:42:36.000Z
|
src/hub/dataload/sources/drugcentral/__init__.py
|
ravila4/mychem.info
|
9b63b5f0957b5e7b252ca8122734a363905036b3
|
[
"Apache-2.0"
] | 92
|
2017-06-22T16:49:20.000Z
|
2022-03-24T20:50:01.000Z
|
src/hub/dataload/sources/drugcentral/__init__.py
|
ravila4/mychem.info
|
9b63b5f0957b5e7b252ca8122734a363905036b3
|
[
"Apache-2.0"
] | 11
|
2017-06-12T18:31:35.000Z
|
2022-01-31T02:56:52.000Z
|
file_path_pharma_class = '/home/kevinxin/drug/drugcentral/2018.6/pharma_class.csv'
#df_drugcentral_pharma_class = pd.read_csv(file_path_pharma_class, sep=",", names=['_id', 'struc_id', 'role', 'description', 'code', 'source'])
file_path_faers = '/home/kevinxin/drug/drugcentral/2018.6/faers.csv'
#df_drugcentral_faers = pd.read_csv(file_path_faers, sep=",", names=['_id', 'struc_id', 'meddra_term', 'meddra_code', 'level', 'llr', 'llr_threshold', 'drug_ae', 'drug_no_ae', 'no_drug_ae', 'no_drug_no_ar'])
file_path_act = '/home/kevinxin/drug/drugcentral/2018.6/act_table_full.csv'
#df_drugcentral_act = pd.read_csv(file_path_act, sep=",", names=["act_id", "struct_id", "target_id", "target_name", "target_class", "accession", "gene", "swissprot", "act_value", "act_unit", "act_type", "act_comment", "act_source", "relation", "moa", "moa_source", "act_source_url", "moa_source_url", "action_type", "first_in_class", "tdl", "act_ref_id", "moa_ref_id", "organism"])
file_path_omop = '/home/kevinxin/drug/drugcentral/2018.6/omop_relationship.csv'
#df_omop = pd.read_csv(file_path_omop, sep=",", names=['_id', 'struct_id', 'concept_id', 'relationship_name', 'concept_name', 'umls_cui', 'snomed_full_name', 'cui_semantic_type', 'snomed_conceptid'])
file_path_approval = '/home/kevinxin/drug/drugcentral/2018.6/approval.csv'
#df_drug_approval = pd.read_csv(file_path_approval, sep=",", names=['_id', 'struct_id', 'approval', 'type', 'applicant', 'orphan'])
file_path_synonym = '/home/kevinxin/drug/drugcentral/2018.6/synonyms.csv'
#df_synonym = pd.read_csv(file_path_synonym, sep=",", names=["_id", "struct_id", 'synonym', 'pref', 'parent', 's2'])
file_path_structure = '/home/kevinxin/drug/drugcentral/2018.6/structures.smiles.tsv'
#df_structure = pd.read_csv(file_path_structure, sep="\t")
file_path_identifier = '/home/kevinxin/drug/drugcentral/2018.6/identifiers.csv'
#df_identifier = pd.read_csv(file_path_identifiers, sep=",", names=["_id", "identifier", "id_type", "struct_id", "parent"])
file_path_drug_dosage = '/home/kevinxin/drug/drugcentral/2018.6/drug_dosage.csv'
| 108.947368
| 381
| 0.744444
| 309
| 2,070
| 4.605178
| 0.249191
| 0.095573
| 0.101195
| 0.170766
| 0.359803
| 0.202389
| 0
| 0
| 0
| 0
| 0
| 0.023675
| 0.061353
| 2,070
| 18
| 382
| 115
| 0.708698
| 0.652174
| 0
| 0
| 0
| 0
| 0.68917
| 0.68917
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c73c1a6cd3ac6f3e26a4d0988c95ddf54bee7445
| 239
|
py
|
Python
|
migrating-from-python/duck.py
|
bgmerrell/presentations
|
5f16e1b86e1b1c68095da238eb67c2872502ca9a
|
[
"CC0-1.0"
] | 1
|
2015-02-01T17:49:25.000Z
|
2015-02-01T17:49:25.000Z
|
migrating-from-python/duck.py
|
bgmerrell/presentations
|
5f16e1b86e1b1c68095da238eb67c2872502ca9a
|
[
"CC0-1.0"
] | null | null | null |
migrating-from-python/duck.py
|
bgmerrell/presentations
|
5f16e1b86e1b1c68095da238eb67c2872502ca9a
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
class Cow(object):
pass
class Fox(object):
def get_noise(self):
return "Ring-ding-ding-ding-dingeringeding"
def ask(obj):
print('What goes "{}?"'.format(obj.get_noise()))
ask(Fox())
ask(Cow())
| 15.933333
| 52
| 0.631799
| 35
| 239
| 4.257143
| 0.657143
| 0.107383
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175732
| 239
| 14
| 53
| 17.071429
| 0.756345
| 0.083682
| 0
| 0
| 0
| 0
| 0.224771
| 0.155963
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0
| 0.111111
| 0.555556
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
c74e47844115c898131e124d105addae2df08e38
| 116
|
py
|
Python
|
libcpab/tensorflow/__init__.py
|
SkafteNicki/tf_cpab_lib
|
aabfd9eaa75d9e492294dbd5625921c08d7cdb83
|
[
"MIT"
] | 35
|
2018-09-18T20:29:30.000Z
|
2022-03-08T07:31:04.000Z
|
libcpab/tensorflow/__init__.py
|
SkafteNicki/tf_cpab_lib
|
aabfd9eaa75d9e492294dbd5625921c08d7cdb83
|
[
"MIT"
] | 10
|
2019-04-23T16:09:06.000Z
|
2021-08-18T07:46:57.000Z
|
libcpab/tensorflow/__init__.py
|
SkafteNicki/tf_cpab_lib
|
aabfd9eaa75d9e492294dbd5625921c08d7cdb83
|
[
"MIT"
] | 8
|
2019-02-20T09:24:11.000Z
|
2021-09-12T14:25:27.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 16 15:26:48 2018
@author: nsde
"""
#%%
import tensorflow as tf
#%%
| 10.545455
| 35
| 0.577586
| 18
| 116
| 3.722222
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141304
| 0.206897
| 116
| 11
| 36
| 10.545455
| 0.586957
| 0.663793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
c766d5d29b23efc789b8100f29a072afd215b061
| 151
|
py
|
Python
|
less-code/version-2/nfs-keep.py
|
tebeka/pythonwise
|
56f8cb7aa792c3ad6a3dc754e15f6a04890d694a
|
[
"BSD-3-Clause"
] | 21
|
2016-11-16T20:08:56.000Z
|
2021-12-11T23:13:05.000Z
|
less-code/version-2/nfs-keep.py
|
tebeka/pythonwise
|
56f8cb7aa792c3ad6a3dc754e15f6a04890d694a
|
[
"BSD-3-Clause"
] | 1
|
2020-10-05T08:35:31.000Z
|
2020-10-05T08:35:31.000Z
|
less-code/version-2/nfs-keep.py
|
tebeka/pythonwise
|
56f8cb7aa792c3ad6a3dc754e15f6a04890d694a
|
[
"BSD-3-Clause"
] | 8
|
2016-11-12T22:54:55.000Z
|
2021-02-10T10:46:23.000Z
|
#!/usr/bin/env python
'''Keep NFS mount "warm", called from crontab'''
from os import listdir
if __name__ == "__main__":
listdir("/path/to/nfs")
| 18.875
| 48
| 0.668874
| 22
| 151
| 4.227273
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15894
| 151
| 7
| 49
| 21.571429
| 0.732283
| 0.417219
| 0
| 0
| 0
| 0
| 0.243902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c76f0cb28918816040c55aee09e62b2ae1399500
| 184
|
py
|
Python
|
bin/raptiformica_spawn.py
|
vdloo/raptiformica
|
e2807e5e913312034161efcbd74525a4b15b37e7
|
[
"MIT"
] | 21
|
2016-09-04T11:27:31.000Z
|
2019-10-30T08:23:14.000Z
|
bin/raptiformica_spawn.py
|
vdloo/raptiformica
|
e2807e5e913312034161efcbd74525a4b15b37e7
|
[
"MIT"
] | 5
|
2017-09-17T15:59:37.000Z
|
2018-02-03T14:53:32.000Z
|
bin/raptiformica_spawn.py
|
vdloo/raptiformica
|
e2807e5e913312034161efcbd74525a4b15b37e7
|
[
"MIT"
] | 2
|
2017-11-21T18:14:51.000Z
|
2017-11-22T01:20:45.000Z
|
#!/usr/bin/env python3
from raptiformica.cli import spawn
if __name__ == '__main__':
spawn()
else:
raise RuntimeError("This script is an entry point and can not be imported")
| 23
| 79
| 0.722826
| 27
| 184
| 4.62963
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006623
| 0.179348
| 184
| 7
| 80
| 26.285714
| 0.821192
| 0.11413
| 0
| 0
| 0
| 0
| 0.376543
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c79185abacc6d2ffde2596eab2365cd825a52f6d
| 126
|
py
|
Python
|
router.py
|
Zechee/flask-commodity
|
14f586f918a4a84ee9f8a79dc4bea1bb323b2d05
|
[
"MIT"
] | null | null | null |
router.py
|
Zechee/flask-commodity
|
14f586f918a4a84ee9f8a79dc4bea1bb323b2d05
|
[
"MIT"
] | null | null | null |
router.py
|
Zechee/flask-commodity
|
14f586f918a4a84ee9f8a79dc4bea1bb323b2d05
|
[
"MIT"
] | null | null | null |
# from resources.session import UserInfo
def router(api) -> None:
# api.add_resource(UserInfo, "/userinfo")
pass
| 21
| 45
| 0.674603
| 15
| 126
| 5.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206349
| 126
| 6
| 46
| 21
| 0.84
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
c7a150ef88886b0f78dee609109b058dd4425e84
| 861
|
py
|
Python
|
melange/domain_event_bus/domain_subscriber.py
|
mangeld/melange
|
1650268b312f3fea7b26afa2fc8ea4f8dd3948e3
|
[
"MIT"
] | null | null | null |
melange/domain_event_bus/domain_subscriber.py
|
mangeld/melange
|
1650268b312f3fea7b26afa2fc8ea4f8dd3948e3
|
[
"MIT"
] | null | null | null |
melange/domain_event_bus/domain_subscriber.py
|
mangeld/melange
|
1650268b312f3fea7b26afa2fc8ea4f8dd3948e3
|
[
"MIT"
] | null | null | null |
""" Subscriber super-class """
from .domain_event_bus import DomainEvent
class DomainEventHandler:
def process(self, event):
"""
Called by the domain_event_bus.
:param event: The event object
:type event: EventMessage or subclass of event
This method implements the logic for processing the event. This method should not block for
long time as that will affect the performance of the domain_event_bus.
"""
pass
def listens_to(self):
return []
def listen(self):
from melange.domain_event_bus import DomainEventBus
DomainEventBus.instance().subscribe(self)
return self
def accepts(self, event):
return isinstance(event, DomainEvent) \
and (self.listens_to() == [] or any(isinstance(event, e) for e in self.listens_to()))
| 29.689655
| 100
| 0.655052
| 106
| 861
| 5.216981
| 0.509434
| 0.079566
| 0.101266
| 0.072333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 861
| 28
| 101
| 30.75
| 0.877778
| 0.344948
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0.076923
| 0.153846
| 0.153846
| 0.769231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
c7b2e65a5cedfd9a74e4d807aa42fa9eb1c6af0d
| 77
|
py
|
Python
|
api/pipwatch_api/core/__init__.py
|
AleksanderGondek/pipwatch
|
bff0c5198d6a433a7ca9d03c82dae981d8b85dd1
|
[
"Apache-2.0"
] | 1
|
2018-11-29T16:24:25.000Z
|
2018-11-29T16:24:25.000Z
|
api/pipwatch_api/core/__init__.py
|
AleksanderGondek/pipwatch
|
bff0c5198d6a433a7ca9d03c82dae981d8b85dd1
|
[
"Apache-2.0"
] | null | null | null |
api/pipwatch_api/core/__init__.py
|
AleksanderGondek/pipwatch
|
bff0c5198d6a433a7ca9d03c82dae981d8b85dd1
|
[
"Apache-2.0"
] | null | null | null |
"""This package contains common modules that are used throughout the api."""
| 38.5
| 76
| 0.766234
| 11
| 77
| 5.363636
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 77
| 1
| 77
| 77
| 0.893939
| 0.909091
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c7b55ad6fc4a78a02176e663d89674618916ef52
| 152
|
py
|
Python
|
surfer/gradient/__init__.py
|
Cryoris/surfer
|
2f8faef420b769ee92abe1562381402d2d6e34ad
|
[
"Apache-2.0"
] | 2
|
2021-07-14T19:16:44.000Z
|
2021-08-20T20:18:13.000Z
|
surfer/gradient/__init__.py
|
Cryoris/surfer
|
2f8faef420b769ee92abe1562381402d2d6e34ad
|
[
"Apache-2.0"
] | null | null | null |
surfer/gradient/__init__.py
|
Cryoris/surfer
|
2f8faef420b769ee92abe1562381402d2d6e34ad
|
[
"Apache-2.0"
] | null | null | null |
from .forward_gradient import ForwardGradient
from .reverse_gradient import ReverseGradient
__all__ = [
"ForwardGradient",
"ReverseGradient"
]
| 19
| 45
| 0.782895
| 13
| 152
| 8.692308
| 0.615385
| 0.247788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151316
| 152
| 7
| 46
| 21.714286
| 0.875969
| 0
| 0
| 0
| 0
| 0
| 0.197368
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c7c5168a419a38b08bb04ad129dd4aed03eba234
| 84,402
|
py
|
Python
|
pysnmp_mibs/IPSEC-SPD-MIB.py
|
jackjack821/pysnmp-mibs
|
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
|
[
"BSD-2-Clause"
] | 6
|
2017-04-21T13:48:08.000Z
|
2022-01-06T19:42:52.000Z
|
pysnmp_mibs/IPSEC-SPD-MIB.py
|
jackjack821/pysnmp-mibs
|
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
|
[
"BSD-2-Clause"
] | 1
|
2020-05-05T16:42:25.000Z
|
2020-05-05T16:42:25.000Z
|
pysnmp_mibs/IPSEC-SPD-MIB.py
|
jackjack821/pysnmp-mibs
|
9835ea0bb2420715caf4ee9aaa07d59bb263acd6
|
[
"BSD-2-Clause"
] | 6
|
2020-02-08T20:28:49.000Z
|
2021-09-14T13:36:46.000Z
|
#
# PySNMP MIB module IPSEC-SPD-MIB (http://pysnmp.sf.net)
# ASN.1 source http://mibs.snmplabs.com:80/asn1/IPSEC-SPD-MIB
# Produced by pysmi-0.0.7 at Sun Feb 14 00:18:27 2016
# On host bldfarm platform Linux version 4.1.13-100.fc21.x86_64 by user goose
# Using Python version 3.5.0 (default, Jan 5 2016, 17:11:52)
#
( Integer, OctetString, ObjectIdentifier, ) = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
( IfDirection, diffServMIBMultiFieldClfrGroup, diffServMultiFieldClfrNextFree, ) = mibBuilder.importSymbols("DIFFSERV-MIB", "IfDirection", "diffServMIBMultiFieldClfrGroup", "diffServMultiFieldClfrNextFree")
( InterfaceIndex, ) = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
( InetAddress, InetAddressType, ) = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
( SnmpAdminString, ) = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
( ModuleCompliance, NotificationGroup, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
( Bits, Counter64, TimeTicks, iso, ModuleIdentity, mib_2, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, ObjectIdentity, Integer32, MibIdentifier, Counter32, Unsigned32, IpAddress, ) = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter64", "TimeTicks", "iso", "ModuleIdentity", "mib-2", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "ObjectIdentity", "Integer32", "MibIdentifier", "Counter32", "Unsigned32", "IpAddress")
( StorageType, TruthValue, TimeStamp, RowStatus, VariablePointer, DisplayString, TextualConvention, ) = mibBuilder.importSymbols("SNMPv2-TC", "StorageType", "TruthValue", "TimeStamp", "RowStatus", "VariablePointer", "DisplayString", "TextualConvention")
spdMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 153)).setRevisions(("2007-02-07 00:00",))
if mibBuilder.loadTexts: spdMIB.setLastUpdated('200702070000Z')
if mibBuilder.loadTexts: spdMIB.setOrganization('IETF IP Security Policy Working Group')
if mibBuilder.loadTexts: spdMIB.setContactInfo('Michael Baer\n P.O. Box 72682\n Davis, CA 95617\n Phone: +1 530 902 3131\n Email: baerm@tislabs.com\n\n Ricky Charlet\n Email: rcharlet@alumni.calpoly.edu\n\n Wes Hardaker\n Sparta, Inc.\n P.O. Box 382\n Davis, CA 95617\n Phone: +1 530 792 1913\n Email: hardaker@tislabs.com\n\n Robert Story\n Revelstone Software\n PO Box 1812\n\n\n\n Tucker, GA 30085\n Phone: +1 770 617 3722\n Email: rstory@ipsp.revelstone.com\n\n Cliff Wang\n ARO\n 4300 S. Miami Blvd.\n Durham, NC 27703\n E-Mail: cliffwangmail@yahoo.com')
if mibBuilder.loadTexts: spdMIB.setDescription('This MIB module defines configuration objects for managing\n IPsec Security Policies. In general, this MIB can be\n implemented anywhere IPsec security services exist (e.g.,\n bump-in-the-wire, host, gateway, firewall, router, etc.).\n\n Copyright (C) The IETF Trust (2007). This version of\n this MIB module is part of RFC 4807; see the RFC itself for\n full legal notices.')
spdConfigObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 1))
spdNotificationObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 2))
spdConformanceObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 3))
spdActions = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 4))
class SpdBooleanOperator(Integer32, TextualConvention):
subtypeSpec = Integer32.subtypeSpec+ConstraintsUnion(SingleValueConstraint(1, 2,))
namedValues = NamedValues(("or", 1), ("and", 2),)
class SpdAdminStatus(Integer32, TextualConvention):
subtypeSpec = Integer32.subtypeSpec+ConstraintsUnion(SingleValueConstraint(1, 2,))
namedValues = NamedValues(("enabled", 1), ("disabled", 2),)
class SpdIPPacketLogging(Integer32, TextualConvention):
displayHint = 'd'
subtypeSpec = Integer32.subtypeSpec+ValueRangeConstraint(-1,65535)
class SpdTimePeriod(OctetString, TextualConvention):
displayHint = '31t'
subtypeSpec = OctetString.subtypeSpec+ValueSizeConstraint(0,31)
spdLocalConfigObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 1, 1))
spdIngressPolicyGroupName = MibScalar((1, 3, 6, 1, 2, 1, 153, 1, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0,32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: spdIngressPolicyGroupName.setDescription("This object indicates the global system policy group that\n is to be applied on ingress packets (i.e., arriving at an\n interface from a network) when a given endpoint does not\n contain a policy definition in the spdEndpointToGroupTable.\n Its value can be used as an index into the\n spdGroupContentsTable to retrieve a list of policies. A\n zero length string indicates that no system-wide policy exists\n and the default policy of 'drop' SHOULD be executed for\n ingress packets until one is imposed by either this object\n or by the endpoint processing a given packet.\n\n This object MUST be persistent")
spdEgressPolicyGroupName = MibScalar((1, 3, 6, 1, 2, 1, 153, 1, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0,32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: spdEgressPolicyGroupName.setDescription("This object indicates the policy group containing the\n global system policy that is to be applied on egress\n packets (i.e., packets leaving an interface and entering a\n network) when a given endpoint does not contain a policy\n definition in the spdEndpointToGroupTable. Its value can\n be used as an index into the spdGroupContentsTable to\n retrieve a list of policies. A zero length string\n indicates that no system-wide policy exists and the default\n policy of 'drop' SHOULD be executed for egress packets\n until one is imposed by either this object or by the\n endpoint processing a given packet.\n\n This object MUST be persistent")
spdEndpointToGroupTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 2), )
if mibBuilder.loadTexts: spdEndpointToGroupTable.setDescription('This table maps policies (groupings) onto an endpoint\n (interface). A policy group assigned to an endpoint is then\n used to control access to the network traffic passing\n through that endpoint.\n\n\n\n\n If an endpoint has been configured with a policy group and\n no rule within that policy group matches that packet, the\n default action in this case SHALL be to drop the packet.\n\n If no policy group has been assigned to an endpoint, then\n the policy group specified by spdIngressPolicyGroupName MUST\n be used on traffic inbound from the network through that\n endpoint, and the policy group specified by\n spdEgressPolicyGroupName MUST be used for traffic outbound\n to the network through that endpoint.')
spdEndpointToGroupEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 2, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdEndGroupDirection"), (0, "IPSEC-SPD-MIB", "spdEndGroupInterface"))
if mibBuilder.loadTexts: spdEndpointToGroupEntry.setDescription('A mapping assigning a policy group to an endpoint.')
spdEndGroupDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 2, 1, 1), IfDirection())
if mibBuilder.loadTexts: spdEndGroupDirection.setDescription('This object indicates which direction of packets crossing\n the interface are associated with which spdEndGroupName\n object. Ingress packets, or packets into the device match\n when this value is inbound(1). Egress packets or packets\n out of the device match when this value is outbound(2).')
spdEndGroupInterface = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 2, 1, 2), InterfaceIndex())
if mibBuilder.loadTexts: spdEndGroupInterface.setDescription("This value matches the IF-MIB's ifTable's ifIndex column\n and indicates the interface associated with a given\n endpoint. This object can be used to uniquely identify an\n endpoint that a set of policy groups are applied to.")
spdEndGroupName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 2, 1, 3), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdEndGroupName.setDescription('The policy group name to apply at this endpoint. The\n value of the spdEndGroupName object is then used as an\n index into the spdGroupContentsTable to come up with a list\n of rules that MUST be applied at this endpoint.')
spdEndGroupLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 2, 1, 4), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdEndGroupLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdEndGroupStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 2, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdEndGroupStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a storage\n type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdEndGroupRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 2, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdEndGroupRowStatus.setDescription("This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n This object is considered 'notReady' and MUST NOT be set to\n active until one or more active rows exist within the\n spdGroupContentsTable for the group referenced by the\n spdEndGroupName object.")
spdGroupContentsTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 3), )
if mibBuilder.loadTexts: spdGroupContentsTable.setDescription("This table contains a list of rules and/or subgroups\n contained within a given policy group. For a given value\n of spdGroupContName, the set of rows sharing that value\n forms a 'group'. The rows in a group MUST be processed\n according to the value of the spdGroupContPriority object\n in each row. The processing MUST be executed starting with\n the lowest value of spdGroupContPriority and in ascending\n order thereafter.\n\n If an action is executed as the result of the processing of\n a row in a group, the processing of further rows in that\n group MUST stop. Iterating to the next policy group row by\n finding the next largest spdGroupContPriority object SHALL\n only be done if no actions were run while processing the\n current row for a given packet.")
spdGroupContentsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 3, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdGroupContName"), (0, "IPSEC-SPD-MIB", "spdGroupContPriority"))
if mibBuilder.loadTexts: spdGroupContentsEntry.setDescription('Defines a given sub-component within a policy group. A\n sub-component is either a rule or another group as\n indicated by spdGroupContComponentType and referenced by\n spdGroupContComponentName.')
spdGroupContName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32)))
if mibBuilder.loadTexts: spdGroupContName.setDescription("The administrative name of the group associated with this\n row. A 'group' is formed by all the rows in this table that\n have the same value of this object.")
spdGroupContPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0,65535)))
if mibBuilder.loadTexts: spdGroupContPriority.setDescription('The priority (sequence number) of the sub-component in\n a group that this row represents. This value indicates\n the order that each row of this table MUST be processed\n from low to high. For example, a row with a priority of 0\n is processed before a row with a priority of 1, a 1 before\n a 2, etc.')
spdGroupContFilter = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 3), VariablePointer().clone((1, 3, 6, 1, 2, 1, 153, 1, 7, 1, 0))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdGroupContFilter.setDescription('spdGroupContFilter points to a filter that is evaluated\n to determine whether the spdGroupContComponentName within\n this row is exercised. Managers can use this object to\n classify groups of rules, or subgroups, together in order to\n achieve a greater degree of control and optimization over\n the execution order of the items within the group. If the\n\n\n\n filter evaluates to false, the rule or subgroup will be\n skipped and the next rule or subgroup will be evaluated\n instead. This value can be used to indicate a scalar or\n row in a table. When indicating a row in a table, this\n value MUST point to the first column instance in that row.\n\n An example usage of this object would be to limit a\n group of rules to executing only when the IP packet\n being processed is designated to be processed by IKE.\n This effectively creates a group of IKE-specific rules.\n\n The following tables and scalars can be pointed to by this\n column. All but diffServMultiFieldClfrTable are defined in\n this MIB:\n\n diffServMultiFieldClfrTable\n spdIpOffsetFilterTable\n spdTimeFilterTable\n spdCompoundFilterTable\n spdTrueFilter\n spdIpsoHeaderFilterTable\n\n Implementations MAY choose to provide support for other\n filter tables or scalars.\n\n If this column is set to a VariablePointer value, which\n references a non-existent row in an otherwise supported\n table, the inconsistentName exception MUST be returned. If\n the table or scalar pointed to by the VariablePointer is\n not supported at all, then an inconsistentValue exception\n MUST be returned.\n\n If, during packet processing, a row in this table is applied\n to a packet and the value of this column in that row\n references a non-existent or non-supported object, the\n packet MUST be dropped.')
spdGroupContComponentType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2,))).clone(namedValues=NamedValues(("group", 1), ("rule", 2),)).clone('rule')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdGroupContComponentType.setDescription('Indicates whether the spdGroupContComponentName object\n is the name of another group defined within the\n spdGroupContentsTable or is the name of a rule defined\n\n\n\n within the spdRuleDefinitionTable.')
spdGroupContComponentName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdGroupContComponentName.setDescription('The name of the policy rule or subgroup contained within\n this row, as indicated by the spdGroupContComponentType\n object.')
spdGroupContLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 6), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdGroupContLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem,\n this object SHOULD have a zero value.')
spdGroupContStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 7), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdGroupContStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a storage\n type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdGroupContRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 3, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdGroupContRowStatus.setDescription("This object indicates the conceptual status of this row.\n\n\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n This object MUST NOT be set to active until the row to\n which the spdGroupContComponentName points to exists and is\n active.\n\n If active, this object MUST remain active unless one of the\n following two conditions are met:\n\n I. No active row in spdEndpointToGroupTable exists that\n references this row's group (i.e., indicate this row's\n spdGroupContName).\n\n II. Or at least one other active row in this table has a\n matching spdGroupContName.\n\n If neither condition is met, an attempt to set this row to\n something other than active MUST result in an\n inconsistentValue error.")
spdRuleDefinitionTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 4), )
if mibBuilder.loadTexts: spdRuleDefinitionTable.setDescription('This table defines a rule by associating a filter\n or a set of filters to an action to be executed.')
spdRuleDefinitionEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 4, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdRuleDefName"))
if mibBuilder.loadTexts: spdRuleDefinitionEntry.setDescription('A row defining a particular rule definition. A rule\n definition binds a filter pointer to an action pointer.')
spdRuleDefName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32)))
if mibBuilder.loadTexts: spdRuleDefName.setDescription('spdRuleDefName is the administratively assigned name of\n the rule referred to by the spdGroupContComponentName\n object.')
spdRuleDefDescription = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdRuleDefDescription.setDescription('A user defined string. This field MAY be used for\n administrative tracking purposes.')
spdRuleDefFilter = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 3), VariablePointer()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdRuleDefFilter.setDescription('spdRuleDefFilter points to a filter that is used to\n evaluate whether the action associated with this row is\n executed or not. The action will only execute if the\n filter referenced by this object evaluates to TRUE after\n first applying any negation required by the\n spdRuleDefFilterNegated object.\n\n The following tables and scalars can be pointed to by this\n column. All but diffServMultiFieldClfrTable are defined in\n this MIB. Implementations MAY choose to provide support\n for other filter tables or scalars as well:\n\n diffServMultiFieldClfrTable\n\n\n\n spdIpOffsetFilterTable\n spdTimeFilterTable\n spdCompoundFilterTable\n spdTrueFilter\n\n If this column is set to a VariablePointer value, which\n references a non-existent row in an otherwise supported\n table, the inconsistentName exception MUST be returned. If\n the table or scalar pointed to by the VariablePointer is\n not supported at all, then an inconsistentValue exception\n MUST be returned.\n\n If, during packet processing, this column has a value that\n references a non-existent or non-supported object, the\n packet MUST be dropped.')
spdRuleDefFilterNegated = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 4), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdRuleDefFilterNegated.setDescription('spdRuleDefFilterNegated specifies whether or not the results of\n the filter referenced by the spdRuleDefFilter object is\n negated.')
spdRuleDefAction = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 5), VariablePointer()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdRuleDefAction.setDescription('This column points to the action to be taken. It MAY,\n but is not limited to, point to a row in one of the\n following tables:\n\n spdCompoundActionTable\n ipsaSaPreconfiguredActionTable\n ipiaIkeActionTable\n ipiaIpsecActionTable\n\n It MAY also point to one of the scalar objects beneath\n spdStaticActions.\n\n If this object is set to a pointer to a row in an\n unsupported (or unknown) table, an inconsistentValue\n\n\n\n error MUST be returned.\n\n If this object is set to point to a non-existent row in an\n otherwise supported table, an inconsistentName error MUST\n be returned.\n\n If, during packet processing, this column has a value that\n references a non-existent or non-supported object, the\n packet MUST be dropped.')
spdRuleDefAdminStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 6), SpdAdminStatus().clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdRuleDefAdminStatus.setDescription("Indicates whether the current rule definition is considered\n active. If the value is enabled, the rule MUST be evaluated\n when processing packets. If the value is disabled, the\n packet processing MUST continue as if this rule's filter\n had effectively failed.")
spdRuleDefLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 7), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdRuleDefLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdRuleDefStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 8), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdRuleDefStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a\n storage type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n\n\n\n to be writable.')
spdRuleDefRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 4, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdRuleDefRowStatus.setDescription('This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n This object MUST NOT be set to active until the containing\n conditions, filters, and actions have been defined. Once\n active, it MUST remain active until no active\n policyGroupContents entries are referencing it. A failed\n attempt to do so MUST return an inconsistentValue error.')
spdCompoundFilterTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 5), )
if mibBuilder.loadTexts: spdCompoundFilterTable.setDescription('A table defining compound filters and their associated\n parameters. A row in this table can be pointed to by a\n spdRuleDefFilter object.')
spdCompoundFilterEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 5, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdCompFiltName"))
if mibBuilder.loadTexts: spdCompoundFilterEntry.setDescription('An entry in the spdCompoundFilterTable. Each entry in this\n table represents a compound filter. A filter defined by\n this table is considered to have a TRUE return value if and\n only if:\n\n spdCompFiltLogicType is AND and all of the sub-filters\n associated with it, as defined in the spdSubfiltersTable,\n are all true themselves (after applying any required\n\n\n\n negation, as defined by the ficFilterIsNegated object).\n\n spdCompFiltLogicType is OR and at least one of the\n sub-filters associated with it, as defined in the\n spdSubfiltersTable, is true itself (after applying any\n required negation, as defined by the ficFilterIsNegated\n object.')
spdCompFiltName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 5, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32)))
if mibBuilder.loadTexts: spdCompFiltName.setDescription('A user definable string. This value is used as an index\n into this table.')
spdCompFiltDescription = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 5, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdCompFiltDescription.setDescription('A user definable string. This field MAY be used for\n your administrative tracking purposes.')
spdCompFiltLogicType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 5, 1, 3), SpdBooleanOperator().clone('and')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdCompFiltLogicType.setDescription('Indicates whether the sub-component filters of this\n compound filter are functionally ANDed or ORed together.')
spdCompFiltLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 5, 1, 4), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdCompFiltLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdCompFiltStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 5, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdCompFiltStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a\n storage type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdCompFiltRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 5, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdCompFiltRowStatus.setDescription('This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n Once active, it MUST NOT have its value changed if any\n active rows in the spdRuleDefinitionTable are currently\n pointing at this row.')
spdSubfiltersTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 6), )
if mibBuilder.loadTexts: spdSubfiltersTable.setDescription('This table defines a list of filters contained within a\n given compound filter defined in the\n spdCompoundFilterTable.')
spdSubfiltersEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 6, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdCompFiltName"), (0, "IPSEC-SPD-MIB", "spdSubFiltPriority"))
if mibBuilder.loadTexts: spdSubfiltersEntry.setDescription('An entry in the spdSubfiltersTable. There is an entry in\n this table for each sub-filter of all compound filters\n present in the spdCompoundFilterTable.')
spdSubFiltPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0,65535)))
if mibBuilder.loadTexts: spdSubFiltPriority.setDescription('The priority of a given filter within a compound filter.\n The order of execution is from lowest to highest priority\n value (i.e., priority 0 before priority 1, 1 before 2,\n etc.). Implementations MAY choose to follow this ordering,\n as set by the manager that created the rows. This can allow\n a manager to intelligently construct filter lists such that\n faster filters are evaluated first.')
spdSubFiltSubfilter = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 6, 1, 2), VariablePointer()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdSubFiltSubfilter.setDescription('The OID of the contained filter. The value of this\n object is a VariablePointer that references the filter to\n be included in this compound filter.\n\n The following tables and scalars can be pointed to by this\n column. All but diffServMultiFieldClfrTable are defined in\n this MIB. Implementations MAY choose to provide support\n for other filter tables or scalars as well:\n\n diffServMultiFieldClfrTable\n spdIpsoHeaderFilterTable\n spdIpOffsetFilterTable\n spdTimeFilterTable\n spdCompoundFilterTable\n spdTrueFilter\n\n If this column is set to a VariablePointer value that\n references a non-existent row in an otherwise supported\n table, the inconsistentName exception MUST be returned. If\n the table or scalar pointed to by the VariablePointer is\n not supported at all, then an inconsistentValue exception\n MUST be returned.\n\n If, during packet processing, this column has a value that\n references a non-existent or non-supported object, the\n packet MUST be dropped.')
spdSubFiltSubfilterIsNegated = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 6, 1, 3), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdSubFiltSubfilterIsNegated.setDescription('Indicates whether or not the result of applying this sub-filter\n is negated.')
spdSubFiltLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 6, 1, 4), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdSubFiltLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n\n\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdSubFiltStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 6, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdSubFiltStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a\n storage type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdSubFiltRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 6, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdSubFiltRowStatus.setDescription('This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n This object cannot be made active until a filter\n referenced by the spdSubFiltSubfilter object is both\n defined and active. An attempt to do so MUST result in\n an inconsistentValue error.\n\n If active, this object MUST remain active unless one of the\n following two conditions are met:\n\n I. No active row in the SpdCompoundFilterTable exists\n that has a matching spdCompFiltName.\n\n II. Or, at least one other active row in this table has a\n matching spdCompFiltName.\n\n If neither condition is met, an attempt to set this row to\n something other than active MUST result in an\n inconsistentValue error.')
spdStaticFilters = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 1, 7))
spdTrueFilter = MibScalar((1, 3, 6, 1, 2, 1, 153, 1, 7, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1,1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdTrueFilter.setDescription('This scalar indicates a (automatic) true result for\n a filter. That is, this is a filter that is always\n true; it is useful for adding as a default filter for a\n default action or a set of actions.')
spdTrueFilterInstance = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 1, 7, 1, 0))
spdIpOffsetFilterTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 8), )
if mibBuilder.loadTexts: spdIpOffsetFilterTable.setDescription('This table contains a list of filter definitions to be\n used within the spdRuleDefinitionTable or the\n spdSubfiltersTable.\n\n This type of filter is used to compare an administrator\n specified octet string to the octets at a particular\n location in a packet.')
spdIpOffsetFilterEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 8, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdIpOffFiltName"))
if mibBuilder.loadTexts: spdIpOffsetFilterEntry.setDescription('A definition of a particular filter.')
spdIpOffFiltName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 8, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32)))
if mibBuilder.loadTexts: spdIpOffFiltName.setDescription('The administrative name for this filter.')
spdIpOffFiltOffset = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 8, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0,65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpOffFiltOffset.setDescription("This is the byte offset from the front of the entire IP\n packet where the value or arithmetic comparison is done. A\n value of '0' indicates the first byte of the packet header.\n If this value is greater than the length of the packet, the\n filter represented by this row should be considered to\n fail.")
spdIpOffFiltType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 8, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6,))).clone(namedValues=NamedValues(("equal", 1), ("notEqual", 2), ("arithmeticLess", 3), ("arithmeticGreaterOrEqual", 4), ("arithmeticGreater", 5), ("arithmeticLessOrEqual", 6),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpOffFiltType.setDescription("This defines the various tests that are used when\n evaluating a given filter.\n\n The various tests definable in this table are as follows:\n\n equal:\n - Tests if the OCTET STRING, 'spdIpOffFiltValue', matches\n\n\n\n a value in the packet starting at the given offset in\n the packet and comparing the entire OCTET STRING of\n 'spdIpOffFiltValue'. Any values compared this way are\n assumed to be unsigned integer values in network byte\n order of the same length as 'spdIpOffFiltValue'.\n\n notEqual:\n - Tests if the OCTET STRING, 'spdIpOffFiltValue', does\n not match a value in the packet starting at the given\n offset in the packet and comparing to the entire OCTET\n STRING of 'spdIpOffFiltValue'. Any values compared\n this way are assumed to be unsigned integer values in\n network byte order of the same length as\n 'spdIpOffFiltValue'.\n\n arithmeticLess:\n - Tests if the OCTET STRING, 'spdIpOffFiltValue', is\n arithmetically less than ('<') the value starting at\n the given offset within the packet. The value in the\n packet is assumed to be an unsigned integer in network\n byte order of the same length as 'spdIpOffFiltValue'.\n\n arithmeticGreaterOrEqual:\n - Tests if the OCTET STRING, 'spdIpOffFiltValue', is\n arithmetically greater than or equal to ('>=') the\n value starting at the given offset within the packet.\n The value in the packet is assumed to be an unsigned\n integer in network byte order of the same length as\n 'spdIpOffFiltValue'.\n\n arithmeticGreater:\n - Tests if the OCTET STRING, 'spdIpOffFiltValue', is\n arithmetically greater than ('>') the value starting at\n the given offset within the packet. The value in the\n packet is assumed to be an unsigned integer in network\n byte order of the same length as 'spdIpOffFiltValue'.\n\n arithmeticLessOrEqual:\n - Tests if the OCTET STRING, 'spdIpOffFiltValue', is\n arithmetically less than or equal to ('<=') the value\n starting at the given offset within the packet. The\n value in the packet is assumed to be an unsigned\n integer in network byte order of the same length as\n 'spdIpOffFiltValue'.")
spdIpOffFiltValue = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 8, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1,1024))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpOffFiltValue.setDescription('spdIpOffFiltValue is used for match comparisons of a\n packet at spdIpOffFiltOffset.')
spdIpOffFiltLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 8, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdIpOffFiltLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdIpOffFiltStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 8, 1, 6), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpOffFiltStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a\n storage type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdIpOffFiltRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 8, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpOffFiltRowStatus.setDescription('This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n If active, this object MUST remain active if it is\n\n\n\n referenced by an active row in another table. An attempt\n to set it to anything other than active while it is\n referenced by an active row in another table MUST result in\n an inconsistentValue error.')
spdTimeFilterTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 9), )
if mibBuilder.loadTexts: spdTimeFilterTable.setDescription('Defines a table of filters that can be used to\n effectively enable or disable policies based on a valid\n time range.')
spdTimeFilterEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 9, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdTimeFiltName"))
if mibBuilder.loadTexts: spdTimeFilterEntry.setDescription("A row describing a given time frame for which a policy\n is filtered on to activate or deactivate the rule.\n\n If all the column objects in a row are true for the current\n time, the row evaluates as 'true'. More explicitly, the\n time matching column objects in a row MUST be logically\n ANDed together to form the boolean true/false for the row.")
spdTimeFiltName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32)))
if mibBuilder.loadTexts: spdTimeFiltName.setDescription('An administratively assigned name for this filter.')
spdTimeFiltPeriod = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 2), SpdTimePeriod().clone('THISANDPRIOR/THISANDFUTURE')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdTimeFiltPeriod.setDescription("The valid time period for this filter. This column is\n considered 'true' if the current time is within the range of\n this object.")
spdTimeFiltMonthOfYearMask = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 3), Bits().clone(namedValues=NamedValues(("january", 0), ("february", 1), ("march", 2), ("april", 3), ("may", 4), ("june", 5), ("july", 6), ("august", 7), ("september", 8), ("october", 9), ("november", 10), ("december", 11),)).clone(namedValues=NamedValues(("january", 0), ("february", 1), ("march", 2), ("april", 3), ("may", 4), ("june", 5), ("july", 6), ("august", 7), ("september", 8), ("october", 9), ("november", 10), ("december", 11),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdTimeFiltMonthOfYearMask.setDescription("A bit mask that indicates acceptable months of the year.\n This column evaluates to 'true' if the current month's bit\n is set.")
spdTimeFiltDayOfMonthMask = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8,8)).setFixedLength(8).clone(hexValue="fffffffffffffffe")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdTimeFiltDayOfMonthMask.setDescription("Defines which days of the month the current time is\n valid for. It is a sequence of 64 BITS, where each BIT\n represents a corresponding day of the month in forward or\n reverse order. Starting from the left-most bit, the first\n 31 bits identify the day of the month, counting from the\n beginning of the month. The following 31 bits (bits 32-62)\n indicate the day of the month, counting from the end of the\n\n\n\n month. For months with fewer than 31 days, the bits that\n correspond to the non-existent days of that month are\n ignored (e.g., for non-leap year Februarys, bits 29-31 and\n 60-62 are ignored).\n\n This column evaluates to 'true' if the current day of the\n month's bit is set.\n\n For example, a value of 0X'80 00 00 01 00 00 00 00'\n indicates that this column evaluates to true on the first\n and last days of the month.\n\n The last two bits in the string MUST be zero.")
spdTimeFiltDayOfWeekMask = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 5), Bits().clone(namedValues=NamedValues(("sunday", 0), ("monday", 1), ("tuesday", 2), ("wednesday", 3), ("thursday", 4), ("friday", 5), ("saturday", 6),)).clone(namedValues=NamedValues(("monday", 1), ("tuesday", 2), ("wednesday", 3), ("thursday", 4), ("friday", 5), ("saturday", 6), ("sunday", 0),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdTimeFiltDayOfWeekMask.setDescription("A bit mask that defines which days of the week that the current\n time is valid for. This column evaluates to 'true' if the\n current day of the week's bit is set.")
spdTimeFiltTimeOfDayMask = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 6), SpdTimePeriod().clone('00000000T000000/00000000T240000')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdTimeFiltTimeOfDayMask.setDescription("Indicates the start and end time of the day for which this\n filter evaluates to true. The date portions of the\n spdTimePeriod TC are ignored for purposes of evaluating this\n mask, and only the time-specific portions are used.\n\n This column evaluates to 'true' if the current time of day\n is within the range of the start and end times of the day\n indicated by this object.")
spdTimeFiltLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 7), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdTimeFiltLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdTimeFiltStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 8), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdTimeFiltStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a storage\n type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdTimeFiltRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 9, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdTimeFiltRowStatus.setDescription('This object indicates the conceptual status of this\n row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n If active, this object MUST remain active if it is\n referenced by an active row in another table. An attempt\n to set it to anything other than active while it is\n referenced by an active row in another table MUST result in\n an inconsistentValue error.')
spdIpsoHeaderFilterTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 10), )
if mibBuilder.loadTexts: spdIpsoHeaderFilterTable.setDescription('This table contains a list of IPSO header filter\n definitions to be used within the spdRuleDefinitionTable or\n the spdSubfiltersTable. IPSO headers and their values are\n described in RFC 1108.')
spdIpsoHeaderFilterEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 10, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdIpsoHeadFiltName"))
if mibBuilder.loadTexts: spdIpsoHeaderFilterEntry.setDescription('A definition of a particular filter.')
spdIpsoHeadFiltName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 10, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32)))
if mibBuilder.loadTexts: spdIpsoHeadFiltName.setDescription('The administrative name for this filter.')
spdIpsoHeadFiltType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 10, 1, 2), Bits().clone(namedValues=NamedValues(("classificationLevel", 0), ("protectionAuthority", 1),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpsoHeadFiltType.setDescription('This object indicates which of the IPSO header field a\n packet is filtered on for this row. If this object is set\n to classification(0), the spdIpsoHeadFiltClassification\n\n\n\n object indicates how the packet is filtered. If this object\n is set to protectionAuthority(1), the\n spdIpsoHeadFiltProtectionAuth object indicates how the\n packet is filtered.')
spdIpsoHeadFiltClassification = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(61, 90, 150, 171,))).clone(namedValues=NamedValues(("topSecret", 61), ("secret", 90), ("confidential", 150), ("unclassified", 171),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpsoHeadFiltClassification.setDescription("This object indicates the IPSO classification header field\n value that the packet MUST have for this row to evaluate to\n 'true'.\n\n The values of these enumerations are defined by RFC 1108.")
spdIpsoHeadFiltProtectionAuth = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4,))).clone(namedValues=NamedValues(("genser", 0), ("siopesi", 1), ("sci", 2), ("nsa", 3), ("doe", 4),))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpsoHeadFiltProtectionAuth.setDescription("This object indicates the IPSO protection authority header\n field value that the packet MUST have for this row to\n evaluate to 'true'.\n\n The values of these enumerations are defined by RFC 1108.\n Hence the reason the SMIv2 convention of not using 0 in\n enumerated lists is violated here.")
spdIpsoHeadFiltLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 10, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdIpsoHeadFiltLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdIpsoHeadFiltStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 10, 1, 6), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpsoHeadFiltStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a storage\n type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdIpsoHeadFiltRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 10, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdIpsoHeadFiltRowStatus.setDescription('This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n However, this object MUST NOT be set to active if the\n requirements of the spdIpsoHeadFiltType object are not met.\n Specifically, if the spdIpsoHeadFiltType bit for\n classification(0) is set, the spdIpsoHeadFiltClassification\n column MUST have a valid value for the row status to be set\n to active. If the spdIpsoHeadFiltType bit for\n protectionAuthority(1) is set, the\n spdIpsoHeadFiltProtectionAuth column MUST have a valid\n value for the row status to be set to active.\n\n If active, this object MUST remain active if it is\n referenced by an active row in another table. An attempt\n to set it to anything other than active while it is\n referenced by an active row in another table MUST result in\n an inconsistentValue error.')
spdCompoundActionTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 11), )
if mibBuilder.loadTexts: spdCompoundActionTable.setDescription('Table used to allow multiple actions to be associated\n with a rule. It uses the spdSubactionsTable to do this.\n The rows from spdSubactionsTable that are partially indexed\n by spdCompActName form the set of compound actions to be\n performed. The spdCompActExecutionStrategy column in this\n table indicates how those actions are processed.')
spdCompoundActionEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 11, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdCompActName"))
if mibBuilder.loadTexts: spdCompoundActionEntry.setDescription('A row in the spdCompoundActionTable.')
spdCompActName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 11, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1,32)))
if mibBuilder.loadTexts: spdCompActName.setDescription('This is an administratively assigned name of this\n compound action.')
spdCompActExecutionStrategy = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3,))).clone(namedValues=NamedValues(("doAll", 1), ("doUntilSuccess", 2), ("doUntilFailure", 3),)).clone('doUntilSuccess')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdCompActExecutionStrategy.setDescription('This object indicates how the sub-actions are executed\n based on the success of the actions as they finish\n executing.\n\n\n\n doAll - run each sub-action regardless of the\n exit status of the previous action.\n This parent action is always\n considered to have acted successfully.\n\n doUntilSuccess - run each sub-action until one succeeds,\n at which point stop processing the\n sub-actions within this parent\n compound action. If one of the\n sub-actions did execute successfully,\n this parent action is also considered\n to have executed successfully.\n\n doUntilFailure - run each sub-action until one fails,\n at which point stop processing the\n sub-actions within this compound\n action. If any sub-action fails, the\n result of this parent action is\n considered to have failed.')
spdCompActLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 11, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdCompActLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdCompActStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 11, 1, 4), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdCompActStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a storage\n type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdCompActRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 11, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdCompActRowStatus.setDescription('This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n Once a row in the spdCompoundActionTable has been made\n active, this object MUST NOT be set to destroy without\n first destroying all the contained rows listed in the\n spdSubactionsTable.')
spdSubactionsTable = MibTable((1, 3, 6, 1, 2, 1, 153, 1, 12), )
if mibBuilder.loadTexts: spdSubactionsTable.setDescription('This table contains a list of the sub-actions within a\n given compound action. Compound actions executing these\n actions MUST execute them in series based on the\n spdSubActPriority value, with the lowest value executing\n first.')
spdSubactionsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 153, 1, 12, 1), ).setIndexNames((0, "IPSEC-SPD-MIB", "spdCompActName"), (0, "IPSEC-SPD-MIB", "spdSubActPriority"))
if mibBuilder.loadTexts: spdSubactionsEntry.setDescription('A row containing a reference to a given compound-action\n sub-action.')
spdSubActPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 12, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0,65535)))
if mibBuilder.loadTexts: spdSubActPriority.setDescription('The priority of a given sub-action within a compound\n action. The order in which sub-actions MUST be executed\n are based on the value from this column, with the lowest\n numeric value executing first (i.e., priority 0 before\n priority 1, 1 before 2, etc.).')
spdSubActSubActionName = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 12, 1, 2), VariablePointer()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdSubActSubActionName.setDescription('This column points to the action to be taken. It MAY,\n but is not limited to, point to a row in one of the\n following tables:\n\n spdCompoundActionTable - Allowing recursion\n ipsaSaPreconfiguredActionTable\n ipiaIkeActionTable\n ipiaIpsecActionTable\n\n It MAY also point to one of the scalar objects beneath\n spdStaticActions.\n\n If this object is set to a pointer to a row in an\n unsupported (or unknown) table, an inconsistentValue\n error MUST be returned.\n\n If this object is set to point to a non-existent row in\n an otherwise supported table, an inconsistentName error\n MUST be returned.\n\n If, during packet processing, this column has a value that\n references a non-existent or non-supported object, the\n packet MUST be dropped.')
spdSubActLastChanged = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 12, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdSubActLastChanged.setDescription('The value of sysUpTime when this row was last modified\n or created either through SNMP SETs or by some other\n external means.\n\n If this row has not been modified since the last\n re-initialization of the network management subsystem, this\n object SHOULD have a zero value.')
spdSubActStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 12, 1, 4), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdSubActStorageType.setDescription('The storage type for this row. Rows in this table that\n were created through an external process MAY have a storage\n type of readOnly or permanent.\n\n For a storage type of permanent, none of the columns have\n to be writable.')
spdSubActRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 153, 1, 12, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: spdSubActRowStatus.setDescription('This object indicates the conceptual status of this row.\n\n The value of this object has no effect on whether other\n objects in this conceptual row can be modified.\n\n If active, this object MUST remain active unless one of the\n following two conditions are met. An attempt to set it to\n anything other than active while the following conditions\n are not met MUST result in an inconsistentValue error. The\n two conditions are:\n\n I. No active row in the spdCompoundActionTable exists\n which has a matching spdCompActName.\n\n II. Or, at least one other active row in this table has a\n matching spdCompActName.')
spdStaticActions = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 1, 13))
spdDropAction = MibScalar((1, 3, 6, 1, 2, 1, 153, 1, 13, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1,1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdDropAction.setDescription('This scalar indicates that a packet MUST be dropped\n and SHOULD NOT have action/packet logging.')
spdDropActionLog = MibScalar((1, 3, 6, 1, 2, 1, 153, 1, 13, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1,1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdDropActionLog.setDescription('This scalar indicates that a packet MUST be dropped\n and SHOULD have action/packet logging.')
spdAcceptAction = MibScalar((1, 3, 6, 1, 2, 1, 153, 1, 13, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1,1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdAcceptAction.setDescription('This Scalar indicates that a packet MUST be accepted\n (pass-through) and SHOULD NOT have action/packet logging.')
spdAcceptActionLog = MibScalar((1, 3, 6, 1, 2, 1, 153, 1, 13, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1,1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: spdAcceptActionLog.setDescription('This scalar indicates that a packet MUST be accepted\n (pass-through) and SHOULD have action/packet logging.')
spdNotificationVariables = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 2, 1))
spdNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 2, 0))
spdActionExecuted = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 1), VariablePointer()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdActionExecuted.setDescription('Points to the action instance that was executed that\n resulted in the notification being sent.')
spdIPEndpointAddType = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 2), InetAddressType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdIPEndpointAddType.setDescription('Contains the address type for the interface that the\n notification triggering packet is passing through.')
spdIPEndpointAddress = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 3), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdIPEndpointAddress.setDescription('Contains the interface address for the interface that the\n notification triggering packet is passing through.\n\n The format of this object is specified by the\n spdIPEndpointAddType object.')
spdIPSourceType = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 4), InetAddressType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdIPSourceType.setDescription('Contains the source address type of the packet that\n\n\n\n triggered the notification.')
spdIPSourceAddress = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 5), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdIPSourceAddress.setDescription('Contains the source address of the packet that\n triggered the notification.\n\n The format of this object is specified by the\n spdIPSourceType object.')
spdIPDestinationType = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 6), InetAddressType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdIPDestinationType.setDescription('Contains the destination address type of the packet\n that triggered the notification.')
spdIPDestinationAddress = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 7), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdIPDestinationAddress.setDescription('Contains the destination address of the packet that\n triggered the notification.\n\n The format of this object is specified by the\n spdIPDestinationType object.')
spdPacketDirection = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 8), IfDirection()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdPacketDirection.setDescription('Indicates if the packet that triggered the action in\n questions was ingress (inbound) or egress (outbound).')
spdPacketPart = MibScalar((1, 3, 6, 1, 2, 1, 153, 2, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0,65535))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: spdPacketPart.setDescription("spdPacketPart is the front part of the full IP packet that\n triggered this notification. The initial size limit is\n determined by the smaller of the size, indicated by:\n\n I. The value of the object with the TC syntax\n 'SpdIPPacketLogging' that indicated the packet SHOULD be\n logged and\n\n II. The size of the triggering packet.\n\n The final limit is determined by the SNMP packet size when\n sending the notification. The maximum size that can be\n included will be the smaller of the initial size, given the\n above, and the length that will fit in a single SNMP\n notification packet after the rest of the notification's\n objects and any other necessary packet data (headers encoding,\n etc.) have been included in the packet.")
spdActionNotification = NotificationType((1, 3, 6, 1, 2, 1, 153, 2, 0, 1)).setObjects(*(("IPSEC-SPD-MIB", "spdActionExecuted"), ("IPSEC-SPD-MIB", "spdIPEndpointAddType"), ("IPSEC-SPD-MIB", "spdIPEndpointAddress"), ("IPSEC-SPD-MIB", "spdIPSourceType"), ("IPSEC-SPD-MIB", "spdIPSourceAddress"), ("IPSEC-SPD-MIB", "spdIPDestinationType"), ("IPSEC-SPD-MIB", "spdIPDestinationAddress"), ("IPSEC-SPD-MIB", "spdPacketDirection"),))
if mibBuilder.loadTexts: spdActionNotification.setDescription('Notification that an action was executed by a rule.\n Only actions with logging enabled will result in this\n notification getting sent. The object includes the\n spdActionExecuted object, which will indicate which action\n was executed within the scope of the rule. Additionally,\n the spdIPSourceType, spdIPSourceAddress,\n spdIPDestinationType, and spdIPDestinationAddress objects\n are included to indicate the packet source and destination\n of the packet that triggered the action. Finally, the\n spdIPEndpointAddType, spdIPEndpointAddress, and\n spdPacketDirection objects indicate which interface the\n executed action was associated with, and if the packet was\n ingress or egress through the endpoint.\n\n A spdActionNotification SHOULD be limited to a maximum of\n one notification sent per minute for any action\n notifications that do not have any other configuration\n controlling their send rate.\n\n\n\n Note that compound actions with multiple executed\n sub-actions may result in multiple notifications being sent\n from a single rule execution.')
spdPacketNotification = NotificationType((1, 3, 6, 1, 2, 1, 153, 2, 0, 2)).setObjects(*(("IPSEC-SPD-MIB", "spdActionExecuted"), ("IPSEC-SPD-MIB", "spdIPEndpointAddType"), ("IPSEC-SPD-MIB", "spdIPEndpointAddress"), ("IPSEC-SPD-MIB", "spdIPSourceType"), ("IPSEC-SPD-MIB", "spdIPSourceAddress"), ("IPSEC-SPD-MIB", "spdIPDestinationType"), ("IPSEC-SPD-MIB", "spdIPDestinationAddress"), ("IPSEC-SPD-MIB", "spdPacketDirection"), ("IPSEC-SPD-MIB", "spdPacketPart"),))
if mibBuilder.loadTexts: spdPacketNotification.setDescription("Notification that a packet passed through a Security\n Association (SA). Only SAs created by actions with packet\n logging enabled will result in this notification getting\n sent. The objects sent MUST include the spdActionExecuted,\n which will indicate which action was executed within the\n scope of the rule. Additionally, the spdIPSourceType,\n spdIPSourceAddress, spdIPDestinationType, and\n spdIPDestinationAddress objects MUST be included to\n indicate the packet source and destination of the packet\n that triggered the action. The spdIPEndpointAddType,\n spdIPEndpointAddress, and spdPacketDirection objects are\n included to indicate which endpoint the packet was\n associated with. Finally, spdPacketPart is included to\n enable sending a variable sized part of the front of the\n packet with the size dependent on the value of the object of\n TC syntax 'SpdIPPacketLogging', which indicated that logging\n should be done.\n\n A spdPacketNotification SHOULD be limited to a maximum of\n one notification sent per minute for any action\n notifications that do not have any other configuration\n controlling their send rate.\n\n An action notification SHOULD be limited to a maximum of\n one notification sent per minute for any action\n notifications that do not have any other configuration\n controlling their send rate.")
spdCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 3, 1))
spdGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 153, 3, 2))
spdRuleFilterFullCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 153, 3, 1, 1)).setObjects(*(("IPSEC-SPD-MIB", "spdEndpointGroup"), ("IPSEC-SPD-MIB", "spdGroupContentsGroup"), ("IPSEC-SPD-MIB", "spdRuleDefinitionGroup"), ("IPSEC-SPD-MIB", "spdStaticFilterGroup"), ("IPSEC-SPD-MIB", "spdStaticActionGroup"), ("IPSEC-SPD-MIB", "diffServMIBMultiFieldClfrGroup"), ("IPSEC-SPD-MIB", "spdIpsecSystemPolicyNameGroup"), ("IPSEC-SPD-MIB", "spdCompoundFilterGroup"), ("IPSEC-SPD-MIB", "spdIPOffsetFilterGroup"), ("IPSEC-SPD-MIB", "spdTimeFilterGroup"), ("IPSEC-SPD-MIB", "spdIpsoHeaderFilterGroup"), ("IPSEC-SPD-MIB", "spdCompoundActionGroup"),))
if mibBuilder.loadTexts: spdRuleFilterFullCompliance.setDescription('The compliance statement for SNMP entities that include\n an IPsec MIB implementation with Endpoint, Rules, and\n filters support.\n\n When this MIB is implemented with support for read-create,\n then such an implementation can claim full compliance. Such\n devices can then be both monitored and configured with this\n MIB.')
spdLoggingCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 153, 3, 1, 2)).setObjects(*(("IPSEC-SPD-MIB", "spdActionLoggingObjectGroup"), ("IPSEC-SPD-MIB", "spdActionNotificationGroup"),))
if mibBuilder.loadTexts: spdLoggingCompliance.setDescription('The compliance statement for SNMP entities that support\n sending notifications when actions are invoked.')
spdRuleFilterReadOnlyCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 153, 3, 1, 3)).setObjects(*(("IPSEC-SPD-MIB", "spdEndpointGroup"), ("IPSEC-SPD-MIB", "spdGroupContentsGroup"), ("IPSEC-SPD-MIB", "spdRuleDefinitionGroup"), ("IPSEC-SPD-MIB", "spdStaticFilterGroup"), ("IPSEC-SPD-MIB", "spdStaticActionGroup"), ("IPSEC-SPD-MIB", "diffServMIBMultiFieldClfrGroup"), ("IPSEC-SPD-MIB", "spdIpsecSystemPolicyNameGroup"), ("IPSEC-SPD-MIB", "spdCompoundFilterGroup"), ("IPSEC-SPD-MIB", "spdIPOffsetFilterGroup"), ("IPSEC-SPD-MIB", "spdTimeFilterGroup"), ("IPSEC-SPD-MIB", "spdIpsoHeaderFilterGroup"), ("IPSEC-SPD-MIB", "spdCompoundActionGroup"),))
if mibBuilder.loadTexts: spdRuleFilterReadOnlyCompliance.setDescription('The compliance statement for SNMP entities that include\n an IPsec MIB implementation with Endpoint, Rules, and\n filters support.\n\n If this MIB is implemented without support for read-create\n (i.e., in read-only), it is not in full compliance, but it\n can claim read-only compliance. Such a device can then be\n monitored, but cannot be configured with this MIB.')
spdEndpointGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 1)).setObjects(*(("IPSEC-SPD-MIB", "spdEndGroupName"), ("IPSEC-SPD-MIB", "spdEndGroupLastChanged"), ("IPSEC-SPD-MIB", "spdEndGroupStorageType"), ("IPSEC-SPD-MIB", "spdEndGroupRowStatus"),))
if mibBuilder.loadTexts: spdEndpointGroup.setDescription('This group is made up of objects from the IPsec Policy\n Endpoint Table.')
spdGroupContentsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 2)).setObjects(*(("IPSEC-SPD-MIB", "spdGroupContComponentType"), ("IPSEC-SPD-MIB", "spdGroupContFilter"), ("IPSEC-SPD-MIB", "spdGroupContComponentName"), ("IPSEC-SPD-MIB", "spdGroupContLastChanged"), ("IPSEC-SPD-MIB", "spdGroupContStorageType"), ("IPSEC-SPD-MIB", "spdGroupContRowStatus"),))
if mibBuilder.loadTexts: spdGroupContentsGroup.setDescription('This group is made up of objects from the IPsec Policy\n Group Contents Table.')
spdIpsecSystemPolicyNameGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 3)).setObjects(*(("IPSEC-SPD-MIB", "spdIngressPolicyGroupName"), ("IPSEC-SPD-MIB", "spdEgressPolicyGroupName"),))
if mibBuilder.loadTexts: spdIpsecSystemPolicyNameGroup.setDescription('This group is made up of objects represent the System\n Policy Group Names.')
spdRuleDefinitionGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 4)).setObjects(*(("IPSEC-SPD-MIB", "spdRuleDefDescription"), ("IPSEC-SPD-MIB", "spdRuleDefFilter"), ("IPSEC-SPD-MIB", "spdRuleDefFilterNegated"), ("IPSEC-SPD-MIB", "spdRuleDefAction"), ("IPSEC-SPD-MIB", "spdRuleDefAdminStatus"), ("IPSEC-SPD-MIB", "spdRuleDefLastChanged"), ("IPSEC-SPD-MIB", "spdRuleDefStorageType"), ("IPSEC-SPD-MIB", "spdRuleDefRowStatus"),))
if mibBuilder.loadTexts: spdRuleDefinitionGroup.setDescription('This group is made up of objects from the IPsec Policy Rule\n Definition Table.')
spdCompoundFilterGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 5)).setObjects(*(("IPSEC-SPD-MIB", "spdCompFiltDescription"), ("IPSEC-SPD-MIB", "spdCompFiltLogicType"), ("IPSEC-SPD-MIB", "spdCompFiltLastChanged"), ("IPSEC-SPD-MIB", "spdCompFiltStorageType"), ("IPSEC-SPD-MIB", "spdCompFiltRowStatus"), ("IPSEC-SPD-MIB", "spdSubFiltSubfilter"), ("IPSEC-SPD-MIB", "spdSubFiltSubfilterIsNegated"), ("IPSEC-SPD-MIB", "spdSubFiltLastChanged"), ("IPSEC-SPD-MIB", "spdSubFiltStorageType"), ("IPSEC-SPD-MIB", "spdSubFiltRowStatus"),))
if mibBuilder.loadTexts: spdCompoundFilterGroup.setDescription('This group is made up of objects from the IPsec Policy\n Compound Filter Table and Sub-Filter Table Group.')
spdStaticFilterGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 6)).setObjects(*(("IPSEC-SPD-MIB", "spdTrueFilter"),))
if mibBuilder.loadTexts: spdStaticFilterGroup.setDescription('The static filter group. Currently this is just a true\n filter.')
spdIPOffsetFilterGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 7)).setObjects(*(("IPSEC-SPD-MIB", "spdIpOffFiltOffset"), ("IPSEC-SPD-MIB", "spdIpOffFiltType"), ("IPSEC-SPD-MIB", "spdIpOffFiltValue"), ("IPSEC-SPD-MIB", "spdIpOffFiltLastChanged"), ("IPSEC-SPD-MIB", "spdIpOffFiltStorageType"), ("IPSEC-SPD-MIB", "spdIpOffFiltRowStatus"),))
if mibBuilder.loadTexts: spdIPOffsetFilterGroup.setDescription('This group is made up of objects from the IPsec Policy IP\n Offset Filter Table.')
spdTimeFilterGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 8)).setObjects(*(("IPSEC-SPD-MIB", "spdTimeFiltPeriod"), ("IPSEC-SPD-MIB", "spdTimeFiltMonthOfYearMask"), ("IPSEC-SPD-MIB", "spdTimeFiltDayOfMonthMask"), ("IPSEC-SPD-MIB", "spdTimeFiltDayOfWeekMask"), ("IPSEC-SPD-MIB", "spdTimeFiltTimeOfDayMask"), ("IPSEC-SPD-MIB", "spdTimeFiltLastChanged"), ("IPSEC-SPD-MIB", "spdTimeFiltStorageType"), ("IPSEC-SPD-MIB", "spdTimeFiltRowStatus"),))
if mibBuilder.loadTexts: spdTimeFilterGroup.setDescription('This group is made up of objects from the IPsec Policy Time\n Filter Table.')
spdIpsoHeaderFilterGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 9)).setObjects(*(("IPSEC-SPD-MIB", "spdIpsoHeadFiltType"), ("IPSEC-SPD-MIB", "spdIpsoHeadFiltClassification"), ("IPSEC-SPD-MIB", "spdIpsoHeadFiltProtectionAuth"), ("IPSEC-SPD-MIB", "spdIpsoHeadFiltLastChanged"), ("IPSEC-SPD-MIB", "spdIpsoHeadFiltStorageType"), ("IPSEC-SPD-MIB", "spdIpsoHeadFiltRowStatus"),))
if mibBuilder.loadTexts: spdIpsoHeaderFilterGroup.setDescription('This group is made up of objects from the IPsec Policy IPSO\n Header Filter Table.')
spdStaticActionGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 10)).setObjects(*(("IPSEC-SPD-MIB", "spdDropAction"), ("IPSEC-SPD-MIB", "spdAcceptAction"), ("IPSEC-SPD-MIB", "spdDropActionLog"), ("IPSEC-SPD-MIB", "spdAcceptActionLog"),))
if mibBuilder.loadTexts: spdStaticActionGroup.setDescription('This group is made up of objects from the IPsec Policy\n Static Actions.')
spdCompoundActionGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 11)).setObjects(*(("IPSEC-SPD-MIB", "spdCompActExecutionStrategy"), ("IPSEC-SPD-MIB", "spdCompActLastChanged"), ("IPSEC-SPD-MIB", "spdCompActStorageType"), ("IPSEC-SPD-MIB", "spdCompActRowStatus"), ("IPSEC-SPD-MIB", "spdSubActSubActionName"), ("IPSEC-SPD-MIB", "spdSubActLastChanged"), ("IPSEC-SPD-MIB", "spdSubActStorageType"), ("IPSEC-SPD-MIB", "spdSubActRowStatus"),))
if mibBuilder.loadTexts: spdCompoundActionGroup.setDescription('The IPsec Policy Compound Action Table and Actions In\n\n\n\n Compound Action Table Group.')
spdActionLoggingObjectGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 12)).setObjects(*(("IPSEC-SPD-MIB", "spdActionExecuted"), ("IPSEC-SPD-MIB", "spdIPEndpointAddType"), ("IPSEC-SPD-MIB", "spdIPEndpointAddress"), ("IPSEC-SPD-MIB", "spdIPSourceType"), ("IPSEC-SPD-MIB", "spdIPSourceAddress"), ("IPSEC-SPD-MIB", "spdIPDestinationType"), ("IPSEC-SPD-MIB", "spdIPDestinationAddress"), ("IPSEC-SPD-MIB", "spdPacketDirection"), ("IPSEC-SPD-MIB", "spdPacketPart"),))
if mibBuilder.loadTexts: spdActionLoggingObjectGroup.setDescription('This group is made up of all the Notification objects for\n this MIB.')
spdActionNotificationGroup = NotificationGroup((1, 3, 6, 1, 2, 1, 153, 3, 2, 13)).setObjects(*(("IPSEC-SPD-MIB", "spdActionNotification"), ("IPSEC-SPD-MIB", "spdPacketNotification"),))
if mibBuilder.loadTexts: spdActionNotificationGroup.setDescription('This group is made up of all the Notifications for this MIB.')
mibBuilder.exportSymbols("IPSEC-SPD-MIB", spdRuleDefFilter=spdRuleDefFilter, spdEndpointToGroupTable=spdEndpointToGroupTable, spdIpOffFiltValue=spdIpOffFiltValue, spdCompoundFilterTable=spdCompoundFilterTable, spdRuleDefRowStatus=spdRuleDefRowStatus, spdSubFiltRowStatus=spdSubFiltRowStatus, spdStaticActions=spdStaticActions, spdEndGroupInterface=spdEndGroupInterface, spdRuleFilterReadOnlyCompliance=spdRuleFilterReadOnlyCompliance, spdCompoundFilterGroup=spdCompoundFilterGroup, spdEndpointGroup=spdEndpointGroup, spdEgressPolicyGroupName=spdEgressPolicyGroupName, spdGroupContentsTable=spdGroupContentsTable, spdSubFiltSubfilter=spdSubFiltSubfilter, spdIpsoHeadFiltClassification=spdIpsoHeadFiltClassification, spdGroupContStorageType=spdGroupContStorageType, spdCompliances=spdCompliances, spdCompFiltName=spdCompFiltName, spdRuleDefName=spdRuleDefName, spdIPOffsetFilterGroup=spdIPOffsetFilterGroup, spdSubfiltersTable=spdSubfiltersTable, SpdIPPacketLogging=SpdIPPacketLogging, spdTimeFilterEntry=spdTimeFilterEntry, spdTimeFilterTable=spdTimeFilterTable, spdGroupContPriority=spdGroupContPriority, spdTimeFiltStorageType=spdTimeFiltStorageType, spdSubactionsEntry=spdSubactionsEntry, spdActions=spdActions, spdIpsoHeaderFilterGroup=spdIpsoHeaderFilterGroup, spdTimeFiltLastChanged=spdTimeFiltLastChanged, spdRuleDefinitionEntry=spdRuleDefinitionEntry, spdGroupContComponentName=spdGroupContComponentName, spdTimeFiltDayOfMonthMask=spdTimeFiltDayOfMonthMask, spdStaticFilters=spdStaticFilters, spdSubFiltPriority=spdSubFiltPriority, spdRuleDefLastChanged=spdRuleDefLastChanged, spdNotificationVariables=spdNotificationVariables, spdSubFiltSubfilterIsNegated=spdSubFiltSubfilterIsNegated, spdIpsoHeadFiltType=spdIpsoHeadFiltType, PYSNMP_MODULE_ID=spdMIB, spdIpsoHeadFiltName=spdIpsoHeadFiltName, spdRuleDefAction=spdRuleDefAction, spdGroupContComponentType=spdGroupContComponentType, spdActionLoggingObjectGroup=spdActionLoggingObjectGroup, spdIpsoHeaderFilterTable=spdIpsoHeaderFilterTable, spdStaticActionGroup=spdStaticActionGroup, spdIpOffsetFilterEntry=spdIpOffsetFilterEntry, spdEndGroupLastChanged=spdEndGroupLastChanged, spdRuleDefinitionTable=spdRuleDefinitionTable, spdCompFiltLogicType=spdCompFiltLogicType, spdIpOffFiltRowStatus=spdIpOffFiltRowStatus, spdNotificationObjects=spdNotificationObjects, spdRuleDefinitionGroup=spdRuleDefinitionGroup, spdTimeFiltRowStatus=spdTimeFiltRowStatus, spdIpOffFiltOffset=spdIpOffFiltOffset, spdCompFiltDescription=spdCompFiltDescription, spdGroupContentsGroup=spdGroupContentsGroup, spdMIB=spdMIB, spdConformanceObjects=spdConformanceObjects, spdEndGroupDirection=spdEndGroupDirection, spdEndGroupName=spdEndGroupName, spdCompoundFilterEntry=spdCompoundFilterEntry, spdIpsoHeadFiltRowStatus=spdIpsoHeadFiltRowStatus, spdTimeFilterGroup=spdTimeFilterGroup, spdRuleDefAdminStatus=spdRuleDefAdminStatus, spdIpOffFiltLastChanged=spdIpOffFiltLastChanged, spdCompoundActionGroup=spdCompoundActionGroup, spdSubActStorageType=spdSubActStorageType, spdIpOffFiltName=spdIpOffFiltName, spdSubfiltersEntry=spdSubfiltersEntry, SpdBooleanOperator=SpdBooleanOperator, spdEndGroupStorageType=spdEndGroupStorageType, spdIPSourceType=spdIPSourceType, spdCompActLastChanged=spdCompActLastChanged, spdGroupContentsEntry=spdGroupContentsEntry, spdPacketDirection=spdPacketDirection, spdRuleDefStorageType=spdRuleDefStorageType, spdIPDestinationType=spdIPDestinationType, spdActionNotificationGroup=spdActionNotificationGroup, spdTrueFilter=spdTrueFilter, spdSubActRowStatus=spdSubActRowStatus, spdPacketPart=spdPacketPart, spdTimeFiltDayOfWeekMask=spdTimeFiltDayOfWeekMask, spdCompActName=spdCompActName, spdIpOffFiltType=spdIpOffFiltType, spdIpOffFiltStorageType=spdIpOffFiltStorageType, spdRuleDefDescription=spdRuleDefDescription, spdIpOffsetFilterTable=spdIpOffsetFilterTable, spdIPEndpointAddType=spdIPEndpointAddType, spdEndGroupRowStatus=spdEndGroupRowStatus, spdSubActPriority=spdSubActPriority, spdLocalConfigObjects=spdLocalConfigObjects, spdDropActionLog=spdDropActionLog, spdTimeFiltTimeOfDayMask=spdTimeFiltTimeOfDayMask, spdSubActLastChanged=spdSubActLastChanged, spdDropAction=spdDropAction, spdTrueFilterInstance=spdTrueFilterInstance, spdConfigObjects=spdConfigObjects, spdActionNotification=spdActionNotification, spdIpsecSystemPolicyNameGroup=spdIpsecSystemPolicyNameGroup, spdActionExecuted=spdActionExecuted, SpdTimePeriod=SpdTimePeriod, spdCompActRowStatus=spdCompActRowStatus, spdSubactionsTable=spdSubactionsTable, spdNotifications=spdNotifications, spdCompFiltLastChanged=spdCompFiltLastChanged, SpdAdminStatus=SpdAdminStatus, spdGroupContRowStatus=spdGroupContRowStatus, spdIPEndpointAddress=spdIPEndpointAddress, spdEndpointToGroupEntry=spdEndpointToGroupEntry, spdCompoundActionTable=spdCompoundActionTable, spdLoggingCompliance=spdLoggingCompliance, spdIpsoHeadFiltLastChanged=spdIpsoHeadFiltLastChanged, spdGroups=spdGroups, spdIpsoHeadFiltProtectionAuth=spdIpsoHeadFiltProtectionAuth, spdGroupContFilter=spdGroupContFilter, spdIpsoHeadFiltStorageType=spdIpsoHeadFiltStorageType, spdSubActSubActionName=spdSubActSubActionName, spdTimeFiltName=spdTimeFiltName, spdCompActExecutionStrategy=spdCompActExecutionStrategy, spdRuleFilterFullCompliance=spdRuleFilterFullCompliance, spdStaticFilterGroup=spdStaticFilterGroup, spdAcceptAction=spdAcceptAction, spdIPSourceAddress=spdIPSourceAddress, spdTimeFiltPeriod=spdTimeFiltPeriod, spdRuleDefFilterNegated=spdRuleDefFilterNegated, spdAcceptActionLog=spdAcceptActionLog, spdCompActStorageType=spdCompActStorageType, spdPacketNotification=spdPacketNotification, spdGroupContLastChanged=spdGroupContLastChanged, spdSubFiltLastChanged=spdSubFiltLastChanged, spdSubFiltStorageType=spdSubFiltStorageType, spdIPDestinationAddress=spdIPDestinationAddress, spdCompoundActionEntry=spdCompoundActionEntry, spdCompFiltStorageType=spdCompFiltStorageType, spdGroupContName=spdGroupContName, spdIpsoHeaderFilterEntry=spdIpsoHeaderFilterEntry, spdTimeFiltMonthOfYearMask=spdTimeFiltMonthOfYearMask, spdCompFiltRowStatus=spdCompFiltRowStatus, spdIngressPolicyGroupName=spdIngressPolicyGroupName)
| 285.141892
| 6,085
| 0.726322
| 10,909
| 84,402
| 5.619122
| 0.087451
| 0.005481
| 0.006998
| 0.008875
| 0.483254
| 0.439975
| 0.408914
| 0.384036
| 0.350805
| 0.334883
| 0
| 0.032154
| 0.178657
| 84,402
| 295
| 6,086
| 286.108475
| 0.852098
| 0.003578
| 0
| 0.007042
| 0
| 0.320423
| 0.609994
| 0.044975
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.021127
| 0.035211
| 0
| 0.077465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c7dbde5654ffe423fdffc4b6fd4b0a4178b3f731
| 2,481
|
py
|
Python
|
src/api/api_lists/routes/lists.py
|
rrickgauer/lists
|
371de6af332789ef386392fd24857702794d05a6
|
[
"Apache-2.0"
] | null | null | null |
src/api/api_lists/routes/lists.py
|
rrickgauer/lists
|
371de6af332789ef386392fd24857702794d05a6
|
[
"Apache-2.0"
] | 67
|
2021-12-14T22:30:56.000Z
|
2022-03-15T18:28:27.000Z
|
src/api/api_lists/routes/lists.py
|
rrickgauer/lists
|
371de6af332789ef386392fd24857702794d05a6
|
[
"Apache-2.0"
] | null | null | null |
"""
********************************************************************************************
Module: lists
Prefix: /lists
********************************************************************************************
"""
from uuid import UUID
import flask
from ..common import security
from ..services import lists as list_services, items as item_services
# module blueprint
bp_lists = flask.Blueprint('lists', __name__)
#------------------------------------------------------
# Retrieve all lists
#------------------------------------------------------
@bp_lists.get('')
@security.login_required
def getAll():
request_args = flask.request.args.to_dict()
return list_services.getAllLists(request_args)
#------------------------------------------------------
# Retrieve a single list
#------------------------------------------------------
@bp_lists.get('<uuid:list_id>')
@security.login_required
def get(list_id: UUID):
return list_services.getList(list_id)
#------------------------------------------------------
# Create a new list
#------------------------------------------------------
@bp_lists.post('')
@security.login_required
def post():
return list_services.createList(flask.request.form.to_dict())
#------------------------------------------------------
# Update an existing list
# Or Create a new list with the provided list id from the url
#------------------------------------------------------
@bp_lists.put('<uuid:list_id>')
@security.login_required
def put(list_id: UUID):
return list_services.updateList(list_id, flask.request.form.to_dict())
#------------------------------------------------------
# Delete an existing list
#------------------------------------------------------
@bp_lists.delete('<uuid:list_id>')
@security.login_required
def delete(list_id: UUID):
return list_services.deleteList(list_id)
#------------------------------------------------------
# Retrieve the items for a single list
#------------------------------------------------------
@bp_lists.get('<uuid:list_id>/items')
@security.login_required
def getListItems(list_id: UUID):
return item_services.getItems([list_id])
#------------------------------------------------------
# Clone the given list
#------------------------------------------------------
@bp_lists.post('<uuid:list_id>/clones')
@security.login_required
def cloneList(list_id: UUID):
return list_services.cloneListResponse(list_id, flask.request.form.to_dict())
| 32.220779
| 92
| 0.461104
| 226
| 2,481
| 4.845133
| 0.261062
| 0.087671
| 0.134247
| 0.153425
| 0.314155
| 0.294064
| 0.191781
| 0.056621
| 0.056621
| 0
| 0
| 0
| 0.083837
| 2,481
| 76
| 93
| 32.644737
| 0.481742
| 0.492543
| 0
| 0.205882
| 0
| 0
| 0.071603
| 0.017087
| 0
| 0
| 0
| 0
| 0
| 1
| 0.205882
| false
| 0
| 0.117647
| 0.176471
| 0.529412
| 0.029412
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
c7dde4a56cce7a557fbc05a4f351239e805001b4
| 191
|
py
|
Python
|
Numpy/null_vector_of_size_20.py
|
abhishekkulkarni24/Machine-Learning
|
8bf49a5c9c669ce078250f164376606546c9e81f
|
[
"MIT"
] | 1
|
2021-03-18T14:25:33.000Z
|
2021-03-18T14:25:33.000Z
|
Numpy/null_vector_of_size_20.py
|
abhishekkulkarni24/Machine-Learning
|
8bf49a5c9c669ce078250f164376606546c9e81f
|
[
"MIT"
] | null | null | null |
Numpy/null_vector_of_size_20.py
|
abhishekkulkarni24/Machine-Learning
|
8bf49a5c9c669ce078250f164376606546c9e81f
|
[
"MIT"
] | null | null | null |
#Create a null vector of size 20 and show the output.
import numpy as np
vector = np.zeros(20);
print(vector)
'''
Output
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
'''
| 14.692308
| 61
| 0.565445
| 42
| 191
| 2.571429
| 0.428571
| 0.351852
| 0.5
| 0.62963
| 0.185185
| 0.185185
| 0.185185
| 0.185185
| 0.185185
| 0.185185
| 0
| 0.165517
| 0.240838
| 191
| 13
| 62
| 14.692308
| 0.57931
| 0.272251
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
c7ecbbad250ba26c9f31ce83f498f4dd6edcfa33
| 88
|
py
|
Python
|
server/config/keys.example.py
|
vincenzodistasio97/excel-to-json
|
e367d4db4134dc676344b2b9fb2443300bd3c9c7
|
[
"MIT"
] | 6
|
2018-03-26T13:20:31.000Z
|
2020-05-02T03:14:34.000Z
|
server/config/keys.example.py
|
vincenzodistasio97/excel-to-json
|
e367d4db4134dc676344b2b9fb2443300bd3c9c7
|
[
"MIT"
] | null | null | null |
server/config/keys.example.py
|
vincenzodistasio97/excel-to-json
|
e367d4db4134dc676344b2b9fb2443300bd3c9c7
|
[
"MIT"
] | 2
|
2019-12-31T06:12:46.000Z
|
2022-02-26T09:52:57.000Z
|
keys = {
'MONGO_DBNAME' : 'your_database_name',
'MONGO_URI' : 'your_mongo_uri_name'
}
| 17.6
| 39
| 0.704545
| 12
| 88
| 4.583333
| 0.583333
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 88
| 4
| 40
| 22
| 0.723684
| 0
| 0
| 0
| 0
| 0
| 0.659091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
400578dfbd4cd8f31d0dd1dbbe5c091beb1aea93
| 405
|
py
|
Python
|
build/lib/ally/requests/account_holdings.py
|
anthonymorast/AllyInvest.py
|
e3824e7d426328198dd4a51b2b861ce902dcbe2c
|
[
"MIT"
] | 13
|
2020-02-05T17:47:32.000Z
|
2022-02-23T02:54:09.000Z
|
ally/requests/account_holdings.py
|
jpwatt/AllyInvest.py
|
3f763bd24be777f1e009160d70e0901ed9676666
|
[
"MIT"
] | 4
|
2019-11-22T16:34:00.000Z
|
2022-02-06T15:44:57.000Z
|
ally/requests/account_holdings.py
|
jpwatt/AllyInvest.py
|
3f763bd24be777f1e009160d70e0901ed9676666
|
[
"MIT"
] | 2
|
2021-03-01T03:31:31.000Z
|
2021-06-23T18:17:28.000Z
|
from .request import *
from ..responses.account_holdings import *
class AccountHoldingsRequest(Request):
def __init__(self, account_id, response_format='json'):
super().__init__(response_format)
self.account_id = account_id
def execute(self, ally_api):
return AccountHoldingsResponse(self.account_id, self.response_format, ally_api.get_account_holdings(self.account_id))
| 36.818182
| 125
| 0.760494
| 49
| 405
| 5.857143
| 0.44898
| 0.156794
| 0.181185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145679
| 405
| 10
| 126
| 40.5
| 0.82948
| 0
| 0
| 0
| 0
| 0
| 0.009877
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.125
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
401859d7c0d9a8d3f279e4d5647fbeebac9f4876
| 154
|
py
|
Python
|
biflorica-recorder/api/__init__.py
|
nbohorquez/biflorica-recorder
|
32a0228af2cf052dc710562dc75aede838b3a4d5
|
[
"Apache-2.0"
] | null | null | null |
biflorica-recorder/api/__init__.py
|
nbohorquez/biflorica-recorder
|
32a0228af2cf052dc710562dc75aede838b3a4d5
|
[
"Apache-2.0"
] | null | null | null |
biflorica-recorder/api/__init__.py
|
nbohorquez/biflorica-recorder
|
32a0228af2cf052dc710562dc75aede838b3a4d5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from session import APISession
from objects import Handbook, Messages, Requests, Log, Filters, MarketPlace
| 25.666667
| 75
| 0.74026
| 20
| 154
| 5.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007519
| 0.136364
| 154
| 5
| 76
| 30.8
| 0.849624
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
40273abd9e00f82b983d231bdc8aec6b70f95232
| 360
|
py
|
Python
|
tracardi/domain/entity.py
|
bytepl/tracardi
|
e8fa4684fa6bd3d05165fe48aa925fc6c1e74923
|
[
"MIT"
] | 153
|
2021-11-02T00:35:41.000Z
|
2022-03-25T16:37:44.000Z
|
tracardi/domain/entity.py
|
bytepl/tracardi
|
e8fa4684fa6bd3d05165fe48aa925fc6c1e74923
|
[
"MIT"
] | 243
|
2021-10-17T17:00:22.000Z
|
2022-03-28T10:13:34.000Z
|
tracardi/domain/entity.py
|
bytepl/tracardi
|
e8fa4684fa6bd3d05165fe48aa925fc6c1e74923
|
[
"MIT"
] | 14
|
2021-10-17T11:39:04.000Z
|
2022-03-14T14:44:02.000Z
|
from typing import Optional
from uuid import uuid4
from pydantic import BaseModel
from tracardi.domain.value_object.storage_info import StorageInfo
class Entity(BaseModel):
id: str
@staticmethod
def new() -> 'Entity':
return Entity(id=str(uuid4()))
@staticmethod
def storage_info() -> Optional[StorageInfo]:
return None
| 21.176471
| 65
| 0.711111
| 43
| 360
| 5.883721
| 0.55814
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006993
| 0.205556
| 360
| 16
| 66
| 22.5
| 0.877622
| 0
| 0
| 0.166667
| 0
| 0
| 0.016667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
40365cdff96689c1c97efcb00e356b27954cbf32
| 1,532
|
py
|
Python
|
piwebasync/exceptions.py
|
newvicx/piwebasync
|
fc0d159aa4b99667777f428a090fe7a102481fea
|
[
"MIT"
] | null | null | null |
piwebasync/exceptions.py
|
newvicx/piwebasync
|
fc0d159aa4b99667777f428a090fe7a102481fea
|
[
"MIT"
] | 2
|
2022-03-02T17:42:21.000Z
|
2022-03-29T19:24:01.000Z
|
piwebasync/exceptions.py
|
newvicx/piwebasync
|
fc0d159aa4b99667777f428a090fe7a102481fea
|
[
"MIT"
] | null | null | null |
class PIWebAsyncException(Exception):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class APIException(PIWebAsyncException):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class SerializationError(APIException):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class HTTPClientError(PIWebAsyncException):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class HTTPStatusError(HTTPClientError):
def __init__(self, message) -> None:
super().__init__(message)
class WebsocketClientError(PIWebAsyncException):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class ChannelClosed(WebsocketClientError):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class ChannelClosedError(ChannelClosed):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class ChannelClosedOK(ChannelClosed):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class ChannelUpdateError(WebsocketClientError):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class ChannelRollback(WebsocketClientError):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class WatchdogTimeout(WebsocketClientError):
def __init__(self, *args: object) -> None:
super().__init__(*args)
| 26.877193
| 49
| 0.655352
| 143
| 1,532
| 6.34965
| 0.153846
| 0.092511
| 0.145374
| 0.181718
| 0.694934
| 0.694934
| 0.694934
| 0.694934
| 0.694934
| 0.694934
| 0
| 0
| 0.213446
| 1,532
| 56
| 50
| 27.357143
| 0.753527
| 0
| 0
| 0.611111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
404f8e6d7d8c0e8a1b4ee2f85ead4178d58cdf46
| 151
|
py
|
Python
|
pt/__init__.py
|
quantmind/pulsar-twitter
|
b9f7f2ef2e5ea216e0dc2b223468491a25f805a0
|
[
"BSD-3-Clause"
] | 1
|
2016-10-08T22:46:15.000Z
|
2016-10-08T22:46:15.000Z
|
pt/__init__.py
|
quantmind/pulsar-twitter
|
b9f7f2ef2e5ea216e0dc2b223468491a25f805a0
|
[
"BSD-3-Clause"
] | null | null | null |
pt/__init__.py
|
quantmind/pulsar-twitter
|
b9f7f2ef2e5ea216e0dc2b223468491a25f805a0
|
[
"BSD-3-Clause"
] | 2
|
2017-02-27T10:27:52.000Z
|
2020-07-04T14:32:25.000Z
|
"""Twitter consumer for pulsar queue"""
from .version import get_version
VERSION = (0, 1, 0, 'final', 0)
__version__ = get_version(VERSION, __file__)
| 25.166667
| 44
| 0.728477
| 21
| 151
| 4.761905
| 0.619048
| 0.2
| 0.34
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030769
| 0.139073
| 151
| 5
| 45
| 30.2
| 0.738462
| 0.218543
| 0
| 0
| 0
| 0
| 0.044643
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
4078682c19694d387b0abcd4892f767e6ce140f7
| 58
|
py
|
Python
|
src/gateau_api/scripts/__init__.py
|
k2bd/gateau-api
|
b7fb1db8a9b6346b3ee7ebf4500d923996990e8b
|
[
"MIT"
] | null | null | null |
src/gateau_api/scripts/__init__.py
|
k2bd/gateau-api
|
b7fb1db8a9b6346b3ee7ebf4500d923996990e8b
|
[
"MIT"
] | 12
|
2022-02-03T20:36:05.000Z
|
2022-03-27T11:29:06.000Z
|
src/gateau_api/scripts/__init__.py
|
k2bd/gateau-api
|
b7fb1db8a9b6346b3ee7ebf4500d923996990e8b
|
[
"MIT"
] | null | null | null |
"""
These scripts can be run to cause one-off effects
"""
| 14.5
| 49
| 0.689655
| 10
| 58
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189655
| 58
| 3
| 50
| 19.333333
| 0.851064
| 0.844828
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
40a90e4c0d3ccab05a8a0c12303db72e63fbd692
| 308
|
py
|
Python
|
test/test_uw_atg_wx.py
|
jsheedy/uw_atg_wx
|
4dfb2d184e4c746d5796197115648c3351c4ffe9
|
[
"WTFPL"
] | null | null | null |
test/test_uw_atg_wx.py
|
jsheedy/uw_atg_wx
|
4dfb2d184e4c746d5796197115648c3351c4ffe9
|
[
"WTFPL"
] | null | null | null |
test/test_uw_atg_wx.py
|
jsheedy/uw_atg_wx
|
4dfb2d184e4c746d5796197115648c3351c4ffe9
|
[
"WTFPL"
] | null | null | null |
import unittest
from uw_atg_wx import get_obs_from_uw_atg, get_obs
import unittest
class TestDispatcher(unittest.TestCase):
def setUp(self):
super().setUp()
def test_get_obs_from_uw_atg(self):
response = get_obs_from_uw_atg()
self.assertGreater(len(list(response)), 1)
| 18.117647
| 50
| 0.717532
| 45
| 308
| 4.555556
| 0.466667
| 0.117073
| 0.17561
| 0.17561
| 0.258537
| 0.185366
| 0
| 0
| 0
| 0
| 0
| 0.004032
| 0.194805
| 308
| 16
| 51
| 19.25
| 0.822581
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
40ad297a3f4e11b89990d53e7b6ea82fd62022d0
| 131
|
py
|
Python
|
src/files/apps.py
|
yigitgenc/bulkstash
|
44ed74cd5c70e8b833736d594d3bbb5e5293d37c
|
[
"MIT"
] | 11
|
2017-11-24T12:38:48.000Z
|
2019-08-31T14:29:11.000Z
|
src/files/apps.py
|
yigitgenc/kuzgun.io
|
44ed74cd5c70e8b833736d594d3bbb5e5293d37c
|
[
"MIT"
] | null | null | null |
src/files/apps.py
|
yigitgenc/kuzgun.io
|
44ed74cd5c70e8b833736d594d3bbb5e5293d37c
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class FilesConfig(AppConfig):
"""
App config for files app.
"""
name = 'files'
| 14.555556
| 33
| 0.633588
| 15
| 131
| 5.533333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259542
| 131
| 8
| 34
| 16.375
| 0.85567
| 0.19084
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
40bce13b15eda374153741ed5965e7d5f816f601
| 954
|
py
|
Python
|
observatorio/dados/migrations/0003_auto_20200930_1624.py
|
guerrasao/Observatorio-Socioeconomico-da-COVID-19
|
15457859092a41e539e57af6cc1bc875f3fbdf93
|
[
"MIT"
] | null | null | null |
observatorio/dados/migrations/0003_auto_20200930_1624.py
|
guerrasao/Observatorio-Socioeconomico-da-COVID-19
|
15457859092a41e539e57af6cc1bc875f3fbdf93
|
[
"MIT"
] | null | null | null |
observatorio/dados/migrations/0003_auto_20200930_1624.py
|
guerrasao/Observatorio-Socioeconomico-da-COVID-19
|
15457859092a41e539e57af6cc1bc875f3fbdf93
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-09-30 19:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dados', '0002_auto_20200930_1623'),
]
operations = [
migrations.AlterField(
model_name='estado',
name='nome_normalizado',
field=models.SlugField(editable=False, max_length=255),
),
migrations.AlterField(
model_name='municipio',
name='nome_normalizado',
field=models.SlugField(editable=False, max_length=255),
),
migrations.AlterField(
model_name='pais',
name='nome_normalizado',
field=models.SlugField(editable=False, max_length=255),
),
migrations.AlterField(
model_name='variavel',
name='nome_normalizado',
field=models.SlugField(editable=False, max_length=255),
),
]
| 28.058824
| 67
| 0.589099
| 93
| 954
| 5.88172
| 0.44086
| 0.146252
| 0.182815
| 0.212066
| 0.627057
| 0.627057
| 0.627057
| 0.627057
| 0.627057
| 0.627057
| 0
| 0.064371
| 0.29979
| 954
| 33
| 68
| 28.909091
| 0.754491
| 0.04717
| 0
| 0.592593
| 1
| 0
| 0.131202
| 0.025358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
40c1caf757098231c8114a1a90280569071da06c
| 191
|
py
|
Python
|
pyteste.py
|
alissonn17/createdInsite
|
e79d707103efc3f8624d82cf6b5001f58b14dbc8
|
[
"MIT"
] | null | null | null |
pyteste.py
|
alissonn17/createdInsite
|
e79d707103efc3f8624d82cf6b5001f58b14dbc8
|
[
"MIT"
] | null | null | null |
pyteste.py
|
alissonn17/createdInsite
|
e79d707103efc3f8624d82cf6b5001f58b14dbc8
|
[
"MIT"
] | null | null | null |
# Created first archieve in github
x = int(input('Digite um número))
print('\033[3;33m=-'*20)
if x >= 2:
print(f'sim, {x} é maior que 2')
else:
print(f'não, {x} não é maior que 2')
| 19.1
| 40
| 0.60733
| 37
| 191
| 3.135135
| 0.675676
| 0.103448
| 0.155172
| 0.172414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072848
| 0.209424
| 191
| 9
| 41
| 21.222222
| 0.695364
| 0.167539
| 0
| 0
| 0
| 0
| 0.382166
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
40dc537fac669f08af5635577d6e8e84d26383a1
| 262
|
py
|
Python
|
isic/core/templatetags/display.py
|
ImageMarkup/isic
|
607b2b103d0d2a67adb61f8ea88f1461c85ec8f3
|
[
"Apache-2.0"
] | null | null | null |
isic/core/templatetags/display.py
|
ImageMarkup/isic
|
607b2b103d0d2a67adb61f8ea88f1461c85ec8f3
|
[
"Apache-2.0"
] | 18
|
2021-06-10T05:14:34.000Z
|
2022-03-22T02:15:59.000Z
|
isic/core/templatetags/display.py
|
ImageMarkup/isic
|
607b2b103d0d2a67adb61f8ea88f1461c85ec8f3
|
[
"Apache-2.0"
] | null | null | null |
from django import template
from django.contrib.auth.models import User
from isic.core.utils.display import user_nicename as user_nicename_util
register = template.Library()
@register.filter
def user_nicename(user: User):
return user_nicename_util(user)
| 21.833333
| 71
| 0.812977
| 38
| 262
| 5.447368
| 0.526316
| 0.231884
| 0.154589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118321
| 262
| 11
| 72
| 23.818182
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.428571
| 0.142857
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
40e107c0e9324e6210ac4c553067ebc8a607623c
| 114
|
py
|
Python
|
wtpy/apps/datahelper/__init__.py
|
NoTravel/wtpy
|
bd48db23ed6eb3157fc97d298b47279c0733d197
|
[
"MIT"
] | 164
|
2020-06-18T01:47:31.000Z
|
2022-03-30T09:19:42.000Z
|
wtpy/apps/datahelper/__init__.py
|
NoTravel/wtpy
|
bd48db23ed6eb3157fc97d298b47279c0733d197
|
[
"MIT"
] | 20
|
2020-12-02T02:57:55.000Z
|
2022-03-30T05:25:14.000Z
|
wtpy/apps/datahelper/__init__.py
|
NoTravel/wtpy
|
bd48db23ed6eb3157fc97d298b47279c0733d197
|
[
"MIT"
] | 66
|
2020-09-08T03:21:24.000Z
|
2022-03-29T08:39:55.000Z
|
from .DHFactory import DHFactory
from .db.MysqlHelper import MysqlHelper
__all__ = ["DHFactory","MysqlHelper"]
| 28.5
| 40
| 0.780702
| 12
| 114
| 7.083333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 114
| 4
| 41
| 28.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
904f87e19b58c8ac66dd2a2b6a601944b775808e
| 56
|
py
|
Python
|
motion/neuro/trctst.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 23
|
2021-04-02T09:02:04.000Z
|
2022-03-22T05:31:03.000Z
|
motion/neuro/trctst.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 35
|
2021-04-12T09:41:05.000Z
|
2022-03-26T13:32:46.000Z
|
motion/neuro/trctst.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 16
|
2021-03-30T11:55:45.000Z
|
2022-03-30T07:10:59.000Z
|
import torch
import numpy as np
script = np.loadtxt('')
| 14
| 23
| 0.732143
| 9
| 56
| 4.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160714
| 56
| 4
| 23
| 14
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
90685b1696895c798240304f9b67e5574a0f62f9
| 187
|
py
|
Python
|
Community/manage_records/manage_records_config.py
|
spenney-bc/gateway-workflows
|
0311a9224b2d53c01689eb6a9a0a593177abed63
|
[
"Apache-2.0"
] | 43
|
2017-12-04T17:38:24.000Z
|
2021-12-29T09:17:17.000Z
|
Community/manage_records/manage_records_config.py
|
spenney-bc/gateway-workflows
|
0311a9224b2d53c01689eb6a9a0a593177abed63
|
[
"Apache-2.0"
] | 49
|
2017-12-07T21:02:29.000Z
|
2022-02-04T22:27:16.000Z
|
Community/manage_records/manage_records_config.py
|
spenney-bc/gateway-workflows
|
0311a9224b2d53c01689eb6a9a0a593177abed63
|
[
"Apache-2.0"
] | 82
|
2017-12-04T17:56:00.000Z
|
2021-12-29T09:17:21.000Z
|
# Copyright 2020 BlueCat Networks. All rights reserved.
# Default Configuration ID
DEFAULT_CONFIG_NAME = 'Default'
# Default View ID
DEFAULT_VIEW_NAME = 'test1'
DEFAULT_VIEW_ID = 100910
| 23.375
| 55
| 0.796791
| 25
| 187
| 5.72
| 0.6
| 0.230769
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068323
| 0.139037
| 187
| 7
| 56
| 26.714286
| 0.819876
| 0.502674
| 0
| 0
| 0
| 0
| 0.134831
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
90941c0171c2bdaa283f653d538fe5fd2483e421
| 201
|
py
|
Python
|
s3imageservice/cms/serializers/upload.py
|
The-Politico/django-s3-image-service
|
da8afd36b82c031ed09d881e7b3de013061e9ccd
|
[
"MIT"
] | 7
|
2018-07-23T16:35:07.000Z
|
2021-12-22T16:42:08.000Z
|
s3imageservice/cms/serializers/upload.py
|
The-Politico/django-s3-image-service
|
da8afd36b82c031ed09d881e7b3de013061e9ccd
|
[
"MIT"
] | 13
|
2019-01-07T22:42:30.000Z
|
2022-02-27T10:53:02.000Z
|
s3imageservice/cms/serializers/upload.py
|
The-Politico/django-s3-image-service
|
da8afd36b82c031ed09d881e7b3de013061e9ccd
|
[
"MIT"
] | 2
|
2020-07-27T02:25:59.000Z
|
2020-10-17T20:54:22.000Z
|
from s3imageservice.models import Upload
from rest_framework import serializers
class UploadSerializer(serializers.ModelSerializer):
class Meta:
model = Upload
fields = "__all__"
| 22.333333
| 52
| 0.751244
| 20
| 201
| 7.3
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006211
| 0.199005
| 201
| 8
| 53
| 25.125
| 0.900621
| 0
| 0
| 0
| 0
| 0
| 0.034826
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
90a41aac26f8c1f84abf44952e23ad00b5f46fd3
| 3,016
|
py
|
Python
|
chainer_chemistry/dataset/converters.py
|
jo7ueb/chainer-chemistry
|
74286aef6b53f272d1677b8e226bb7200a3922a8
|
[
"MIT"
] | 1
|
2019-06-19T00:05:59.000Z
|
2019-06-19T00:05:59.000Z
|
chainer_chemistry/dataset/converters.py
|
jo7ueb/chainer-chemistry
|
74286aef6b53f272d1677b8e226bb7200a3922a8
|
[
"MIT"
] | null | null | null |
chainer_chemistry/dataset/converters.py
|
jo7ueb/chainer-chemistry
|
74286aef6b53f272d1677b8e226bb7200a3922a8
|
[
"MIT"
] | 1
|
2019-05-23T12:25:57.000Z
|
2019-05-23T12:25:57.000Z
|
import chainer
def concat_mols(batch, device=None, padding=0):
"""Concatenates a list of molecules into array(s).
This function converts an "array of tuples" into a "tuple of arrays".
Specifically, given a list of examples each of which consists of
a list of elements, this function first makes an array
by taking the element in the same position from each example
and concatenates them along the newly-inserted first axis
(called `batch dimension`) into one array.
It repeats this for all positions and returns the resulting arrays.
The output type depends on the type of examples in ``batch``.
For instance, consider each example consists of two arrays ``(x, y)``.
Then, this function concatenates ``x`` 's into one array, and ``y`` 's
into another array, and returns a tuple of these two arrays. Another
example: consider each example is a dictionary of two entries whose keys
are ``'x'`` and ``'y'``, respectively, and values are arrays. Then, this
function concatenates ``x`` 's into one array, and ``y`` 's into another
array, and returns a dictionary with two entries ``x`` and ``y`` whose
values are the concatenated arrays.
When the arrays to concatenate have different shapes, the behavior depends
on the ``padding`` value. If ``padding`` is ``None``, it raises an error.
Otherwise, it builds an array of the minimum shape that the
contents of all arrays can be substituted to. The padding value is then
used to the extra elements of the resulting arrays.
The current implementation is identical to
:func:`~chainer.dataset.concat_examples` of Chainer, except the default
value of the ``padding`` option is changed to ``0``.
.. admonition:: Example
>>> import numpy
>>> from chainer_chemistry.dataset.converters import concat_mols
>>> x0 = numpy.array([1, 2])
>>> x1 = numpy.array([4, 5, 6])
>>> dataset = [x0, x1]
>>> results = concat_mols(dataset)
>>> print(results)
[[1 2 0]
[4 5 6]]
.. seealso:: :func:`chainer.dataset.concat_examples`
Args:
batch (list):
A list of examples. This is typically given by a dataset
iterator.
device (int):
Device ID to which each array is sent. Negative value
indicates the host memory (CPU). If it is omitted, all arrays are
left in the original device.
padding:
Scalar value for extra elements. If this is None (default),
an error is raised on shape mismatch. Otherwise, an array of
minimum dimensionalities that can accommodate all arrays is
created, and elements outside of the examples are padded by this
value.
Returns:
Array, a tuple of arrays, or a dictionary of arrays:
The type depends on the type of each example in the batch.
"""
return chainer.dataset.concat_examples(batch, device, padding=padding)
| 43.710145
| 78
| 0.666114
| 429
| 3,016
| 4.666667
| 0.34965
| 0.00999
| 0.013986
| 0.041958
| 0.127872
| 0.095904
| 0.073926
| 0.073926
| 0.073926
| 0.073926
| 0
| 0.007576
| 0.255968
| 3,016
| 68
| 79
| 44.352941
| 0.884581
| 0.87931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
90c96dedac77955005f32239c78c3e5ce67c94ee
| 202
|
py
|
Python
|
models/cls/_registry.py
|
luost26/Equivariant-OrientedMP
|
597f9c4ace953929e5eefef84e4c840d6636b818
|
[
"MIT"
] | 5
|
2022-03-26T07:08:21.000Z
|
2022-03-31T12:23:40.000Z
|
models/cls/_registry.py
|
luost26/Equivariant-OrientedMP
|
597f9c4ace953929e5eefef84e4c840d6636b818
|
[
"MIT"
] | null | null | null |
models/cls/_registry.py
|
luost26/Equivariant-OrientedMP
|
597f9c4ace953929e5eefef84e4c840d6636b818
|
[
"MIT"
] | null | null | null |
_MODEL_DICT = {}
def register_model(name):
def decorator(cls):
_MODEL_DICT[name] = cls
return cls
return decorator
def get_model(cfg):
return _MODEL_DICT[cfg.type](cfg)
| 14.428571
| 37
| 0.648515
| 27
| 202
| 4.555556
| 0.407407
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252475
| 202
| 13
| 38
| 15.538462
| 0.81457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0
| 0.125
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
90cf0983f5f1dd249044178ca0f81b1b0472a250
| 912
|
py
|
Python
|
PythonApp/lib/tracerdev/rtl.py
|
IMULMUL/tracer
|
221ae67558fa08a8a45ddd82c237417c0085b5a5
|
[
"MIT"
] | 51
|
2017-12-18T20:49:36.000Z
|
2022-02-19T13:28:51.000Z
|
PythonApp/lib/tracerdev/rtl.py
|
IMULMUL/tracer
|
221ae67558fa08a8a45ddd82c237417c0085b5a5
|
[
"MIT"
] | 1
|
2018-05-15T19:46:47.000Z
|
2018-05-15T19:59:25.000Z
|
PythonApp/lib/tracerdev/rtl.py
|
IMULMUL/tracer
|
221ae67558fa08a8a45ddd82c237417c0085b5a5
|
[
"MIT"
] | 8
|
2018-05-07T13:59:53.000Z
|
2022-01-20T02:06:00.000Z
|
#===============================================================================
# Imports
#===============================================================================
import sys
import textwrap
from tracer.sourcefile import HeaderFile
#===============================================================================
# Helpers
#===============================================================================
#===============================================================================
# Classes
#===============================================================================
class RtlSource:
header = None
source = None
class RtlSource(SourceFile):
def sync(self):
pass
class RtlHeader(HeaderFile):
def sync(self):
pass
class RtlHeader(HeaderFile):
def sync(self):
pass
# vim:set ts=8 sw=4 sts=4 tw=80 et :
| 26.823529
| 80
| 0.277412
| 50
| 912
| 5.06
| 0.6
| 0.083004
| 0.130435
| 0.177866
| 0.367589
| 0.367589
| 0.367589
| 0.367589
| 0.367589
| 0.367589
| 0
| 0.006536
| 0.161184
| 912
| 33
| 81
| 27.636364
| 0.324183
| 0.632675
| 0
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.2
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
2902e4f3c4bda11ed90898a9e667b2cdf8b88694
| 67
|
py
|
Python
|
magicmirror/tools/limit/__init__.py
|
memirror/magicMirror
|
05ee16b44aef22c30da2bc3323c5ba593b3e53fa
|
[
"MIT"
] | 5
|
2021-09-03T03:06:51.000Z
|
2022-03-22T07:48:22.000Z
|
magicmirror/tools/limit/__init__.py
|
xiaodongxiexie/magicMirror
|
05ee16b44aef22c30da2bc3323c5ba593b3e53fa
|
[
"MIT"
] | null | null | null |
magicmirror/tools/limit/__init__.py
|
xiaodongxiexie/magicMirror
|
05ee16b44aef22c30da2bc3323c5ba593b3e53fa
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Author: xiaodong
# @Date : 2021/6/1
| 13.4
| 23
| 0.507463
| 9
| 67
| 3.777778
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 0.238806
| 67
| 4
| 24
| 16.75
| 0.529412
| 0.850746
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
291e3493dbe704f23652c7482a0d4a13bf18e84c
| 104
|
py
|
Python
|
registration/apps.py
|
jorgesaw/robotkmarket
|
d13c97dcd80ffd2a8cebcad674318e2fcf95ad89
|
[
"MIT"
] | null | null | null |
registration/apps.py
|
jorgesaw/robotkmarket
|
d13c97dcd80ffd2a8cebcad674318e2fcf95ad89
|
[
"MIT"
] | 9
|
2019-09-20T00:47:58.000Z
|
2022-02-10T12:54:13.000Z
|
registration/apps.py
|
jorgesaw/robotkmarket
|
d13c97dcd80ffd2a8cebcad674318e2fcf95ad89
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
name = 'registration'
| 17.333333
| 37
| 0.740385
| 10
| 104
| 7.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 104
| 5
| 38
| 20.8
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
293d6d78d12c360cd881cba6c693b0a709633e14
| 33
|
py
|
Python
|
autokey/Scripts/numpad/dot.py
|
Djuuu/dotfiles
|
98a1165a3be675a0cd142ddac316f6138563c1a5
|
[
"MIT"
] | 1
|
2020-10-18T15:43:41.000Z
|
2020-10-18T15:43:41.000Z
|
autokey/Scripts/numpad/dot.py
|
Djuuu/dotfiles
|
98a1165a3be675a0cd142ddac316f6138563c1a5
|
[
"MIT"
] | null | null | null |
autokey/Scripts/numpad/dot.py
|
Djuuu/dotfiles
|
98a1165a3be675a0cd142ddac316f6138563c1a5
|
[
"MIT"
] | 1
|
2020-04-20T05:36:18.000Z
|
2020-04-20T05:36:18.000Z
|
keyboard.send_keys("<shift>+;")
| 11
| 31
| 0.666667
| 4
| 33
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 33
| 2
| 32
| 16.5
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0.28125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
2959f48fe5a4ee64234dabf251481852cb6f16c4
| 347
|
py
|
Python
|
0x07-python-test_driven_development/101-lazy_matrix_mul.py
|
gogomillan/holbertonschool-higher_level_programming
|
1549ffc4fdc284271684321ff6edd882a314193a
|
[
"MIT"
] | null | null | null |
0x07-python-test_driven_development/101-lazy_matrix_mul.py
|
gogomillan/holbertonschool-higher_level_programming
|
1549ffc4fdc284271684321ff6edd882a314193a
|
[
"MIT"
] | null | null | null |
0x07-python-test_driven_development/101-lazy_matrix_mul.py
|
gogomillan/holbertonschool-higher_level_programming
|
1549ffc4fdc284271684321ff6edd882a314193a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
"""Module for multiplies 2 matrices by using the module NumPy
"""
def lazy_matrix_mul(m_a, m_b):
"""function that multiplies 2 matrices by using the module NumPy:
Args:
m_a (list int or float): Matrix a
m_b (list int or float): Matrix b
"""
import numpy as np
return np.matmul(m_a, m_b)
| 21.6875
| 70
| 0.648415
| 59
| 347
| 3.677966
| 0.525424
| 0.02765
| 0.041475
| 0.193548
| 0.552995
| 0.368664
| 0.368664
| 0.368664
| 0
| 0
| 0
| 0.011628
| 0.256484
| 347
| 15
| 71
| 23.133333
| 0.829457
| 0.642651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
464b91856de445c5f09047b0881ee9a2d256659f
| 246
|
py
|
Python
|
tests/testapp/testapp/index/__init__.py
|
avanov/Rhetoric
|
c446f6b17dc9cf614f43c1d1be3722bb42db5123
|
[
"MIT"
] | 9
|
2015-04-26T20:51:39.000Z
|
2021-11-05T23:35:37.000Z
|
tests/testapp/testapp/index/__init__.py
|
avanov/Rhetoric
|
c446f6b17dc9cf614f43c1d1be3722bb42db5123
|
[
"MIT"
] | 2
|
2015-01-27T19:33:42.000Z
|
2015-05-29T21:38:23.000Z
|
tests/testapp/testapp/index/__init__.py
|
avanov/Rhetoric
|
c446f6b17dc9cf614f43c1d1be3722bb42db5123
|
[
"MIT"
] | 5
|
2015-06-06T21:39:59.000Z
|
2021-11-05T23:35:41.000Z
|
def includeme(config):
config.add_route('index.dashboard', '/dashboard')
config.add_route('index.dashboard.api', '/api/dashboard/')
config.add_route('index.versions', '/versions')
config.add_route('index.json_body', '/json-body')
| 41
| 62
| 0.703252
| 31
| 246
| 5.419355
| 0.354839
| 0.214286
| 0.333333
| 0.452381
| 0.553571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105691
| 246
| 5
| 63
| 49.2
| 0.763636
| 0
| 0
| 0
| 0
| 0
| 0.434959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
46a4b997aa2cc99b9ae7e2c51f0d26076c2f89ff
| 887
|
py
|
Python
|
core/migrations/0005_auto_20210707_0111.py
|
Rakib01/blood_lagbe
|
367ad7991573f633386cc68bf5008a67d91955a7
|
[
"Apache-2.0"
] | 3
|
2021-04-24T16:30:09.000Z
|
2021-06-19T08:02:22.000Z
|
core/migrations/0005_auto_20210707_0111.py
|
Rakib01/blood_lagbe
|
367ad7991573f633386cc68bf5008a67d91955a7
|
[
"Apache-2.0"
] | 16
|
2021-04-24T07:44:34.000Z
|
2021-04-28T17:12:25.000Z
|
core/migrations/0005_auto_20210707_0111.py
|
Rakib01/blood_lagbe
|
367ad7991573f633386cc68bf5008a67d91955a7
|
[
"Apache-2.0"
] | 4
|
2021-04-24T23:42:51.000Z
|
2021-06-20T16:53:00.000Z
|
# Generated by Django 3.2 on 2021-07-06 19:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20210706_1858'),
]
operations = [
migrations.RemoveField(
model_name='union',
name='upazila',
),
migrations.RemoveField(
model_name='upazila',
name='district',
),
migrations.RemoveField(
model_name='village',
name='union',
),
migrations.DeleteModel(
name='District',
),
migrations.DeleteModel(
name='Division',
),
migrations.DeleteModel(
name='Union',
),
migrations.DeleteModel(
name='Upazila',
),
migrations.DeleteModel(
name='Village',
),
]
| 21.634146
| 45
| 0.503946
| 68
| 887
| 6.485294
| 0.485294
| 0.238095
| 0.283447
| 0.204082
| 0.154195
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054845
| 0.383315
| 887
| 40
| 46
| 22.175
| 0.751371
| 0.048478
| 0
| 0.470588
| 1
| 0
| 0.119952
| 0.027316
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d3b18c628f4ddb1e5dd787a38eda66cd60c58da4
| 27
|
py
|
Python
|
kivy_garden/splittergrid/_version.py
|
kivy-garden/splittergrid
|
5164cdf31bd77d79b6cdea922d36726296c79b40
|
[
"MIT"
] | 7
|
2020-02-17T17:36:48.000Z
|
2020-09-14T14:15:18.000Z
|
kivy_garden/splittergrid/_version.py
|
kivy-garden/splittergrid
|
5164cdf31bd77d79b6cdea922d36726296c79b40
|
[
"MIT"
] | null | null | null |
kivy_garden/splittergrid/_version.py
|
kivy-garden/splittergrid
|
5164cdf31bd77d79b6cdea922d36726296c79b40
|
[
"MIT"
] | null | null | null |
__version__ = '1.0.1-dev0'
| 13.5
| 26
| 0.666667
| 5
| 27
| 2.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0.111111
| 27
| 1
| 27
| 27
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d3bdde28a3e4638ba4a077ec57cb7d9ae9eee578
| 253
|
py
|
Python
|
vsrl/verifier/keymaerax_pretty_printer.py
|
IBM/vsrl-framework
|
42e0853bffb5efbb66cd97178aff9e10ad18c5a9
|
[
"MIT"
] | 44
|
2020-07-08T20:24:11.000Z
|
2022-03-10T03:38:12.000Z
|
vsrl/verifier/keymaerax_pretty_printer.py
|
ThyLor/vsrl-framework
|
673001eabcd310699d980ad442f3856f60f6e1cf
|
[
"MIT"
] | 5
|
2020-07-10T14:36:05.000Z
|
2021-07-22T16:38:42.000Z
|
vsrl/verifier/keymaerax_pretty_printer.py
|
ThyLor/vsrl-framework
|
673001eabcd310699d980ad442f3856f60f6e1cf
|
[
"MIT"
] | 12
|
2020-07-08T15:38:23.000Z
|
2022-03-10T03:39:23.000Z
|
#
# Copyright (C) 2020 IBM. All Rights Reserved.
#
# See LICENSE.txt file in the root directory
# of this source tree for licensing information.
#
from vsrl.verifier.expr import Expression, pp
def keymaerax_pp(e: Expression) -> str:
return pp(e)
| 19.461538
| 48
| 0.727273
| 38
| 253
| 4.815789
| 0.894737
| 0.032787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019324
| 0.181818
| 253
| 12
| 49
| 21.083333
| 0.864734
| 0.529644
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
3113d934b934bc1bbf8a7da2d8ba9cea96c2ef92
| 60
|
py
|
Python
|
cry/aesnoc/src/secret.py
|
wani-hackase/wanictf2021-writeup
|
67193041a413ab01bb7cfff2618db12a7e525e16
|
[
"MIT"
] | 3
|
2021-11-07T16:05:33.000Z
|
2021-11-08T03:59:20.000Z
|
cry/aesnoc/src/secret.py
|
wani-hackase/wanictf2021-writeup
|
67193041a413ab01bb7cfff2618db12a7e525e16
|
[
"MIT"
] | null | null | null |
cry/aesnoc/src/secret.py
|
wani-hackase/wanictf2021-writeup
|
67193041a413ab01bb7cfff2618db12a7e525e16
|
[
"MIT"
] | 3
|
2021-11-07T11:36:58.000Z
|
2022-03-09T13:55:04.000Z
|
flag = b"FLAG{Wh47_h4pp3n$_1f_y0u_kn0w_the_la5t_bl0ck___?}"
| 30
| 59
| 0.833333
| 11
| 60
| 3.636364
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0.05
| 60
| 1
| 60
| 60
| 0.54386
| 0
| 0
| 0
| 0
| 0
| 0.816667
| 0.816667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
312921d11efe108ad84f9d79532f7bbf1c3a7d8d
| 15,021
|
py
|
Python
|
tests/legacy/objects/repository/test_repository.py
|
timmo001/aiogithubapi
|
9d33bad77e49f8ee720bcd81c2cbab8a4cf8ebac
|
[
"MIT"
] | 8
|
2019-07-24T18:14:25.000Z
|
2022-03-01T18:33:53.000Z
|
tests/legacy/objects/repository/test_repository.py
|
timmo001/aiogithubapi
|
9d33bad77e49f8ee720bcd81c2cbab8a4cf8ebac
|
[
"MIT"
] | 33
|
2019-12-18T22:15:06.000Z
|
2022-03-30T06:08:38.000Z
|
tests/legacy/objects/repository/test_repository.py
|
timmo001/aiogithubapi
|
9d33bad77e49f8ee720bcd81c2cbab8a4cf8ebac
|
[
"MIT"
] | 14
|
2019-09-02T17:50:16.000Z
|
2022-03-14T10:30:37.000Z
|
# pylint: disable=missing-docstring, redefined-outer-name, unused-import
import datetime
import json
import pytest
from aiogithubapi import AIOGitHubAPIException, AIOGitHubAPINotModifiedException, GitHub
from tests.common import TOKEN
from tests.legacy.responses.base import base_response
from tests.legacy.responses.branch import branch_response
from tests.legacy.responses.contents import (
contents_file_response,
contents_list_response,
)
from tests.legacy.responses.issue_fixture import issue_response
from tests.legacy.responses.releases import releases_response
from tests.legacy.responses.repository_fixture import repository_response
from tests.legacy.responses.tree import tree_response
@pytest.mark.asyncio
async def test_get_repository(aresponses, repository_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
assert repository.description == "This your first repo!"
assert repository.id == 1296269
assert repository.name == "Hello-World"
assert repository.full_name == "octocat/Hello-World"
assert repository.pushed_at == datetime.datetime(2011, 1, 26, 19, 6, 43)
assert not repository.archived
assert repository.topics == ["octocat", "atom", "electron", "api"]
assert not repository.fork
assert repository.forks_count == 9
assert repository.default_branch == "main"
assert repository.last_commit is None
assert repository.homepage == "https://github.com"
assert repository.stargazers_count == 80
assert repository.watchers_count == 80
assert repository.owner.login == "octocat"
@pytest.mark.asyncio
async def test_set_last_commit(aresponses, repository_response, branch_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/branches/main",
"get",
aresponses.Response(
text=json.dumps(branch_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/branches/main",
"get",
aresponses.Response(status=304),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
assert repository.last_commit is None
await repository.set_last_commit()
assert repository.last_commit == "7fd1a60"
with pytest.raises(AIOGitHubAPINotModifiedException):
await repository.set_last_commit(github.client.last_response.etag)
@pytest.mark.asyncio
async def test_get_contents_file(aresponses, repository_response, contents_file_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/contents/README.md",
"get",
aresponses.Response(
text=json.dumps(contents_file_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
contents = await repository.get_contents("README.md", "main")
assert contents.name == "README.md"
@pytest.mark.asyncio
async def test_get_contents_list(aresponses, repository_response, contents_list_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/contents/",
"get",
aresponses.Response(
text=json.dumps(contents_list_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
contents = await repository.get_contents("", "main")
assert len(contents) == 2
@pytest.mark.asyncio
async def test_get_tree(aresponses, repository_response, tree_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/git/trees/main",
"get",
aresponses.Response(
text=json.dumps(tree_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
contents = await repository.get_tree("main")
assert contents[0].full_path == "subdir/file.txt"
with pytest.raises(AIOGitHubAPIException):
await repository.get_tree()
@pytest.mark.asyncio
async def test_get_rendered_contents(aresponses, repository_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/contents/README.md",
"get",
aresponses.Response(
text="test",
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/contents/README.md",
"get",
aresponses.Response(status=304),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
rendered = await repository.get_rendered_contents("README.md", "main")
assert rendered == "test"
with pytest.raises(AIOGitHubAPINotModifiedException):
await repository.get_rendered_contents(
"README.md", "main", etag=github.client.last_response.etag
)
@pytest.mark.asyncio
async def test_get_releases(aresponses, repository_response, releases_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/releases",
"get",
aresponses.Response(
text=json.dumps(releases_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/releases",
"get",
aresponses.Response(
text=json.dumps(releases_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/releases",
"get",
aresponses.Response(
text=json.dumps(releases_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/releases",
"get",
aresponses.Response(status=304),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
releases = await repository.get_releases()
assert len(releases) == 1
releases = await repository.get_releases(prerelease=True)
assert len(releases) == 2
assert releases[0].tag_name == "v1.0.0"
assert not releases[0].prerelease
releases = await repository.get_releases(prerelease=True, returnlimit=1)
assert len(releases) == 1
with pytest.raises(AIOGitHubAPINotModifiedException):
await repository.get_releases(etag=github.client.last_response.as_dict()["etag"])
@pytest.mark.asyncio
async def test_get_issue(aresponses, repository_response, issue_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/issues/1",
"get",
aresponses.Response(
text=json.dumps(issue_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/issues/1",
"get",
aresponses.Response(status=304),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
issue = await repository.get_issue(1)
assert issue.title == "Found a bug"
with pytest.raises(AIOGitHubAPINotModifiedException):
await repository.get_issue(1, etag=github.client.last_response.etag)
@pytest.mark.asyncio
async def test_create_issue(aresponses, repository_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/issues",
"post",
aresponses.Response(
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
data = {
"title": "test",
"body": "body",
"state": "closed",
"milestone": "v1.0",
"labels": ["test"],
"assignees": ["octocat"],
}
await repository.create_issue(**data)
@pytest.mark.asyncio
async def test_get_last_commit(aresponses, repository_response, branch_response):
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World",
"get",
aresponses.Response(
text=json.dumps(repository_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/branches/main",
"get",
aresponses.Response(
text=json.dumps(branch_response),
status=200,
headers={
"X-RateLimit-Remaining": "1337",
"Content-Type": "application/json",
"Etag": "xyz..zyx",
},
),
)
aresponses.add(
"api.github.com",
"/repos/octocat/Hello-World/branches/main",
"get",
aresponses.Response(status=304),
)
async with GitHub(TOKEN) as github:
repository = await github.get_repo("octocat/Hello-World")
assert repository.last_commit is None
commit = await repository.get_last_commit()
assert commit.sha == "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d"
with pytest.raises(AIOGitHubAPINotModifiedException):
await repository.get_last_commit(etag=github.client.last_response.etag)
| 31.959574
| 93
| 0.55955
| 1,435
| 15,021
| 5.767944
| 0.099652
| 0.04591
| 0.075994
| 0.069107
| 0.800894
| 0.750151
| 0.729733
| 0.66123
| 0.639966
| 0.639966
| 0
| 0.022434
| 0.314493
| 15,021
| 469
| 94
| 32.027719
| 0.781393
| 0.00466
| 0
| 0.697727
| 0
| 0
| 0.213325
| 0.090708
| 0
| 0
| 0
| 0
| 0.065909
| 1
| 0
| false
| 0
| 0.027273
| 0
| 0.027273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
315e27f8a9a155a4890fd0cabc15015336210bde
| 3,608
|
py
|
Python
|
code/plot_silhouette.py
|
PaulRenauld/k-mean-cuda
|
3a905e875e3f2f80e4087ef142d100c7c1d86180
|
[
"MIT"
] | null | null | null |
code/plot_silhouette.py
|
PaulRenauld/k-mean-cuda
|
3a905e875e3f2f80e4087ef142d100c7c1d86180
|
[
"MIT"
] | null | null | null |
code/plot_silhouette.py
|
PaulRenauld/k-mean-cuda
|
3a905e875e3f2f80e4087ef142d100c7c1d86180
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import matplotlib.pyplot as plt
def main(output: str):
output = output.replace("silhouette with k=", '').replace(' ', '').strip().split()
values = [(int(line[0]), float(line[1])) for line in [l.split(':') for l in output]]
xs = [x for (x, _) in values]
ys = [y for (_, y) in values]
plt.plot(xs, ys, 'bo-')
plt.title('Figure 1: Silhouette value for a dataset of 10000 points')
plt.xlabel('Number of clusters k')
plt.ylabel('Silhouette value')
plt.show()
if __name__ == '__main__':
output = """
silhouette with k=2: 0.346227
silhouette with k=3: 0.470481
silhouette with k=4: 0.417261
silhouette with k=5: 0.409835
silhouette with k=6: 0.456681
silhouette with k=7: 0.448866
silhouette with k=8: 0.472404
silhouette with k=9: 0.427348
silhouette with k=10: 0.416935
silhouette with k=11: 0.475667
silhouette with k=12: 0.476308
silhouette with k=13: 0.464758
silhouette with k=14: 0.470616
silhouette with k=15: 0.497664
silhouette with k=16: 0.486602
silhouette with k=17: 0.493796
silhouette with k=18: 0.493715
silhouette with k=19: 0.491342
silhouette with k=20: 0.502917
silhouette with k=21: 0.517567
silhouette with k=22: 0.507803
silhouette with k=23: 0.496292
silhouette with k=24: 0.48864
silhouette with k=25: 0.49081
silhouette with k=26: 0.482454
silhouette with k=27: 0.476064
silhouette with k=28: 0.472313
silhouette with k=29: 0.483676
silhouette with k=30: 0.480141
silhouette with k=31: 0.477628
silhouette with k=32: 0.469208
silhouette with k=33: 0.468222
silhouette with k=34: 0.473008
silhouette with k=35: 0.473488
silhouette with k=36: 0.462661
silhouette with k=37: 0.458961
silhouette with k=38: 0.460186
silhouette with k=39: 0.460809
silhouette with k=40: 0.448713
silhouette with k=41: 0.444547
silhouette with k=42: 0.43751
silhouette with k=43: 0.432112
silhouette with k=44: 0.419957
silhouette with k=45: 0.415264
silhouette with k=46: 0.414678
silhouette with k=47: 0.415762
silhouette with k=48: 0.410349
silhouette with k=49: 0.40355
silhouette with k=50: 0.396805
silhouette with k=51: 0.397055
silhouette with k=52: 0.39367
silhouette with k=53: 0.394573
silhouette with k=54: 0.382966
silhouette with k=55: 0.375492
silhouette with k=56: 0.373873
silhouette with k=57: 0.363668
silhouette with k=58: 0.373386
silhouette with k=59: 0.370099
silhouette with k=60: 0.367258
silhouette with k=61: 0.367424
silhouette with k=62: 0.362858
silhouette with k=63: 0.366425
silhouette with k=64: 0.360038
silhouette with k=65: 0.356889
silhouette with k=66: 0.354824
silhouette with k=67: 0.356824
silhouette with k=68: 0.351318
silhouette with k=69: 0.352354
silhouette with k=70: 0.347736
silhouette with k=71: 0.348015
silhouette with k=72: 0.345109
silhouette with k=73: 0.355291
silhouette with k=74: 0.353053
silhouette with k=75: 0.353703
silhouette with k=76: 0.348955
silhouette with k=77: 0.343844
silhouette with k=78: 0.339369
silhouette with k=79: 0.338756
silhouette with k=80: 0.339398
silhouette with k=81: 0.339631
silhouette with k=82: 0.339216
silhouette with k=83: 0.340668
silhouette with k=84: 0.341916
silhouette with k=85: 0.343993
silhouette with k=86: 0.34329
silhouette with k=87: 0.344199
silhouette with k=88: 0.341271
silhouette with k=89: 0.337846
silhouette with k=90: 0.340286
silhouette with k=91: 0.340816
silhouette with k=92: 0.339894
silhouette with k=93: 0.33923
silhouette with k=94: 0.338258
silhouette with k=95: 0.337707
silhouette with k=96: 0.335981
silhouette with k=97: 0.336288
silhouette with k=98: 0.336502
silhouette with k=99: 0.335928
"""
main(output)
| 30.319328
| 88
| 0.744734
| 673
| 3,608
| 3.977712
| 0.364042
| 0.517744
| 0.554725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.283172
| 0.14357
| 3,608
| 118
| 89
| 30.576271
| 0.583172
| 0.005543
| 0
| 0
| 0
| 0
| 0.878695
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.00885
| 0
| 0.017699
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
31624640156e8bb225b0b56809b1df9d7072c927
| 188
|
py
|
Python
|
robot_tests/libraries/fixture_library.py
|
frogshead/TestArchiver
|
26fb787a63aba5099724d5afa99f7403d57238ce
|
[
"Apache-2.0"
] | 24
|
2019-10-18T19:36:45.000Z
|
2021-11-12T11:17:45.000Z
|
robot_tests/libraries/fixture_library.py
|
frogshead/TestArchiver
|
26fb787a63aba5099724d5afa99f7403d57238ce
|
[
"Apache-2.0"
] | 29
|
2019-08-01T05:34:16.000Z
|
2021-11-05T10:44:29.000Z
|
robot_tests/libraries/fixture_library.py
|
frogshead/TestArchiver
|
26fb787a63aba5099724d5afa99f7403d57238ce
|
[
"Apache-2.0"
] | 19
|
2019-10-10T08:54:38.000Z
|
2021-12-18T14:35:53.000Z
|
class Interactions(object):
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self):
self.mode = None
@staticmethod
def interact_with_sut():
return "foo"
| 15.666667
| 38
| 0.632979
| 21
| 188
| 5.285714
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.276596
| 188
| 11
| 39
| 17.090909
| 0.816176
| 0
| 0
| 0
| 0
| 0
| 0.069519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
317f9d22d5a970384fc60efd1a96b525aab120f9
| 85
|
py
|
Python
|
segfault_strategies/lahni/apps.py
|
SDharan93/DementiaHack2017_backend
|
3f0ca36895017fff6ae06c4eb29923299f0a9b3f
|
[
"MIT"
] | null | null | null |
segfault_strategies/lahni/apps.py
|
SDharan93/DementiaHack2017_backend
|
3f0ca36895017fff6ae06c4eb29923299f0a9b3f
|
[
"MIT"
] | null | null | null |
segfault_strategies/lahni/apps.py
|
SDharan93/DementiaHack2017_backend
|
3f0ca36895017fff6ae06c4eb29923299f0a9b3f
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class LahniConfig(AppConfig):
name = 'lahni'
| 14.166667
| 33
| 0.741176
| 10
| 85
| 6.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 85
| 5
| 34
| 17
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
31842c3dae9958e743a8351a0edeaa632f6a575d
| 182
|
py
|
Python
|
run_socket_server.py
|
siAyush/raven-distribution-framework
|
9478238f136d3ba3ae2e01b5ba86e77282bac708
|
[
"MIT"
] | 32
|
2020-06-30T09:39:48.000Z
|
2022-02-14T08:33:09.000Z
|
run_socket_server.py
|
siAyush/raven-distribution-framework
|
9478238f136d3ba3ae2e01b5ba86e77282bac708
|
[
"MIT"
] | 3
|
2021-05-05T14:35:15.000Z
|
2022-02-15T10:25:59.000Z
|
run_socket_server.py
|
siAyush/raven-distribution-framework
|
9478238f136d3ba3ae2e01b5ba86e77282bac708
|
[
"MIT"
] | 18
|
2020-06-30T22:12:11.000Z
|
2022-02-15T10:00:00.000Z
|
from aiohttp import web
# We kick off our server
from ravsock.socketio_server import app
if __name__ == '__main__':
print("Starting server...")
web.run_app(app, port=9999)
| 20.222222
| 39
| 0.71978
| 27
| 182
| 4.481481
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.175824
| 182
| 8
| 40
| 22.75
| 0.78
| 0.120879
| 0
| 0
| 0
| 0
| 0.164557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0.2
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
31873216f0a4188329be7d5dda06ce4db3f78b9f
| 112
|
py
|
Python
|
online_shop/coupon/forms.py
|
djangojeng-e/djangoproejcts
|
1efc3bc04a4a1bef039c906584cfecf0231c177f
|
[
"MIT"
] | null | null | null |
online_shop/coupon/forms.py
|
djangojeng-e/djangoproejcts
|
1efc3bc04a4a1bef039c906584cfecf0231c177f
|
[
"MIT"
] | null | null | null |
online_shop/coupon/forms.py
|
djangojeng-e/djangoproejcts
|
1efc3bc04a4a1bef039c906584cfecf0231c177f
|
[
"MIT"
] | null | null | null |
from django import forms
class AddCouponForm(forms.Form):
code = forms.CharField(label='Your Coupon Code')
| 22.4
| 52
| 0.758929
| 15
| 112
| 5.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 112
| 5
| 52
| 22.4
| 0.885417
| 0
| 0
| 0
| 0
| 0
| 0.141593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
319cda203267a8d883e3f4dbdd6a23cab5feb578
| 104
|
py
|
Python
|
metaci/testresults/apps.py
|
sfdc-qbranch/MetaCI
|
78ac0d2bccd2db381998321ebd71029dd5d9ab39
|
[
"BSD-3-Clause"
] | 48
|
2018-10-24T14:52:06.000Z
|
2022-03-25T21:14:50.000Z
|
metaci/testresults/apps.py
|
sfdc-qbranch/MetaCI
|
78ac0d2bccd2db381998321ebd71029dd5d9ab39
|
[
"BSD-3-Clause"
] | 2,034
|
2018-10-31T20:59:16.000Z
|
2022-03-22T21:38:03.000Z
|
metaci/testresults/apps.py
|
sfdc-qbranch/MetaCI
|
78ac0d2bccd2db381998321ebd71029dd5d9ab39
|
[
"BSD-3-Clause"
] | 27
|
2018-12-24T18:16:23.000Z
|
2021-12-15T17:57:27.000Z
|
from django.apps import AppConfig
class TestResultsConfig(AppConfig):
name = "metaci.testresults"
| 17.333333
| 35
| 0.778846
| 11
| 104
| 7.363636
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144231
| 104
| 5
| 36
| 20.8
| 0.910112
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
31c64e20488f9dc82cebb6b13ac8e7bff86eb49e
| 333
|
py
|
Python
|
hassio-google-drive-backup/backup/ha/__init__.py
|
voxipbx/hassio-addons
|
ddf2f1745f4240c2905f9f9d7616a927fb8934e4
|
[
"MIT"
] | 1
|
2022-01-22T18:27:04.000Z
|
2022-01-22T18:27:04.000Z
|
hassio-google-drive-backup/backup/ha/__init__.py
|
voxipbx/hassio-addons
|
ddf2f1745f4240c2905f9f9d7616a927fb8934e4
|
[
"MIT"
] | 19
|
2021-09-29T06:13:25.000Z
|
2022-03-08T06:13:49.000Z
|
hassio-google-drive-backup/backup/ha/__init__.py
|
voxipbx/hassio-addons
|
ddf2f1745f4240c2905f9f9d7616a927fb8934e4
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from .hasource import HaSource, HABackup, PendingBackup, SOURCE_HA
from .haupdater import HaUpdater
from .harequests import HaRequests, EVENT_BACKUP_END, EVENT_BACKUP_START, VERSION_BACKUP_PATH
from .backupname import BackupName, BACKUP_NAME_KEYS
from .password import Password
from .addon_stopper import AddonStopper
| 41.625
| 93
| 0.852853
| 43
| 333
| 6.372093
| 0.55814
| 0.080292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003344
| 0.102102
| 333
| 7
| 94
| 47.571429
| 0.913043
| 0.036036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
31e035b6dcc3de8812d4cdef53fc14618668f605
| 5,462
|
py
|
Python
|
tests/test_cli.py
|
brsynth/rpreactor
|
37c649d27f0c9f08cf7281b711d1f27b8cd6d775
|
[
"MIT"
] | 4
|
2021-01-20T19:42:38.000Z
|
2022-02-11T13:54:49.000Z
|
tests/test_cli.py
|
brsynth/rpreactor
|
37c649d27f0c9f08cf7281b711d1f27b8cd6d775
|
[
"MIT"
] | 9
|
2020-09-09T12:59:55.000Z
|
2021-05-06T16:15:24.000Z
|
tests/test_cli.py
|
brsynth/rpreactor
|
37c649d27f0c9f08cf7281b711d1f27b8cd6d775
|
[
"MIT"
] | null | null | null |
import pytest
from rpreactor import cli
import json
@pytest.fixture
def result_example():
result_input = [{
'rule_id': '0',
'substrate_id': '0',
'product_list':
[['RDKIT_DUMMY', 'RDKIT_DUMMY']],
'product_inchikeys':
[['CSZRNWHGZPKNKY-UHFFFAOYSA-N', 'QGWNDRXFNXRZMB-UHFFFAOYSA-N']],
'product_inchis':
[['InChI=1S/C3H7O6P/c1-2(3(4)5)9-10(6,7)8/h2H,1H3,(H,4,5)(H2,6,7,8)', 'InChI=1S/C10H15N5O11P2/c11-10-13-7-4(8(18)14-10)12-2-15(7)9-6(17)5(16)3(25-9)1-24-28(22,23)26-27(19,20)21/h2-3,5-6,9,16-17H,1H2,(H,22,23)(H2,19,20,21)(H3,11,13,14,18)']],
'product_smiles':
[['[H]OC(=O)C([H])(OP(=O)(O[H])O[H])C([H])([H])[H]', '[H]N=c1nc(O[H])c2nc([H])n(C3([H])OC([H])(C([H])([H])OP(=O)(O[H])OP(=O)(O[H])O[H])C([H])(O[H])C3([H])O[H])c2n1[H]']]
}]
result_json = """
[
{
"rule_id": "0",
"substrate_id": "0",
"product_inchikeys": [
[
"CSZRNWHGZPKNKY-UHFFFAOYSA-N",
"QGWNDRXFNXRZMB-UHFFFAOYSA-N"
]
],
"product_inchis": [
[
"InChI=1S/C3H7O6P/c1-2(3(4)5)9-10(6,7)8/h2H,1H3,(H,4,5)(H2,6,7,8)",
"InChI=1S/C10H15N5O11P2/c11-10-13-7-4(8(18)14-10)12-2-15(7)9-6(17)5(16)3(25-9)1-24-28(22,23)26-27(19,20)21/h2-3,5-6,9,16-17H,1H2,(H,22,23)(H2,19,20,21)(H3,11,13,14,18)"
]
],
"product_smiles": [
[
"[H]OC(=O)C([H])(OP(=O)(O[H])O[H])C([H])([H])[H]",
"[H]N=c1nc(O[H])c2nc([H])n(C3([H])OC([H])(C([H])([H])OP(=O)(O[H])OP(=O)(O[H])O[H])C([H])(O[H])C3([H])O[H])c2n1[H]"
]
]
}
]
"""
return result_input, result_json
def test_jsonify(result_example):
results_input, json_output = result_example
assert json.loads(cli.jsonify(results_input)) == json.loads(json_output)
def test_write(tmp_path):
text = 'Hello, world'
ofile = tmp_path / 'ofile.json'
cli.write(text, ofile=ofile)
with open(ofile) as fh:
assert fh.read() == text
def test_build_arg_parser(mocker):
# No arguments
args = ['prog']
mocker.patch('sys.argv', args)
parser = cli.__build_arg_parser()
args = parser.parse_args()
with pytest.raises(AttributeError):
args.func()
# inline mode
args = ['prog', 'inline', '--rsmarts', 'DUMMY_RSMARTS', '--inchi', 'DUMMY_INCHI']
mocker.patch('sys.argv', args)
parser = cli.__build_arg_parser()
parser.parse_args()
# infile mode
args = ['prog', 'infile', '--rfile', 'DUMMY_RFILE', '--cfile', 'DUMMY_CFILE']
mocker.patch('sys.argv', args)
parser = cli.__build_arg_parser()
parser.parse_args()
def test_inline_mode(mocker):
substrate_inchi = 'InChI=1S/C3H6O3/c1-2(4)3(5)6/h2,4H,1H3,(H,5,6)'
reaction_smarts = '([#8&v2:1](-[#6&v4:2](-[#6&v4:3](-[#8&v2:4]-[#1&v1:5])=[#8&v2:6])(-[#6&v4:7](-[#1&v1:8])(-[#1&v1:9])-[#1&v1:10])-[#1&v1:11])-[#1&v1:12])>>([#15&v5](=[#8&v2])(-[#8&v2]-[#1&v1])(-[#8&v2]-[#1&v1])-[#8&v2:1]-[#6&v4:2](-[#6&v4:3](-[#8&v2:4]-[#1&v1:5])=[#8&v2:6])(-[#6&v4:7](-[#1&v1:8])(-[#1&v1:9])-[#1&v1:10])-[#1&v1:11].[#7&v3](=[#6&v4]1:[#7&v3]:[#6&v4](-[#8&v2]-[#1&v1]):[#6&v4]2:[#7&v3]:[#6&v4](-[#1&v1]):[#7&v3](-[#6&v4]3(-[#1&v1])-[#8&v2]-[#6&v4](-[#6&v4](-[#8&v2]-[#15&v5](=[#8&v2])(-[#8&v2]-[#1&v1])-[#8&v2]-[#15&v5](-[#8&v2]-[#1&v1:12])(=[#8&v2])-[#8&v2]-[#1&v1])(-[#1&v1])-[#1&v1])(-[#1&v1])-[#6&v4](-[#8&v2]-[#1&v1])(-[#1&v1])-[#6&v4]-3(-[#8&v2]-[#1&v1])-[#1&v1]):[#6&v4]:2:[#7&v3]:1-[#1&v1])-[#1&v1])'
# Without ID
args = ['prog', 'inline', '--rsmarts', reaction_smarts, '--inchi', substrate_inchi]
mocker.patch('sys.argv', args)
parser = cli.__build_arg_parser()
args = parser.parse_args()
cli.__inline_mode(args)
# With IDs
args = ['prog', 'inline', '--rsmarts', reaction_smarts, '--rid', 'rid', '--inchi', substrate_inchi, '--cid', 'cid']
mocker.patch('sys.argv', args)
parser = cli.__build_arg_parser()
args = parser.parse_args()
cli.__inline_mode(args)
def test_infile_mode(mocker, tmpdir):
substrate_inchi = 'InChI=1S/C3H6O3/c1-2(4)3(5)6/h2,4H,1H3,(H,5,6)'
reaction_smarts = '([#8&v2:1](-[#6&v4:2](-[#6&v4:3](-[#8&v2:4]-[#1&v1:5])=[#8&v2:6])(-[#6&v4:7](-[#1&v1:8])(-[#1&v1:9])-[#1&v1:10])-[#1&v1:11])-[#1&v1:12])>>([#15&v5](=[#8&v2])(-[#8&v2]-[#1&v1])(-[#8&v2]-[#1&v1])-[#8&v2:1]-[#6&v4:2](-[#6&v4:3](-[#8&v2:4]-[#1&v1:5])=[#8&v2:6])(-[#6&v4:7](-[#1&v1:8])(-[#1&v1:9])-[#1&v1:10])-[#1&v1:11].[#7&v3](=[#6&v4]1:[#7&v3]:[#6&v4](-[#8&v2]-[#1&v1]):[#6&v4]2:[#7&v3]:[#6&v4](-[#1&v1]):[#7&v3](-[#6&v4]3(-[#1&v1])-[#8&v2]-[#6&v4](-[#6&v4](-[#8&v2]-[#15&v5](=[#8&v2])(-[#8&v2]-[#1&v1])-[#8&v2]-[#15&v5](-[#8&v2]-[#1&v1:12])(=[#8&v2])-[#8&v2]-[#1&v1])(-[#1&v1])-[#1&v1])(-[#1&v1])-[#6&v4](-[#8&v2]-[#1&v1])(-[#1&v1])-[#6&v4]-3(-[#8&v2]-[#1&v1])-[#1&v1]):[#6&v4]:2:[#7&v3]:1-[#1&v1])-[#1&v1])'
with open(f'{tmpdir}/cfile.tsv', 'w') as ofh:
ofh.writelines([
'inchi\tchem_id\n',
f'{substrate_inchi}\ncid\n'
])
with open(tmpdir/'rfile.tsv', 'w') as ofh:
ofh.writelines([
'rule_smarts\trule_id\n',
f'{reaction_smarts}\nrid\n'
])
args = ['prog', 'infile', '--rfile', f'{tmpdir}/rfile.tsv', '--cfile', f'{tmpdir}/cfile.tsv']
mocker.patch('sys.argv', args)
parser = cli.__build_arg_parser()
args = parser.parse_args()
cli.__infile_mode(args)
| 46.683761
| 730
| 0.506591
| 973
| 5,462
| 2.750257
| 0.141829
| 0.06278
| 0.029895
| 0.035874
| 0.680867
| 0.680867
| 0.638266
| 0.62145
| 0.62145
| 0.62145
| 0
| 0.141259
| 0.165324
| 5,462
| 116
| 731
| 47.086207
| 0.445712
| 0.010253
| 0
| 0.30303
| 0
| 0.121212
| 0.612778
| 0.458333
| 0
| 0
| 0
| 0
| 0.020202
| 1
| 0.060606
| false
| 0
| 0.030303
| 0
| 0.10101
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9ecd14d646846fb92aeac5e19e0e39749bd0933e
| 318
|
py
|
Python
|
chat/serializers.py
|
BickySamourai/djreact
|
cea500cb3dc841100cc058110d7e2c6d813ca8b8
|
[
"MIT"
] | 1
|
2018-12-05T11:21:50.000Z
|
2018-12-05T11:21:50.000Z
|
chat/serializers.py
|
floriansollami/djreact
|
cea500cb3dc841100cc058110d7e2c6d813ca8b8
|
[
"MIT"
] | 2
|
2020-02-11T23:28:33.000Z
|
2020-06-05T19:36:41.000Z
|
chat/serializers.py
|
BickySamourai/djreact
|
cea500cb3dc841100cc058110d7e2c6d813ca8b8
|
[
"MIT"
] | 1
|
2018-12-10T10:32:23.000Z
|
2018-12-10T10:32:23.000Z
|
from rest_framework import serializers
from .models import Chatroom,Message
class ChatroomSerializer(serializers.ModelSerializer):
class Meta:
model=Chatroom
fields = '__all__'
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model=Message
fields = '__all__'
| 24.461538
| 54
| 0.72956
| 30
| 318
| 7.433333
| 0.533333
| 0.233184
| 0.278027
| 0.313901
| 0.358744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207547
| 318
| 13
| 55
| 24.461538
| 0.884921
| 0
| 0
| 0.4
| 0
| 0
| 0.043887
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
9ed5833524bd46370d1a792ca01402bfd1886f31
| 220
|
py
|
Python
|
graphite_feeder/handler/event/appliance/sprinkler/duration.py
|
majamassarini/automate-graphite-feeder
|
0f17f99bbdaab86e10e0b7d424d055ff44fc4ca0
|
[
"MIT"
] | null | null | null |
graphite_feeder/handler/event/appliance/sprinkler/duration.py
|
majamassarini/automate-graphite-feeder
|
0f17f99bbdaab86e10e0b7d424d055ff44fc4ca0
|
[
"MIT"
] | null | null | null |
graphite_feeder/handler/event/appliance/sprinkler/duration.py
|
majamassarini/automate-graphite-feeder
|
0f17f99bbdaab86e10e0b7d424d055ff44fc4ca0
|
[
"MIT"
] | null | null | null |
import home
from graphite_feeder.handler.event.appliance.int import Handler as Parent
class Handler(Parent):
KLASS = home.appliance.sprinkler.event.duration.Event
TITLE = "Sprinkling time duration (seconds)"
| 22
| 73
| 0.777273
| 28
| 220
| 6.071429
| 0.678571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140909
| 220
| 9
| 74
| 24.444444
| 0.899471
| 0
| 0
| 0
| 0
| 0
| 0.154545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9ed67e9f0ee77cc64725e04bc6d1196191ff021f
| 230
|
py
|
Python
|
altium/soup.py
|
jimmyswimmy/AltiumDesignerLibrary
|
6c751a20c6d7c0ea3b05bb897fa2d2f1b41a7ac3
|
[
"MIT"
] | 2
|
2018-10-02T20:49:08.000Z
|
2021-03-25T15:45:20.000Z
|
altium/soup.py
|
normaldotcom/AltiumDesignerLibrary
|
6c751a20c6d7c0ea3b05bb897fa2d2f1b41a7ac3
|
[
"MIT"
] | null | null | null |
altium/soup.py
|
normaldotcom/AltiumDesignerLibrary
|
6c751a20c6d7c0ea3b05bb897fa2d2f1b41a7ac3
|
[
"MIT"
] | 1
|
2019-01-29T19:54:09.000Z
|
2019-01-29T19:54:09.000Z
|
import urllib.request
from bs4 import BeautifulSoup
def soup(url):
return BeautifulSoup(urllib.request.urlopen(url).read())
def make_digikey_url(part_number):
pass
def get_digikey_info(part_number):
pass
| 19.166667
| 61
| 0.734783
| 31
| 230
| 5.258065
| 0.612903
| 0.159509
| 0.171779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005319
| 0.182609
| 230
| 11
| 62
| 20.909091
| 0.861702
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0.25
| 0.25
| 0.125
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
9ee2cdc6477790ec06acad7de7b0db12580bd5d2
| 20,243
|
py
|
Python
|
python/ray/tune/tests/test_function_api.py
|
ruiminshen/ray
|
b3564cda3d214cd19e9ea3804c21a46d5e14a914
|
[
"Apache-2.0"
] | 3
|
2020-12-03T17:48:45.000Z
|
2022-01-22T08:09:46.000Z
|
python/ray/tune/tests/test_function_api.py
|
mfitton/ray
|
fece8db70d703da1aad192178bd50923e83cc99a
|
[
"Apache-2.0"
] | 72
|
2021-02-06T08:07:16.000Z
|
2022-03-26T07:17:49.000Z
|
python/ray/tune/tests/test_function_api.py
|
mfitton/ray
|
fece8db70d703da1aad192178bd50923e83cc99a
|
[
"Apache-2.0"
] | 2
|
2021-05-05T21:05:16.000Z
|
2021-06-22T21:16:03.000Z
|
import json
import os
import sys
import shutil
import tempfile
import unittest
import ray
import ray.cloudpickle as cloudpickle
from ray.rllib import _register_all
from ray import tune
from ray.tune.logger import NoopLogger
from ray.tune.utils.trainable import TrainableUtil
from ray.tune.function_runner import with_parameters, wrap_function, \
FuncCheckpointUtil
from ray.tune.result import DEFAULT_METRIC, TRAINING_ITERATION
def creator_generator(logdir):
def logger_creator(config):
return NoopLogger(config, logdir)
return logger_creator
class FuncCheckpointUtilTest(unittest.TestCase):
def setUp(self):
self.logdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.logdir)
def testEmptyCheckpoint(self):
checkpoint_dir = FuncCheckpointUtil.mk_null_checkpoint_dir(self.logdir)
assert FuncCheckpointUtil.is_null_checkpoint(checkpoint_dir)
def testTempCheckpointDir(self):
checkpoint_dir = FuncCheckpointUtil.mk_temp_checkpoint_dir(self.logdir)
assert FuncCheckpointUtil.is_temp_checkpoint_dir(checkpoint_dir)
def testConvertTempToPermanent(self):
checkpoint_dir = FuncCheckpointUtil.mk_temp_checkpoint_dir(self.logdir)
new_checkpoint_dir = FuncCheckpointUtil.create_perm_checkpoint(
checkpoint_dir, self.logdir, step=4)
assert new_checkpoint_dir == TrainableUtil.find_checkpoint_dir(
new_checkpoint_dir)
assert os.path.exists(new_checkpoint_dir)
assert not FuncCheckpointUtil.is_temp_checkpoint_dir(
new_checkpoint_dir)
tmp_checkpoint_dir = FuncCheckpointUtil.mk_temp_checkpoint_dir(
self.logdir)
assert tmp_checkpoint_dir != new_checkpoint_dir
class FunctionCheckpointingTest(unittest.TestCase):
def setUp(self):
self.logdir = tempfile.mkdtemp()
self.logger_creator = creator_generator(self.logdir)
def tearDown(self):
shutil.rmtree(self.logdir)
def testCheckpointReuse(self):
"""Test that repeated save/restore never reuses same checkpoint dir."""
def train(config, checkpoint_dir=None):
if checkpoint_dir:
count = sum("checkpoint-" in path
for path in os.listdir(checkpoint_dir))
assert count == 1, os.listdir(checkpoint_dir)
for step in range(20):
with tune.checkpoint_dir(step=step) as checkpoint_dir:
path = os.path.join(checkpoint_dir,
"checkpoint-{}".format(step))
open(path, "a").close()
tune.report(test=step)
wrapped = wrap_function(train)
checkpoint = None
for i in range(5):
new_trainable = wrapped(logger_creator=self.logger_creator)
if checkpoint:
new_trainable.restore(checkpoint)
for i in range(2):
result = new_trainable.train()
checkpoint = new_trainable.save()
new_trainable.stop()
assert result[TRAINING_ITERATION] == 10
def testCheckpointReuseObject(self):
"""Test that repeated save/restore never reuses same checkpoint dir."""
def train(config, checkpoint_dir=None):
if checkpoint_dir:
count = sum("checkpoint-" in path
for path in os.listdir(checkpoint_dir))
assert count == 1, os.listdir(checkpoint_dir)
for step in range(20):
with tune.checkpoint_dir(step=step) as checkpoint_dir:
path = os.path.join(checkpoint_dir,
"checkpoint-{}".format(step))
open(path, "a").close()
tune.report(test=step)
wrapped = wrap_function(train)
checkpoint = None
for i in range(5):
new_trainable = wrapped(logger_creator=self.logger_creator)
if checkpoint:
new_trainable.restore_from_object(checkpoint)
for i in range(2):
result = new_trainable.train()
checkpoint = new_trainable.save_to_object()
new_trainable.stop()
self.assertTrue(result[TRAINING_ITERATION] == 10)
def testCheckpointReuseObjectWithoutTraining(self):
"""Test that repeated save/restore never reuses same checkpoint dir."""
def train(config, checkpoint_dir=None):
if checkpoint_dir:
count = sum("checkpoint-" in path
for path in os.listdir(checkpoint_dir))
assert count == 1, os.listdir(checkpoint_dir)
for step in range(20):
with tune.checkpoint_dir(step=step) as checkpoint_dir:
path = os.path.join(checkpoint_dir,
"checkpoint-{}".format(step))
open(path, "a").close()
tune.report(test=step)
wrapped = wrap_function(train)
new_trainable = wrapped(logger_creator=self.logger_creator)
for i in range(2):
result = new_trainable.train()
checkpoint = new_trainable.save_to_object()
new_trainable.stop()
new_trainable2 = wrapped(logger_creator=self.logger_creator)
new_trainable2.restore_from_object(checkpoint)
new_trainable2.stop()
new_trainable2 = wrapped(logger_creator=self.logger_creator)
new_trainable2.restore_from_object(checkpoint)
result = new_trainable2.train()
new_trainable2.stop()
self.assertTrue(result[TRAINING_ITERATION] == 3)
def testReuseNullCheckpoint(self):
def train(config, checkpoint_dir=None):
assert not checkpoint_dir
for step in range(10):
tune.report(test=step)
# Create checkpoint
wrapped = wrap_function(train)
checkpoint = None
new_trainable = wrapped(logger_creator=self.logger_creator)
new_trainable.train()
checkpoint = new_trainable.save()
new_trainable.stop()
# Use the checkpoint a couple of times
for i in range(3):
new_trainable = wrapped(logger_creator=self.logger_creator)
new_trainable.restore(checkpoint)
new_trainable.stop()
# Make sure the result is still good
new_trainable = wrapped(logger_creator=self.logger_creator)
new_trainable.restore(checkpoint)
result = new_trainable.train()
checkpoint = new_trainable.save()
new_trainable.stop()
self.assertTrue(result[TRAINING_ITERATION] == 1)
def testMultipleNullCheckpoints(self):
def train(config, checkpoint_dir=None):
assert not checkpoint_dir
for step in range(10):
tune.report(test=step)
wrapped = wrap_function(train)
checkpoint = None
for i in range(5):
new_trainable = wrapped(logger_creator=self.logger_creator)
if checkpoint:
new_trainable.restore(checkpoint)
result = new_trainable.train()
checkpoint = new_trainable.save()
new_trainable.stop()
self.assertTrue(result[TRAINING_ITERATION] == 1)
def testMultipleNullMemoryCheckpoints(self):
def train(config, checkpoint_dir=None):
assert not checkpoint_dir
for step in range(10):
tune.report(test=step)
wrapped = wrap_function(train)
checkpoint = None
for i in range(5):
new_trainable = wrapped(logger_creator=self.logger_creator)
if checkpoint:
new_trainable.restore_from_object(checkpoint)
result = new_trainable.train()
checkpoint = new_trainable.save_to_object()
new_trainable.stop()
assert result[TRAINING_ITERATION] == 1
def testFunctionNoCheckpointing(self):
def train(config, checkpoint_dir=None):
if checkpoint_dir:
assert os.path.exists(checkpoint_dir)
for step in range(10):
tune.report(test=step)
wrapped = wrap_function(train)
new_trainable = wrapped(logger_creator=self.logger_creator)
result = new_trainable.train()
checkpoint = new_trainable.save()
new_trainable.stop()
new_trainable2 = wrapped(logger_creator=self.logger_creator)
new_trainable2.restore(checkpoint)
result = new_trainable2.train()
self.assertEquals(result[TRAINING_ITERATION], 1)
checkpoint = new_trainable2.save()
new_trainable2.stop()
def testFunctionRecurringSave(self):
"""This tests that save and restore are commutative."""
def train(config, checkpoint_dir=None):
if checkpoint_dir:
assert os.path.exists(checkpoint_dir)
for step in range(10):
if step % 3 == 0:
with tune.checkpoint_dir(step=step) as checkpoint_dir:
path = os.path.join(checkpoint_dir, "checkpoint")
with open(path, "w") as f:
f.write(json.dumps({"step": step}))
tune.report(test=step)
wrapped = wrap_function(train)
new_trainable = wrapped(logger_creator=self.logger_creator)
new_trainable.train()
checkpoint_obj = new_trainable.save_to_object()
new_trainable.restore_from_object(checkpoint_obj)
checkpoint = new_trainable.save()
new_trainable.stop()
new_trainable2 = wrapped(logger_creator=self.logger_creator)
new_trainable2.restore(checkpoint)
new_trainable2.train()
new_trainable2.stop()
def testFunctionImmediateSave(self):
"""This tests that save and restore are commutative."""
def train(config, checkpoint_dir=None):
if checkpoint_dir:
assert os.path.exists(checkpoint_dir)
for step in range(10):
with tune.checkpoint_dir(step=step) as checkpoint_dir:
print(checkpoint_dir)
path = os.path.join(checkpoint_dir,
"checkpoint-{}".format(step))
open(path, "w").close()
tune.report(test=step)
wrapped = wrap_function(train)
new_trainable = wrapped(logger_creator=self.logger_creator)
new_trainable.train()
new_trainable.train()
checkpoint_obj = new_trainable.save_to_object()
new_trainable.stop()
new_trainable2 = wrapped(logger_creator=self.logger_creator)
new_trainable2.restore_from_object(checkpoint_obj)
checkpoint_obj = new_trainable2.save_to_object()
new_trainable2.train()
result = new_trainable2.train()
assert sum("tmp" in path for path in os.listdir(self.logdir)) == 1
new_trainable2.stop()
assert sum("tmp" in path for path in os.listdir(self.logdir)) == 0
assert result[TRAINING_ITERATION] == 4
class FunctionApiTest(unittest.TestCase):
def setUp(self):
ray.init(num_cpus=4, num_gpus=0, object_store_memory=150 * 1024 * 1024)
def tearDown(self):
ray.shutdown()
_register_all() # re-register the evicted objects
def testCheckpointError(self):
def train(config, checkpoint_dir=False):
pass
with self.assertRaises(ValueError):
tune.run(train, checkpoint_freq=1)
with self.assertRaises(ValueError):
tune.run(train, checkpoint_at_end=True)
def testCheckpointFunctionAtEnd(self):
def train(config, checkpoint_dir=False):
for i in range(10):
tune.report(test=i)
with tune.checkpoint_dir(step=10) as checkpoint_dir:
checkpoint_path = os.path.join(checkpoint_dir, "ckpt.log")
with open(checkpoint_path, "w") as f:
f.write("hello")
[trial] = tune.run(train).trials
assert os.path.exists(os.path.join(trial.checkpoint.value, "ckpt.log"))
def testCheckpointFunctionAtEndContext(self):
def train(config, checkpoint_dir=False):
for i in range(10):
tune.report(test=i)
with tune.checkpoint_dir(step=10) as checkpoint_dir:
checkpoint_path = os.path.join(checkpoint_dir, "ckpt.log")
with open(checkpoint_path, "w") as f:
f.write("hello")
[trial] = tune.run(train).trials
assert os.path.exists(os.path.join(trial.checkpoint.value, "ckpt.log"))
def testVariousCheckpointFunctionAtEnd(self):
def train(config, checkpoint_dir=False):
for i in range(10):
with tune.checkpoint_dir(step=i) as checkpoint_dir:
checkpoint_path = os.path.join(checkpoint_dir, "ckpt.log")
with open(checkpoint_path, "w") as f:
f.write("hello")
tune.report(test=i)
with tune.checkpoint_dir(step=i) as checkpoint_dir:
checkpoint_path = os.path.join(checkpoint_dir, "ckpt.log2")
with open(checkpoint_path, "w") as f:
f.write("goodbye")
[trial] = tune.run(train, keep_checkpoints_num=3).trials
assert os.path.exists(
os.path.join(trial.checkpoint.value, "ckpt.log2"))
def testReuseCheckpoint(self):
def train(config, checkpoint_dir=None):
itr = 0
if checkpoint_dir:
with open(os.path.join(checkpoint_dir, "ckpt.log"), "r") as f:
itr = int(f.read()) + 1
for i in range(itr, config["max_iter"]):
with tune.checkpoint_dir(step=i) as checkpoint_dir:
checkpoint_path = os.path.join(checkpoint_dir, "ckpt.log")
with open(checkpoint_path, "w") as f:
f.write(str(i))
tune.report(test=i, training_iteration=i)
[trial] = tune.run(
train,
config={
"max_iter": 5
},
).trials
last_ckpt = trial.checkpoint.value
assert os.path.exists(os.path.join(trial.checkpoint.value, "ckpt.log"))
analysis = tune.run(train, config={"max_iter": 10}, restore=last_ckpt)
trial_dfs = list(analysis.trial_dataframes.values())
assert len(trial_dfs[0]["training_iteration"]) == 5
def testRetry(self):
def train(config, checkpoint_dir=None):
restored = bool(checkpoint_dir)
itr = 0
if checkpoint_dir:
with open(os.path.join(checkpoint_dir, "ckpt.log"), "r") as f:
itr = int(f.read()) + 1
for i in range(itr, 10):
if i == 5 and not restored:
raise Exception("try to fail me")
with tune.checkpoint_dir(step=i) as checkpoint_dir:
checkpoint_path = os.path.join(checkpoint_dir, "ckpt.log")
with open(checkpoint_path, "w") as f:
f.write(str(i))
tune.report(test=i, training_iteration=i)
analysis = tune.run(train, max_failures=3)
last_ckpt = analysis.trials[0].checkpoint.value
assert os.path.exists(os.path.join(last_ckpt, "ckpt.log"))
trial_dfs = list(analysis.trial_dataframes.values())
assert len(trial_dfs[0]["training_iteration"]) == 10
def testEnabled(self):
def train(config, checkpoint_dir=None):
is_active = tune.is_session_enabled()
if is_active:
tune.report(active=is_active)
return is_active
assert train({}) is False
analysis = tune.run(train)
t = analysis.trials[0]
assert t.last_result["active"]
def testBlankCheckpoint(self):
def train(config, checkpoint_dir=None):
restored = bool(checkpoint_dir)
itr = 0
if checkpoint_dir:
with open(os.path.join(checkpoint_dir, "ckpt.log"), "r") as f:
itr = int(f.read()) + 1
for i in range(itr, 10):
if i == 5 and not restored:
raise Exception("try to fail me")
with tune.checkpoint_dir(step=itr) as checkpoint_dir:
checkpoint_path = os.path.join(checkpoint_dir, "ckpt.log")
with open(checkpoint_path, "w") as f:
f.write(str(i))
tune.report(test=i, training_iteration=i)
analysis = tune.run(train, max_failures=3)
trial_dfs = list(analysis.trial_dataframes.values())
assert len(trial_dfs[0]["training_iteration"]) == 10
def testWithParameters(self):
class Data:
def __init__(self):
self.data = [0] * 500_000
data = Data()
data.data[100] = 1
def train(config, data=None):
data.data[101] = 2 # Changes are local
tune.report(metric=len(data.data), hundred=data.data[100])
trial_1, trial_2 = tune.run(
with_parameters(train, data=data), num_samples=2).trials
self.assertEquals(data.data[101], 0)
self.assertEquals(trial_1.last_result["metric"], 500_000)
self.assertEquals(trial_1.last_result["hundred"], 1)
self.assertEquals(trial_2.last_result["metric"], 500_000)
self.assertEquals(trial_2.last_result["hundred"], 1)
# With checkpoint dir parameter
def train(config, checkpoint_dir="DIR", data=None):
data.data[101] = 2 # Changes are local
tune.report(metric=len(data.data), cp=checkpoint_dir)
trial_1, trial_2 = tune.run(
with_parameters(train, data=data), num_samples=2).trials
self.assertEquals(data.data[101], 0)
self.assertEquals(trial_1.last_result["metric"], 500_000)
self.assertEquals(trial_1.last_result["cp"], "DIR")
self.assertEquals(trial_2.last_result["metric"], 500_000)
self.assertEquals(trial_2.last_result["cp"], "DIR")
def testWithParameters2(self):
class Data:
def __init__(self):
import numpy as np
self.data = np.random.rand((2 * 1024 * 1024))
def train(config, data=None):
tune.report(metric=len(data.data))
trainable = tune.with_parameters(train, data=Data())
dumped = cloudpickle.dumps(trainable)
assert sys.getsizeof(dumped) < 100 * 1024
def testReturnAnonymous(self):
def train(config):
return config["a"]
trial_1, trial_2 = tune.run(
train, config={
"a": tune.grid_search([4, 8])
}).trials
self.assertEquals(trial_1.last_result[DEFAULT_METRIC], 4)
self.assertEquals(trial_2.last_result[DEFAULT_METRIC], 8)
def testReturnSpecific(self):
def train(config):
return {"m": config["a"]}
trial_1, trial_2 = tune.run(
train, config={
"a": tune.grid_search([4, 8])
}).trials
self.assertEquals(trial_1.last_result["m"], 4)
self.assertEquals(trial_2.last_result["m"], 8)
def testYieldAnonymous(self):
def train(config):
for i in range(10):
yield config["a"] + i
trial_1, trial_2 = tune.run(
train, config={
"a": tune.grid_search([4, 8])
}).trials
self.assertEquals(trial_1.last_result[DEFAULT_METRIC], 4 + 9)
self.assertEquals(trial_2.last_result[DEFAULT_METRIC], 8 + 9)
def testYieldSpecific(self):
def train(config):
for i in range(10):
yield {"m": config["a"] + i}
trial_1, trial_2 = tune.run(
train, config={
"a": tune.grid_search([4, 8])
}).trials
self.assertEquals(trial_1.last_result["m"], 4 + 9)
self.assertEquals(trial_2.last_result["m"], 8 + 9)
| 37.837383
| 79
| 0.602035
| 2,313
| 20,243
| 5.088629
| 0.098573
| 0.118182
| 0.028547
| 0.036703
| 0.791674
| 0.761427
| 0.730501
| 0.710365
| 0.685641
| 0.65616
| 0
| 0.017436
| 0.300203
| 20,243
| 534
| 80
| 37.90824
| 0.813427
| 0.024008
| 0
| 0.675991
| 0
| 0
| 0.022357
| 0
| 0
| 0
| 0
| 0
| 0.130536
| 1
| 0.13986
| false
| 0.002331
| 0.034965
| 0.006993
| 0.198135
| 0.002331
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9eeea9e80fd6b80375d04873a065d8585fabd984
| 540
|
py
|
Python
|
app/main/errors.py
|
kungfumas/imageretrieval4
|
f62ab54f550cbeafe9d55fcdb098cf7a2cb43d87
|
[
"MIT"
] | 36
|
2020-02-25T14:36:08.000Z
|
2022-03-05T10:14:39.000Z
|
app/main/errors.py
|
kungfumas/imageretrieval4
|
f62ab54f550cbeafe9d55fcdb098cf7a2cb43d87
|
[
"MIT"
] | 4
|
2020-01-28T22:17:46.000Z
|
2021-10-12T22:52:34.000Z
|
app/main/errors.py
|
kungfumas/imageretrieval4
|
f62ab54f550cbeafe9d55fcdb098cf7a2cb43d87
|
[
"MIT"
] | 5
|
2020-03-26T18:43:20.000Z
|
2022-03-08T05:51:56.000Z
|
# -*- coding: utf-8 -*-
from flask import render_template
from flask_wtf.csrf import CSRFError
from . import main
@main.errorhandler(CSRFError)
def handle_csrf_error(e):
return render_template('csrf_error.html', reason=e.description), 400
@main.app_errorhandler(403)
def forbidden(e):
return render_template('403.html'), 403
@main.app_errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@main.app_errorhandler(500)
def internal_server_error(e):
return render_template('500.html'), 500
| 20
| 72
| 0.751852
| 78
| 540
| 5
| 0.410256
| 0.179487
| 0.133333
| 0.215385
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065678
| 0.125926
| 540
| 26
| 73
| 20.769231
| 0.760593
| 0.038889
| 0
| 0
| 0
| 0
| 0.075435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0.2
| 0.266667
| 0.733333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
7308dae0b4961f627ee40af66b03e3368ec616da
| 184
|
py
|
Python
|
server/data_server/errors.py
|
nahidupa/grr
|
100a9d85ef2abb234e12e3ac2623caffb4116be7
|
[
"Apache-2.0"
] | 6
|
2015-04-03T02:25:28.000Z
|
2021-11-17T21:42:59.000Z
|
server/data_server/errors.py
|
nahidupa/grr
|
100a9d85ef2abb234e12e3ac2623caffb4116be7
|
[
"Apache-2.0"
] | 3
|
2020-09-11T12:54:50.000Z
|
2020-09-11T12:55:01.000Z
|
server/data_server/errors.py
|
nahidupa/grr
|
100a9d85ef2abb234e12e3ac2623caffb4116be7
|
[
"Apache-2.0"
] | 3
|
2018-12-07T07:04:37.000Z
|
2022-02-22T05:28:16.000Z
|
#!/usr/bin/env python
"""Exceptions for the distributed data servers."""
class DataServerError(Exception):
"""Raised when some error condition happens in a data server."""
pass
| 20.444444
| 66
| 0.728261
| 24
| 184
| 5.583333
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 184
| 8
| 67
| 23
| 0.858974
| 0.673913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
731fd0c9a7bc10cd3600ce369d4c7052e09d2f8f
| 294
|
py
|
Python
|
Trakttv.bundle/Contents/Libraries/Shared/oem_framework/plugin.py
|
disrupted/Trakttv.bundle
|
24712216c71f3b22fd58cb5dd89dad5bb798ed60
|
[
"RSA-MD"
] | 1,346
|
2015-01-01T14:52:24.000Z
|
2022-03-28T12:50:48.000Z
|
Trakttv.bundle/Contents/Libraries/Shared/oem_framework/plugin.py
|
alcroito/Plex-Trakt-Scrobbler
|
4f83fb0860dcb91f860d7c11bc7df568913c82a6
|
[
"RSA-MD"
] | 474
|
2015-01-01T10:27:46.000Z
|
2022-03-21T12:26:16.000Z
|
Trakttv.bundle/Contents/Libraries/Shared/oem_framework/plugin.py
|
alcroito/Plex-Trakt-Scrobbler
|
4f83fb0860dcb91f860d7c11bc7df568913c82a6
|
[
"RSA-MD"
] | 191
|
2015-01-02T18:27:22.000Z
|
2022-03-29T10:49:48.000Z
|
class Plugin(object):
__key__ = None
__priority__ = 0
_client = None
# TODO should this be changed to support both client and database updater plugins?
def initialize(self, client):
self._client = client
@property
def available(self):
return True
| 21
| 86
| 0.656463
| 35
| 294
| 5.228571
| 0.771429
| 0.10929
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004739
| 0.282313
| 294
| 13
| 87
| 22.615385
| 0.862559
| 0.272109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 1
| 0.222222
| false
| 0
| 0
| 0.111111
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
731ffd5d4d9c5501f8052f597d4329b143562a83
| 268
|
py
|
Python
|
openlobby/core/admin.py
|
openlobby/openlobby-server
|
b7a1a2b73e903c4da57970926844b0639dce5aae
|
[
"MIT"
] | 7
|
2017-11-23T15:24:50.000Z
|
2018-11-29T21:47:55.000Z
|
openlobby/core/admin.py
|
openlobby/openlobby-server
|
b7a1a2b73e903c4da57970926844b0639dce5aae
|
[
"MIT"
] | 20
|
2018-02-21T22:25:42.000Z
|
2020-06-05T17:22:36.000Z
|
openlobby/core/admin.py
|
openlobby/openlobby-server
|
b7a1a2b73e903c4da57970926844b0639dce5aae
|
[
"MIT"
] | 3
|
2018-03-08T10:05:01.000Z
|
2018-08-16T14:36:28.000Z
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .models import OpenIdClient, User
@admin.register(OpenIdClient)
class OpenIdClientAdmin(admin.ModelAdmin):
pass
@admin.register(User)
class MyUserAdmin(UserAdmin):
pass
| 17.866667
| 47
| 0.794776
| 32
| 268
| 6.65625
| 0.5
| 0.093897
| 0.159624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126866
| 268
| 14
| 48
| 19.142857
| 0.910256
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.222222
| 0.333333
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
733c6ad093448f51c6140a40515cb11c6cd5b41f
| 124
|
py
|
Python
|
{{cookiecutter.repo_name}}/src/{{cookiecutter.package_name}}/__main__.py
|
k-dominik/cookiecutter-python-conda
|
d68f112da2bc7966e980935ceee353c3b4892e3c
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/src/{{cookiecutter.package_name}}/__main__.py
|
k-dominik/cookiecutter-python-conda
|
d68f112da2bc7966e980935ceee353c3b4892e3c
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/src/{{cookiecutter.package_name}}/__main__.py
|
k-dominik/cookiecutter-python-conda
|
d68f112da2bc7966e980935ceee353c3b4892e3c
|
[
"MIT"
] | null | null | null |
def main():
print("Implement {{ cookiecutter.package_name }}'s main function!")
if __name__ == "__main__":
main()
| 17.714286
| 71
| 0.645161
| 14
| 124
| 5.071429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185484
| 124
| 6
| 72
| 20.666667
| 0.70297
| 0
| 0
| 0
| 0
| 0
| 0.532258
| 0.201613
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
733decdb39001347341307799871535981e1dd05
| 1,192
|
py
|
Python
|
syft/frameworks/torch/tensors/crypten/syft_crypten.py
|
ribhu97/PySyft
|
ee36eb95a01f4e6736e87b8856a3e7788054ac30
|
[
"Apache-2.0"
] | 1
|
2021-04-09T10:23:06.000Z
|
2021-04-09T10:23:06.000Z
|
syft/frameworks/torch/tensors/crypten/syft_crypten.py
|
ribhu97/PySyft
|
ee36eb95a01f4e6736e87b8856a3e7788054ac30
|
[
"Apache-2.0"
] | null | null | null |
syft/frameworks/torch/tensors/crypten/syft_crypten.py
|
ribhu97/PySyft
|
ee36eb95a01f4e6736e87b8856a3e7788054ac30
|
[
"Apache-2.0"
] | null | null | null |
from syft.generic.frameworks.hook import hook_args
from syft.generic.tensor import AbstractTensor
from syft.generic.frameworks.hook.trace import tracer
class SyftCrypTensor(AbstractTensor):
def __init__(
self, owner=None, id=None, tensor=None, tags: set = None, description: str = None,
):
super().__init__(id=id, owner=owner, tags=tags, description=description)
self.tensor = tensor
def get_class_attributes(self):
"""
Specify all the attributes need to build a wrapper correctly when returning a response,
"""
# TODO: what we should return specific for this one?
return {}
@property
def data(self):
return self
@data.setter
def data(self, new_data):
self.tensor = new_data.child
return self
@tracer(method_name="add")
def add(self, other):
return SyftCrypTensor(tensor=self.tensor)
__add__ = add
__radd__ = add
@tracer(method_name="get_plain_text")
def get_plain_text(self, dst=None):
return SyftCrypTensor(tensor=self.tensor)
### Register the tensor with hook_args.py ###
hook_args.default_register_tensor(SyftCrypTensor)
| 27.090909
| 95
| 0.677852
| 150
| 1,192
| 5.18
| 0.426667
| 0.05148
| 0.057915
| 0.06435
| 0.16731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.229027
| 1,192
| 43
| 96
| 27.72093
| 0.845484
| 0.14849
| 0
| 0.148148
| 0
| 0
| 0.017276
| 0
| 0
| 0
| 0
| 0.023256
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.111111
| 0.62963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
733f0d74afec0767d284bc087e66f7dbbb92d872
| 4,279
|
py
|
Python
|
robot_api/CRC16.py
|
DeerKK/Deformable-Modeling
|
97b14be152e78f44dd6e783059bc5380a3a74a68
|
[
"MIT"
] | 4
|
2020-11-16T16:06:08.000Z
|
2022-03-30T03:53:54.000Z
|
robot_api/CRC16.py
|
DeerKK/Deformable-Modeling
|
97b14be152e78f44dd6e783059bc5380a3a74a68
|
[
"MIT"
] | null | null | null |
robot_api/CRC16.py
|
DeerKK/Deformable-Modeling
|
97b14be152e78f44dd6e783059bc5380a3a74a68
|
[
"MIT"
] | null | null | null |
#obtained from https://www.digi.com/resources/documentation/digidocs/90001537/references/r_python_crc16_modbus.htm
#* File: CRC16.PY
#* CRC-16 (reverse) table lookup for Modbus or DF1
#*
INITIAL_MODBUS = 0xFFFF
INITIAL_DF1 = 0x0000
table = (
0x0000, 0xC0C1, 0xC181, 0x0140, 0xC301, 0x03C0, 0x0280, 0xC241,
0xC601, 0x06C0, 0x0780, 0xC741, 0x0500, 0xC5C1, 0xC481, 0x0440,
0xCC01, 0x0CC0, 0x0D80, 0xCD41, 0x0F00, 0xCFC1, 0xCE81, 0x0E40,
0x0A00, 0xCAC1, 0xCB81, 0x0B40, 0xC901, 0x09C0, 0x0880, 0xC841,
0xD801, 0x18C0, 0x1980, 0xD941, 0x1B00, 0xDBC1, 0xDA81, 0x1A40,
0x1E00, 0xDEC1, 0xDF81, 0x1F40, 0xDD01, 0x1DC0, 0x1C80, 0xDC41,
0x1400, 0xD4C1, 0xD581, 0x1540, 0xD701, 0x17C0, 0x1680, 0xD641,
0xD201, 0x12C0, 0x1380, 0xD341, 0x1100, 0xD1C1, 0xD081, 0x1040,
0xF001, 0x30C0, 0x3180, 0xF141, 0x3300, 0xF3C1, 0xF281, 0x3240,
0x3600, 0xF6C1, 0xF781, 0x3740, 0xF501, 0x35C0, 0x3480, 0xF441,
0x3C00, 0xFCC1, 0xFD81, 0x3D40, 0xFF01, 0x3FC0, 0x3E80, 0xFE41,
0xFA01, 0x3AC0, 0x3B80, 0xFB41, 0x3900, 0xF9C1, 0xF881, 0x3840,
0x2800, 0xE8C1, 0xE981, 0x2940, 0xEB01, 0x2BC0, 0x2A80, 0xEA41,
0xEE01, 0x2EC0, 0x2F80, 0xEF41, 0x2D00, 0xEDC1, 0xEC81, 0x2C40,
0xE401, 0x24C0, 0x2580, 0xE541, 0x2700, 0xE7C1, 0xE681, 0x2640,
0x2200, 0xE2C1, 0xE381, 0x2340, 0xE101, 0x21C0, 0x2080, 0xE041,
0xA001, 0x60C0, 0x6180, 0xA141, 0x6300, 0xA3C1, 0xA281, 0x6240,
0x6600, 0xA6C1, 0xA781, 0x6740, 0xA501, 0x65C0, 0x6480, 0xA441,
0x6C00, 0xACC1, 0xAD81, 0x6D40, 0xAF01, 0x6FC0, 0x6E80, 0xAE41,
0xAA01, 0x6AC0, 0x6B80, 0xAB41, 0x6900, 0xA9C1, 0xA881, 0x6840,
0x7800, 0xB8C1, 0xB981, 0x7940, 0xBB01, 0x7BC0, 0x7A80, 0xBA41,
0xBE01, 0x7EC0, 0x7F80, 0xBF41, 0x7D00, 0xBDC1, 0xBC81, 0x7C40,
0xB401, 0x74C0, 0x7580, 0xB541, 0x7700, 0xB7C1, 0xB681, 0x7640,
0x7200, 0xB2C1, 0xB381, 0x7340, 0xB101, 0x71C0, 0x7080, 0xB041,
0x5000, 0x90C1, 0x9181, 0x5140, 0x9301, 0x53C0, 0x5280, 0x9241,
0x9601, 0x56C0, 0x5780, 0x9741, 0x5500, 0x95C1, 0x9481, 0x5440,
0x9C01, 0x5CC0, 0x5D80, 0x9D41, 0x5F00, 0x9FC1, 0x9E81, 0x5E40,
0x5A00, 0x9AC1, 0x9B81, 0x5B40, 0x9901, 0x59C0, 0x5880, 0x9841,
0x8801, 0x48C0, 0x4980, 0x8941, 0x4B00, 0x8BC1, 0x8A81, 0x4A40,
0x4E00, 0x8EC1, 0x8F81, 0x4F40, 0x8D01, 0x4DC0, 0x4C80, 0x8C41,
0x4400, 0x84C1, 0x8581, 0x4540, 0x8701, 0x47C0, 0x4680, 0x8641,
0x8201, 0x42C0, 0x4380, 0x8341, 0x4100, 0x81C1, 0x8081, 0x4040 )
def calcByte( ch, crc):
"""Given a new Byte and previous CRC, Calc a new CRC-16"""
if type(ch) == type("c"):
by = ord( ch)
else:
by = ch
crc = (crc >> 8) ^ table[(crc ^ by) & 0xFF]
return (crc & 0xFFFF)
def calcString( st, crc):
"""Given a bunary string and starting CRC, Calc a final CRC-16 """
for ch in st:
crc = (crc >> 8) ^ table[(crc ^ ord(ch)) & 0xFF]
return crc
if __name__ == '__main__':
# test Modbus
print "testing Modbus messages with crc16.py"
print "test case #1:",
crc = INITIAL_MODBUS
st = "\xEA\x03\x00\x00\x00\x64"
for ch in st:
crc = calcByte( ch, crc)
if crc != 0x3A53:
print "BAD - ERROR - FAILED!",
print "expect:0x3A53 but saw 0x%x" % crc
else:
print "Ok"
print "test case #2:",
st = "\x4b\x03\x00\x2c\x00\x37"
crc = calcString( st, INITIAL_MODBUS)
if crc != 0xbfcb:
print "BAD - ERROR - FAILED! ",
print "expect:0xBFCB but saw 0x%x" % crc
else:
print "Ok"
print "test case #3:",
st = "\x0d\x01\x00\x62\x00\x33"
crc = calcString( st, INITIAL_MODBUS)
if crc != 0x0ddd:
print "BAD - ERROR - FAILED!",
print "expect:0x0DDD but saw 0x%x" % crc
else:
print "Ok"
print
print "testing DF1 messages with crc16.py"
print "test case #1:",
st = "\x07\x11\x41\x00\x53\xB9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
# DF1 uses same algorithm - just starts with CRC=0x0000 instead of 0xFFFF
# note: <DLE><STX> and the <DLE> of the <DLE><ETX> pair NOT to be included
crc = calcString( st, INITIAL_DF1)
crc = calcByte( "\x03", crc) # final ETX added
if crc != 0x4C6B:
print "BAD - ERROR - FAILED!",
print "expect:0x4C6B but saw 0x%x" % crc
else:
print "Ok"
# end file
| 40.367925
| 115
| 0.649217
| 568
| 4,279
| 4.860915
| 0.649648
| 0.028251
| 0.035857
| 0.039116
| 0.167331
| 0.148859
| 0.105397
| 0.081492
| 0.049258
| 0.039116
| 0
| 0.350602
| 0.224118
| 4,279
| 106
| 116
| 40.367925
| 0.481024
| 0.08904
| 0
| 0.223529
| 0
| 0.011765
| 0.130292
| 0.039334
| 0
| 0
| 0.439771
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.223529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
73537f66718e43516311ff5113615d6f98d96acf
| 736
|
py
|
Python
|
ex19a.py
|
arunkumarang/python
|
1960e285dfe2ef54d2e3ab37584bfef8b24ecca9
|
[
"Apache-2.0"
] | null | null | null |
ex19a.py
|
arunkumarang/python
|
1960e285dfe2ef54d2e3ab37584bfef8b24ecca9
|
[
"Apache-2.0"
] | null | null | null |
ex19a.py
|
arunkumarang/python
|
1960e285dfe2ef54d2e3ab37584bfef8b24ecca9
|
[
"Apache-2.0"
] | null | null | null |
def delivery_fooditems_and_food(apple, mango, rice, dal, milk, curd):
print "Hi da, I got %d fresh apples & %d mangos" % (apple, mango)
print "As per list, I bought %d kg rice & %d kg dal" % (rice, dal)
print "You mentioned to get %d litre milk & %d litre curd" % (milk, curd)
print "Call Bigbasket customer care and order these items!!!!!\n"
print "Just passing as numbers:"
delivery_fooditems_and_food(12, 7, 5, 3, 2, 1)
print "Just passing through variables:"
apple = 22
mango = 5
rice = 10
dal = 20
milk = 7
curd = 3
delivery_fooditems_and_food(apple, mango, rice, dal, milk, curd)
print "Passing as variable with numbers:"
delivery_fooditems_and_food(apple+100, mango+70, rice+15, dal+25, milk+2, curd+4)
| 32
| 81
| 0.692935
| 124
| 736
| 4.016129
| 0.475806
| 0.136546
| 0.160643
| 0.192771
| 0.351406
| 0.216867
| 0.216867
| 0.216867
| 0.216867
| 0.216867
| 0
| 0.045226
| 0.188859
| 736
| 22
| 82
| 33.454545
| 0.788945
| 0
| 0
| 0
| 0
| 0
| 0.379592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.176471
| 0
| null | null | 0.411765
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
|
0
| 4
|
735dd6fc47c0c2a15df6c4faf729b23f21858f13
| 234
|
py
|
Python
|
8kyu/color_ghost.py
|
nhsz/codewars
|
82703959e910254d6feff4162f78c6dbd7a1c3ed
|
[
"MIT"
] | 1
|
2018-12-02T23:04:38.000Z
|
2018-12-02T23:04:38.000Z
|
8kyu/color_ghost.py
|
nhsz/codewars
|
82703959e910254d6feff4162f78c6dbd7a1c3ed
|
[
"MIT"
] | null | null | null |
8kyu/color_ghost.py
|
nhsz/codewars
|
82703959e910254d6feff4162f78c6dbd7a1c3ed
|
[
"MIT"
] | null | null | null |
# http://www.codewars.com/kata/53f1015fa9fe02cbda00111a/
import random
def get_color():
return random.choice(["white", "yellow", "purple", "red"])
class Ghost(object):
def __init__(self):
self.color = get_color()
| 18
| 62
| 0.67094
| 28
| 234
| 5.392857
| 0.785714
| 0.10596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071795
| 0.166667
| 234
| 12
| 63
| 19.5
| 0.702564
| 0.230769
| 0
| 0
| 0
| 0
| 0.11236
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
73638bbc64e7244617537d274ed910412476195c
| 4,437
|
py
|
Python
|
functions_engraving_study.py
|
clch1234/Hyperspectral-Imaging
|
ee390c678df0c09b3ca050770d3e3d90484c4835
|
[
"MIT"
] | null | null | null |
functions_engraving_study.py
|
clch1234/Hyperspectral-Imaging
|
ee390c678df0c09b3ca050770d3e3d90484c4835
|
[
"MIT"
] | null | null | null |
functions_engraving_study.py
|
clch1234/Hyperspectral-Imaging
|
ee390c678df0c09b3ca050770d3e3d90484c4835
|
[
"MIT"
] | null | null | null |
from numpy import *
from scipy import heaviside
import scipy.integrate as integrate
""" Mode shapes """
length = 1 # We need to normalize for this to work !
beta1 = pi * 0.59686 / length
C1 = (cos(beta1*length)+cosh(beta1*length))/(sin(beta1*length)+sinh(beta1*length))
beta2 = pi * 1.49418 / length
C2 = (cos(beta2*length)+cosh(beta2*length))/(sin(beta2*length)+sinh(beta2*length))
def phi1(y): # Mode shape for mode 1
return cosh(beta1*y) - cos(beta1*y) + C1 * (sin(beta1*y)-sinh(beta1*y))
def phi1_pp(y): # Second derivative
return beta1**2 * (cosh(beta1*y) + cos(beta1*y) - C1 * (sin(beta1*y) + sinh(beta1*y)))
def phi2(y): # Mode shape for mode 2
return cosh(beta2*y) - cos(beta2*y) + C2 * (sin(beta2*y)-sinh(beta2*y))
def phi2_pp(y): # Second derivative
return beta2**2 * (cosh(beta2*y) + cos(beta2*y) - C2 * (sin(beta2*y) + sinh(beta2*y)))
"""
indexes 01 : clamped end to free end (y = 0 to y = 1)
indexes 10 : free end to clamped end (y = 1 to y = 0)
"""
alphaE = -1.2e-4 # 1/E dE/dT
int1a = integrate.quad(lambda y:phi1_pp(y)**2, 0, 1)[0]
int1b = integrate.quad(lambda y:phi1(y)**2, 0, 1)[0]
int2a = integrate.quad(lambda y:phi2_pp(y)**2, 0, 1)[0]
int2b = integrate.quad(lambda y:phi2(y)**2, 0, 1)[0]
""" Direction and mode invariant : temperature """
def dT(y, y_s, dT_max):
# Here T_max is a global value, the temperature reached when y_s = 1 !
if y_s == 0:
return 0
else:
return y/y_s * (dT_max*y_s) * heaviside(-(y - y_s), .5) + (dT_max*y_s) * heaviside(y - y_s, .5)
""" First Mechanical mode """
# Direction invariant : temperature
def dOmega1_T(y_s, dT_max):
intT = integrate.quad(lambda y:dT(y, y_s, dT_max)*phi1_pp(y)**2, 0, 1)[0]
return .5*alphaE*intT/int1a
# Sens base -> bout
def dR_01(y, y_s, dR0):
return dR0 * (heaviside(y - y_s, 1) - 1)
def dOmega1_R_01(y_s, dR0):
int_a = integrate.quad(lambda y:dR_01(y, y_s, dR0)*phi1_pp(y)**2, 0, 1)[0]
int_b = integrate.quad(lambda y:dR_01(y, y_s, dR0)*phi1(y)**2, 0, 1)[0]
return 2*int_a/int1a - int_b/int1b
def dOmega1_01(y_s, dR0, dT_max):
return dOmega1_T(y_s, dT_max) + dOmega1_R_01(y_s, dR0)
# Sens bout -> base
def dR_10(y, y_s, dR0):
return - dR0 * heaviside(y - y_s, 1)
def dOmega1_R_10(y_s, dR0):
int_a = integrate.quad(lambda y:dR_10(y, y_s, dR0)*phi1_pp(y)**2, 0, 1)[0]
int_b = integrate.quad(lambda y:dR_10(y, y_s, dR0)*phi1(y)**2, 0, 1)[0]
return 2*int_a/int1a - int_b/int1b
def dOmega1_10(y_s, dR0, dT_max):
return dOmega1_T(y_s, dT_max) + dOmega1_R_10(y_s, dR0)
""" Second mechanical mode """
# Direction invariant : temperature
def dOmega2_T(y_s, dT_max):
intT = integrate.quad(lambda y:dT(y, y_s, dT_max)*phi2_pp(y)**2, 0, 1)[0]
return .5*alphaE*intT/int1a
def dOmega2_R_01(y_s, dR0):
int_a = integrate.quad(lambda y:dR_01(y, y_s, dR0)*phi2_pp(y)**2, 0, 1)[0]
int_b = integrate.quad(lambda y:dR_01(y, y_s, dR0)*phi2(y)**2, 0, 1)[0]
return 2*int_a/int2a - int_b/int2b
def dOmega2_01(y_s, dR0, dT_max):
return dOmega2_T(y_s, dT_max) + dOmega2_R_01(y_s, dR0)
def dOmega2_R_10(y_s, dR0):
int_a = integrate.quad(lambda y:dR_10(y, y_s, dR0)*phi2_pp(y)**2, 0, 1)[0]
int_b = integrate.quad(lambda y:dR_10(y, y_s, dR0)*phi2(y)**2, 0, 1)[0]
return 2*int_a/int2a - int_b/int2b
def dOmega2_10(y_s, dR0, dT_max):
return dOmega2_T(y_s, dT_max) + dOmega2_R_10(y_s, dR0)
""" Density change """
# Direction invariant : temperature
def dOmega1_T(y_s, dT_max):
intT = integrate.quad(lambda y:dT(y, y_s, dT_max)*phi1_pp(y)**2, 0, 1)[0]
return .5*alphaE*intT/int1a
# Sens base -> bout
def drho_01(y, y_s, drho0):
return drho0 * (heaviside(y - y_s, 1) - 1)
def dOmega1_rho_01(y_s, drho0):
int_b = integrate.quad(lambda y:dR_01(y, y_s, drho0)*phi1(y)**2, 0, 1)[0]
return .5 * int_b/int1b
def dOmega1_density_01(y_s, dR0, dT_max):
return dOmega1_T(y_s, dT_max) + dOmega1_R_01(y_s, dR0)
# Sens bout -> base
def dR_10(y, y_s, dR0):
return - dR0 * heaviside(y - y_s, 1)
def dOmega1_R_10(y_s, dR0):
int_a = integrate.quad(lambda y:dR_10(y, y_s, dR0)*phi1_pp(y)**2, 0, 1)[0]
int_b = integrate.quad(lambda y:dR_10(y, y_s, dR0)*phi1(y)**2, 0, 1)[0]
return 2*int_a/int1a - int_b/int1b
def dOmega1_10(y_s, dR0, dT_max):
return dOmega1_T(y_s, dT_max) + dOmega1_R_10(y_s, dR0)
| 37.601695
| 104
| 0.633762
| 875
| 4,437
| 3.021714
| 0.107429
| 0.043116
| 0.056732
| 0.136157
| 0.772315
| 0.696293
| 0.64826
| 0.630862
| 0.617625
| 0.602496
| 0
| 0.091113
| 0.196078
| 4,437
| 117
| 105
| 37.923077
| 0.650126
| 0.08384
| 0
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.038462
| 0.179487
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
7dfb99fae642274c00138799db547b5d4df0c53a
| 2,277
|
py
|
Python
|
models/unused/model_upper_limits.py
|
nyck33/bioactive_dash
|
94c4e57467d88c5ed0d1b97821c5505f662468ff
|
[
"MIT"
] | null | null | null |
models/unused/model_upper_limits.py
|
nyck33/bioactive_dash
|
94c4e57467d88c5ed0d1b97821c5505f662468ff
|
[
"MIT"
] | null | null | null |
models/unused/model_upper_limits.py
|
nyck33/bioactive_dash
|
94c4e57467d88c5ed0d1b97821c5505f662468ff
|
[
"MIT"
] | null | null | null |
"""
infant, child, male, female, preg, lactation for all plus macroUpper
so 6 * 5 classes + 3 for macroDistRange of 1-3, 4-18 and adults
total 33 classes = 33 collections
"""
from flask import current_app as app
from cnf.models.model_macronutrients_distrange import NutrientsDocument
# Just a shorthand
db = app.db # MongoEngine(cnf) in main.py
class UpperElementsRDI(NutrientsDocument):
meta = {
'collection': 'upperMineralsRDI'
}
# mg/d = m, ug/d = u, g/d = g
arsenic = db.StringField(default='')
boron = db.StringField(default='')
calcium = db.StringField(default='') # m
chromium = db.StringField(default='') # u
copper = db.StringField(default="") # u
fluoride = db.StringField(default='') # m
iodine = db.StringField(default='') # u
iron = db.StringField(default='') # m
magnesium = db.StringField(default='') # m
manganese = db.StringField(default='') # u
molybdenum = db.StringField(default="") # u
nickel = db.StringField(default='') #mg
phosphorus = db.StringField(default='') # m
potassium = db.StringField(default="")
selenium = db.StringField(default='') # u
silicon = db.StringField(default='')
sulfate = db.StringField(default='')
vanadium = db.StringField(default='')
zinc = db.StringField(default='') # m
sodium = db.StringField(default='') # m
chloride = db.StringField(default='') # g
class UpperVitaminsRDI(NutrientsDocument):
meta = {
'collection': 'upperVitaminsRDI'
}
# mg/d = m, ug/d = u, g/d = g
vitaminA = db.StringField(default='') # u
vitaminC = db.StringField(default='') # m
vitaminD = db.StringField(default="") # u
vitaminE = db.StringField(default='') # m
vitaminK = db.StringField(default='') # u
thiamin = db.StringField(default='') # m
riboflavin = db.StringField(default='') # m
niacin = db.StringField(default='') # m
vitaminB6 = db.StringField(default="") # m
folate = db.StringField(default='') # u
vitaminB12 = db.StringField(default='') # u
pantothenicAcid = db.StringField(default='') # m
biotin = db.StringField(default='') # m
choline = db.StringField(default='') # m
carotenoids = db.StringField(default='') #all ND
| 39.258621
| 71
| 0.640316
| 260
| 2,277
| 5.596154
| 0.369231
| 0.321649
| 0.494845
| 0.230928
| 0.01512
| 0.01512
| 0.01512
| 0.01512
| 0.01512
| 0
| 0
| 0.008306
| 0.206851
| 2,277
| 58
| 72
| 39.258621
| 0.797342
| 0.145806
| 0
| 0.042553
| 0
| 0
| 0.027297
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.042553
| 0
| 0.893617
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
7dfea149f800779332c99932b8e1cc92bf95880c
| 255
|
py
|
Python
|
setigen/voltage/__init__.py
|
RocketRoss/setigen
|
fde4434094b5cc0d095c341d0b90e8f65c9e9215
|
[
"MIT"
] | null | null | null |
setigen/voltage/__init__.py
|
RocketRoss/setigen
|
fde4434094b5cc0d095c341d0b90e8f65c9e9215
|
[
"MIT"
] | null | null | null |
setigen/voltage/__init__.py
|
RocketRoss/setigen
|
fde4434094b5cc0d095c341d0b90e8f65c9e9215
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
from .data_stream import *
from .antenna import *
from .polyphase_filterbank import *
from .quantization import *
from .waterfall import *
from .level_utils import *
from .backend import *
| 25.5
| 64
| 0.8
| 32
| 255
| 6.09375
| 0.53125
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 255
| 9
| 65
| 28.333333
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.125
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
7dff4de995b677855c6aef2e0aa5307e06979f88
| 197
|
py
|
Python
|
references/ati_cnn/crnn_keras.py
|
busyyang/torch_ecg
|
031d90a32b8a1e202364efe1e5a19a9ba1f0a726
|
[
"MIT"
] | 9
|
2021-06-26T03:00:55.000Z
|
2022-03-03T13:43:00.000Z
|
references/ati_cnn/crnn_keras.py
|
busyyang/torch_ecg
|
031d90a32b8a1e202364efe1e5a19a9ba1f0a726
|
[
"MIT"
] | 1
|
2021-10-01T09:29:30.000Z
|
2021-10-02T03:41:55.000Z
|
references/ati_cnn/crnn_keras.py
|
busyyang/torch_ecg
|
031d90a32b8a1e202364efe1e5a19a9ba1f0a726
|
[
"MIT"
] | 2
|
2021-04-28T03:13:11.000Z
|
2021-05-15T14:15:34.000Z
|
import argparse
import numpy as np
import keras
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Dropout, Embedding, LSTM, Bidirectional
| 28.142857
| 71
| 0.84264
| 27
| 197
| 6.148148
| 0.62963
| 0.162651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121827
| 197
| 6
| 72
| 32.833333
| 0.959538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b40d22f8edcb0d534b378a7226d123b6201d5df4
| 7,662
|
py
|
Python
|
lib/sqlalchemy/util/__init__.py
|
zcattacz/sqlalchemy
|
882afe4c3718f741800809003c6c465d835de213
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/util/__init__.py
|
zcattacz/sqlalchemy
|
882afe4c3718f741800809003c6c465d835de213
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/util/__init__.py
|
zcattacz/sqlalchemy
|
882afe4c3718f741800809003c6c465d835de213
|
[
"MIT"
] | null | null | null |
# util/__init__.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from collections import defaultdict as defaultdict
from functools import partial as partial
from functools import update_wrapper as update_wrapper
from ._collections import coerce_generator_arg as coerce_generator_arg
from ._collections import coerce_to_immutabledict as coerce_to_immutabledict
from ._collections import column_dict as column_dict
from ._collections import column_set as column_set
from ._collections import EMPTY_DICT as EMPTY_DICT
from ._collections import EMPTY_SET as EMPTY_SET
from ._collections import FacadeDict as FacadeDict
from ._collections import flatten_iterator as flatten_iterator
from ._collections import has_dupes as has_dupes
from ._collections import has_intersection as has_intersection
from ._collections import IdentitySet as IdentitySet
from ._collections import ImmutableContainer as ImmutableContainer
from ._collections import immutabledict as immutabledict
from ._collections import ImmutableProperties as ImmutableProperties
from ._collections import LRUCache as LRUCache
from ._collections import merge_lists_w_ordering as merge_lists_w_ordering
from ._collections import ordered_column_set as ordered_column_set
from ._collections import OrderedDict as OrderedDict
from ._collections import OrderedIdentitySet as OrderedIdentitySet
from ._collections import OrderedProperties as OrderedProperties
from ._collections import OrderedSet as OrderedSet
from ._collections import PopulateDict as PopulateDict
from ._collections import Properties as Properties
from ._collections import ScopedRegistry as ScopedRegistry
from ._collections import sort_dictionary as sort_dictionary
from ._collections import ThreadLocalRegistry as ThreadLocalRegistry
from ._collections import to_column_set as to_column_set
from ._collections import to_list as to_list
from ._collections import to_set as to_set
from ._collections import unique_list as unique_list
from ._collections import UniqueAppender as UniqueAppender
from ._collections import update_copy as update_copy
from ._collections import WeakPopulateDict as WeakPopulateDict
from ._collections import WeakSequence as WeakSequence
from ._preloaded import preload_module as preload_module
from ._preloaded import preloaded as preloaded
from .compat import arm as arm
from .compat import b as b
from .compat import b64decode as b64decode
from .compat import b64encode as b64encode
from .compat import cmp as cmp
from .compat import cpython as cpython
from .compat import dataclass_fields as dataclass_fields
from .compat import decode_backslashreplace as decode_backslashreplace
from .compat import dottedgetter as dottedgetter
from .compat import has_refcount_gc as has_refcount_gc
from .compat import inspect_getfullargspec as inspect_getfullargspec
from .compat import local_dataclass_fields as local_dataclass_fields
from .compat import osx as osx
from .compat import py38 as py38
from .compat import py39 as py39
from .compat import pypy as pypy
from .compat import win32 as win32
from .concurrency import await_fallback as await_fallback
from .concurrency import await_only as await_only
from .concurrency import greenlet_spawn as greenlet_spawn
from .concurrency import is_exit_exception as is_exit_exception
from .deprecations import became_legacy_20 as became_legacy_20
from .deprecations import deprecated as deprecated
from .deprecations import deprecated_cls as deprecated_cls
from .deprecations import deprecated_params as deprecated_params
from .deprecations import deprecated_property as deprecated_property
from .deprecations import moved_20 as moved_20
from .deprecations import warn_deprecated as warn_deprecated
from .langhelpers import add_parameter_text as add_parameter_text
from .langhelpers import as_interface as as_interface
from .langhelpers import asbool as asbool
from .langhelpers import asint as asint
from .langhelpers import assert_arg_type as assert_arg_type
from .langhelpers import attrsetter as attrsetter
from .langhelpers import bool_or_str as bool_or_str
from .langhelpers import chop_traceback as chop_traceback
from .langhelpers import class_hierarchy as class_hierarchy
from .langhelpers import classproperty as classproperty
from .langhelpers import clsname_as_plain_name as clsname_as_plain_name
from .langhelpers import coerce_kw_type as coerce_kw_type
from .langhelpers import constructor_copy as constructor_copy
from .langhelpers import constructor_key as constructor_key
from .langhelpers import counter as counter
from .langhelpers import create_proxy_methods as create_proxy_methods
from .langhelpers import decode_slice as decode_slice
from .langhelpers import decorator as decorator
from .langhelpers import dictlike_iteritems as dictlike_iteritems
from .langhelpers import duck_type_collection as duck_type_collection
from .langhelpers import ellipses_string as ellipses_string
from .langhelpers import EnsureKWArg as EnsureKWArg
from .langhelpers import format_argspec_init as format_argspec_init
from .langhelpers import format_argspec_plus as format_argspec_plus
from .langhelpers import generic_fn_descriptor as generic_fn_descriptor
from .langhelpers import generic_repr as generic_repr
from .langhelpers import get_annotations as get_annotations
from .langhelpers import get_callable_argspec as get_callable_argspec
from .langhelpers import get_cls_kwargs as get_cls_kwargs
from .langhelpers import get_func_kwargs as get_func_kwargs
from .langhelpers import getargspec_init as getargspec_init
from .langhelpers import has_compiled_ext as has_compiled_ext
from .langhelpers import HasMemoized as HasMemoized
from .langhelpers import hybridmethod as hybridmethod
from .langhelpers import hybridproperty as hybridproperty
from .langhelpers import inject_docstring_text as inject_docstring_text
from .langhelpers import iterate_attributes as iterate_attributes
from .langhelpers import map_bits as map_bits
from .langhelpers import md5_hex as md5_hex
from .langhelpers import memoized_instancemethod as memoized_instancemethod
from .langhelpers import memoized_property as memoized_property
from .langhelpers import MemoizedSlots as MemoizedSlots
from .langhelpers import method_is_overridden as method_is_overridden
from .langhelpers import methods_equivalent as methods_equivalent
from .langhelpers import (
monkeypatch_proxied_specials as monkeypatch_proxied_specials,
)
from .langhelpers import non_memoized_property as non_memoized_property
from .langhelpers import NoneType as NoneType
from .langhelpers import only_once as only_once
from .langhelpers import (
parse_user_argument_for_enum as parse_user_argument_for_enum,
)
from .langhelpers import PluginLoader as PluginLoader
from .langhelpers import portable_instancemethod as portable_instancemethod
from .langhelpers import quoted_token_parser as quoted_token_parser
from .langhelpers import safe_reraise as safe_reraise
from .langhelpers import set_creation_order as set_creation_order
from .langhelpers import string_or_unprintable as string_or_unprintable
from .langhelpers import symbol as symbol
from .langhelpers import TypingOnly as TypingOnly
from .langhelpers import (
unbound_method_to_callable as unbound_method_to_callable,
)
from .langhelpers import walk_subclasses as walk_subclasses
from .langhelpers import warn as warn
from .langhelpers import warn_exception as warn_exception
from .langhelpers import warn_limited as warn_limited
from .langhelpers import wrap_callable as wrap_callable
| 52.479452
| 76
| 0.87053
| 1,052
| 7,662
| 6.077947
| 0.206274
| 0.147795
| 0.206913
| 0.018768
| 0.053488
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005565
| 0.108718
| 7,662
| 145
| 77
| 52.841379
| 0.930737
| 0.029235
| 0
| 0.022059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007353
| 1
| 0
| true
| 0
| 0.955882
| 0
| 0.955882
| 0.007353
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b4412e67ee8491d8b9b9df4f77c7da475ed17e89
| 86
|
py
|
Python
|
present_date1.py
|
pavaniuriti2000/python_files
|
89e3c6bd95af6756b49bc8ee1fa74f900ea6c681
|
[
"MIT"
] | null | null | null |
present_date1.py
|
pavaniuriti2000/python_files
|
89e3c6bd95af6756b49bc8ee1fa74f900ea6c681
|
[
"MIT"
] | null | null | null |
present_date1.py
|
pavaniuriti2000/python_files
|
89e3c6bd95af6756b49bc8ee1fa74f900ea6c681
|
[
"MIT"
] | null | null | null |
#date and time
import datetime
current_date=datetime.date.today()
print(current_date)
| 17.2
| 34
| 0.825581
| 13
| 86
| 5.307692
| 0.615385
| 0.318841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081395
| 86
| 4
| 35
| 21.5
| 0.873418
| 0.151163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b44927f3fb2ed8bb7bb8a4898d99b5da64aa52b4
| 232
|
py
|
Python
|
config.py
|
MiyukiKun/telegram-rename-bot
|
68ecc3be1ee600fae115f5085ce4f460d8301648
|
[
"MIT"
] | 5
|
2021-06-29T17:34:48.000Z
|
2021-11-25T14:53:12.000Z
|
config.py
|
MiyukiKun/telegram-rename-bot
|
68ecc3be1ee600fae115f5085ce4f460d8301648
|
[
"MIT"
] | 1
|
2021-06-29T17:33:16.000Z
|
2021-11-30T04:29:33.000Z
|
config.py
|
MiyukiKun/telegram-rename-bot
|
68ecc3be1ee600fae115f5085ce4f460d8301648
|
[
"MIT"
] | 4
|
2021-06-29T17:34:54.000Z
|
2022-02-20T16:14:58.000Z
|
import os
from telethon import TelegramClient
api_id = os.environ.get('API_ID')
api_hash = os.environ.get('API_HASH')
bot_token = os.environ.get('BOT_TOKEN')
bot = TelegramClient('bot', api_id, api_hash).start(bot_token=bot_token)
| 29
| 72
| 0.771552
| 39
| 232
| 4.333333
| 0.333333
| 0.189349
| 0.213018
| 0.177515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090517
| 232
| 8
| 72
| 29
| 0.800948
| 0
| 0
| 0
| 0
| 0
| 0.111588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b4530a8f2f0b9f9534dde5ab29e038c835b5e169
| 1,409
|
py
|
Python
|
catalyst/metrics/functional/__init__.py
|
tadejsv/catalyst
|
2553ce8fd7cecc025ad88819aea73faf8abb229b
|
[
"Apache-2.0"
] | 206
|
2018-10-05T19:16:47.000Z
|
2019-01-19T21:10:41.000Z
|
catalyst/metrics/functional/__init__.py
|
tadejsv/catalyst
|
2553ce8fd7cecc025ad88819aea73faf8abb229b
|
[
"Apache-2.0"
] | 20
|
2018-10-07T06:30:49.000Z
|
2019-01-17T17:26:15.000Z
|
catalyst/metrics/functional/__init__.py
|
tadejsv/catalyst
|
2553ce8fd7cecc025ad88819aea73faf8abb229b
|
[
"Apache-2.0"
] | 22
|
2018-10-06T12:34:08.000Z
|
2019-01-10T16:00:48.000Z
|
# flake8: noqa
from catalyst.metrics.functional._misc import (
check_consistent_length,
process_multilabel_components,
process_recsys_components,
get_binary_statistics,
get_multiclass_statistics,
get_multilabel_statistics,
get_default_topk,
)
from catalyst.metrics.functional._accuracy import accuracy, multilabel_accuracy
from catalyst.metrics.functional._auc import auc, binary_auc
from catalyst.metrics.functional._average_precision import (
average_precision,
mean_average_precision,
binary_average_precision,
)
from catalyst.metrics.functional._classification import precision_recall_fbeta_support
from catalyst.metrics.functional._cmc_score import (
cmc_score,
cmc_score_count,
masked_cmc_score,
)
from catalyst.metrics.functional._f1_score import f1_score, fbeta_score
from catalyst.metrics.functional._focal import (
sigmoid_focal_loss,
reduced_focal_loss,
)
from catalyst.metrics.functional._hitrate import hitrate
from catalyst.metrics.functional._mrr import reciprocal_rank, mrr
from catalyst.metrics.functional._ndcg import dcg, ndcg
from catalyst.metrics.functional._precision import precision
from catalyst.metrics.functional._r2_squared import r2_squared
from catalyst.metrics.functional._recall import recall
from catalyst.metrics.functional._segmentation import (
iou,
dice,
trevsky,
get_segmentation_statistics,
)
| 33.547619
| 86
| 0.822569
| 169
| 1,409
| 6.514793
| 0.301775
| 0.163488
| 0.258856
| 0.395095
| 0.13079
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004032
| 0.119943
| 1,409
| 41
| 87
| 34.365854
| 0.883871
| 0.008517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.384615
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b463119de18ae2f0118b217cad4e38761e433f7a
| 90
|
py
|
Python
|
element/apps.py
|
rackis/kakaobot_hyoammeal
|
f7e0fa1180bbdee5a33f3ab8ffba5139459a74d6
|
[
"MIT"
] | null | null | null |
element/apps.py
|
rackis/kakaobot_hyoammeal
|
f7e0fa1180bbdee5a33f3ab8ffba5139459a74d6
|
[
"MIT"
] | null | null | null |
element/apps.py
|
rackis/kakaobot_hyoammeal
|
f7e0fa1180bbdee5a33f3ab8ffba5139459a74d6
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class KakaobotConfig(AppConfig):
name = 'element'
| 15
| 33
| 0.755556
| 10
| 90
| 6.8
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 90
| 5
| 34
| 18
| 0.906667
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b4849f4782fd52b9684c25d4b6c13549a4223c19
| 42
|
py
|
Python
|
disp/cli/__init__.py
|
zhubonan/disp
|
c36da25fc97e38111a6786df24957f01ba7f30f3
|
[
"MIT"
] | 1
|
2022-03-01T03:44:43.000Z
|
2022-03-01T03:44:43.000Z
|
disp/cli/__init__.py
|
zhubonan/disp
|
c36da25fc97e38111a6786df24957f01ba7f30f3
|
[
"MIT"
] | null | null | null |
disp/cli/__init__.py
|
zhubonan/disp
|
c36da25fc97e38111a6786df24957f01ba7f30f3
|
[
"MIT"
] | null | null | null |
"""
Commandline interface sub-pacakge
"""
| 10.5
| 33
| 0.714286
| 4
| 42
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 42
| 3
| 34
| 14
| 0.810811
| 0.785714
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
81f1aaadd0e850c875963d3cc4a1c89953e0b941
| 149
|
py
|
Python
|
slides/birdhouse-architecture/myplot.py
|
Zeitsperre/birdhouse-docs
|
1789c524ea19de38c0ebd920fe9817323e86e565
|
[
"Apache-2.0"
] | null | null | null |
slides/birdhouse-architecture/myplot.py
|
Zeitsperre/birdhouse-docs
|
1789c524ea19de38c0ebd920fe9817323e86e565
|
[
"Apache-2.0"
] | null | null | null |
slides/birdhouse-architecture/myplot.py
|
Zeitsperre/birdhouse-docs
|
1789c524ea19de38c0ebd920fe9817323e86e565
|
[
"Apache-2.0"
] | null | null | null |
def myplot(nc_file, variable):
"""
nc_file application/netcdf
variable string
"""
# plot variable of nc_file
return plot.png
| 18.625
| 30
| 0.644295
| 19
| 149
| 4.894737
| 0.631579
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.268456
| 149
| 7
| 31
| 21.285714
| 0.853211
| 0.456376
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
c30a012e1f395f5dae0233bde64b070345b85e58
| 821
|
py
|
Python
|
wirecamel/lib/style.py
|
Querdos/wirecamel
|
87c249bcaf969ad09ee6d1dba9f4a7f3343e1723
|
[
"MIT"
] | 3
|
2018-04-24T06:16:25.000Z
|
2018-11-06T06:09:24.000Z
|
wirecamel/lib/style.py
|
aeliant/wirecamel
|
87c249bcaf969ad09ee6d1dba9f4a7f3343e1723
|
[
"MIT"
] | null | null | null |
wirecamel/lib/style.py
|
aeliant/wirecamel
|
87c249bcaf969ad09ee6d1dba9f4a7f3343e1723
|
[
"MIT"
] | 2
|
2018-04-24T06:16:26.000Z
|
2018-11-02T03:02:59.000Z
|
# coding=utf-8
# HEADER = '\033[95m'
# OKBLUE = '\033[94m'
def checked(text):
print("[\033[94m+\033[0m] {}".format(text))
def not_checked(text):
print("[\033[91m×\033[0m] {}".format(text))
def loading(text):
print("[\033[93m*\033[0m] {}".format(text))
def underline(text):
print("\033[4m{}\033[0m".format(text))
def bold(text):
print("\033[1m{}\033[0m".format(text))
def fail(text):
print("\033[91m{}\033[0m\n".format(text))
def success(text):
print("\033[92m{}\033[0m".format(text))
def warning(text):
print("\033[93m{}\033[0m".format(text))
# Printing function
def print_call_info(return_code, process_name, text):
if return_code != 0:
not_checked("A problem occured with {}. Aborting".format(process_name))
exit(1)
else:
checked(text)
| 17.847826
| 79
| 0.613886
| 122
| 821
| 4.07377
| 0.377049
| 0.144869
| 0.193159
| 0.211268
| 0.307847
| 0.120724
| 0.120724
| 0.120724
| 0
| 0
| 0
| 0.12188
| 0.170524
| 821
| 45
| 80
| 18.244444
| 0.606461
| 0.085262
| 0
| 0
| 0
| 0
| 0.245308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.409091
| false
| 0
| 0
| 0
| 0.409091
| 0.409091
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
c32955074144d92fe395b39419d6f2eca54440f6
| 19,363
|
py
|
Python
|
Scripts/R1/plot_BoxPlots_ModelParametersEpochs.py
|
zmlabe/InternalSignal
|
2ac31bc7a0c445ed9fa609f3c7f9800bec7b4aed
|
[
"MIT"
] | 3
|
2020-08-27T08:07:34.000Z
|
2021-10-06T08:47:42.000Z
|
Scripts/R1/plot_BoxPlots_ModelParametersEpochs.py
|
zmlabe/InternalSignal
|
2ac31bc7a0c445ed9fa609f3c7f9800bec7b4aed
|
[
"MIT"
] | 1
|
2022-03-25T02:14:19.000Z
|
2022-03-25T02:14:19.000Z
|
Scripts/R1/plot_BoxPlots_ModelParametersEpochs.py
|
zmlabe/InternalSignal
|
2ac31bc7a0c445ed9fa609f3c7f9800bec7b4aed
|
[
"MIT"
] | 3
|
2021-02-12T19:18:20.000Z
|
2021-03-28T08:38:56.000Z
|
"""
Plot results of the ANNs with different hyperparameters
Reference : Barnes et al. [2020, JAMES] and Barnes et al. [2019, GRL]
Author : Zachary M. Labe
Date : 25 March 2021
Version : R1
"""
### Import packages
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as stats
### Plotting defaults
plt.rc('text',usetex=True)
plt.rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 5))
else:
spine.set_color('none')
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
ax.xaxis.set_ticks([])
###############################################################################
###############################################################################
###############################################################################
### Data preliminaries
directorydataLLS = '/Users/zlabe/Data/LENS/SINGLE/'
directorydataLLL = '/Users/zlabe/Data/LENS/monthly'
directorydataBB = '/Users/zlabe/Data/BEST/'
directorydataEE = '/Users/zlabe/Data/ERA5/'
directorydataoutput = '/Users/zlabe/Documents/Research/InternalSignal/Data/FINAL/R1/Parameters/'
directoryfigure = '/Users/zlabe/Desktop/PAPER/R1/'
datasetsingle = ['XGHG','XAER','lens']
simuname = [r'AER+',r'GHG+',r'ALL']
simu = ['XGHG','XAER','lens']
seasons = ['annual']
timexghg = np.arange(1920,2080+1,1)
timexaer = np.arange(1920,2080+1,1)
timelens = np.arange(1920,2080+1,1)
yearsall = [timexghg,timexaer,timelens]
directoriesall = [directorydataLLS,directorydataLLS,directorydataLLL]
### Parameters (setting random seeds everwhere for 10 iterations (counter))
l2_try = [0.0001,0.001,0.01,0.1,1,5]
epochs_try = [100,500,1500]
nodes_try = [8,20]
layers_try8 = [8,8],[8,8,8]
layers_try20 = [20,20],[20,20,20],[20,20,20,20]
layers_tryall = np.append(layers_try8,layers_try20)
SAMPLEQ = 10
###############################################################################
# for lay in range(len(layers_tryall)):
# for epo in range(len(epochs_try)):
# for ridg in range(len(l2_try)):
# for isample in range(SAMPLEQ):
###############################################################################
###############################################################################
###############################################################################
###############################################################################
### Read in data
for mm in range(len(simu)):
slopetrain = np.load(directorydataoutput + '%s_slopeTrain_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
slopetest = np.load(directorydataoutput + '%s_slopeTest_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
r2train = np.load(directorydataoutput + '%s_r2Train_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
r2test = np.load(directorydataoutput + '%s_r2Test_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
rmsePRE = np.load(directorydataoutput + '%s_rmsePRE_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
rmsePOS = np.load(directorydataoutput + '%s_rmsePOST_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
rmseALL = np.load(directorydataoutput + '%s_rmseYEARS_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
maePRE = np.load(directorydataoutput + '%s_maePRE_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
maePOS = np.load(directorydataoutput + '%s_maePOST_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
maeALL = np.load(directorydataoutput + '%s_maeYEARS_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
lay = np.load(directorydataoutput + '%s_numOfLayers_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
epo = np.load(directorydataoutput + '%s_numofEpochs_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
L2s = np.load(directorydataoutput + '%s_L2_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
isample = np.load(directorydataoutput + '%s_numOfSeedSamples_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
enstype = np.load(directorydataoutput + '%s_lensType_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
segseed = np.load(directorydataoutput + '%s_EnsembleSegmentSeed_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
annseed =np.load(directorydataoutput + '%s_annSegmentSeed_annual_R1.npy' % simu[mm],
allow_pickle=True).squeeze().reshape(len(layers_tryall),len(epochs_try),len(l2_try),SAMPLEQ)
###############################################################################
###############################################################################
###############################################################################
### Align correct distributions
slopetestd = np.swapaxes(slopetest,1,2)
r2testd = np.swapaxes(r2test,1,2)
rmsePREd = np.swapaxes(rmsePRE,1,2)
rmsePOSd = np.swapaxes(rmsePOS,1,2)
rmseALLd = np.swapaxes(rmseALL,1,2)
maePREd = np.swapaxes(maePRE,1,2)
maePOSd = np.swapaxes(maePOS,1,2)
maeALLd = np.swapaxes(maeALL,1,2)
layd = np.swapaxes(lay,1,2)
epod = np.swapaxes(epo,1,2)
L2sd = np.swapaxes(L2s,1,2)
isampled = np.swapaxes(isample,1,2)
enstyped = np.swapaxes(enstype,1,2)
segseedd = np.swapaxes(segseed,1,2)
annseedd = np.swapaxes(annseed,1,2)
###############################################################################
###############################################################################
###############################################################################
### Select epochs to show changes in distributions
comp8_1 = maePREd[0,1,:,:]
comp8_2 = maePOSd[0,1,:,:]
comp20_1 = maePREd[2,1,:,:]
comp20_2 = maePOSd[2,1,:,:]
comp8_1m = maePREd[0,2,:,:]
comp8_2m = maePOSd[0,2,:,:]
comp20_1m = maePREd[2,2,:,:]
comp20_2m = maePOSd[2,2,:,:]
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
### Graph for RMSE 8 layers
pree = comp8_1.transpose() # 8x8
post = comp8_2.transpose() # 8x8
fig = plt.figure(figsize=(10,4))
ax = plt.subplot(141)
adjust_spines(ax, ['left', 'bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['left'].set_color('dimgrey')
ax.spines['bottom'].set_color('none')
ax.spines['left'].set_linewidth(2)
ax.tick_params('both',length=4,width=2,which='major',color='dimgrey')
ax.tick_params(axis="x",which="both",bottom = False,top=False,
labelbottom=False)
ax.yaxis.grid(zorder=1,color='darkgrey',alpha=0.7)
def set_box_color(bp, color):
plt.setp(bp['boxes'],color=color)
plt.setp(bp['whiskers'], color=color,linewidth=1.5)
plt.setp(bp['caps'], color='w',alpha=0)
plt.setp(bp['medians'], color='w',linewidth=1)
positionspre = np.array(range(pree.shape[1]))*2.0-0.3
positionspost = np.array(range(post.shape[1]))*2.0+0.3
bpl = plt.boxplot(pree,positions=positionspre,widths=0.5,
patch_artist=True,sym='')
bpr = plt.boxplot(post,positions=positionspost, widths=0.5,
patch_artist=True,sym='')
# Modify boxes
cpree = 'deepskyblue'
cpost = 'indianred'
set_box_color(bpl,cpree)
set_box_color(bpr,cpost)
plt.plot([], c=cpree, label=r'\textbf{BEFORE 1980}')
plt.plot([], c=cpost, label=r'\textbf{AFTER 1980}')
l = plt.legend(shadow=False,fontsize=10,loc='upper center',
fancybox=True,frameon=False,ncol=2,bbox_to_anchor=(2.8,1.16),
labelspacing=1,columnspacing=1,handletextpad=0.4)
for i in range(pree.shape[1]):
y = pree[:,i]
x = np.random.normal(positionspre[i], 0.04, size=len(y))
plt.plot(x, y,color='darkblue', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
for i in range(post.shape[1]):
y = post[:,i]
x = np.random.normal(positionspost[i], 0.04, size=len(y))
plt.plot(x, y,color='darkred', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
plt.ylabel(r'\textbf{\underline{MAE} for layers = %s using %s}' % (layers_tryall[0],simuname[mm]),color='dimgrey',fontsize=8)
plt.yticks(np.arange(0,61,5),list(map(str,np.round(np.arange(0,61,5),2))),
fontsize=6)
plt.ylim([0,15])
plt.text(-0.2,0.2,r'\textbf{100}',fontsize=8,color='k',
ha='left',va='center')
plt.text(2.02,0.2,r'\textbf{500}',fontsize=8,color='k',
ha='center',va='center')
plt.text(4.61,0.2,r'\textbf{1500}',fontsize=8,color='k',
ha='right',va='center')
plt.xlabel(r'\textbf{Epochs for L$_{2}$=%s}' % l2_try[1],color='dimgrey',fontsize=8)
###########################################################################
###############################################################################
### Graph for RMSE 20 layers
pree = comp20_1.transpose() # 20x20
post = comp20_2.transpose() # 20x20
ax = plt.subplot(142)
adjust_spines(ax, ['left', 'bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['left'].set_color('dimgrey')
ax.spines['bottom'].set_color('none')
ax.spines['left'].set_linewidth(2)
ax.tick_params('both',length=4,width=2,which='major',color='dimgrey')
ax.tick_params(axis="x",which="both",bottom = False,top=False,
labelbottom=False)
ax.yaxis.grid(zorder=1,color='darkgrey',alpha=0.7)
def set_box_color(bp, color):
plt.setp(bp['boxes'],color=color)
plt.setp(bp['whiskers'], color=color,linewidth=1.5)
plt.setp(bp['caps'], color='w',alpha=0)
plt.setp(bp['medians'], color='w',linewidth=1)
positionspre = np.array(range(pree.shape[1]))*2.0-0.3
positionspost = np.array(range(post.shape[1]))*2.0+0.3
bpl = plt.boxplot(pree,positions=positionspre,widths=0.5,
patch_artist=True,sym='')
bpr = plt.boxplot(post,positions=positionspost, widths=0.5,
patch_artist=True,sym='')
# Modify boxes
cpree = 'deepskyblue'
cpost = 'indianred'
set_box_color(bpl,cpree)
set_box_color(bpr,cpost)
plt.plot([], c=cpree, label=r'\textbf{BEFORE 1980}')
plt.plot([], c=cpost, label=r'\textbf{AFTER 1980}')
for i in range(pree.shape[1]):
y = pree[:,i]
x = np.random.normal(positionspre[i], 0.04, size=len(y))
plt.plot(x, y,color='darkblue', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
for i in range(post.shape[1]):
y = post[:,i]
x = np.random.normal(positionspost[i], 0.04, size=len(y))
plt.plot(x, y,color='darkred', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
plt.ylabel(r'\textbf{\underline{MAE} for layers = %s using %s}' % (layers_tryall[2],simuname[mm]),color='dimgrey',fontsize=8)
plt.yticks(np.arange(0,61,5),list(map(str,np.round(np.arange(0,61,5),2))),
fontsize=6)
plt.ylim([0,15])
plt.text(-0.2,0.2,r'\textbf{100}',fontsize=8,color='k',
ha='left',va='center')
plt.text(2.02,0.2,r'\textbf{500}',fontsize=8,color='k',
ha='center',va='center')
plt.text(4.61,0.2,r'\textbf{1500}',fontsize=8,color='k',
ha='right',va='center')
plt.xlabel(r'\textbf{Epochs for L$_{2}$=%s}' % l2_try[1],color='dimgrey',fontsize=8)
###########################################################################
###########################################################################
### Graph for RMSE 8 layers
pree = comp8_1m.transpose() # 8x8
post = comp8_2m.transpose() # 8x8
ax = plt.subplot(143)
adjust_spines(ax, ['left', 'bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['left'].set_color('dimgrey')
ax.spines['bottom'].set_color('none')
ax.spines['left'].set_linewidth(2)
ax.tick_params('both',length=4,width=2,which='major',color='dimgrey')
ax.tick_params(axis="x",which="both",bottom = False,top=False,
labelbottom=False)
ax.yaxis.grid(zorder=1,color='darkgrey',alpha=0.7)
def set_box_color(bp, color):
plt.setp(bp['boxes'],color=color)
plt.setp(bp['whiskers'], color=color,linewidth=1.5)
plt.setp(bp['caps'], color='w',alpha=0)
plt.setp(bp['medians'], color='w',linewidth=1)
positionspre = np.array(range(pree.shape[1]))*2.0-0.3
positionspost = np.array(range(post.shape[1]))*2.0+0.3
bpl = plt.boxplot(pree,positions=positionspre,widths=0.5,
patch_artist=True,sym='')
bpr = plt.boxplot(post,positions=positionspost, widths=0.5,
patch_artist=True,sym='')
# Modify boxes
cpree = 'deepskyblue'
cpost = 'indianred'
set_box_color(bpl,cpree)
set_box_color(bpr,cpost)
plt.plot([], c=cpree, label=r'\textbf{BEFORE 1980}')
plt.plot([], c=cpost, label=r'\textbf{AFTER 1980}')
for i in range(pree.shape[1]):
y = pree[:,i]
x = np.random.normal(positionspre[i], 0.04, size=len(y))
plt.plot(x, y,color='darkblue', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
for i in range(post.shape[1]):
y = post[:,i]
x = np.random.normal(positionspost[i], 0.04, size=len(y))
plt.plot(x, y,color='darkred', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
plt.ylabel(r'\textbf{\underline{MAE} for layers = %s using %s}' % (layers_tryall[0],simuname[mm]),color='dimgrey',fontsize=8)
plt.yticks(np.arange(0,61,5),list(map(str,np.round(np.arange(0,61,5),2))),
fontsize=6)
plt.ylim([0,15])
plt.text(-0.2,0.2,r'\textbf{100}',fontsize=8,color='k',
ha='left',va='center')
plt.text(2.02,0.2,r'\textbf{500}',fontsize=8,color='k',
ha='center',va='center')
plt.text(4.61,0.2,r'\textbf{1500}',fontsize=8,color='k',
ha='right',va='center')
plt.xlabel(r'\textbf{Epochs for L$_{2}$=%s}' % l2_try[2],color='dimgrey',fontsize=8)
###########################################################################
###############################################################################
### Graph for RMSE 8 layers
pree = comp20_1m.transpose() # 20x20
post = comp20_2m.transpose() # 20x20
ax = plt.subplot(144)
adjust_spines(ax, ['left', 'bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['left'].set_color('dimgrey')
ax.spines['bottom'].set_color('none')
ax.spines['left'].set_linewidth(2)
ax.tick_params('both',length=4,width=2,which='major',color='dimgrey')
ax.tick_params(axis="x",which="both",bottom = False,top=False,
labelbottom=False)
ax.yaxis.grid(zorder=1,color='darkgrey',alpha=0.7)
def set_box_color(bp, color):
plt.setp(bp['boxes'],color=color)
plt.setp(bp['whiskers'], color=color,linewidth=1.5)
plt.setp(bp['caps'], color='w',alpha=0)
plt.setp(bp['medians'], color='w',linewidth=1)
positionspre = np.array(range(pree.shape[1]))*2.0-0.3
positionspost = np.array(range(post.shape[1]))*2.0+0.3
bpl = plt.boxplot(pree,positions=positionspre,widths=0.5,
patch_artist=True,sym='')
bpr = plt.boxplot(post,positions=positionspost, widths=0.5,
patch_artist=True,sym='')
# Modify boxes
cpree = 'deepskyblue'
cpost = 'indianred'
set_box_color(bpl,cpree)
set_box_color(bpr,cpost)
plt.plot([], c=cpree, label=r'\textbf{BEFORE 1980}')
plt.plot([], c=cpost, label=r'\textbf{AFTER 1980}')
for i in range(pree.shape[1]):
y = pree[:,i]
x = np.random.normal(positionspre[i], 0.04, size=len(y))
plt.plot(x, y,color='darkblue', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
for i in range(post.shape[1]):
y = post[:,i]
x = np.random.normal(positionspost[i], 0.04, size=len(y))
plt.plot(x, y,color='darkred', alpha=0.5,zorder=10,marker='.',linewidth=0,markersize=5,markeredgewidth=0)
plt.ylabel(r'\textbf{\underline{MAE} for layers = %s using %s}' % (layers_tryall[2],simuname[mm]),color='dimgrey',fontsize=8)
plt.yticks(np.arange(0,61,5),list(map(str,np.round(np.arange(0,61,5),2))),
fontsize=6)
plt.ylim([0,15])
plt.text(-0.2,0.2,r'\textbf{100}',fontsize=8,color='k',
ha='left',va='center')
plt.text(2.02,0.2,r'\textbf{500}',fontsize=8,color='k',
ha='center',va='center')
plt.text(4.61,0.2,r'\textbf{1500}',fontsize=8,color='k',
ha='right',va='center')
plt.xlabel(r'\textbf{Epochs for L$_{2}$=%s}' % l2_try[2],color='dimgrey',fontsize=8)
plt.subplots_adjust(wspace=0.5)
plt.savefig(directoryfigure + '%s_MAEForHyperparameters_Epochs.png' % (simu[mm]),dpi=300)
| 46.997573
| 129
| 0.557507
| 2,521
| 19,363
| 4.188417
| 0.12852
| 0.018562
| 0.025571
| 0.04186
| 0.713515
| 0.708306
| 0.703192
| 0.700066
| 0.698362
| 0.698362
| 0
| 0.044061
| 0.174818
| 19,363
| 412
| 130
| 46.997573
| 0.616786
| 0.039663
| 0
| 0.677741
| 0
| 0
| 0.133984
| 0.046237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016611
| false
| 0
| 0.009967
| 0
| 0.026578
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c32ca018ca5ae255d412cc113d76f93f8a421a29
| 169
|
py
|
Python
|
OCT23/14.py
|
Razdeep/PythonSnippets
|
76f9313894f511c487a99bc38bdf0fe5e594caf5
|
[
"MIT"
] | null | null | null |
OCT23/14.py
|
Razdeep/PythonSnippets
|
76f9313894f511c487a99bc38bdf0fe5e594caf5
|
[
"MIT"
] | null | null | null |
OCT23/14.py
|
Razdeep/PythonSnippets
|
76f9313894f511c487a99bc38bdf0fe5e594caf5
|
[
"MIT"
] | null | null | null |
# using ptp method of numpy
# ptp= (max - min)
import numpy as np
a=np.array([[1,2,3],[4,5,6],[7,8,9]])
print(np.ptp(a))
print(np.ptp(a,axis=1))
print(np.ptp(a,axis=0))
| 21.125
| 37
| 0.621302
| 40
| 169
| 2.625
| 0.6
| 0.2
| 0.285714
| 0.314286
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073333
| 0.112426
| 169
| 8
| 38
| 21.125
| 0.626667
| 0.248521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.6
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
c342a7924d46b2ffe53d80897c3e4ab8bb1df9f5
| 324
|
py
|
Python
|
infobasenutritiondb/api/admin.py
|
bfssi-forest-dussault/infobasenutritiondb
|
7e0a70ac5c9d77ea2655135e77085b95b2b05134
|
[
"MIT"
] | null | null | null |
infobasenutritiondb/api/admin.py
|
bfssi-forest-dussault/infobasenutritiondb
|
7e0a70ac5c9d77ea2655135e77085b95b2b05134
|
[
"MIT"
] | null | null | null |
infobasenutritiondb/api/admin.py
|
bfssi-forest-dussault/infobasenutritiondb
|
7e0a70ac5c9d77ea2655135e77085b95b2b05134
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from infobasenutritiondb.api import models
# Register your models here.
admin.site.register(models.AdequacyValueReference)
admin.site.register(models.AgeGroup)
admin.site.register(models.IntakeDistributionCoordinates)
admin.site.register(models.Region)
admin.site.register(models.Nutrient)
| 32.4
| 57
| 0.851852
| 39
| 324
| 7.076923
| 0.435897
| 0.163043
| 0.307971
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058642
| 324
| 9
| 58
| 36
| 0.904918
| 0.080247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c36107061721feb5b0aeecae487ddd041ec37fad
| 4,077
|
py
|
Python
|
text_normalizer/library/test/test_punctuation_mapping.py
|
Yoctol/text-normalizer
|
3609c10cd229c08b4623531e82d2292fc370734c
|
[
"MIT"
] | 16
|
2018-04-16T08:59:34.000Z
|
2021-01-21T06:05:45.000Z
|
text_normalizer/library/test/test_punctuation_mapping.py
|
Yoctol/text-normalizer
|
3609c10cd229c08b4623531e82d2292fc370734c
|
[
"MIT"
] | 12
|
2018-04-16T06:54:07.000Z
|
2020-04-03T19:36:07.000Z
|
text_normalizer/library/test/test_punctuation_mapping.py
|
Yoctol/text-normalizer
|
3609c10cd229c08b4623531e82d2292fc370734c
|
[
"MIT"
] | 3
|
2019-06-03T02:21:46.000Z
|
2021-09-16T11:57:03.000Z
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from ..punctuation_mapping import (
full_punctuation_mapping_text_normalizer,
simplified_punctuation_mapping_text_normalizer,
)
class PunctuationMappingTextNormalizerTestCase(TestCase):
def run_test(self, test_cases, normalizer):
for test_case in test_cases:
with self.subTest(test_case=test_case):
revised_sentence, meta = normalizer.normalize(
sentence=test_case[0],
)
self.assertEqual(
test_case[1],
revised_sentence,
)
recovered_sentence = normalizer.denormalize(
sentence=test_case[1],
meta=meta,
)
self.assertEqual(
test_case[0],
recovered_sentence,
)
def test_full_punctuation_mapping_text_normalizer(self):
test_cases = [
(
"符號, 、 。 . ? ! ~ $ % @ & # * ‧ , 、 。 . ? ! ~ $ % @ & # * ‧",
"符號, , . . ? ! ~ $ % @ & # * . , , . . ? ! ~ $ % @ & # * .",
),
(
"; ︰ … ﹐ ﹒ ˙ · ﹔ ﹕ ‘ ’ “ ” 〝 〞 ; ︰ … ﹐ ﹒ ˙ · ﹔ ﹕ ‘ ’ “ ” 〝 〞",
"; : ... , . . . ; : \" \" \" \" \" \" ; : ... , . . . ; : \" \" \" \" \" \"",
),
(
"括號符號; 〔 〕 【 】 ﹝ ﹞ 〈 〉 ﹙ ﹚ 《 》 ( ) ; 〔 〕 【 】 ﹝ ﹞ 〈 〉 ﹙ ﹚ 《 》 ( )",
"括號符號; [ ] [ ] [ ] < > ( ) < > ( ) ; [ ] [ ] [ ] < > ( ) < > ( )",
),
(
"{ } ﹛ ﹜ 『 』 「 」 < > ≦ ≧ ﹤ ﹥ { } ﹛ ﹜ 『 』 「 」 < > ≦ ≧ ﹤ ﹥",
"{ } { } \" \" \" \" < > < > < > { } { } \" \" \" \" < > < > < >",
),
(
"括號符號; ︵ ︶ ︷ ︸ ︹ ︺ ︻ ︼ ︽ ︾ ︿ ﹀ ﹁ ﹂ ﹃ ﹄ ︵ ︶ ︷ ︸ ︹ ︺ ︻ ︼ ︽ ︾ ︿ ﹀ ﹁ ﹂ ﹃ ﹄",
"括號符號; ( ) { } [ ] [ ] < > < > \" \" \" \" ( ) { } [ ] [ ] < > < > \" \" \" \"",
),
(
"線段符號; ﹣ ﹦ ≡ | ∣ ∥ – ︱ — ︳ ╴ ¯  ̄ ﹉ ﹣ ﹦ ≡ | ∣ ∥ – ︱ — ︳ ╴ ¯  ̄ ﹉",
"線段符號; - = = | | / - | - | - - - - - = = | | / - | - | - - - -",
),
(
"﹊ ﹍ ﹎ ﹋ ﹌ ﹏ ︴ ﹨ ∕ ╲ ╱ \ / ﹊ ﹍ ﹎ ﹋ ﹌ ﹏ ︴ ﹨ ∕ ╲ ╱ \ /",
"- _ _ - - _ | \ / \ / \ / - _ _ - - _ | \ / \ / \ /",
),
(
"+ + + ﹢ * * × ╳",
"+ + + + * * * *",
),
]
self.run_test(test_cases, normalizer=full_punctuation_mapping_text_normalizer)
def test_simplified_punctuation_mapping_text_normalizer(self):
test_cases = [
(
"符號, 、 。 . ? ! ~ $ % @ & # * ‧ , 、 。 . ? ! ~ $ % @ & # * ‧",
"符號, , . . ? ! - $ % @ & # * . , , . . ? ! - $ % @ & # * .",
),
(
"; ︰ … ﹐ ﹒ ˙ · ﹔ ﹕ ‘ ’ “ ” 〝 〞 ︰ … ﹐ ﹒ ˙ · ﹔ ﹕ ‘ ’ “ ” 〝 〞",
"; : ... , . . . ; : \" \" \" \" \" \" : ... , . . . ; : \" \" \" \" \" \"",
),
(
"括號符號; 〔 〕 【 】 ﹝ ﹞ 〈 〉 ﹙ ﹚ 《 》 ( ) 〔 〕 【 】 ﹝ ﹞ 〈 〉 ﹙ ﹚ 《 》 ( )",
"括號符號; ( ) ( ) ( ) ( ) ( ) ( ) ( ) ( ) ( ) ( ) ( ) ( ) ( ) ( )",
),
(
"{ } ﹛ ﹜ 『 』 「 」 < > ≦ ≧ ﹤ ﹥ { } ﹛ ﹜ 『 』 「 」 < > ≦ ≧ ﹤ ﹥",
"( ) ( ) \" \" \" \" ( ) ( ) ( ) ( ) ( ) \" \" \" \" ( ) ( ) ( )",
),
(
"括號符號; ︵ ︶ ︷ ︸ ︹ ︺ ︻ ︼ ︽ ︾ ︿ ﹀ ﹁ ﹂ ﹃ ﹄ ︵ ︶ ︷ ︸ ︹ ︺ ︻ ︼ ︽ ︾ ︿ ﹀ ﹁ ﹂ ﹃ ﹄",
"括號符號; ( ) ( ) ( ) ( ) ( ) ( ) \" \" \" \" ( ) ( ) ( ) ( ) ( ) ( ) \" \" \" \"",
),
(
"線段符號; ﹣ ﹦ ≡ | ∣ ∥ – ︱ — ︳ ╴ ¯  ̄ ﹉ ﹣ ﹦ ≡ | ∣ ∥ – ︱ — ︳ ╴ ¯  ̄ ﹉",
"線段符號; - = = , , , - , - , - - - - - = = , , , - , - , - - - -",
),
(
"﹊ ﹍ ﹎ ﹋ ﹌ ﹏ ︴ ﹨ ∕ ╲ ╱ \ / ﹊ ﹍ ﹎ ﹋ ﹌ ﹏ ︴ ﹨ ∕ ╲ ╱ \ /",
"- _ _ - - _ , , , , , , , - _ _ - - _ , , , , , , ,",
),
]
self.run_test(test_cases, normalizer=simplified_punctuation_mapping_text_normalizer)
| 40.77
| 97
| 0.208487
| 406
| 4,077
| 2.64532
| 0.211823
| 0.022346
| 0.022346
| 0.178771
| 0.610801
| 0.540037
| 0.396648
| 0.396648
| 0.396648
| 0.396648
| 0
| 0.002565
| 0.521952
| 4,077
| 99
| 98
| 41.181818
| 0.403284
| 0.005151
| 0
| 0.315217
| 0
| 0.23913
| 0.404045
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.032609
| false
| 0
| 0.021739
| 0
| 0.065217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c361818f27dee178909360229c1b9ffeba99a258
| 214
|
py
|
Python
|
rundbconnector.py
|
cambridgeltl/chat
|
98eaa1da042a149496d2d31373fc7265cdb3043d
|
[
"MIT"
] | 9
|
2017-07-15T08:45:22.000Z
|
2021-07-06T08:32:03.000Z
|
rundbconnector.py
|
cambridgeltl/chat
|
98eaa1da042a149496d2d31373fc7265cdb3043d
|
[
"MIT"
] | 6
|
2017-05-05T13:11:51.000Z
|
2019-01-25T22:46:30.000Z
|
rundbconnector.py
|
cambridgeltl/chat
|
98eaa1da042a149496d2d31373fc7265cdb3043d
|
[
"MIT"
] | 3
|
2018-03-16T14:11:52.000Z
|
2020-03-04T02:08:31.000Z
|
#!/usr/bin/env python
from dbconnector import app
from dbconnector import config
if config.DEBUG:
app.run(debug=True, port=config.PORT, use_reloader=False)
else:
app.run(host='0.0.0.0', port=config.PORT)
| 21.4
| 61
| 0.733645
| 36
| 214
| 4.333333
| 0.555556
| 0.038462
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021505
| 0.130841
| 214
| 9
| 62
| 23.777778
| 0.817204
| 0.093458
| 0
| 0
| 0
| 0
| 0.036269
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6f19e2a6135eacbb701298371fd9ffb6555157b2
| 923
|
py
|
Python
|
tests/test_extract_num.py
|
dheepdatascigit/pdclean
|
688185d70511094d24db8c7e3043150ea266ce08
|
[
"MIT"
] | null | null | null |
tests/test_extract_num.py
|
dheepdatascigit/pdclean
|
688185d70511094d24db8c7e3043150ea266ce08
|
[
"MIT"
] | null | null | null |
tests/test_extract_num.py
|
dheepdatascigit/pdclean
|
688185d70511094d24db8c7e3043150ea266ce08
|
[
"MIT"
] | null | null | null |
import pytest
from pdtools import pdtools
def test_extractnum_pnum():
test_str = "xyz share was 98.14 and yesterday was 97.3 and day before was 98 dollars"
assert pdtools.extract_num(test_str, select_logic='first') == 98.14
assert pdtools.extract_num(test_str, select_logic='sum') == 293.44
assert round(pdtools.extract_num(test_str, select_logic='mean'), 2) == 97.81
assert len(pdtools.extract_num(test_str)) == 3
assert 97.34 not in pdtools.extract_num(test_str)
def test_extractnum_nnum():
test_str = "todays weather was -40.30 but yesterday for +10.30 so it had 20 degres change"
assert pdtools.extract_num(test_str, select_logic='first') == -40.3
assert round(pdtools.extract_num(test_str, select_logic='sum')) == -10
assert round(pdtools.extract_num(test_str, select_logic='mean'), 2) == -3.33
assert len(pdtools.extract_num(test_str)) == 3, "passed"
assert 97.34 not in pdtools.extract_num(test_str)
| 48.578947
| 91
| 0.75948
| 157
| 923
| 4.261147
| 0.343949
| 0.125561
| 0.254111
| 0.313901
| 0.647235
| 0.647235
| 0.647235
| 0.647235
| 0.475336
| 0.269058
| 0
| 0.063882
| 0.118093
| 923
| 18
| 92
| 51.277778
| 0.757985
| 0
| 0
| 0.125
| 0
| 0
| 0.193933
| 0
| 0
| 0
| 0
| 0
| 0.625
| 1
| 0.125
| false
| 0.0625
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
6f3549a27878723bb42cdc586b8a1c0f5d1f5f22
| 111
|
py
|
Python
|
{{cookiecutter.github_repository_name}}/{{cookiecutter.app_name}}/utils/apps.py
|
hhuseyinpay/cookiecutter-django-rest
|
89280ee1ff2845b29bbd7ce8f7f375f099f77a8e
|
[
"MIT"
] | null | null | null |
{{cookiecutter.github_repository_name}}/{{cookiecutter.app_name}}/utils/apps.py
|
hhuseyinpay/cookiecutter-django-rest
|
89280ee1ff2845b29bbd7ce8f7f375f099f77a8e
|
[
"MIT"
] | null | null | null |
{{cookiecutter.github_repository_name}}/{{cookiecutter.app_name}}/utils/apps.py
|
hhuseyinpay/cookiecutter-django-rest
|
89280ee1ff2845b29bbd7ce8f7f375f099f77a8e
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class UtilsConfig(AppConfig):
name = '{{cookiecutter.app_name}}.utils'
| 18.5
| 44
| 0.747748
| 13
| 111
| 6.307692
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 111
| 5
| 45
| 22.2
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0.279279
| 0.279279
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6f4f3ec9e44d4ddfa8b7138c8b2c2aa94ed063bf
| 4,414
|
py
|
Python
|
etc/pending_ugens/DynKlang.py
|
butayama/supriya
|
0c197324ecee4232381221880d1f40e109bb756c
|
[
"MIT"
] | 191
|
2015-11-13T02:28:42.000Z
|
2022-03-29T10:26:44.000Z
|
etc/pending_ugens/DynKlang.py
|
butayama/supriya
|
0c197324ecee4232381221880d1f40e109bb756c
|
[
"MIT"
] | 130
|
2016-01-04T16:59:02.000Z
|
2022-02-26T15:37:20.000Z
|
etc/pending_ugens/DynKlang.py
|
butayama/supriya
|
0c197324ecee4232381221880d1f40e109bb756c
|
[
"MIT"
] | 22
|
2016-05-04T10:32:16.000Z
|
2022-02-26T19:22:45.000Z
|
import collections
from supriya.enums import CalculationRate
from supriya.synthdefs import UGen
class DynKlang(UGen):
"""
::
>>> dyn_klang = supriya.ugens.DynKlang.ar(
... freqoffset=0,
... freqscale=1,
... specifications_array_ref=specifications_array_ref,
... )
>>> dyn_klang
DynKlang.ar()
"""
### CLASS VARIABLES ###
__documentation_section__ = None
_ordered_input_names = collections.OrderedDict(
'specifications_array_ref',
'freqscale',
'freqoffset',
)
_valid_calculation_rates = None
### INITIALIZER ###
def __init__(
self,
calculation_rate=None,
freqoffset=0,
freqscale=1,
specifications_array_ref=None,
):
UGen.__init__(
self,
calculation_rate=calculation_rate,
freqoffset=freqoffset,
freqscale=freqscale,
specifications_array_ref=specifications_array_ref,
)
### PUBLIC METHODS ###
@classmethod
def ar(
cls,
freqoffset=0,
freqscale=1,
specifications_array_ref=None,
):
"""
Constructs an audio-rate DynKlang.
::
>>> dyn_klang = supriya.ugens.DynKlang.ar(
... freqoffset=0,
... freqscale=1,
... specifications_array_ref=specifications_array_ref,
... )
>>> dyn_klang
DynKlang.ar()
Returns ugen graph.
"""
import supriya.synthdefs
calculation_rate = supriya.CalculationRate.AUDIO
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
freqoffset=freqoffset,
freqscale=freqscale,
specifications_array_ref=specifications_array_ref,
)
return ugen
@classmethod
def kr(
cls,
freqoffset=0,
freqscale=1,
specifications_array_ref=None,
):
"""
Constructs a control-rate DynKlang.
::
>>> dyn_klang = supriya.ugens.DynKlang.kr(
... freqoffset=0,
... freqscale=1,
... specifications_array_ref=specifications_array_ref,
... )
>>> dyn_klang
DynKlang.kr()
Returns ugen graph.
"""
import supriya.synthdefs
calculation_rate = supriya.CalculationRate.CONTROL
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
freqoffset=freqoffset,
freqscale=freqscale,
specifications_array_ref=specifications_array_ref,
)
return ugen
# def new1(): ...
### PUBLIC PROPERTIES ###
@property
def freqoffset(self):
"""
Gets `freqoffset` input of DynKlang.
::
>>> dyn_klang = supriya.ugens.DynKlang.ar(
... freqoffset=0,
... freqscale=1,
... specifications_array_ref=specifications_array_ref,
... )
>>> dyn_klang.freqoffset
0.0
Returns ugen input.
"""
index = self._ordered_input_names.index('freqoffset')
return self._inputs[index]
@property
def freqscale(self):
"""
Gets `freqscale` input of DynKlang.
::
>>> dyn_klang = supriya.ugens.DynKlang.ar(
... freqoffset=0,
... freqscale=1,
... specifications_array_ref=specifications_array_ref,
... )
>>> dyn_klang.freqscale
1.0
Returns ugen input.
"""
index = self._ordered_input_names.index('freqscale')
return self._inputs[index]
@property
def specifications_array_ref(self):
"""
Gets `specifications_array_ref` input of DynKlang.
::
>>> dyn_klang = supriya.ugens.DynKlang.ar(
... freqoffset=0,
... freqscale=1,
... specifications_array_ref=specifications_array_ref,
... )
>>> dyn_klang.specifications_array_ref
Returns ugen input.
"""
index = self._ordered_input_names.index('specifications_array_ref')
return self._inputs[index]
| 24.797753
| 75
| 0.53353
| 366
| 4,414
| 6.139344
| 0.161202
| 0.219849
| 0.254562
| 0.084112
| 0.724967
| 0.724967
| 0.696484
| 0.676903
| 0.655986
| 0.635069
| 0
| 0.008256
| 0.368826
| 4,414
| 177
| 76
| 24.937853
| 0.798277
| 0.381287
| 0
| 0.608696
| 0
| 0
| 0.039216
| 0.021888
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.072464
| 0
| 0.289855
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6f5a83c36a5392ffe6d065b4ff5cf79258fa4fe0
| 60
|
py
|
Python
|
main.py
|
JohnnyChangCheng/FinanceTool
|
67240b62e3f13d2bef2209c976ff7267dfcaf27c
|
[
"MIT"
] | null | null | null |
main.py
|
JohnnyChangCheng/FinanceTool
|
67240b62e3f13d2bef2209c976ff7267dfcaf27c
|
[
"MIT"
] | null | null | null |
main.py
|
JohnnyChangCheng/FinanceTool
|
67240b62e3f13d2bef2209c976ff7267dfcaf27c
|
[
"MIT"
] | null | null | null |
from market import Market
crawler = Market()
crawler.twse()
| 15
| 25
| 0.766667
| 8
| 60
| 5.75
| 0.625
| 0.565217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 60
| 4
| 26
| 15
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
489a84885b280b1903424467fb485fe82dfb5f93
| 1,005
|
py
|
Python
|
survol/sources_types/sqlserver/table/__init__.py
|
AugustinMascarelli/survol
|
7a822900e82d1e6f016dba014af5741558b78f15
|
[
"BSD-3-Clause"
] | null | null | null |
survol/sources_types/sqlserver/table/__init__.py
|
AugustinMascarelli/survol
|
7a822900e82d1e6f016dba014af5741558b78f15
|
[
"BSD-3-Clause"
] | null | null | null |
survol/sources_types/sqlserver/table/__init__.py
|
AugustinMascarelli/survol
|
7a822900e82d1e6f016dba014af5741558b78f15
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Sqlserver table
"""
import sys
import lib_common
from sources_types.sqlserver import dsn as sqlserver_dsn
from sources_types.sqlserver import schema as sqlserver_schema
def AddInfo(grph,node,entity_ids_arr):
# TODO: Ca serait quand meme mieux de passer au AddInfo un dict plutot qu un tableau.
dsnNam = entity_ids_arr[0]
schemaNam = entity_ids_arr[1]
nodeSchema = sqlserver_schema.MakeUri(dsnNam,schemaNam)
grph.add( ( nodeSchema, lib_common.MakeProp("sqlserver table"), node ) )
def EntityOntology():
return ( [sqlserver_dsn.CgiPropertyDsn(), "Schema", "Table"], )
# Beware of the possible confusion with normal users.
def MakeUri(dsnNam,schemaName,tableName):
# sys.stderr.write("sqlserver/table tableName=%s\n"%tableName)
return lib_common.gUriGen.UriMakeFromDict("sqlserver/table", { sqlserver_dsn.CgiPropertyDsn() : dsnNam, "Schema" : schemaName, "Table" : tableName } )
def EntityName(entity_ids_arr):
return entity_ids_arr[0] + "." + entity_ids_arr[1] + "." + entity_ids_arr[2]
| 35.892857
| 151
| 0.765174
| 136
| 1,005
| 5.477941
| 0.455882
| 0.084564
| 0.112752
| 0.067114
| 0.083221
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00565
| 0.119403
| 1,005
| 27
| 152
| 37.222222
| 0.836158
| 0.21194
| 0
| 0
| 0
| 0
| 0.069054
| 0
| 0
| 0
| 0
| 0.037037
| 0
| 1
| 0.266667
| false
| 0
| 0.266667
| 0.2
| 0.733333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
48a247462e3491bc7c1dcf33af67ced10784f5ea
| 129
|
py
|
Python
|
WANNRelease/WANN/visualize.py
|
freeknijweide/brain-tokyo-workshop
|
3a34bd46fe99bba165760cc1f1850e8737d43b6c
|
[
"Apache-2.0"
] | null | null | null |
WANNRelease/WANN/visualize.py
|
freeknijweide/brain-tokyo-workshop
|
3a34bd46fe99bba165760cc1f1850e8737d43b6c
|
[
"Apache-2.0"
] | null | null | null |
WANNRelease/WANN/visualize.py
|
freeknijweide/brain-tokyo-workshop
|
3a34bd46fe99bba165760cc1f1850e8737d43b6c
|
[
"Apache-2.0"
] | null | null | null |
import matplotlib.pyplot as plt
from vis.viewInd import viewInd
viewInd("champions/cifar10_classifier.out","cifar10")
plt.show()
| 25.8
| 53
| 0.813953
| 18
| 129
| 5.777778
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033613
| 0.077519
| 129
| 5
| 54
| 25.8
| 0.840336
| 0
| 0
| 0
| 0
| 0
| 0.3
| 0.246154
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
48a3548e3564797fd43ae0a3cfb00bc239c829ce
| 513
|
py
|
Python
|
tests/dummy_view.py
|
stevecassidy/signbank-dictionary
|
9a2b9b953d17a186b53161be039366f2d3d1e30d
|
[
"BSD-3-Clause"
] | 2
|
2017-01-23T11:28:21.000Z
|
2018-05-27T00:43:01.000Z
|
tests/dummy_view.py
|
stevecassidy/signbank-dictionary
|
9a2b9b953d17a186b53161be039366f2d3d1e30d
|
[
"BSD-3-Clause"
] | null | null | null |
tests/dummy_view.py
|
stevecassidy/signbank-dictionary
|
9a2b9b953d17a186b53161be039366f2d3d1e30d
|
[
"BSD-3-Clause"
] | 1
|
2022-03-22T05:11:01.000Z
|
2022-03-22T05:11:01.000Z
|
'''
The dictionary app, in various places, tries to resolve urls defined in
the feedback app. Since the dictionary app is independent from the
feedback app, it cannot go to the feedback app to resolve these urls. Intead,
I re-define the feedback urls referened by the dictionary app in
test/test_urls.py. These urls require a view, and this file provides that view.
'''
from django.http import HttpResponse
def dummy(request):
return HttpResponse("Django's independent app idea doesn't work.")
| 34.2
| 79
| 0.760234
| 82
| 513
| 4.743902
| 0.597561
| 0.113111
| 0.123393
| 0.092545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183236
| 513
| 14
| 80
| 36.642857
| 0.928401
| 0.709552
| 0
| 0
| 0
| 0
| 0.304965
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
48c5aa2025d2e34fa0e56c6939cfb884cff68f74
| 360
|
py
|
Python
|
microsoft_problems/problem_8.py
|
loftwah/Daily-Coding-Problem
|
0327f0b4f69ef419436846c831110795c7a3c1fe
|
[
"MIT"
] | 129
|
2018-10-14T17:52:29.000Z
|
2022-01-29T15:45:57.000Z
|
microsoft_problems/problem_8.py
|
loftwah/Daily-Coding-Problem
|
0327f0b4f69ef419436846c831110795c7a3c1fe
|
[
"MIT"
] | 2
|
2019-11-30T23:28:23.000Z
|
2020-01-03T16:30:32.000Z
|
microsoft_problems/problem_8.py
|
loftwah/Daily-Coding-Problem
|
0327f0b4f69ef419436846c831110795c7a3c1fe
|
[
"MIT"
] | 60
|
2019-02-21T09:18:31.000Z
|
2022-03-25T21:01:04.000Z
|
"""This problem was asked by Microsoft.
Given an array of numbers, find the length of the longest increasing subsequence in the array.
The subsequence does not necessarily have to be contiguous.
For example, given the array [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15],
the longest increasing subsequence has length 6: it is 0, 2, 6, 9, 11, 15.
"""
| 60
| 95
| 0.708333
| 67
| 360
| 3.80597
| 0.686567
| 0.078431
| 0.156863
| 0.243137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106529
| 0.191667
| 360
| 6
| 96
| 60
| 0.769759
| 0.980556
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
48ea8a6df87379f369d698632031ce760c94224b
| 329
|
py
|
Python
|
Client/views.py
|
jsteerv/Test_TicketSystem
|
c666ac1ed8ee75bba66aaff51dec38d74e73b5f3
|
[
"MIT"
] | null | null | null |
Client/views.py
|
jsteerv/Test_TicketSystem
|
c666ac1ed8ee75bba66aaff51dec38d74e73b5f3
|
[
"MIT"
] | 2
|
2020-02-11T23:46:26.000Z
|
2020-06-05T17:40:24.000Z
|
Client/views.py
|
jsteerv/Test_TicketSystem
|
c666ac1ed8ee75bba66aaff51dec38d74e73b5f3
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
def customer(request):
context = {}
return render(request, 'Client/customer.html', context)
def agent(request):
context = {}
return render(request, 'Client/agent.html', context)
| 23.5
| 59
| 0.74772
| 40
| 329
| 6.125
| 0.5
| 0.122449
| 0.163265
| 0.212245
| 0.318367
| 0.318367
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151976
| 329
| 13
| 60
| 25.307692
| 0.878136
| 0
| 0
| 0.222222
| 0
| 0
| 0.112462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
48f2442386725dcf9275dd815c2192dd0c4441ba
| 44
|
py
|
Python
|
tests/integration/common/__init__.py
|
rkm/bluebird
|
2325ebb151724d4444c092c095a040d7365dda79
|
[
"MIT"
] | 8
|
2019-01-29T15:19:39.000Z
|
2020-07-16T03:55:36.000Z
|
tests/integration/common/__init__.py
|
rkm/bluebird
|
2325ebb151724d4444c092c095a040d7365dda79
|
[
"MIT"
] | 46
|
2019-02-08T14:23:11.000Z
|
2021-04-06T13:45:10.000Z
|
tests/integration/common/__init__.py
|
rkm/bluebird
|
2325ebb151724d4444c092c095a040d7365dda79
|
[
"MIT"
] | 3
|
2019-05-06T14:18:07.000Z
|
2021-06-17T10:39:59.000Z
|
"""
Integration tests for any simulator
"""
| 11
| 35
| 0.704545
| 5
| 44
| 6.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 3
| 36
| 14.666667
| 0.837838
| 0.795455
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
48f6b2f59dffc69b82c1633e379b9f29af9bb4de
| 9,756
|
py
|
Python
|
models/layers/mesh_conv_legacy.py
|
eldadp100/The-Mesh-Transformer
|
b3ab18f774251feff1093040dfdcf7b836a43505
|
[
"MIT"
] | 1
|
2020-12-13T09:34:04.000Z
|
2020-12-13T09:34:04.000Z
|
models/layers/mesh_conv_legacy.py
|
eldadp100/The-Mesh-Transformer
|
b3ab18f774251feff1093040dfdcf7b836a43505
|
[
"MIT"
] | null | null | null |
models/layers/mesh_conv_legacy.py
|
eldadp100/The-Mesh-Transformer
|
b3ab18f774251feff1093040dfdcf7b836a43505
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
class MeshLinearLayer(nn.Module):
def __init__(self, input_size, output_size, bias=True):
super(MeshLinearLayer, self).__init__()
self.lin = nn.Linear(input_size, output_size, bias=bias)
self.out_size = output_size
def forward(self, x):
# x = x.squeeze(-1)
out = torch.empty(x.shape[0], x.shape[1], self.out_size)
for i in range(x.shape[0]):
out[i] = self.lin(x[i])
return out
class MeshEdgeEmbeddingLayer(nn.Module):
"""
Very important - who said that a-c is meaningfull at first layer...
"""
def __init__(self, input_size, embedding_size, bias=True):
super(MeshEdgeEmbeddingLayer, self).__init__()
self.lin = nn.Linear(input_size, embedding_size, bias=bias)
def forward(self, x):
x = x.squeeze(-1)
x = x.permute(0, 2, 1)
return self.lin(x).permute(0, 2, 1)
#
# class MeshConv(nn.Module):
# """ Computes convolution between edges and 4 incident (1-ring) edge neighbors
# in the forward pass takes:
# x: edge features (Batch x Features x Edges)
# mesh: list of mesh data-structure (len(mesh) == Batch)
# and applies convolution
# """
#
# def __init__(self, edge_in_feat, edge_out_feat, kernel_size=1, bias=True):
# super(MeshConv, self).__init__()
# # we support only kernel_size=1...
# self.lin = MeshLinearLayer(edge_in_feat, edge_out_feat, bias=bias)
#
# def __call__(self, edge_f, mesh):
# return self.forward(edge_f, mesh)
#
# def forward(self, x, mesh):
# device = x.device
# x = x.squeeze(-1)
# G = torch.cat([self.pad_gemm(i, x.shape[2], x.device) for i in mesh], 0)
# # build 'neighborhood image' and apply convolution
# G = self.create_GeMM(x, G)
# G = G.permute(0, 2, 1, 3)
# x = G.reshape(G.shape[0], G.shape[1], -1)
# x = self.lin(x)
# return x.permute(0, 2, 1).unsqueeze(-1).to(device)
#
# def flatten_gemm_inds(self, Gi):
# (b, ne, nn) = Gi.shape
# ne += 1
# batch_n = torch.floor(torch.arange(b * ne, device=Gi.device).float() / ne).view(b, ne)
# add_fac = batch_n * ne
# add_fac = add_fac.view(b, ne, 1)
# add_fac = add_fac.repeat(1, 1, nn)
# # flatten Gi
# Gi = Gi.float() + add_fac[:, 1:, :]
# return Gi
#
# def create_GeMM(self, x, Gi):
# """ gathers the edge features (x) with from the 1-ring indices (Gi)
# applys symmetric functions to handle order invariance
# returns a 'fake image' which can use 2d convolution on
# output dimensions: Batch x Channels x Edges x 5
# """
# Gishape = Gi.shape
# # pad the first row of every sample in batch with zeros
# padding = torch.zeros((x.shape[0], x.shape[1], 1), requires_grad=True, device=x.device)
# # padding = padding.to(x.device)
# x = torch.cat((padding, x), dim=2)
# Gi = Gi + 1 # shift
#
# # first flatten indices
# Gi_flat = self.flatten_gemm_inds(Gi)
# Gi_flat = Gi_flat.view(-1).long()
# #
# odim = x.shape
# x = x.permute(0, 2, 1).contiguous()
# x = x.view(odim[0] * odim[2], odim[1])
#
# f = torch.index_select(x, dim=0, index=Gi_flat)
# f = f.view(Gishape[0], Gishape[1], Gishape[2], -1)
# f = f.permute(0, 3, 1, 2)
#
# # apply the symmetric functions for an equivariant conv
# x_1 = f[:, :, :, 1] + f[:, :, :, 3]
# x_2 = f[:, :, :, 2] + f[:, :, :, 4]
# x_3 = torch.abs(f[:, :, :, 1] - f[:, :, :, 3])
# x_4 = torch.abs(f[:, :, :, 2] - f[:, :, :, 4])
# # f = torch.stack([f[:, :, :, 0], x_1, x_2, x_3, x_4], dim=3)
# f = sum([f[:, :, :, i] for i in range(5)]) / 5.0
# return f.unsqueeze(-1)
#
# def pad_gemm(self, m, xsz, device):
# """ extracts one-ring neighbors (4x) -> m.gemm_edges
# which is of size #edges x 4
# add the edge_id itself to make #edges x 5
# then pad to desired size e.g., xsz x 5
# """
# padded_gemm = torch.tensor(m.gemm_edges, device=device).float()
# padded_gemm = padded_gemm.requires_grad_()
# padded_gemm = torch.cat((torch.arange(m.edges_count, device=device).float().unsqueeze(1), padded_gemm), dim=1)
# # pad using F
# padded_gemm = F.pad(padded_gemm, (0, 0, 0, xsz - m.edges_count), "constant", 0)
# padded_gemm = padded_gemm.unsqueeze(0)
# return padded_gemm
## Legacy
# class MeshConv(nn.Module):
# """
# Computes convolution between edges and 4 incident (1-ring) edge neighbors
# in the forward pass takes:
# x: edge features (Batch x Features x Edges)
# mesh: list of mesh data-structure (len(mesh) == Batch)
# and applies convolution
# """
#
# # TODO: separate SA layer and Conv layer - copy the methods of conv to SA.
# def __init__(self, in_size, out_size, bias=True, kernel_size=1):
# super(MeshConv, self).__init__()
# self.kernel_size = kernel_size # which ring
# self.fc = nn.Linear(in_size, out_size, bias=bias)
#
# def __call__(self, edge_f, mesh):
# return self.forward(edge_f, mesh)
#
# def forward(self, x, mesh):
# # attention phase (split later - easy)
# # treat each mesh by it self for now. The sequnce is the edges!!! (already sorted)
# # take window of 40 (better to depend on kernel_size but it isn't a must) and compute it...
#
# print(x.shape)
# batch_size = x.shape[0]
# num_edges = x.shape[1]
# num_features = x.shape[2]
# x = x.squeeze(-1)
# G = torch.cat([self.pad_gemm(i, x.shape[2], x.device) for i in mesh], 0)
# # build 'neighborhood' and apply convolution - similar to GCN conv
# f = self.create_GeMM(x, G)
# x_1 = f[:, :, :, 1] + f[:, :, :, 3]
# x_2 = f[:, :, :, 2] + f[:, :, :, 4]
# x_3 = torch.abs(f[:, :, :, 1] - f[:, :, :, 3])
# x_4 = torch.abs(f[:, :, :, 2] - f[:, :, :, 4])
# f = torch.stack([f[:, :, :, 0], x_1, x_2, x_3, x_4], dim=3)
# x = G.reshape(x.shape[0], -1)
#
# print(x.shape)
# x = self.fc(x)
# x = x.reshape(x.shape[0], 1, -1) # TODO: replace with unsqueeze(1)
# print(x)
# return x
#
# def flatten_gemm_inds(self, Gi):
# (b, ne, nn) = Gi.shape
# ne += 1
# batch_n = torch.floor(torch.arange(b * ne, device=Gi.device).float() / ne).view(b, ne)
# add_fac = batch_n * ne
# add_fac = add_fac.view(b, ne, 1)
# add_fac = add_fac.repeat(1, 1, nn)
# # flatten Gi
# Gi = Gi.float() + add_fac[:, 1:, :]
# return Gi
#
# def create_GeMM(self, x, Gi):
# """ gathers the edge features (x) with from the 1-ring indices (Gi)
# applys symmetric functions to handle order invariance
# returns a 'fake image' which can use 2d convolution on
# output dimensions: Batch x Channels x Edges x 5
# """
# Gishape = Gi.shape
# # pad the first row of every sample in batch with zeros
# padding = torch.zeros((x.shape[0], x.shape[1], 1), requires_grad=True, device=x.device)
# # padding = padding.to(x.device)
# x = torch.cat((padding, x), dim=2)
# Gi = Gi + 1 # shift
#
# # first flatten indices
# Gi_flat = self.flatten_gemm_inds(Gi)
# Gi_flat = Gi_flat.view(-1).long()
# #
# odim = x.shape
# x = x.permute(0, 2, 1).contiguous()
# x = x.view(odim[0] * odim[2], odim[1])
#
# f = torch.index_select(x, dim=0, index=Gi_flat)
# f = f.view(Gishape[0], Gishape[1], Gishape[2], -1)
# f = f.permute(0, 3, 1, 2)
#
# # _f = f[:, :, :, 0]
# # if self.kernel_size == 1:
# # for i in range(1, 5):
# # _f += f[:, :, :, i]
# # _f /= 5.0
# # elif self.kernel_size == 2:
# # if 8: # TODO: detect if ring is 8 edges or 12 edges
# # for i in range(1, 5):
# # _f += f[:, :, :, i]
# # _f /= 8.0
# # elif 12:
# # for i in range(1, 5):
# # _f += f[:, :, :, i]
# # _f /= 12.0
# # else:
# # raise Exception(f"kernel size={self.kernel_size} not supported. kernel_size should be 1 or 2")
#
# # apply the symmetric functions for an equivariant conv
# x_1 = f[:, :, :, 1] + f[:, :, :, 3]
# x_2 = f[:, :, :, 2] + f[:, :, :, 4]
# x_3 = torch.abs(f[:, :, :, 1] - f[:, :, :, 3])
# x_4 = torch.abs(f[:, :, :, 2] - f[:, :, :, 4])
# f = torch.stack([f[:, :, :, 0], x_1, x_2, x_3, x_4], dim=3)
# # print(_f.shape)
# # return _f
# return f
#
# def pad_gemm(self, m, xsz, device):
# """ extracts one-ring neighbors (4x) -> m.gemm_edges
# which is of size #edges x 4
# add the edge_id itself to make #edges x 5
# then pad to desired size e.g., xsz x 5
# """
# padded_gemm = torch.tensor(m.gemm_edges, device=device).float()
# padded_gemm = padded_gemm.requires_grad_()
# padded_gemm = torch.cat((torch.arange(m.edges_count, device=device).float().unsqueeze(1), padded_gemm), dim=1)
# # pad using F
# padded_gemm = F.pad(padded_gemm, (0, 0, 0, xsz - m.edges_count), "constant", 0)
# padded_gemm = padded_gemm.unsqueeze(0)
# return padded_gemm
#
| 34.473498
| 120
| 0.527778
| 1,409
| 9,756
| 3.518808
| 0.149752
| 0.040339
| 0.009883
| 0.012102
| 0.752521
| 0.708955
| 0.69524
| 0.69524
| 0.672247
| 0.672247
| 0
| 0.030925
| 0.310578
| 9,756
| 282
| 121
| 34.595745
| 0.706215
| 0.857421
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003546
| 0
| 1
| 0.190476
| false
| 0
| 0.142857
| 0
| 0.52381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
5b07e68516001752a6feeb460c3254842b727074
| 299
|
py
|
Python
|
viberio/types/messages/picture_message.py
|
vorlov-bash/viberio
|
8bea3bb4f39f68e9b83f1d1876d293b641ee6a06
|
[
"MIT"
] | 18
|
2019-02-20T15:24:21.000Z
|
2022-03-24T12:48:30.000Z
|
viberio/types/messages/picture_message.py
|
vorlov-bash/viberio
|
8bea3bb4f39f68e9b83f1d1876d293b641ee6a06
|
[
"MIT"
] | null | null | null |
viberio/types/messages/picture_message.py
|
vorlov-bash/viberio
|
8bea3bb4f39f68e9b83f1d1876d293b641ee6a06
|
[
"MIT"
] | 10
|
2019-04-05T17:09:03.000Z
|
2021-09-17T17:17:08.000Z
|
import attr
from .message import TypedMessage
@attr.s
class PictureMessage(TypedMessage):
type: str = attr.ib(default='picture')
text: str = attr.ib(default=None)
media: str = attr.ib(default=None)
thumbnail: str = attr.ib(default=None)
file_name: str = attr.ib(default=None)
| 23
| 42
| 0.698997
| 42
| 299
| 4.952381
| 0.452381
| 0.168269
| 0.216346
| 0.384615
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 299
| 12
| 43
| 24.916667
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0.023411
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.222222
| 0
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
5b0832506bd3d3f4c39d9d63e37f0e61307be13f
| 146
|
py
|
Python
|
WeatherPy/api_keys.py
|
cphyland/python-api-challenge
|
f4c0aa702e526416a748a6d4052e581068a23a87
|
[
"ADSL"
] | null | null | null |
WeatherPy/api_keys.py
|
cphyland/python-api-challenge
|
f4c0aa702e526416a748a6d4052e581068a23a87
|
[
"ADSL"
] | null | null | null |
WeatherPy/api_keys.py
|
cphyland/python-api-challenge
|
f4c0aa702e526416a748a6d4052e581068a23a87
|
[
"ADSL"
] | null | null | null |
# OpenWeatherMap API Key
weather_api_key = "d3bc8c276acb9f0ad7660e4353716e90"
# Google API Key
g_key = "AIzaSyDibsDuYSaDy7VFjswzeuRETk5kiCuRBhs"
| 24.333333
| 52
| 0.842466
| 13
| 146
| 9.230769
| 0.615385
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167939
| 0.10274
| 146
| 5
| 53
| 29.2
| 0.748092
| 0.253425
| 0
| 0
| 0
| 0
| 0.669811
| 0.669811
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5b11649b734e1e317d813a3787e6cdc71a1a2c85
| 248
|
py
|
Python
|
run.py
|
thewarsawpakt/PaktMonitor
|
06049994893f884260cf35f35cbf53e3974b18fb
|
[
"MIT"
] | null | null | null |
run.py
|
thewarsawpakt/PaktMonitor
|
06049994893f884260cf35f35cbf53e3974b18fb
|
[
"MIT"
] | null | null | null |
run.py
|
thewarsawpakt/PaktMonitor
|
06049994893f884260cf35f35cbf53e3974b18fb
|
[
"MIT"
] | null | null | null |
from paktmonitor import paktmonitor
import sys
if __name__ == "__main__":
try:
paktmonitor.run("127.0.0.1", sys.argv[1])
except IndexError: # there should be a neater way to do this
paktmonitor.run("127.0.0.1", 5000)
| 22.545455
| 64
| 0.649194
| 37
| 248
| 4.135135
| 0.675676
| 0.222222
| 0.222222
| 0.235294
| 0.261438
| 0.261438
| 0
| 0
| 0
| 0
| 0
| 0.089474
| 0.233871
| 248
| 10
| 65
| 24.8
| 0.715789
| 0.157258
| 0
| 0
| 0
| 0
| 0.125604
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d29fd2df283b4e5fb1fac45453d02c4664851c2d
| 185
|
py
|
Python
|
guest_user/apps.py
|
blag/django-guest-user
|
2e442b7d7ecab0a87d890cf3e6f63f35df9bacdc
|
[
"MIT"
] | 11
|
2021-11-03T01:22:36.000Z
|
2022-03-20T17:33:37.000Z
|
guest_user/apps.py
|
blag/django-guest-user
|
2e442b7d7ecab0a87d890cf3e6f63f35df9bacdc
|
[
"MIT"
] | 1
|
2021-12-26T09:46:43.000Z
|
2021-12-26T09:46:43.000Z
|
guest_user/apps.py
|
blag/django-guest-user
|
2e442b7d7ecab0a87d890cf3e6f63f35df9bacdc
|
[
"MIT"
] | 3
|
2021-12-26T08:40:07.000Z
|
2022-02-27T16:58:55.000Z
|
from django.apps import AppConfig
class GuestUserConfig(AppConfig):
name = "guest_user"
verbose_name = "Guest User"
def ready(self):
from . import checks # noqa
| 18.5
| 36
| 0.675676
| 22
| 185
| 5.590909
| 0.727273
| 0.146341
| 0.211382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243243
| 185
| 9
| 37
| 20.555556
| 0.878571
| 0.021622
| 0
| 0
| 0
| 0
| 0.111732
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d2a9caa5807e67c7521e99e74d18fa12faae4875
| 133
|
py
|
Python
|
jsonpathTransformation/__init__.py
|
patricerosay/jsonpathTransformation
|
0effbc765a75d78cce29233a1840342e527e3587
|
[
"MIT"
] | null | null | null |
jsonpathTransformation/__init__.py
|
patricerosay/jsonpathTransformation
|
0effbc765a75d78cce29233a1840342e527e3587
|
[
"MIT"
] | null | null | null |
jsonpathTransformation/__init__.py
|
patricerosay/jsonpathTransformation
|
0effbc765a75d78cce29233a1840342e527e3587
|
[
"MIT"
] | null | null | null |
name = "jsonpathTransformation"
from .jsonpathTransformation import parseJsonPathTransformationFile,extractFromFileJsonPathExpression
| 66.5
| 101
| 0.917293
| 7
| 133
| 17.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045113
| 133
| 2
| 101
| 66.5
| 0.96063
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 0.164179
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
d2ceff7ff55e9097f11f6e1329a5263f1244d604
| 794
|
py
|
Python
|
pytorch_pfn_extras/onnx/__init__.py
|
belltailjp/pytorch-pfn-extras
|
8ce08d8e6bc67502be8e0fd5053a8c84e4df5e6a
|
[
"MIT"
] | null | null | null |
pytorch_pfn_extras/onnx/__init__.py
|
belltailjp/pytorch-pfn-extras
|
8ce08d8e6bc67502be8e0fd5053a8c84e4df5e6a
|
[
"MIT"
] | null | null | null |
pytorch_pfn_extras/onnx/__init__.py
|
belltailjp/pytorch-pfn-extras
|
8ce08d8e6bc67502be8e0fd5053a8c84e4df5e6a
|
[
"MIT"
] | null | null | null |
try:
from pytorch_pfn_extras.onnx.export_testcase import export # NOQA
from pytorch_pfn_extras.onnx.export_testcase import export_testcase # NOQA
from pytorch_pfn_extras.onnx.export_testcase import is_large_tensor # NOQA
from pytorch_pfn_extras.onnx.export_testcase import LARGE_TENSOR_DATA_THRESHOLD # NOQA
from pytorch_pfn_extras.onnx.annotate import annotate # NOQA
from pytorch_pfn_extras.onnx.annotate import apply_annotation # NOQA
from pytorch_pfn_extras.onnx.annotate import scoped_anchor # NOQA
from pytorch_pfn_extras.onnx._as_output import as_output # NOQA
from pytorch_pfn_extras.onnx._grad import grad # NOQA
from pytorch_pfn_extras.onnx.load import load_model # NOQA
available = True
except ImportError:
available = False
| 52.933333
| 91
| 0.797229
| 112
| 794
| 5.321429
| 0.267857
| 0.184564
| 0.234899
| 0.33557
| 0.687919
| 0.687919
| 0.54698
| 0.54698
| 0.33557
| 0
| 0
| 0
| 0.15995
| 794
| 14
| 92
| 56.714286
| 0.893553
| 0.061713
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.785714
| 0
| 0.785714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d2ec2160dee0d73f30834b81d6bc397a3f80ec87
| 118
|
py
|
Python
|
api/models.py
|
Stepan91/utk_api
|
f917afc9019711f8d8643ebea88eed84f33c449a
|
[
"MIT"
] | null | null | null |
api/models.py
|
Stepan91/utk_api
|
f917afc9019711f8d8643ebea88eed84f33c449a
|
[
"MIT"
] | null | null | null |
api/models.py
|
Stepan91/utk_api
|
f917afc9019711f8d8643ebea88eed84f33c449a
|
[
"MIT"
] | null | null | null |
from django.db import models
class Picture(models.Model):
image = models.ImageField(verbose_name='Изображение')
| 19.666667
| 57
| 0.771186
| 15
| 118
| 6
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127119
| 118
| 5
| 58
| 23.6
| 0.873786
| 0
| 0
| 0
| 0
| 0
| 0.09322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d2f969bcdac465dc1662ebed4a289d897b9224e4
| 3,741
|
py
|
Python
|
examples/wmels_trive_on_diagonal.py
|
parkertomf/WrightTools
|
09642b868fb51dab032470e8a6bc9c9fd5a541d4
|
[
"MIT"
] | null | null | null |
examples/wmels_trive_on_diagonal.py
|
parkertomf/WrightTools
|
09642b868fb51dab032470e8a6bc9c9fd5a541d4
|
[
"MIT"
] | null | null | null |
examples/wmels_trive_on_diagonal.py
|
parkertomf/WrightTools
|
09642b868fb51dab032470e8a6bc9c9fd5a541d4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
WMELs: TRIVE on diagonal
=========================
Draw WMELs for TRIVE on diagonal.
"""
import matplotlib.pyplot as plt
import WrightTools.diagrams.WMEL as WMEL
artist = WMEL.Artist(size=[6, 3], energies=[0., .5, 1.], state_names=["g", "a", "b", "a+b"])
artist.label_rows([r"$\mathrm{\alpha}$", r"$\mathrm{\beta}$", r"$\mathrm{\gamma}$"])
artist.label_columns(["I", "II", "III", "IV", "V", "VI"])
artist.clear_diagram([1, 2])
artist.clear_diagram([3, 2])
# pw1 alpha
artist.add_arrow([0, 0], 0, [0, 1], "ket", "1")
artist.add_arrow([0, 0], 1, [0, 1], "bra", "-2")
artist.add_arrow([0, 0], 2, [1, 0], "bra", "2'")
artist.add_arrow([0, 0], 3, [1, 0], "out")
# pw1 beta
artist.add_arrow([0, 1], 0, [0, 1], "ket", "1")
artist.add_arrow([0, 1], 1, [0, 1], "bra", "-2")
artist.add_arrow([0, 1], 2, [1, 2], "ket", "2'")
artist.add_arrow([0, 1], 3, [2, 1], "out")
# pw1 gamma
artist.add_arrow([0, 2], 0, [0, 1], "ket", "1")
artist.add_arrow([0, 2], 1, [1, 0], "ket", "-2")
artist.add_arrow([0, 2], 2, [0, 1], "ket", "2'")
artist.add_arrow([0, 2], 3, [1, 0], "out")
# pw2 alpha
artist.add_arrow([1, 0], 0, [0, 1], "ket", "1")
artist.add_arrow([1, 0], 1, [1, 2], "ket", "2'")
artist.add_arrow([1, 0], 2, [2, 1], "ket", "-2")
artist.add_arrow([1, 0], 3, [1, 0], "out")
# pw2 beta
artist.add_arrow([1, 1], 0, [0, 1], "ket", "1")
artist.add_arrow([1, 1], 1, [1, 2], "ket", "2'")
artist.add_arrow([1, 1], 2, [0, 1], "bra", "-2")
artist.add_arrow([1, 1], 3, [2, 1], "out")
# pw3 alpha
artist.add_arrow([2, 0], 0, [0, 1], "bra", "-2")
artist.add_arrow([2, 0], 1, [0, 1], "ket", "1")
artist.add_arrow([2, 0], 2, [1, 0], "bra", "2'")
artist.add_arrow([2, 0], 3, [1, 0], "out")
# pw3 beta
artist.add_arrow([2, 1], 0, [0, 1], "bra", "-2")
artist.add_arrow([2, 1], 1, [0, 1], "ket", "1")
artist.add_arrow([2, 1], 2, [1, 2], "ket", "2'")
artist.add_arrow([2, 1], 3, [2, 1], "out")
# pw3 gamma
artist.add_arrow([2, 2], 0, [0, 1], "bra", "-2")
artist.add_arrow([2, 2], 1, [1, 0], "bra", "1")
artist.add_arrow([2, 2], 2, [0, 1], "ket", "2'")
artist.add_arrow([2, 2], 3, [1, 0], "out")
# pw4 alpha
artist.add_arrow([3, 0], 0, [0, 1], "ket", "2'")
artist.add_arrow([3, 0], 1, [1, 2], "ket", "1")
artist.add_arrow([3, 0], 2, [2, 1], "ket", "-2")
artist.add_arrow([3, 0], 3, [1, 0], "out")
# pw4 beta
artist.add_arrow([3, 1], 0, [0, 1], "ket", "2'")
artist.add_arrow([3, 1], 1, [1, 2], "ket", "1")
artist.add_arrow([3, 1], 2, [0, 1], "bra", "-2")
artist.add_arrow([3, 1], 3, [2, 1], "out")
# pw5 alpha
artist.add_arrow([4, 0], 0, [0, 1], "bra", "-2")
artist.add_arrow([4, 0], 1, [1, 0], "bra", "2'")
artist.add_arrow([4, 0], 2, [0, 1], "ket", "1")
artist.add_arrow([4, 0], 3, [1, 0], "out")
# pw5 beta
artist.add_arrow([4, 1], 0, [0, 1], "bra", "-2")
artist.add_arrow([4, 1], 1, [0, 1], "ket", "2'")
artist.add_arrow([4, 1], 2, [1, 2], "ket", "1")
artist.add_arrow([4, 1], 3, [2, 1], "out")
# pw5 gamma
artist.add_arrow([4, 2], 0, [0, 1], "bra", "-2")
artist.add_arrow([4, 2], 1, [0, 1], "ket", "2'")
artist.add_arrow([4, 2], 2, [1, 0], "bra", "1")
artist.add_arrow([4, 2], 3, [1, 0], "out")
# pw6 alpha
artist.add_arrow([5, 0], 0, [0, 1], "ket", "2'")
artist.add_arrow([5, 0], 1, [1, 0], "ket", "-2")
artist.add_arrow([5, 0], 2, [0, 1], "ket", "1")
artist.add_arrow([5, 0], 3, [1, 0], "out")
# pw6 beta
artist.add_arrow([5, 1], 0, [0, 1], "ket", "2'")
artist.add_arrow([5, 1], 1, [0, 1], "bra", "-2")
artist.add_arrow([5, 1], 2, [1, 2], "ket", "1")
artist.add_arrow([5, 1], 3, [2, 1], "out")
# pw6 beta
artist.add_arrow([5, 2], 0, [0, 1], "ket", "2'")
artist.add_arrow([5, 2], 1, [0, 1], "bra", "-2")
artist.add_arrow([5, 2], 2, [1, 0], "bra", "1")
artist.add_arrow([5, 2], 3, [1, 0], "out")
artist.plot()
plt.show()
| 31.436975
| 92
| 0.520984
| 732
| 3,741
| 2.568306
| 0.083333
| 0.306383
| 0.476596
| 0.255319
| 0.795213
| 0.632979
| 0.60266
| 0.54734
| 0.523404
| 0
| 0
| 0.123666
| 0.148356
| 3,741
| 118
| 93
| 31.70339
| 0.466416
| 0.069767
| 0
| 0
| 0
| 0
| 0.09809
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027397
| 0
| 0.027397
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9605f542aea4e680b1022bb43c8c9f60eb69f924
| 87
|
py
|
Python
|
src/postss/apps.py
|
codeforiraqprojects/-aqeel97---Wadhefa-
|
b0871c50a7e04357af2eecddea48df03464ce0bb
|
[
"bzip2-1.0.6"
] | null | null | null |
src/postss/apps.py
|
codeforiraqprojects/-aqeel97---Wadhefa-
|
b0871c50a7e04357af2eecddea48df03464ce0bb
|
[
"bzip2-1.0.6"
] | null | null | null |
src/postss/apps.py
|
codeforiraqprojects/-aqeel97---Wadhefa-
|
b0871c50a7e04357af2eecddea48df03464ce0bb
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.apps import AppConfig
class PostssConfig(AppConfig):
name = 'postss'
| 14.5
| 33
| 0.747126
| 10
| 87
| 6.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 87
| 5
| 34
| 17.4
| 0.902778
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.