hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4d89d9d7fd8d29364892c3b9f633da91e6542bba
| 226
|
py
|
Python
|
elastica/memory_block/__init__.py
|
yeonsu-jung/PyElastica
|
fee87b9da22e310ff925c16fdc839bf8405c51a4
|
[
"MIT"
] | null | null | null |
elastica/memory_block/__init__.py
|
yeonsu-jung/PyElastica
|
fee87b9da22e310ff925c16fdc839bf8405c51a4
|
[
"MIT"
] | 1
|
2022-01-06T11:30:20.000Z
|
2022-02-07T07:11:22.000Z
|
elastica/memory_block/__init__.py
|
yeonsu-jung/PyElastica
|
fee87b9da22e310ff925c16fdc839bf8405c51a4
|
[
"MIT"
] | null | null | null |
__all__ = [
"MemoryBlockCosseratRod",
"MemoryBlockRigidBody",
]
from elastica.memory_block.memory_block_rod import MemoryBlockCosseratRod
from elastica.memory_block.memory_block_rigid_body import MemoryBlockRigidBody
| 28.25
| 78
| 0.840708
| 22
| 226
| 8.136364
| 0.5
| 0.24581
| 0.201117
| 0.256983
| 0.379888
| 0.379888
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10177
| 226
| 7
| 79
| 32.285714
| 0.881773
| 0
| 0
| 0
| 0
| 0
| 0.185841
| 0.097345
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
4d95d74b01c940a61c9183519a82bb88e1d1ca37
| 27
|
py
|
Python
|
autoscalingsim/scaling/policiesbuilder/metric/scaling_aspect_calculation/calculators/learning_based/quality_metrics/__init__.py
|
Remit/autoscaling-simulator
|
091943c0e9eedf9543e9305682a067ab60f56def
|
[
"MIT"
] | 6
|
2021-03-10T16:23:10.000Z
|
2022-01-14T04:57:46.000Z
|
autoscalingsim/scaling/policiesbuilder/metric/scaling_aspect_calculation/calculators/learning_based/quality_metrics/__init__.py
|
Remit/autoscaling-simulator
|
091943c0e9eedf9543e9305682a067ab60f56def
|
[
"MIT"
] | null | null | null |
autoscalingsim/scaling/policiesbuilder/metric/scaling_aspect_calculation/calculators/learning_based/quality_metrics/__init__.py
|
Remit/autoscaling-simulator
|
091943c0e9eedf9543e9305682a067ab60f56def
|
[
"MIT"
] | 1
|
2022-01-14T04:57:55.000Z
|
2022-01-14T04:57:55.000Z
|
from . import scaled_error
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4db138296cf00b9bd3b1e6d520738906aa29187d
| 12,834
|
py
|
Python
|
test/organism_test.py
|
MoffMade/python-apollo
|
3cc61458cf5c20bd44fde656b8364417b915cfb8
|
[
"MIT"
] | null | null | null |
test/organism_test.py
|
MoffMade/python-apollo
|
3cc61458cf5c20bd44fde656b8364417b915cfb8
|
[
"MIT"
] | 1
|
2020-08-25T00:16:42.000Z
|
2020-08-25T00:16:42.000Z
|
test/organism_test.py
|
MoffMade/python-apollo
|
3cc61458cf5c20bd44fde656b8364417b915cfb8
|
[
"MIT"
] | null | null | null |
import json
import time
from . import ApolloTestCase, wa
class OrganismTest(ApolloTestCase):
def test_get_organisms(self):
orgs = wa.organisms.get_organisms()
assert len(orgs) >= 3
first_org = orgs[0]
assert 'nonDefaultTranslationTable' in first_org
assert 'annotationCount' in first_org
assert 'commonName' in first_org
assert 'obsolete' in first_org
assert 'id' in first_org
assert 'publicMode' in first_org
assert 'valid' in first_org
# deprecated
# assert 'currentOrganism' in first_org
assert 'sequences' in first_org
assert 'directory' in first_org
assert 'blatdb' in first_org
assert 'genus' in first_org
assert 'species' in first_org
assert 'metadata' in first_org
# when testing locally this could be something else
assert 'org' in first_org['directory']
# assert '/data/org' in first_org['directory']
assert first_org['commonName'] in ['test_organism', 'alt_org', 'org3', 'org4']
def test_get_organism_creator(self):
orgs = wa.organisms.get_organisms()
org_id = orgs[0]['id']
creator = wa.organisms.get_organism_creator(str(org_id))
assert 'creator' in creator
def test_show_organism(self):
orgs = wa.organisms.get_organisms()
org_id = orgs[0]['id']
org_info = wa.organisms.show_organism(org_id)
assert org_info == orgs[0]
def test_show_organism_cn(self):
orgs = wa.organisms.get_organisms()
org_cn = orgs[0]['commonName']
org_info = wa.organisms.show_organism(org_cn)
assert org_info == orgs[0]
def test_get_sequences(self):
orgs = wa.organisms.get_organisms()
org_id = orgs[0]['id']
seqs = wa.organisms.get_sequences(org_id)
assert 'sequences' in seqs
assert seqs['sequences'][0]['name'] == 'Merlin'
assert seqs['sequences'][0]['length'] == 172788
assert seqs['sequences'][0]['start'] == 0
assert seqs['sequences'][0]['end'] == 172788
def test_update_metadata(self):
orgs = wa.organisms.get_organisms()
org_id = orgs[0]['id']
res = wa.organisms.update_metadata(org_id, {'some': 'metadata'})
assert res == {}
time.sleep(3)
org_info = wa.organisms.show_organism(org_id)
assert json.loads(org_info['metadata']) == {'some': 'metadata'}
def test_delete_organism(self):
org_info = self.waitOrgCreated('temp_org')
wa.organisms.delete_organism(org_info['id'])
self.waitOrgDeleted('temp_org')
orgs = wa.organisms.get_organisms()
for org in orgs:
assert org['commonName'] != 'temp_org'
def test_delete_organism_cn(self):
wa.organisms.delete_organism('temp_org')
self.waitOrgDeleted('temp_org')
orgs = wa.organisms.get_organisms()
for org in orgs:
assert org['commonName'] != 'temp_org'
def test_delete_features(self):
wa.annotations.load_gff3('temp_org', 'test-data/merlin.gff')
org_info = wa.organisms.show_organism('temp_org')
feats_before = wa.annotations.get_features(org_info['id'], 'Merlin')
assert 'features' in feats_before
assert len(feats_before['features']) > 0
wa.organisms.delete_features(org_info['id'])
feats_after = wa.annotations.get_features(org_info['id'], 'Merlin')
assert 'features' in feats_after
assert len(feats_after['features']) == 0
def test_delete_features_cn(self):
wa.annotations.load_gff3('temp_org', 'test-data/merlin.gff')
org_info = wa.organisms.show_organism('temp_org')
feats_before = wa.annotations.get_features(org_info['id'], 'Merlin')
assert 'features' in feats_before
assert len(feats_before['features']) > 0
wa.organisms.delete_features('temp_org')
feats_after = wa.annotations.get_features(org_info['id'], 'Merlin')
assert 'features' in feats_after
assert len(feats_after['features']) == 0
def test_update_organism(self):
other_org_info = wa.organisms.show_organism('test_organism')
org_info = wa.organisms.show_organism('temp_org')
wa.organisms.update_organism(org_info['id'], 'temp_org', other_org_info['directory'], species='updatedspecies', genus='updatedgenus', blatdb=other_org_info['directory'] + "/seq/genome.2bit", public=False)
# Returns useless stuff
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['blatdb'] == other_org_info['directory'] + "/seq/genome.2bit"
assert not org_info['publicMode']
assert org_info['sequences'] == 1
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_noreload(self):
other_org_info = wa.organisms.show_organism('test_organism')
org_info = wa.organisms.show_organism('temp_org')
wa.organisms.update_organism(org_info['id'], 'temp_org', other_org_info['directory'], species='updatedspecies', genus='updatedgenus', blatdb=other_org_info['directory'] + "/seq/genome.2bit", public=False, no_reload_sequences=True)
# Returns useless stuff
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['blatdb'] == other_org_info['directory'] + "/seq/genome.2bit"
assert not org_info['publicMode']
assert org_info['sequences'] == 1
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_newseq(self):
other_org_info = wa.organisms.show_organism('test_organism')
org_info = wa.organisms.show_organism('temp_org')
new_dir = org_info['directory'].replace('org2', 'org_update_newseq')
wa.organisms.update_organism(org_info['id'], 'temp_org', new_dir, species='updatedspecies', genus='updatedgenus', blatdb=other_org_info['directory'] + "/seq/genome.2bit", public=False)
# Returns useless stuff
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['blatdb'] == other_org_info['directory'] + "/seq/genome.2bit"
assert not org_info['publicMode']
assert org_info['sequences'] == 2
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 2
seq = seqs[0]
assert seq['name'] == 'Anotherseq'
assert seq['length'] == 4730
seq = seqs[1]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_changedseq(self):
other_org_info = wa.organisms.show_organism('test_organism')
org_info = wa.organisms.show_organism('temp_org')
new_dir = org_info['directory'].replace('org2', 'org_update_changedseq')
wa.organisms.update_organism(org_info['id'], 'temp_org', new_dir, species='updatedspecies', genus='updatedgenus', blatdb=other_org_info['directory'] + "/seq/genome.2bit", public=False)
# Returns useless stuff
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['blatdb'] == other_org_info['directory'] + "/seq/genome.2bit"
assert not org_info['publicMode']
assert org_info['sequences'] == 2
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 2
seq = seqs[0]
assert seq['name'] == 'Anotherseq'
assert seq['length'] == 4730
seq = seqs[1]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172188
def test_update_organism_newseq_noreload(self):
other_org_info = wa.organisms.show_organism('test_organism')
org_info = wa.organisms.show_organism('temp_org')
new_dir = org_info['directory'].replace('org2', 'org_update_newseq')
wa.organisms.update_organism(org_info['id'], 'temp_org', new_dir, species='updatedspecies', genus='updatedgenus', blatdb=other_org_info['directory'] + "/seq/genome.2bit", public=False, no_reload_sequences=True)
# Returns useless stuff
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['blatdb'] == other_org_info['directory'] + "/seq/genome.2bit"
assert not org_info['publicMode']
assert org_info['sequences'] == 1
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_changedseq_noreload(self):
other_org_info = wa.organisms.show_organism('test_organism')
org_info = wa.organisms.show_organism('temp_org')
new_dir = org_info['directory'].replace('org2', 'org_update_changedseq')
wa.organisms.update_organism(org_info['id'], 'temp_org', new_dir, species='updatedspecies', genus='updatedgenus', blatdb=other_org_info['directory'] + "/seq/genome.2bit", public=False, no_reload_sequences=True)
# Returns useless stuff
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['blatdb'] == other_org_info['directory'] + "/seq/genome.2bit"
assert not org_info['publicMode']
assert org_info['sequences'] == 1
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_add_organism(self):
org_info = wa.organisms.show_organism('test_organism')
meta = {"bla": "bli"}
res = wa.organisms.add_organism('some_new_org', org_info['directory'], species='newspecies', genus='newgenus', blatdb=org_info['directory'] + "/seq/genome.2bit", metadata=meta)
assert res['species'] == 'newspecies'
assert res['genus'] == 'newgenus'
assert res['blatdb'] == org_info['directory'] + "/seq/genome.2bit"
meta_back = json.loads(res['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
org_info = self.waitOrgCreated('some_new_org')
wa.organisms.delete_organism(org_info['id'])
assert org_info['species'] == 'newspecies'
assert org_info['genus'] == 'newgenus'
assert org_info['blatdb'] == org_info['directory'] + "/seq/genome.2bit"
assert not org_info['publicMode']
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
def setUp(self):
# Make sure the organism is not already there
temp_org_info = wa.organisms.show_organism('temp_org')
if 'directory' in temp_org_info:
wa.organisms.delete_organism(temp_org_info['id'])
self.waitOrgDeleted('temp_org')
org_info = wa.organisms.show_organism('alt_org')
if 'directory' not in org_info:
# Should not happen, but let's be tolerant...
# Error received when it fails: {'error': 'No row with the given identifier exists: [org.bbop.apollo.Organism#1154]'}
time.sleep(1)
org_info = wa.organisms.show_organism('alt_org')
wa.organisms.add_organism('temp_org', org_info['directory'])
self.waitOrgCreated('temp_org')
def tearDown(self):
org_info = wa.organisms.show_organism('temp_org')
if org_info and 'id' in org_info:
wa.organisms.delete_organism(org_info['id'])
self.waitOrgDeleted('temp_org')
org_info = wa.organisms.show_organism('some_new_org')
if org_info and 'id' in org_info:
wa.organisms.delete_organism(org_info['id'])
self.waitOrgDeleted('some_new_org')
| 34.315508
| 238
| 0.639317
| 1,594
| 12,834
| 4.909034
| 0.088457
| 0.107348
| 0.036805
| 0.07361
| 0.781597
| 0.771246
| 0.747604
| 0.722684
| 0.706837
| 0.685751
| 0
| 0.012994
| 0.22643
| 12,834
| 373
| 239
| 34.407507
| 0.775181
| 0.037245
| 0
| 0.649351
| 0
| 0
| 0.190098
| 0.00551
| 0
| 0
| 0
| 0
| 0.419913
| 1
| 0.082251
| false
| 0
| 0.012987
| 0
| 0.099567
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4db88300c2802c32988838d07e14dc34b542410c
| 227
|
py
|
Python
|
worlddata/APISections/search.py
|
worlddata-ai/python-api
|
c4157a8a079993e4339c29dbb65a7e87390579b5
|
[
"MIT"
] | 1
|
2021-06-24T11:13:50.000Z
|
2021-06-24T11:13:50.000Z
|
worlddata/APISections/search.py
|
worlddata-ai/python-api
|
c4157a8a079993e4339c29dbb65a7e87390579b5
|
[
"MIT"
] | null | null | null |
worlddata/APISections/search.py
|
worlddata-ai/python-api
|
c4157a8a079993e4339c29dbb65a7e87390579b5
|
[
"MIT"
] | null | null | null |
from worlddata.APISections.base import WorldDataBase
class WorldDataSearch(WorldDataBase):
def search(self, search_text, **kwargs):
return self.call_api_post("search", search_text=search_text, kwargs=kwargs)
| 28.375
| 83
| 0.762115
| 27
| 227
| 6.222222
| 0.62963
| 0.178571
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145374
| 227
| 8
| 84
| 28.375
| 0.865979
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
150be877389d726657732105825bf14ad698f027
| 36
|
py
|
Python
|
LED2Net/Loss/__init__.py
|
zhigangjiang/LED2-Net
|
28528b2180d6af0caee54a60560b88dd0f218f1b
|
[
"MIT"
] | 57
|
2021-03-25T05:42:34.000Z
|
2022-03-30T02:50:30.000Z
|
LED2Net/Loss/__init__.py
|
zhigangjiang/LED2-Net
|
28528b2180d6af0caee54a60560b88dd0f218f1b
|
[
"MIT"
] | 8
|
2021-04-09T09:50:22.000Z
|
2022-02-17T17:36:27.000Z
|
LED2Net/Loss/__init__.py
|
zhigangjiang/LED2-Net
|
28528b2180d6af0caee54a60560b88dd0f218f1b
|
[
"MIT"
] | 6
|
2021-04-11T10:15:07.000Z
|
2022-03-31T06:56:56.000Z
|
from .DepthRender import RenderLoss
| 18
| 35
| 0.861111
| 4
| 36
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
128190e07054d033e666de598e662b7ee6ec7ca3
| 162
|
py
|
Python
|
mashupsim/strategy/StateMachine.py
|
irhawks/mashupsim
|
a990bd49c46d6d4f11a2570bd8862f59a2f22a7e
|
[
"Apache-2.0"
] | null | null | null |
mashupsim/strategy/StateMachine.py
|
irhawks/mashupsim
|
a990bd49c46d6d4f11a2570bd8862f59a2f22a7e
|
[
"Apache-2.0"
] | null | null | null |
mashupsim/strategy/StateMachine.py
|
irhawks/mashupsim
|
a990bd49c46d6d4f11a2570bd8862f59a2f22a7e
|
[
"Apache-2.0"
] | null | null | null |
class StrategyStateMachine :
def discovery() :
pass
def composition() :
pass
def usage() :
pass
def end() :
pass
| 14.727273
| 28
| 0.5
| 14
| 162
| 5.785714
| 0.571429
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.419753
| 162
| 10
| 29
| 16.2
| 0.861702
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| true
| 0.444444
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
421bed1489e9e50e1016293576bb387cd28072e7
| 39
|
py
|
Python
|
src/__init__.py
|
jackgoffinet/poe-vae
|
18ca2cd4cffe3259e19525c2dc65c84d7219e9d6
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
jackgoffinet/poe-vae
|
18ca2cd4cffe3259e19525c2dc65c84d7219e9d6
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
jackgoffinet/poe-vae
|
18ca2cd4cffe3259e19525c2dc65c84d7219e9d6
|
[
"MIT"
] | 1
|
2022-01-14T12:29:51.000Z
|
2022-01-14T12:29:51.000Z
|
from .param_maps import LIKELIHOOD_MAP
| 19.5
| 38
| 0.871795
| 6
| 39
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
42494cf229dca4e0978560eaed63edcbf96c7c64
| 1,529
|
py
|
Python
|
pychron/canvas/canvas2D/tests/calibration_item.py
|
ASUPychron/pychron
|
dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76
|
[
"Apache-2.0"
] | 31
|
2016-03-07T02:38:17.000Z
|
2022-02-14T18:23:43.000Z
|
pychron/canvas/canvas2D/tests/calibration_item.py
|
ASUPychron/pychron
|
dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76
|
[
"Apache-2.0"
] | 1,626
|
2015-01-07T04:52:35.000Z
|
2022-03-25T19:15:59.000Z
|
pychron/canvas/canvas2D/tests/calibration_item.py
|
UIllinoisHALPychron/pychron
|
f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc
|
[
"Apache-2.0"
] | 26
|
2015-05-23T00:10:06.000Z
|
2022-03-07T16:51:57.000Z
|
from __future__ import absolute_import
import unittest
from pychron.canvas.canvas2D.scene.primitives.calibration import CalibrationObject
class CalibrationObjectTestCase(unittest.TestCase):
def setUp(self):
self._cal_obj = CalibrationObject(cx=0, cy=0)
def test_calc_rotation_east_counter_clockwise(self):
rot = self._cal_obj.calculate_rotation(1, 1)
self.assertEqual(rot, 45.0)
def test_calc_rotation_west_counter_clockwise(self):
rot = self._cal_obj.calculate_rotation(-1, -1, "west")
self.assertEqual(rot, 45.0)
def test_calc_rotation_south_counter_clockwise(self):
rot = self._cal_obj.calculate_rotation(1, -1, "south")
self.assertEqual(rot, 45.0)
def test_calc_rotation_north_counter_clockwise(self):
rot = self._cal_obj.calculate_rotation(-1, 1, "north")
self.assertEqual(rot, 45.0)
def test_calc_rotation_east_clockwise(self):
rot = self._cal_obj.calculate_rotation(1, -1)
self.assertEqual(rot, -45.0)
def test_calc_rotation_west_clockwise(self):
rot = self._cal_obj.calculate_rotation(-1, 1, "west")
self.assertEqual(rot, -45.0)
def test_calc_rotation_south_clockwise(self):
rot = self._cal_obj.calculate_rotation(-1, -1, "south")
self.assertEqual(rot, -45.0)
def test_calc_rotation_north_clockwise(self):
rot = self._cal_obj.calculate_rotation(1, 1, "north")
self.assertEqual(rot, -45.0)
if __name__ == "__main__":
unittest.main()
| 33.23913
| 82
| 0.705036
| 206
| 1,529
| 4.868932
| 0.194175
| 0.062812
| 0.089731
| 0.095713
| 0.770688
| 0.770688
| 0.742772
| 0.742772
| 0.742772
| 0.723829
| 0
| 0.034483
| 0.184434
| 1,529
| 45
| 83
| 33.977778
| 0.769848
| 0
| 0
| 0.25
| 0
| 0
| 0.023545
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.28125
| false
| 0
| 0.09375
| 0
| 0.40625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
35eeea9b137490e3f8a0a82b973404020ef1d4f8
| 34
|
py
|
Python
|
src/minerva_ufrj/__init__.py
|
pedromxavier/minerva
|
304bec6b3cd85ab4ad123f0e14e18dfab8632cb2
|
[
"MIT"
] | 2
|
2020-07-11T20:04:52.000Z
|
2020-10-13T16:57:22.000Z
|
src/minerva_ufrj/__init__.py
|
pedromxavier/minerva
|
304bec6b3cd85ab4ad123f0e14e18dfab8632cb2
|
[
"MIT"
] | null | null | null |
src/minerva_ufrj/__init__.py
|
pedromxavier/minerva
|
304bec6b3cd85ab4ad123f0e14e18dfab8632cb2
|
[
"MIT"
] | null | null | null |
from .minerva import Minerva, main
| 34
| 34
| 0.823529
| 5
| 34
| 5.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c438a63bbe870f94b1f4a1a9d21f696bd40834a6
| 3,457
|
py
|
Python
|
keylib/keys.py
|
mdcovarr/encrypt-server
|
335f6417396418c73d19c9d8c804ae98cfab9fed
|
[
"MIT"
] | 1
|
2020-06-08T20:02:27.000Z
|
2020-06-08T20:02:27.000Z
|
keylib/keys.py
|
mdcovarr/encrypt-server
|
335f6417396418c73d19c9d8c804ae98cfab9fed
|
[
"MIT"
] | null | null | null |
keylib/keys.py
|
mdcovarr/encrypt-server
|
335f6417396418c73d19c9d8c804ae98cfab9fed
|
[
"MIT"
] | null | null | null |
"""
RSA Private/Public Key Parameters
----------------------------------
KEY_BIT_SIZE: bit size of keys
e: exponent
"""
KEY_BIT_SIZE = 4000
e = int("""130499359017053281556458431345533123778363781651270565436082977439613579949
5471732291742574936464294335429130852137501781519767363426995264206489
9070346494524853207171672967384456879272614309142468488188894961980326
4210652869136872341373046618500443845129257644095534241153647067619323
000920769040063242820133""".replace(" ", "").replace("\n", ""))
"""
Diffie Hellman Public keys
----------------------------------
g: generator
p: prime
"""
g = int("""9677178152764243356585979556264224589944191744979699073371576738861236
5663820546922607619786124954900448084138704336019707101781113070799068
5744514558595068941725067952556006237862391064159647193542530329259333
4424851756939418426847120076462424229265080004033026690789716709345894
8676163784692008959171172634206184380581278989999081666391528267108503
9813609522242829719587993249808317734238106660385861768230295679126590
8390972444782203928717828427457583267560097495187522617809715033399571
0124142927808606451916188467080375525692807503004072582957175996256741
6958199028585508053574180142683126826804771118716296486230523760774389
7157494791542352379311268259974895147341335235499016003307513390038990
1582196141853936279863966997543171337135092681583084518153432642302837
0436056697857918994988629688023563560002153140124962200937852164145182
1610847931627295268929335901602846813690082539801509776517015975714046
5455848263618069464889478247144935435822126939965077545376582476552939
5288811662441509565199205733657279155210616750060391443188845224391244
5982465119470715706942563826139640100216780957119233780885476576542097
8318327126238727841787217270826207296485682133095572761510633060271315""".replace(" ", "").replace("\n", ""))
p = int("""2773513095749167337576358874942831569385761553923082020361322269992944
8489006798120232791463013505228500900024049333039459029366992215417394
0703109337560451078293297821188778260938274928421790028940882569457077
8270715497001472804773372159699487464437256876108641279314813575799288
0353560828726390302647822163531592190925834713707675874151479095828997
9709275760692869280803757520668776451222054720062078905947201506921948
2248258148634825249349597280042484353178956233483223727571140311838306
9497997993896536595853659564600179648675284862073335665278820295284039
2441154268228992660874384047813295938635270043470524847835602162062324
6182957756186469188241103927864116660349640671385022766484753851141361
3324705366794734356249759513986782234719409680441184269264165474240174
7019497972779105025866714266206768504640255640079527841905839126323963
3600041551667467165519541808705130094613958692430907777974227738480151
9284479867895217795687886082284763600753200413473134257852188910038101
0022934537091672256327978299054218233790927484338926431601990283936699
4034965244475466733634646851920984543901636177633543005383561910647171
8158178526713140623881625988429186051133467385983636059069118372099145
33050012879383""".replace(" ", "").replace("\n", ""))
| 59.603448
| 117
| 0.832514
| 81
| 3,457
| 35.481481
| 0.753086
| 0.007307
| 0.015658
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.889875
| 0.120046
| 3,457
| 57
| 118
| 60.649123
| 0.054898
| 0.032109
| 0
| 0
| 0
| 0
| 0.947123
| 0.836392
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c45070ee11eb8065c04150878e25b05069f38987
| 2,872
|
py
|
Python
|
vgg19.py
|
Azkel/NeuralNoodle
|
aa4710e18d6bdd77c0f5e7f5dd86f9b90f724e27
|
[
"MIT"
] | null | null | null |
vgg19.py
|
Azkel/NeuralNoodle
|
aa4710e18d6bdd77c0f5e7f5dd86f9b90f724e27
|
[
"MIT"
] | 5
|
2020-11-13T17:15:08.000Z
|
2022-02-09T23:26:48.000Z
|
vgg19.py
|
Azkel/NeuralNoodle
|
aa4710e18d6bdd77c0f5e7f5dd86f9b90f724e27
|
[
"MIT"
] | 1
|
2018-06-28T18:44:52.000Z
|
2018-06-28T18:44:52.000Z
|
# Taken from https://gist.github.com/baraldilorenzo/8d096f48a1be4a2d660d
# MaxPooling layers replated by AveragePooling layers, according to https://arxiv.org/abs/1508.06576
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout
from keras.layers.convolutional import Convolution2D, AveragePooling2D, ZeroPadding2D
from keras.optimizers import SGD
import cv2, numpy as np
def VGG_19(weights_path=None):
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, 224, 224)))
model.add(Convolution2D(64, 3, 3, activation='relu', name="conv1_1"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name="conv1_2"))
model.add(AveragePooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name="conv2_1"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name="conv2_2"))
model.add(AveragePooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name="conv3_1"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name="conv3_2"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name="conv3_3"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name="conv3_4"))
model.add(AveragePooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv4_1"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv4_2"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv4_3"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv4_4"))
model.add(AveragePooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv5_1"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv5_2"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv5_3"))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name="conv5_4"))
model.add(AveragePooling2D((2, 2), strides=(2, 2)))
model.add(Flatten())
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(1000, activation='softmax'))
if weights_path:
model.load_weights(weights_path)
return model
| 43.515152
| 101
| 0.680362
| 403
| 2,872
| 4.794045
| 0.183623
| 0.178054
| 0.093168
| 0.182195
| 0.754658
| 0.742754
| 0.742754
| 0.742754
| 0.742754
| 0.728778
| 0
| 0.100565
| 0.137883
| 2,872
| 65
| 102
| 44.184615
| 0.679725
| 0.059192
| 0
| 0.45283
| 0
| 0
| 0.070767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.09434
| 0
| 0.132075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c47679fda46f1f049313e1ff5866f97d4cf41485
| 115
|
py
|
Python
|
dassl/utils/__init__.py
|
weiliuxm/Dassl.pytorch
|
8084b66332623c7a2394ea1404f2d043ef415ebb
|
[
"MIT"
] | 563
|
2020-03-17T13:57:40.000Z
|
2022-03-31T02:38:47.000Z
|
dassl/utils/__init__.py
|
weiliuxm/Dassl.pytorch
|
8084b66332623c7a2394ea1404f2d043ef415ebb
|
[
"MIT"
] | 37
|
2020-05-21T02:12:47.000Z
|
2022-03-30T06:10:47.000Z
|
dassl/utils/__init__.py
|
weiliuxm/Dassl.pytorch
|
8084b66332623c7a2394ea1404f2d043ef415ebb
|
[
"MIT"
] | 99
|
2020-03-17T15:23:15.000Z
|
2022-03-27T14:52:30.000Z
|
from .tools import *
from .logger import *
from .meters import *
from .registry import *
from .torchtools import *
| 19.166667
| 25
| 0.73913
| 15
| 115
| 5.666667
| 0.466667
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 115
| 5
| 26
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c47da6485534db330586008c3a0111d74700d7c9
| 1,131
|
py
|
Python
|
test_op.py
|
HighCWu/denoising-diffusion-paddle
|
cdb75812e9d8ac29027ec0c482fb3c5b0ebbbcef
|
[
"MIT"
] | 10
|
2021-03-19T16:20:02.000Z
|
2021-05-19T10:57:16.000Z
|
test_op.py
|
HighCWu/denoising-diffusion-paddle
|
cdb75812e9d8ac29027ec0c482fb3c5b0ebbbcef
|
[
"MIT"
] | null | null | null |
test_op.py
|
HighCWu/denoising-diffusion-paddle
|
cdb75812e9d8ac29027ec0c482fb3c5b0ebbbcef
|
[
"MIT"
] | null | null | null |
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from model import UNet
from diffusion import make_beta_schedule, GaussianDiffusion
from config import config
conf = config.diffusion
betas = make_beta_schedule(**conf.diffusion.beta_schedule)
diffusion = GaussianDiffusion(betas)
model = UNet(**conf.model)
img = paddle.randn([
conf.training.dataloader.batch_size,
conf.model.in_channel,
conf.dataset.resolution,
conf.dataset.resolution
])
time = paddle.randint(
0, conf.diffusion.beta_schedule.n_timestep, (img.shape[0],)
)
loss = diffusion.p_loss(model, img, time)
print(loss.numpy())
conf = config.improved
betas = make_beta_schedule(**conf.diffusion.beta_schedule)
diffusion = GaussianDiffusion(betas)
model = UNet(**conf.model)
img = paddle.randn([
conf.training.dataloader.batch_size,
conf.model.in_channel,
conf.dataset.resolution,
conf.dataset.resolution
])
time = paddle.randint(
0, conf.diffusion.beta_schedule.n_timestep, (img.shape[0],)
)
loss = diffusion.p_loss(model, img, time)
loss.backward()
print(loss.numpy())
| 27.585366
| 64
| 0.731211
| 148
| 1,131
| 5.466216
| 0.263514
| 0.103832
| 0.084054
| 0.123609
| 0.719407
| 0.719407
| 0.719407
| 0.719407
| 0.719407
| 0.719407
| 0
| 0.004175
| 0.152962
| 1,131
| 41
| 65
| 27.585366
| 0.840292
| 0
| 0
| 0.702703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.162162
| 0
| 0.162162
| 0.054054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
67919f574a12df10f9ff9cdda3eb3dfeae74018c
| 6,909
|
py
|
Python
|
test/test_table_reader.py
|
askainet/haystack
|
00aa1f41d7c21273d8c312a3fad0b51ddd446672
|
[
"Apache-2.0"
] | null | null | null |
test/test_table_reader.py
|
askainet/haystack
|
00aa1f41d7c21273d8c312a3fad0b51ddd446672
|
[
"Apache-2.0"
] | null | null | null |
test/test_table_reader.py
|
askainet/haystack
|
00aa1f41d7c21273d8c312a3fad0b51ddd446672
|
[
"Apache-2.0"
] | null | null | null |
import logging
import pandas as pd
import pytest
from haystack.schema import Document, Answer
from haystack.pipelines.base import Pipeline
def test_table_reader(table_reader):
data = {
"actors": ["brad pitt", "leonardo di caprio", "george clooney"],
"age": ["58", "47", "60"],
"number of movies": ["87", "53", "69"],
"date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
table = pd.DataFrame(data)
query = "When was Di Caprio born?"
prediction = table_reader.predict(query=query, documents=[Document(content=table, content_type="table")])
assert prediction["answers"][0].answer == "11 november 1974"
assert prediction["answers"][0].offsets_in_context[0].start == 7
assert prediction["answers"][0].offsets_in_context[0].end == 8
def test_table_reader_batch_single_query_single_doc_list(table_reader):
data = {
"actors": ["brad pitt", "leonardo di caprio", "george clooney"],
"age": ["58", "47", "60"],
"number of movies": ["87", "53", "69"],
"date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
table = pd.DataFrame(data)
query = "When was Di Caprio born?"
prediction = table_reader.predict_batch(queries=query, documents=[Document(content=table, content_type="table")])
# Expected output: List of lists of answers
assert isinstance(prediction["answers"], list)
assert isinstance(prediction["answers"][0], list)
assert isinstance(prediction["answers"][0][0], Answer)
assert len(prediction["answers"]) == 1 # Predictions for 5 docs
def test_table_reader_batch_single_query_multiple_doc_lists(table_reader):
data = {
"actors": ["brad pitt", "leonardo di caprio", "george clooney"],
"age": ["58", "47", "60"],
"number of movies": ["87", "53", "69"],
"date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
table = pd.DataFrame(data)
query = "When was Di Caprio born?"
prediction = table_reader.predict_batch(queries=query, documents=[[Document(content=table, content_type="table")]])
# Expected output: List of lists of answers
assert isinstance(prediction["answers"], list)
assert isinstance(prediction["answers"][0], list)
assert isinstance(prediction["answers"][0][0], Answer)
assert len(prediction["answers"]) == 1 # Predictions for 1 collection of docs
def test_table_reader_batch_multiple_queries_single_doc_list(table_reader):
data = {
"actors": ["brad pitt", "leonardo di caprio", "george clooney"],
"age": ["58", "47", "60"],
"number of movies": ["87", "53", "69"],
"date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
table = pd.DataFrame(data)
query = "When was Di Caprio born?"
prediction = table_reader.predict_batch(
queries=[query, query], documents=[Document(content=table, content_type="table")]
)
# Expected output: List of lists of lists of answers
assert isinstance(prediction["answers"], list)
assert isinstance(prediction["answers"][0], list)
assert isinstance(prediction["answers"][0][0], list)
assert isinstance(prediction["answers"][0][0][0], Answer)
assert len(prediction["answers"]) == 2 # Predictions for 2 queries
def test_table_reader_batch_multiple_queries_multiple_doc_lists(table_reader):
data = {
"actors": ["brad pitt", "leonardo di caprio", "george clooney"],
"age": ["58", "47", "60"],
"number of movies": ["87", "53", "69"],
"date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
table = pd.DataFrame(data)
query = "When was Di Caprio born?"
prediction = table_reader.predict_batch(
queries=[query, query],
documents=[[Document(content=table, content_type="table")], [Document(content=table, content_type="table")]],
)
# Expected output: List of lists answers
assert isinstance(prediction["answers"], list)
assert isinstance(prediction["answers"][0], list)
assert isinstance(prediction["answers"][0][0], Answer)
assert len(prediction["answers"]) == 2 # Predictions for 2 collections of documents
def test_table_reader_in_pipeline(table_reader):
pipeline = Pipeline()
pipeline.add_node(table_reader, "TableReader", ["Query"])
data = {
"actors": ["brad pitt", "leonardo di caprio", "george clooney"],
"age": ["58", "47", "60"],
"number of movies": ["87", "53", "69"],
"date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
table = pd.DataFrame(data)
query = "When was Di Caprio born?"
prediction = pipeline.run(query=query, documents=[Document(content=table, content_type="table")])
assert prediction["answers"][0].answer == "11 november 1974"
assert prediction["answers"][0].offsets_in_context[0].start == 7
assert prediction["answers"][0].offsets_in_context[0].end == 8
@pytest.mark.parametrize("table_reader", ["tapas"], indirect=True)
def test_table_reader_aggregation(table_reader):
data = {
"Mountain": ["Mount Everest", "K2", "Kangchenjunga", "Lhotse", "Makalu"],
"Height": ["8848m", "8,611 m", "8 586m", "8 516 m", "8,485m"],
}
table = pd.DataFrame(data)
query = "How tall are all mountains on average?"
prediction = table_reader.predict(query=query, documents=[Document(content=table, content_type="table")])
assert prediction["answers"][0].answer == "8609.2 m"
assert prediction["answers"][0].meta["aggregation_operator"] == "AVERAGE"
assert prediction["answers"][0].meta["answer_cells"] == ["8848m", "8,611 m", "8 586m", "8 516 m", "8,485m"]
query = "How tall are all mountains together?"
prediction = table_reader.predict(query=query, documents=[Document(content=table, content_type="table")])
assert prediction["answers"][0].answer == "43046.0 m"
assert prediction["answers"][0].meta["aggregation_operator"] == "SUM"
assert prediction["answers"][0].meta["answer_cells"] == ["8848m", "8,611 m", "8 586m", "8 516 m", "8,485m"]
def test_table_without_rows(caplog, table_reader):
# empty DataFrame
table = pd.DataFrame()
document = Document(content=table, content_type="table", id="no_rows")
with caplog.at_level(logging.WARNING):
predictions = table_reader.predict(query="test", documents=[document])
assert "Skipping document with id 'no_rows'" in caplog.text
assert len(predictions["answers"]) == 0
def test_text_document(caplog, table_reader):
document = Document(content="text", id="text_doc")
with caplog.at_level(logging.WARNING):
predictions = table_reader.predict(query="test", documents=[document])
assert "Skipping document with id 'text_doc'" in caplog.text
assert len(predictions["answers"]) == 0
| 42.913043
| 119
| 0.656824
| 874
| 6,909
| 5.078947
| 0.156751
| 0.111061
| 0.085154
| 0.096643
| 0.855373
| 0.850867
| 0.82879
| 0.796351
| 0.748141
| 0.748141
| 0
| 0.053695
| 0.183239
| 6,909
| 160
| 120
| 43.18125
| 0.732943
| 0.046027
| 0
| 0.624
| 0
| 0
| 0.255888
| 0
| 0
| 0
| 0
| 0
| 0.264
| 1
| 0.072
| false
| 0
| 0.04
| 0
| 0.112
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
67b8688a086df0dcfbc901847584a6dd4f4ce51f
| 254
|
py
|
Python
|
rados_deploy/internal/remoto/modules/package_install.py
|
MariskaIJpelaar/rados-deploy
|
4ffb467211c2b05d17d76c2423c72c0ee4d4ec99
|
[
"MIT"
] | null | null | null |
rados_deploy/internal/remoto/modules/package_install.py
|
MariskaIJpelaar/rados-deploy
|
4ffb467211c2b05d17d76c2423c72c0ee4d4ec99
|
[
"MIT"
] | 1
|
2022-02-08T10:07:18.000Z
|
2022-02-08T10:07:18.000Z
|
rados_deploy/internal/remoto/modules/package_install.py
|
MariskaIJpelaar/rados-deploy
|
4ffb467211c2b05d17d76c2423c72c0ee4d4ec99
|
[
"MIT"
] | 2
|
2021-10-05T12:24:53.000Z
|
2021-12-22T09:41:07.000Z
|
def remote_pip_install_simple(name, silent):
return remote_pip_install(name, True, 'python3', 'pip3', silent)
def remote_pip_install(name, usermode, py, pip, silent):
return lib_install(name, usermode=usermode, py=py, pip=pip, silent=silent)
| 31.75
| 79
| 0.748031
| 37
| 254
| 4.918919
| 0.378378
| 0.148352
| 0.263736
| 0.208791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00905
| 0.129921
| 254
| 7
| 80
| 36.285714
| 0.81448
| 0
| 0
| 0
| 0
| 0
| 0.043651
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
67beafab4df8830c8bb514c716b27dc1f02ee428
| 626
|
py
|
Python
|
MonteCarlo/tests/test_SingleDimensionHamiltonian.py
|
DRosen766/MonteCarlo
|
979767b3cdc1f716e84b3855fb31d5f49400eb30
|
[
"MIT"
] | null | null | null |
MonteCarlo/tests/test_SingleDimensionHamiltonian.py
|
DRosen766/MonteCarlo
|
979767b3cdc1f716e84b3855fb31d5f49400eb30
|
[
"MIT"
] | null | null | null |
MonteCarlo/tests/test_SingleDimensionHamiltonian.py
|
DRosen766/MonteCarlo
|
979767b3cdc1f716e84b3855fb31d5f49400eb30
|
[
"MIT"
] | null | null | null |
import sys
import pytest
from MonteCarlo.SingleDimensionHamiltonian import SingleDimensionHamiltonian
from MonteCarlo.SpinConfiguration import spinConfiguration
def testSingleDimensionHamiltonian():
assert(SingleDimensionHamiltonian(-2, 1.1, spinConfiguration(5, 4)).Hamiltonian == -8)
assert(SingleDimensionHamiltonian(-2, 1.1, spinConfiguration(0, 2)).Hamiltonian == 4.0)
def testCalculateEnergy():
assert(SingleDimensionHamiltonian(-2, 1.1, spinConfiguration(0, 2)).calculateEnergy() == 1.7999999999999998)
assert(SingleDimensionHamiltonian(-2, 1.1, spinConfiguration(4, 7)).calculateEnergy() == 0.5)
| 41.733333
| 113
| 0.785942
| 61
| 626
| 8.065574
| 0.344262
| 0.260163
| 0.268293
| 0.276423
| 0.430894
| 0.430894
| 0.219512
| 0.219512
| 0
| 0
| 0
| 0.074468
| 0.099042
| 626
| 15
| 114
| 41.733333
| 0.797872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
db3511b2d7d44acb37c47b6b95b8f9549914d996
| 1,600
|
py
|
Python
|
tests/unit/test_is_best_response.py
|
11michalis11/Nashpy
|
f33a09fa6efd25d8aad965cf8ed907563b5f57d2
|
[
"MIT"
] | 212
|
2016-11-06T12:44:08.000Z
|
2022-03-10T03:05:27.000Z
|
tests/unit/test_is_best_response.py
|
11michalis11/Nashpy
|
f33a09fa6efd25d8aad965cf8ed907563b5f57d2
|
[
"MIT"
] | 93
|
2016-11-06T12:34:14.000Z
|
2022-03-25T10:57:17.000Z
|
tests/unit/test_is_best_response.py
|
11michalis11/Nashpy
|
f33a09fa6efd25d8aad965cf8ed907563b5f57d2
|
[
"MIT"
] | 51
|
2016-11-06T12:31:22.000Z
|
2022-03-29T10:45:53.000Z
|
"""
Tests for the best response check
"""
import numpy as np
from nashpy.utils.is_best_response import (
is_best_response,
)
def test_is_best_response_example_1():
"""
This tests an example from the discussion documentation.
The second assert checks that the column player strategy is as expected.
"""
A = np.array(((0, -1, 1), (1, 0, -1), (-1, 1, 0)))
sigma_c = np.array((0, 1 / 2, 1 / 2))
sigma_r = np.array((0, 0, 1))
assert is_best_response(A=A, sigma_c=sigma_c, sigma_r=sigma_r) is True
assert is_best_response(A=-A.T, sigma_c=sigma_r, sigma_r=sigma_c) is False
def test_is_best_response_example_2():
"""
This tests an example from the discussion documentation.
The second assert checks that the column player strategy is as expected.
"""
A = np.array(((0, -1, 1), (1, 0, -1), (-1, 1, 0)))
sigma_c = np.array((0, 1 / 2, 1 / 2))
sigma_r = np.array((1 / 3, 1 / 3, 1 / 3))
assert is_best_response(A=A, sigma_c=sigma_c, sigma_r=sigma_r) is False
assert is_best_response(A=-A.T, sigma_c=sigma_r, sigma_r=sigma_c) is True
def test_is_best_response_example_3():
"""
This tests an example from the discussion documentation.
The second assert checks that the column player strategy is as expected.
"""
A = np.array(((0, -1, 1), (1, 0, -1), (-1, 1, 0)))
sigma_c = np.array((1 / 3, 1 / 3, 1 / 3))
sigma_r = np.array((1 / 3, 1 / 3, 1 / 3))
assert is_best_response(A=A, sigma_c=sigma_c, sigma_r=sigma_r) is True
assert is_best_response(A=-A.T, sigma_c=sigma_r, sigma_r=sigma_c) is True
| 33.333333
| 78
| 0.650625
| 291
| 1,600
| 3.367698
| 0.140893
| 0.091837
| 0.157143
| 0.02449
| 0.889796
| 0.889796
| 0.804082
| 0.804082
| 0.797959
| 0.797959
| 0
| 0.048298
| 0.210625
| 1,600
| 47
| 79
| 34.042553
| 0.727633
| 0.26625
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.136364
| false
| 0
| 0.090909
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e1fda1a71dc71dd76729ca32b1af93d7e0abe360
| 104,738
|
py
|
Python
|
src/comp_net_raw.py
|
raun1/Complementary_Segmentation_Network-Raw-Code-Available-Under-Construction-
|
6522812f2e25304d4c4dfa572cd0df6549a9ff47
|
[
"MIT"
] | 29
|
2018-10-17T23:30:47.000Z
|
2022-02-17T15:07:37.000Z
|
src/comp_net_raw.py
|
raun1/MICCAI2018---Complementary_Segmentation_Network-Raw-Code-Available-Under-Construction-
|
6522812f2e25304d4c4dfa572cd0df6549a9ff47
|
[
"MIT"
] | 2
|
2018-10-24T12:37:06.000Z
|
2020-08-20T14:02:44.000Z
|
src/comp_net_raw.py
|
raun1/MICCAI2018---Complementary_Segmentation_Network-Raw-Code-Available-Under-Construction-
|
6522812f2e25304d4c4dfa572cd0df6549a9ff47
|
[
"MIT"
] | 5
|
2018-11-12T06:44:01.000Z
|
2021-12-16T08:17:39.000Z
|
# coding: utf-8
# In[2]:
import keras
import scipy as sp
import scipy.misc, scipy.ndimage.interpolation
from medpy import metric
import numpy as np
import os
from keras import losses
import tensorflow as tf
from keras.models import Model
from keras.layers import Input,merge, concatenate, Conv2D, MaxPooling2D, Activation, UpSampling2D,Dropout,Conv2DTranspose,add,multiply
from keras.layers.normalization import BatchNormalization as bn
from keras.callbacks import ModelCheckpoint, TensorBoard
from keras.optimizers import RMSprop
from keras import regularizers
from keras import backend as K
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint
import numpy as np
import nibabel as nib
CUDA_VISIBLE_DEVICES = [1]
os.environ['CUDA_VISIBLE_DEVICES']=','.join([str(x) for x in CUDA_VISIBLE_DEVICES])
#oasis files 1-457
# Please see line 1541 for the main essence of complementary network - i.e. summing up the intermediate outputs and then concatenating them for reconstruction layer
#Hyper parameters to be set -
#l2_Lambda - used for regularizing/penalizing parameters of the current layer
#Mainly used to prevent overfitting and is incorporated in the loss function
#Please see keras.io for more details
#DropP sets the % of dropout at the end of every dense block
#Kernel_size is the kernel size of the convolution filters
#Please see readme for additional resources.
#Lines 73 - 648 is the common encoder of the segmentation and complementary branches.
#Layers such as xconv1a,xmerge1........ belong to the complementary upsampling branch branch of the architecture.
#The convolution layers's number indicates its level and so up6 and xup6 are at the same level
#and are parallel to each other
#Layers such as xxconv1a,xxmerge1 .... belong to the reconstruction branch.
#for more details of the multi outputs please see my isbi repository here
#https://github.com/raun1/ISBI2018-Diagnostic-Classification-Of-Lung-Nodules-Using-3D-Neural-Networks
#Basically to summarize, we have two branches one which has negative dice with ground truth brain mask
#and is the segmentation branch
#We then have another branch with positive dice with ground truth masks
#The THEME of comp-net is to sum up the two sections, future works will provide a better way to do this and a generalized version :)
#We do this theme of summing at every stage of the intermediate outputs i.e. the first intermediate output of segmentation branch
#is summed with first intermediate output of the complementary branch.
#We obtain a final summary of the outputs of the segmentation branch and complementary branch and also sum these two new summaries
#Finally we concat all of these summations and send to the reconstruction branch
#reconstruction branch is a simple structure of dense multi-output U-Net and the ground truth is the input image and loss is MSE.
# In[3]:
import numpy as np
import cv2
#Dice coefficient
smooth = 1.
def dice_coef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
# Negative dice to obtain region of interest (ROI-Branch loss)
def dice_coef_loss(y_true, y_pred):
return -dice_coef(y_true, y_pred)
# Positive dice to minimize overlap with region of interest (Complementary branch (CO) loss)
def neg_dice_coef_loss(y_true, y_pred):
return dice_coef(y_true, y_pred)
def CompNet(input_shape,learn_rate=1e-3):
l2_lambda = 0.0002
DropP = 0.3
kernel_size=3
inputs = Input(input_shape)
conv1a = Conv2D( 12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(inputs)
conv1a = bn()(conv1a)
conv1b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(conv1a)
conv1b = bn()(conv1b)
merge1=concatenate([conv1a,conv1b])
conv1c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv1c = bn()(conv1c)
merge2=concatenate([conv1a,conv1b,conv1c])
conv1d = Conv2D(32, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv1d = bn()(conv1d)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1d)
pool1 = Dropout(DropP)(pool1)
conv2a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(pool1)
conv2a = bn()(conv2a)
conv2b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(conv2a)
conv2b = bn()(conv2b)
merge1=concatenate([conv2a,conv2b])
conv2c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv2c = bn()(conv2c)
merge2=concatenate([conv2a,conv2b,conv2c])
conv2d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv2d = bn()(conv2d)
merge3=concatenate([conv2a,conv2b,conv2c,conv2d])
conv2e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
conv2e = bn()(conv2e)
merge4=concatenate([conv2a,conv2b,conv2c,conv2d,conv2e])
conv2f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
conv2f = bn()(conv2f)
merge5=concatenate([conv2a,conv2b,conv2c,conv2d,conv2e,conv2f])
conv2g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
conv2g = bn()(conv2g)
merge6=concatenate([conv2a,conv2b,conv2c,conv2d,conv2e,conv2f,conv2g])
conv2h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
conv2h = bn()(conv2h)
merge7=concatenate([conv2a,conv2b,conv2c,conv2d,conv2e,conv2f,conv2g,conv2h])
conv2i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
conv2i = bn()(conv2g)
merge8=concatenate([conv2a,conv2b,conv2c,conv2d,conv2e,conv2f,conv2g,conv2h,conv2i])
conv2j = Conv2D(64, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
conv2j = bn()(conv2g)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2j)
pool2 = Dropout(DropP)(pool2)
conv3a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(pool2)
conv3a = bn()(conv3a)
conv3b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(conv3a)
conv3b = bn()(conv3b)
merge1=concatenate([conv3a,conv3b])
conv3c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv3c = bn()(conv3c)
merge2=concatenate([conv3a,conv3b,conv3c])
conv3d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv3d = bn()(conv3d)
merge3=concatenate([conv3a,conv3b,conv3c,conv3d])
conv3e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
conv3e = bn()(conv3e)
merge4=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e])
conv3f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
conv3f = bn()(conv3f)
merge5=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f])
conv3g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
conv3g = bn()(conv3g)
merge6=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g])
conv3h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
conv3h = bn()(conv3h)
merge7=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h])
conv3i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
conv3i = bn()(conv3i)
merge8=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i])
conv3j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
conv3j = bn()(conv3j)
merge9=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j])
conv3k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
conv3k = bn()(conv3k)
merge10=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k])
conv3l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
conv3l = bn()(conv3l)
merge11=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l])
conv3m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
conv3m = bn()(conv3m)
merge12=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m])
conv3n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
conv3n = bn()(conv3n)
merge13=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m,conv3n])
conv3o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
conv3o = bn()(conv3o)
merge14=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m,conv3n,conv3o])
conv3p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
conv3p = bn()(conv3p)
merge15=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m,conv3n,conv3o,conv3p])
conv3q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
conv3q = bn()(conv3q)
merge16=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m,conv3n,conv3o,conv3p,conv3q])
conv3r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
conv3r = bn()(conv3r)
merge17=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m,conv3n,conv3o,conv3p,conv3q,conv3r])
conv3s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
conv3s = bn()(conv3s)
merge18=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m,conv3n,conv3o,conv3p,conv3q,conv3r,conv3s])
conv3t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
conv3t = bn()(conv3t)
merge19=concatenate([conv3a,conv3b,conv3c,conv3d,conv3e,conv3f,conv3g,conv3h,conv3i,conv3j,conv3k,conv3l,conv3m,conv3n,conv3o,conv3p,conv3q,conv3r,conv3s,conv3t])
conv3u=Conv2D(128, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
conv3u = bn()(conv3u)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3u)
pool3 = Dropout(DropP)(pool3)
conv4a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(pool3)
conv4a = bn()(conv4a)
conv4b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(conv4a)
conv4b = bn()(conv4b)
merge1=concatenate([conv4a,conv4b])
conv4c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv4c = bn()(conv4c)
merge2=concatenate([conv4a,conv4b,conv4c])
conv4d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv4d = bn()(conv4d)
merge3=concatenate([conv4a,conv4b,conv4c,conv4d])
conv4e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
conv4e = bn()(conv4e)
merge4=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e])
conv4f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
conv4f = bn()(conv4f)
merge5=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f])
conv4g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
conv4g = bn()(conv4g)
merge6=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g])
conv4h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
conv4h = bn()(conv4h)
merge7=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h])
conv4i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
conv4i = bn()(conv4i)
merge8=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i])
conv4j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
conv4j = bn()(conv4j)
merge9=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j])
conv4k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
conv4k = bn()(conv4k)
merge10=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k])
conv4l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
conv4l = bn()(conv4l)
merge11=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l])
conv4m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
conv4m = bn()(conv4m)
merge12=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m])
conv4n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
conv4n = bn()(conv4n)
merge13=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m,conv4n])
conv4o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
conv4o = bn()(conv4o)
merge14=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m,conv4n,conv4o])
conv4p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
conv4p = bn()(conv4p)
merge15=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m,conv4n,conv4o,conv4p])
conv4q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
conv4q = bn()(conv4q)
merge16=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m,conv4n,conv4o,conv4p,conv4q])
conv4r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
conv4r = bn()(conv4r)
merge17=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m,conv4n,conv4o,conv4p,conv4q,conv4r])
conv4s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
conv4s = bn()(conv4s)
merge18=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m,conv4n,conv4o,conv4p,conv4q,conv4r,conv4s])
conv4t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
conv4t = bn()(conv4t)
merge19=concatenate([conv4a,conv4b,conv4c,conv4d,conv4e,conv4f,conv4g,conv4h,conv4i,conv4j,conv4k,conv4l,conv4m,conv4n,conv4o,conv4p,conv4q,conv4r,conv4s,conv4t])
conv4u=Conv2D(256, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
conv4u = bn()(conv4u)
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4u)
pool4 = Dropout(DropP)(pool4)
conv5a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(pool4)
conv5a = bn()(conv5a)
conv5b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(conv5a)
conv5b = bn()(conv5b)
merge1=concatenate([conv5a,conv5b])
conv5c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv5c = bn()(conv5c)
merge2=concatenate([conv5a,conv5b,conv5c])
conv5d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv5d = bn()(conv5d)
merge3=concatenate([conv5a,conv5b,conv5c,conv5d])
conv5e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
conv5e = bn()(conv5e)
merge4=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e])
conv5f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
conv5f = bn()(conv5f)
merge5=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f])
conv5g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
conv5g = bn()(conv5g)
merge6=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g])
conv5h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
conv5h = bn()(conv5h)
merge7=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h])
conv5i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
conv5i = bn()(conv5i)
merge8=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i])
conv5j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
conv5j = bn()(conv5j)
merge9=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j])
conv5k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
conv5k = bn()(conv5k)
merge10=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k])
conv5l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
conv5l = bn()(conv5l)
merge11=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l])
conv5m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
conv5m = bn()(conv5m)
merge12=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m])
conv5n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
conv5n = bn()(conv5n)
merge13=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m,conv5n])
conv5o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
conv5o = bn()(conv5o)
merge14=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m,conv5n,conv5o])
conv5p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
conv5p = bn()(conv5p)
merge15=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m,conv5n,conv5o,conv5p])
conv5q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
conv5q = bn()(conv5q)
merge16=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m,conv5n,conv5o,conv5p,conv5q])
conv5r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
conv5r = bn()(conv5r)
merge17=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m,conv5n,conv5o,conv5p,conv5q,conv5r])
conv5s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
conv5s = bn()(conv5s)
merge18=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m,conv5n,conv5o,conv5p,conv5q,conv5r,conv5s])
conv5t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
conv5t = bn()(conv5t)
merge19=concatenate([conv5a,conv5b,conv5c,conv5d,conv5e,conv5f,conv5g,conv5h,conv5i,conv5j,conv5k,conv5l,conv5m,conv5n,conv5o,conv5p,conv5q,conv5r,conv5s,conv5t])
conv5u=Conv2D(512, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
conv5u = bn()(conv5u)
up6 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(conv5u), conv4u],name='up6', axis=3)
out6=Conv2DTranspose(12,(2, 2), strides=(8, 8), padding='same')(up6)
out6 = bn()(out6)
output1 = Conv2D(1, (1, 1), activation='sigmoid',name='output1')(out6)
up6 = Dropout(DropP)(up6)
conv6a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(up6)
conv6a = bn()(conv6a)
merge0=concatenate([up6,conv6a])
conv6b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
conv6b = bn()(conv6b)
merge1=concatenate([up6,conv6a,conv6b])
conv6c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv6c = bn()(conv6c)
merge2=concatenate([up6,conv6a,conv6b,conv6c])
conv6d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv6d = bn()(conv6d)
merge3=concatenate([up6,conv6a,conv6b,conv6c,conv6d])
conv6e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
conv6e = bn()(conv6e)
merge4=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e])
conv6f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
conv6f = bn()(conv6f)
merge5=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f])
conv6g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
conv6g = bn()(conv6g)
merge6=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g])
conv6h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
conv6h = bn()(conv6h)
merge7=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h])
conv6i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
conv6i = bn()(conv6i)
merge8=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i])
conv6j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
conv6j = bn()(conv6j)
merge9=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j])
conv6k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
conv6k = bn()(conv6k)
merge10=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k])
conv6l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
conv6l = bn()(conv6l)
merge11=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l])
conv6m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
conv6m = bn()(conv6m)
merge12=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m])
conv6n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
conv6n = bn()(conv6n)
merge13=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m,conv6n])
conv6o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
conv6o = bn()(conv6o)
merge14=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m,conv6n,conv6o])
conv6p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
conv6p = bn()(conv6p)
merge15=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m,conv6n,conv6o,conv6p])
conv6q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
conv6q = bn()(conv6q)
merge16=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m,conv6n,conv6o,conv6p,conv6q])
conv6r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
conv6r = bn()(conv6r)
merge17=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m,conv6n,conv6o,conv6p,conv6q,conv6r])
conv6s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
conv6s = bn()(conv6s)
merge18=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m,conv6n,conv6o,conv6p,conv6q,conv6r,conv6s])
conv6t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
conv6t = bn()(conv6t)
merge19=concatenate([up6,conv6a,conv6b,conv6c,conv6d,conv6e,conv6f,conv6g,conv6h,conv6i,conv6j,conv6k,conv6l,conv6m,conv6n,conv6o,conv6p,conv6q,conv6r,conv6s,conv6t])
conv6u=Conv2D(256, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
conv6u = bn()(conv6u)
up7 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(conv6u), conv3u],name='up7', axis=3)
up7 = Dropout(DropP)(up7)
#add second output here
out7=Conv2DTranspose(12,(2, 2), strides=(4, 4), padding='same')(up7)
out7 = bn()(out7)
output2 = Conv2D(1, (1, 1), activation='sigmoid',name='output2')(out7)
conv7a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(up7)
conv7a = bn()(conv7a)
merge0=concatenate([up7,conv7a])
conv7b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
conv7b = bn()(conv7b)
merge1=concatenate([up7,conv7a,conv7b])
conv7c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv7c = bn()(conv7c)
merge2=concatenate([up7,conv7a,conv7b,conv7c])
conv7d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv7d = bn()(conv7d)
merge3=concatenate([up7,conv7a,conv7b,conv7c,conv7d])
conv7e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
conv7e = bn()(conv7e)
merge4=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e])
conv7f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
conv7f = bn()(conv7f)
merge5=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f])
conv7g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
conv7g = bn()(conv7g)
merge6=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g])
conv7h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
conv7h = bn()(conv7h)
merge7=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h])
conv7i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
conv7i = bn()(conv7i)
merge8=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i])
conv7j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
conv7j = bn()(conv7j)
merge9=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j])
conv7k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
conv7k = bn()(conv7k)
merge10=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k])
conv7l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
conv7l = bn()(conv7l)
merge11=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l])
conv7m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
conv7m = bn()(conv7m)
merge12=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m])
conv7n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
conv7n = bn()(conv7n)
merge13=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m,conv7n])
conv7o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
conv7o = bn()(conv7o)
merge14=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m,conv7n,conv7o])
conv7p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
conv7p = bn()(conv7p)
merge15=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m,conv7n,conv7o,conv7p])
conv7q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
conv7q = bn()(conv7q)
merge16=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m,conv7n,conv7o,conv7p,conv7q])
conv7r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
conv7r = bn()(conv7r)
merge17=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m,conv7n,conv7o,conv7p,conv7q,conv7r])
conv7s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
conv7s = bn()(conv7s)
merge18=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m,conv7n,conv7o,conv7p,conv7q,conv7r,conv7s])
conv7t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
conv7t = bn()(conv7t)
merge19=concatenate([up7,conv7a,conv7b,conv7c,conv7d,conv7e,conv7f,conv7g,conv7h,conv7i,conv7j,conv7k,conv7l,conv7m,conv7n,conv7o,conv7p,conv7q,conv7r,conv7s,conv7t])
conv7u=Conv2D(128, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
conv7u = bn()(conv7u)
up8 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(conv7u), conv2j],name='up8', axis=3)
up8 = Dropout(DropP)(up8)
#add third outout here
out8=Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(up8)
out8 = bn()(out8)
output3 = Conv2D(1, (1, 1), activation='sigmoid',name='output3')(out8)
conv8a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(up8)
conv8a = bn()(conv8a)
merge0=concatenate([up8,conv8a])
conv8b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
conv8b = bn()(conv8b)
merge1=concatenate([up8,conv8a,conv8b])
conv8c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv8c = bn()(conv8c)
merge2=concatenate([up8,conv8a,conv8b,conv8c])
conv8d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv8d = bn()(conv8d)
merge3=concatenate([up8,conv8a,conv8b,conv8c,conv8d])
conv8e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
conv8e = bn()(conv8e)
merge4=concatenate([up8,conv8a,conv8b,conv8c,conv8d,conv8e])
conv8f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
conv8f = bn()(conv8f)
merge5=concatenate([up8,conv8a,conv8b,conv8c,conv8d,conv8e,conv8f])
conv8g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
conv8g = bn()(conv8g)
merge6=concatenate([up8,conv8a,conv8b,conv8c,conv8d,conv8e,conv8f,conv8g])
conv8h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
conv8h = bn()(conv8h)
merge7=concatenate([up8,conv8a,conv8b,conv8c,conv8d,conv8e,conv8f,conv8g,conv8h])
conv8i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
conv8i = bn()(conv8i)
merge8=concatenate([up8,conv8a,conv8b,conv8c,conv8d,conv8e,conv8f,conv8g,conv8h,conv8i])
conv8j = Conv2D(64, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
conv8j = bn()(conv8j)
up9 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(conv8j), conv1d],name='up9',axis=3)
up9 = Dropout(DropP)(up9)
out9=Conv2DTranspose(12,(2, 2), strides=(1, 1), padding='same')(up9)
out9 = bn()(out9)
output4 = Conv2D(1, (1, 1), activation='sigmoid',name='output4')(out9)
conv9a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(up9)
conv9a = bn()(conv9a)
merge0=concatenate([up9,conv9a])
conv9b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
conv9b = bn()(conv9b)
merge1=concatenate([up9,conv9a,conv9b])
conv9c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
conv9c = bn()(conv9c)
merge2=concatenate([up9,conv9a,conv9b,conv9c])
conv9d = Conv2D(32, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
conv9d = bn()(conv9d)
conv10 = Conv2D(1, (1, 1), activation='sigmoid',name='conv10')(conv9d)
finalmerge=concatenate([out6,out7,out8,out9,conv9d])
final_op=Conv2D(1, (1, 1), activation='sigmoid',name='final_op')(finalmerge)
# model = Model(inputs=inputs, outputs=[out6,out7,out8,out9,conv10,final_op])
#second branch - brain
xup6 = concatenate([Conv2DTranspose(24,(2, 2), strides=(2, 2), padding='same')(conv5u), conv4u],name='xup6', axis=3)
xout6=Conv2DTranspose(24,(2, 2), strides=(8, 8), padding='same')(xup6)
xout6 = bn()(xout6)
xoutput1 = Conv2D(1, (1, 1), activation='sigmoid',name='xoutput1')(xout6)
xup6 = Dropout(DropP)(xup6)
xconv6a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xup6)
xconv6a = bn()(xconv6a)
merge0=concatenate([xup6,xconv6a])
xconv6b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xconv6b = bn()(xconv6b)
merge1=concatenate([xup6,xconv6a,xconv6b])
xconv6c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xconv6c = bn()(xconv6c)
merge2=concatenate([xup6,xconv6a,xconv6b,xconv6c])
xconv6d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xconv6d = bn()(xconv6d)
merge3=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d])
xconv6e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xconv6e = bn()(xconv6e)
merge4=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e])
xconv6f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xconv6f = bn()(xconv6f)
merge5=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f])
xconv6g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xconv6g = bn()(xconv6g)
merge6=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g])
xconv6h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xconv6h = bn()(xconv6h)
merge7=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h])
xconv6i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xconv6i = bn()(xconv6i)
merge8=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i])
xconv6j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xconv6j = bn()(xconv6j)
merge9=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j])
xconv6k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
xconv6k = bn()(xconv6k)
merge10=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k])
xconv6l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
xconv6l = bn()(xconv6l)
merge11=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l])
xconv6m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
xconv6m = bn()(xconv6m)
merge12=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m])
xconv6n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
xconv6n = bn()(xconv6n)
merge13=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m,xconv6n])
xconv6o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
xconv6o = bn()(xconv6o)
merge14=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m,xconv6n,xconv6o])
xconv6p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
xconv6p = bn()(xconv6p)
merge15=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m,xconv6n,xconv6o,xconv6p])
xconv6q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
xconv6q = bn()(xconv6q)
merge16=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m,xconv6n,xconv6o,xconv6p,xconv6q])
xconv6r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
xconv6r = bn()(xconv6r)
merge17=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m,xconv6n,xconv6o,xconv6p,xconv6q,xconv6r])
xconv6s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
xconv6s = bn()(xconv6s)
merge18=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m,xconv6n,xconv6o,xconv6p,xconv6q,xconv6r,xconv6s])
xconv6t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
xconv6t = bn()(xconv6t)
merge19=concatenate([xup6,xconv6a,xconv6b,xconv6c,xconv6d,xconv6e,xconv6f,xconv6g,xconv6h,xconv6i,xconv6j,xconv6k,xconv6l,xconv6m,xconv6n,xconv6o,xconv6p,xconv6q,xconv6r,xconv6s,xconv6t])
xconv6u=Conv2D(256, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
xconv6u = bn()(xconv6u)
xup7 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xconv6u), conv3u],name='xup7', axis=3)
xup7 = Dropout(DropP)(xup7)
#add second xoutput here
xout7=Conv2DTranspose(12,(2, 2), strides=(4, 4), padding='same')(xup7)
xout7 = bn()(xout7)
xoutput2 = Conv2D(1, (1, 1), activation='sigmoid',name='xoutput2')(xout7)
xconv7a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xup7)
xconv7a = bn()(xconv7a)
merge0=concatenate([xup7,xconv7a])
xconv7b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xconv7b = bn()(xconv7b)
merge1=concatenate([xup7,xconv7a,xconv7b])
xconv7c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xconv7c = bn()(xconv7c)
merge2=concatenate([xup7,xconv7a,xconv7b,xconv7c])
xconv7d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xconv7d = bn()(xconv7d)
merge3=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d])
xconv7e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xconv7e = bn()(xconv7e)
merge4=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e])
xconv7f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xconv7f = bn()(xconv7f)
merge5=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f])
xconv7g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xconv7g = bn()(xconv7g)
merge6=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g])
xconv7h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xconv7h = bn()(xconv7h)
merge7=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h])
xconv7i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xconv7i = bn()(xconv7i)
merge8=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i])
xconv7j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xconv7j = bn()(xconv7j)
merge9=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j])
xconv7k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
xconv7k = bn()(xconv7k)
merge10=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k])
xconv7l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
xconv7l = bn()(xconv7l)
merge11=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l])
xconv7m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
xconv7m = bn()(xconv7m)
merge12=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m])
xconv7n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
xconv7n = bn()(xconv7n)
merge13=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m,xconv7n])
xconv7o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
xconv7o = bn()(xconv7o)
merge14=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m,xconv7n,xconv7o])
xconv7p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
xconv7p = bn()(xconv7p)
merge15=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m,xconv7n,xconv7o,xconv7p])
xconv7q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
xconv7q = bn()(xconv7q)
merge16=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m,xconv7n,xconv7o,xconv7p,xconv7q])
xconv7r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
xconv7r = bn()(xconv7r)
merge17=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m,xconv7n,xconv7o,xconv7p,xconv7q,xconv7r])
xconv7s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
xconv7s = bn()(xconv7s)
merge18=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m,xconv7n,xconv7o,xconv7p,xconv7q,xconv7r,xconv7s])
xconv7t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
xconv7t = bn()(xconv7t)
merge19=concatenate([xup7,xconv7a,xconv7b,xconv7c,xconv7d,xconv7e,xconv7f,xconv7g,xconv7h,xconv7i,xconv7j,xconv7k,xconv7l,xconv7m,xconv7n,xconv7o,xconv7p,xconv7q,xconv7r,xconv7s,xconv7t])
xconv7u=Conv2D(128, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
xconv7u = bn()(xconv7u)
xup8 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xconv7u), conv2j],name='xup8', axis=3)
xup8 = Dropout(DropP)(xup8)
#add third xoutxout here
xout8=Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xup8)
xout8 = bn()(xout8)
xoutput3 = Conv2D(1, (1, 1), activation='sigmoid',name='xoutput3')(xout8)
xconv8a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xup8)
xconv8a = bn()(xconv8a)
merge0=concatenate([xup8,xconv8a])
xconv8b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xconv8b = bn()(xconv8b)
merge1=concatenate([xup8,xconv8a,xconv8b])
xconv8c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xconv8c = bn()(xconv8c)
merge2=concatenate([xup8,xconv8a,xconv8b,xconv8c])
xconv8d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xconv8d = bn()(xconv8d)
merge3=concatenate([xup8,xconv8a,xconv8b,xconv8c,xconv8d])
xconv8e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xconv8e = bn()(xconv8e)
merge4=concatenate([xup8,xconv8a,xconv8b,xconv8c,xconv8d,xconv8e])
xconv8f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xconv8f = bn()(xconv8f)
merge5=concatenate([xup8,xconv8a,xconv8b,xconv8c,xconv8d,xconv8e,xconv8f])
xconv8g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xconv8g = bn()(xconv8g)
merge6=concatenate([xup8,xconv8a,xconv8b,xconv8c,xconv8d,xconv8e,xconv8f,xconv8g])
xconv8h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xconv8h = bn()(xconv8h)
merge7=concatenate([xup8,xconv8a,xconv8b,xconv8c,xconv8d,xconv8e,xconv8f,xconv8g,xconv8h])
xconv8i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xconv8i = bn()(xconv8i)
merge8=concatenate([xup8,xconv8a,xconv8b,xconv8c,xconv8d,xconv8e,xconv8f,xconv8g,xconv8h,xconv8i])
xconv8j = Conv2D(64, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xconv8j = bn()(xconv8j)
xup9 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xconv8j), conv1d],name='xup9',axis=3)
xup9 = Dropout(DropP)(xup9)
xout9=Conv2DTranspose(12,(2, 2), strides=(1, 1), padding='same')(xup9)
xout9 = bn()(xout9)
xoutput4 = Conv2D(1, (1, 1), activation='sigmoid',name='xoutput4')(xout9)
xconv9a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xup9)
xconv9a = bn()(xconv9a)
merge0=concatenate([xup9,xconv9a])
xconv9b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xconv9b = bn()(xconv9b)
merge1=concatenate([xup9,xconv9a,xconv9b])
xconv9c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xconv9c = bn()(xconv9c)
merge2=concatenate([xup9,xconv9a,xconv9b,xconv9c])
xconv9d = Conv2D(32, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xconv9d = bn()(xconv9d)
xconv10 = Conv2D(1, (1, 1), activation='sigmoid',name='xconv10')(xconv9d)
xfinalmerge=concatenate([xout6,xout7,xout8,xout9,xconv9d])
xfinal_op=Conv2D(1, (1, 1), activation='sigmoid',name='xfinal_op')(xfinalmerge)
u_net_op0=keras.layers.add([final_op,xfinal_op])
u_net_op1=keras.layers.add([conv10,xconv10])
u_net_op2=keras.layers.add([output4,xoutput4])
u_net_op3=keras.layers.add([output3,xoutput3])
u_net_op4=keras.layers.add([output2,xoutput2])
u_net_op5=keras.layers.add([output1,xoutput1])
#Concatenation fed to the reconstruction layer
u_net_op_merge=concatenate([u_net_op0,u_net_op1,u_net_op2,u_net_op3,u_net_op4,u_net_op5])
xxconv1a = Conv2D( 12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(u_net_op_merge)
xxconv1a = bn()(xxconv1a)
xxconv1b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxconv1a)
xxconv1b = bn()(xxconv1b)
merge1=concatenate([xxconv1a,xxconv1b])
xxconv1c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv1c = bn()(xxconv1c)
merge2=concatenate([xxconv1a,xxconv1b,xxconv1c])
xxconv1d = Conv2D(32, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv1d = bn()(xxconv1d)
xxpool1 = MaxPooling2D(pool_size=(2, 2))(xxconv1d)
xxpool1 = Dropout(DropP)(xxpool1)
xxconv2a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxpool1)
xxconv2a = bn()(xxconv2a)
xxconv2b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxconv2a)
xxconv2b = bn()(xxconv2b)
merge1=concatenate([xxconv2a,xxconv2b])
xxconv2c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv2c = bn()(xxconv2c)
merge2=concatenate([xxconv2a,xxconv2b,xxconv2c])
xxconv2d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv2d = bn()(xxconv2d)
merge3=concatenate([xxconv2a,xxconv2b,xxconv2c,xxconv2d])
xxconv2e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xxconv2e = bn()(xxconv2e)
merge4=concatenate([xxconv2a,xxconv2b,xxconv2c,xxconv2d,xxconv2e])
xxconv2f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xxconv2f = bn()(xxconv2f)
merge5=concatenate([xxconv2a,xxconv2b,xxconv2c,xxconv2d,xxconv2e,xxconv2f])
xxconv2g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xxconv2g = bn()(xxconv2g)
merge6=concatenate([xxconv2a,xxconv2b,xxconv2c,xxconv2d,xxconv2e,xxconv2f,xxconv2g])
xxconv2h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xxconv2h = bn()(xxconv2h)
merge7=concatenate([xxconv2a,xxconv2b,xxconv2c,xxconv2d,xxconv2e,xxconv2f,xxconv2g,xxconv2h])
xxconv2i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xxconv2i = bn()(xxconv2g)
merge8=concatenate([xxconv2a,xxconv2b,xxconv2c,xxconv2d,xxconv2e,xxconv2f,xxconv2g,xxconv2h,xxconv2i])
xxconv2j = Conv2D(64, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xxconv2j = bn()(xxconv2g)
xxpool2 = MaxPooling2D(pool_size=(2, 2))(xxconv2j)
xxpool2 = Dropout(DropP)(xxpool2)
xxconv3a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxpool2)
xxconv3a = bn()(xxconv3a)
xxconv3b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxconv3a)
xxconv3b = bn()(xxconv3b)
merge1=concatenate([xxconv3a,xxconv3b])
xxconv3c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv3c = bn()(xxconv3c)
merge2=concatenate([xxconv3a,xxconv3b,xxconv3c])
xxconv3d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv3d = bn()(xxconv3d)
merge3=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d])
xxconv3e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xxconv3e = bn()(xxconv3e)
merge4=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e])
xxconv3f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xxconv3f = bn()(xxconv3f)
merge5=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f])
xxconv3g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xxconv3g = bn()(xxconv3g)
merge6=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g])
xxconv3h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xxconv3h = bn()(xxconv3h)
merge7=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h])
xxconv3i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xxconv3i = bn()(xxconv3i)
merge8=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i])
xxconv3j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xxconv3j = bn()(xxconv3j)
merge9=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j])
xxconv3k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
xxconv3k = bn()(xxconv3k)
merge10=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k])
xxconv3l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
xxconv3l = bn()(xxconv3l)
merge11=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l])
xxconv3m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
xxconv3m = bn()(xxconv3m)
merge12=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m])
xxconv3n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
xxconv3n = bn()(xxconv3n)
merge13=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m,xxconv3n])
xxconv3o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
xxconv3o = bn()(xxconv3o)
merge14=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m,xxconv3n,xxconv3o])
xxconv3p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
xxconv3p = bn()(xxconv3p)
merge15=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m,xxconv3n,xxconv3o,xxconv3p])
xxconv3q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
xxconv3q = bn()(xxconv3q)
merge16=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m,xxconv3n,xxconv3o,xxconv3p,xxconv3q])
xxconv3r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
xxconv3r = bn()(xxconv3r)
merge17=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m,xxconv3n,xxconv3o,xxconv3p,xxconv3q,xxconv3r])
xxconv3s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
xxconv3s = bn()(xxconv3s)
merge18=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m,xxconv3n,xxconv3o,xxconv3p,xxconv3q,xxconv3r,xxconv3s])
xxconv3t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
xxconv3t = bn()(xxconv3t)
merge19=concatenate([xxconv3a,xxconv3b,xxconv3c,xxconv3d,xxconv3e,xxconv3f,xxconv3g,xxconv3h,xxconv3i,xxconv3j,xxconv3k,xxconv3l,xxconv3m,xxconv3n,xxconv3o,xxconv3p,xxconv3q,xxconv3r,xxconv3s,xxconv3t])
xxconv3u=Conv2D(128, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
xxconv3u = bn()(xxconv3u)
xxpool3 = MaxPooling2D(pool_size=(2, 2))(xxconv3u)
xxpool3 = Dropout(DropP)(xxpool3)
xxconv4a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxpool3)
xxconv4a = bn()(xxconv4a)
xxconv4b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxconv4a)
xxconv4b = bn()(xxconv4b)
merge1=concatenate([xxconv4a,xxconv4b])
xxconv4c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv4c = bn()(xxconv4c)
merge2=concatenate([xxconv4a,xxconv4b,xxconv4c])
xxconv4d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv4d = bn()(xxconv4d)
merge3=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d])
xxconv4e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xxconv4e = bn()(xxconv4e)
merge4=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e])
xxconv4f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xxconv4f = bn()(xxconv4f)
merge5=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f])
xxconv4g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xxconv4g = bn()(xxconv4g)
merge6=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g])
xxconv4h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xxconv4h = bn()(xxconv4h)
merge7=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h])
xxconv4i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xxconv4i = bn()(xxconv4i)
merge8=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i])
xxconv4j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xxconv4j = bn()(xxconv4j)
merge9=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j])
xxconv4k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
xxconv4k = bn()(xxconv4k)
merge10=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k])
xxconv4l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
xxconv4l = bn()(xxconv4l)
merge11=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l])
xxconv4m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
xxconv4m = bn()(xxconv4m)
merge12=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m])
xxconv4n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
xxconv4n = bn()(xxconv4n)
merge13=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m,xxconv4n])
xxconv4o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
xxconv4o = bn()(xxconv4o)
merge14=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m,xxconv4n,xxconv4o])
xxconv4p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
xxconv4p = bn()(xxconv4p)
merge15=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m,xxconv4n,xxconv4o,xxconv4p])
xxconv4q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
xxconv4q = bn()(xxconv4q)
merge16=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m,xxconv4n,xxconv4o,xxconv4p,xxconv4q])
xxconv4r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
xxconv4r = bn()(xxconv4r)
merge17=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m,xxconv4n,xxconv4o,xxconv4p,xxconv4q,xxconv4r])
xxconv4s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
xxconv4s = bn()(xxconv4s)
merge18=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m,xxconv4n,xxconv4o,xxconv4p,xxconv4q,xxconv4r,xxconv4s])
xxconv4t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
xxconv4t = bn()(xxconv4t)
merge19=concatenate([xxconv4a,xxconv4b,xxconv4c,xxconv4d,xxconv4e,xxconv4f,xxconv4g,xxconv4h,xxconv4i,xxconv4j,xxconv4k,xxconv4l,xxconv4m,xxconv4n,xxconv4o,xxconv4p,xxconv4q,xxconv4r,xxconv4s,xxconv4t])
xxconv4u=Conv2D(256, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
xxconv4u = bn()(xxconv4u)
xxpool4 = MaxPooling2D(pool_size=(2, 2))(xxconv4u)
xxpool4 = Dropout(DropP)(xxpool4)
xxconv5a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxpool4)
xxconv5a = bn()(xxconv5a)
xxconv5b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxconv5a)
xxconv5b = bn()(xxconv5b)
merge1=concatenate([xxconv5a,xxconv5b])
xxconv5c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv5c = bn()(xxconv5c)
merge2=concatenate([xxconv5a,xxconv5b,xxconv5c])
xxconv5d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv5d = bn()(xxconv5d)
merge3=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d])
xxconv5e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xxconv5e = bn()(xxconv5e)
merge4=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e])
xxconv5f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xxconv5f = bn()(xxconv5f)
merge5=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f])
xxconv5g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xxconv5g = bn()(xxconv5g)
merge6=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g])
xxconv5h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xxconv5h = bn()(xxconv5h)
merge7=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h])
xxconv5i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xxconv5i = bn()(xxconv5i)
merge8=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i])
xxconv5j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xxconv5j = bn()(xxconv5j)
merge9=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j])
xxconv5k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
xxconv5k = bn()(xxconv5k)
merge10=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k])
xxconv5l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
xxconv5l = bn()(xxconv5l)
merge11=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l])
xxconv5m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
xxconv5m = bn()(xxconv5m)
merge12=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m])
xxconv5n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
xxconv5n = bn()(xxconv5n)
merge13=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m,xxconv5n])
xxconv5o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
xxconv5o = bn()(xxconv5o)
merge14=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m,xxconv5n,xxconv5o])
xxconv5p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
xxconv5p = bn()(xxconv5p)
merge15=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m,xxconv5n,xxconv5o,xxconv5p])
xxconv5q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
xxconv5q = bn()(xxconv5q)
merge16=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m,xxconv5n,xxconv5o,xxconv5p,xxconv5q])
xxconv5r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
xxconv5r = bn()(xxconv5r)
merge17=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m,xxconv5n,xxconv5o,xxconv5p,xxconv5q,xxconv5r])
xxconv5s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
xxconv5s = bn()(xxconv5s)
merge18=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m,xxconv5n,xxconv5o,xxconv5p,xxconv5q,xxconv5r,xxconv5s])
xxconv5t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
xxconv5t = bn()(xxconv5t)
merge19=concatenate([xxconv5a,xxconv5b,xxconv5c,xxconv5d,xxconv5e,xxconv5f,xxconv5g,xxconv5h,xxconv5i,xxconv5j,xxconv5k,xxconv5l,xxconv5m,xxconv5n,xxconv5o,xxconv5p,xxconv5q,xxconv5r,xxconv5s,xxconv5t])
xxconv5u=Conv2D(512, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
xxconv5u = bn()(xxconv5u)
xxup6 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xxconv5u), xxconv4u],name='xxup6', axis=3)
xxout6=Conv2DTranspose(12,(2, 2), strides=(8, 8), padding='same')(xxup6)
xxout6 = bn()(xxout6)
xxoutput1 = Conv2D(1, (1, 1), activation='sigmoid',name='xxoutput1')(xxout6)
xxup6 = Dropout(DropP)(xxup6)
xxconv6a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxup6)
xxconv6a = bn()(xxconv6a)
merge0=concatenate([xxup6,xxconv6a])
xxconv6b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xxconv6b = bn()(xxconv6b)
merge1=concatenate([xxup6,xxconv6a,xxconv6b])
xxconv6c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv6c = bn()(xxconv6c)
merge2=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c])
xxconv6d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv6d = bn()(xxconv6d)
merge3=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d])
xxconv6e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xxconv6e = bn()(xxconv6e)
merge4=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e])
xxconv6f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xxconv6f = bn()(xxconv6f)
merge5=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f])
xxconv6g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xxconv6g = bn()(xxconv6g)
merge6=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g])
xxconv6h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xxconv6h = bn()(xxconv6h)
merge7=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h])
xxconv6i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xxconv6i = bn()(xxconv6i)
merge8=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i])
xxconv6j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xxconv6j = bn()(xxconv6j)
merge9=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j])
xxconv6k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
xxconv6k = bn()(xxconv6k)
merge10=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k])
xxconv6l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
xxconv6l = bn()(xxconv6l)
merge11=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l])
xxconv6m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
xxconv6m = bn()(xxconv6m)
merge12=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m])
xxconv6n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
xxconv6n = bn()(xxconv6n)
merge13=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m,xxconv6n])
xxconv6o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
xxconv6o = bn()(xxconv6o)
merge14=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m,xxconv6n,xxconv6o])
xxconv6p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
xxconv6p = bn()(xxconv6p)
merge15=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m,xxconv6n,xxconv6o,xxconv6p])
xxconv6q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
xxconv6q = bn()(xxconv6q)
merge16=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m,xxconv6n,xxconv6o,xxconv6p,xxconv6q])
xxconv6r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
xxconv6r = bn()(xxconv6r)
merge17=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m,xxconv6n,xxconv6o,xxconv6p,xxconv6q,xxconv6r])
xxconv6s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
xxconv6s = bn()(xxconv6s)
merge18=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m,xxconv6n,xxconv6o,xxconv6p,xxconv6q,xxconv6r,xxconv6s])
xxconv6t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
xxconv6t = bn()(xxconv6t)
merge19=concatenate([xxup6,xxconv6a,xxconv6b,xxconv6c,xxconv6d,xxconv6e,xxconv6f,xxconv6g,xxconv6h,xxconv6i,xxconv6j,xxconv6k,xxconv6l,xxconv6m,xxconv6n,xxconv6o,xxconv6p,xxconv6q,xxconv6r,xxconv6s,xxconv6t])
xxconv6u=Conv2D(256, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
xxconv6u = bn()(xxconv6u)
xxup7 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xxconv6u), xxconv3u],name='xxup7', axis=3)
xxup7 = Dropout(DropP)(xxup7)
#add second xxoutput here
xxout7=Conv2DTranspose(12,(2, 2), strides=(4, 4), padding='same')(xxup7)
xxout7 = bn()(xxout7)
xxoutput2 = Conv2D(1, (1, 1), activation='sigmoid',name='xxoutput2')(xxout7)
xxconv7a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxup7)
xxconv7a = bn()(xxconv7a)
merge0=concatenate([xxup7,xxconv7a])
xxconv7b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xxconv7b = bn()(xxconv7b)
merge1=concatenate([xxup7,xxconv7a,xxconv7b])
xxconv7c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv7c = bn()(xxconv7c)
merge2=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c])
xxconv7d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv7d = bn()(xxconv7d)
merge3=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d])
xxconv7e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xxconv7e = bn()(xxconv7e)
merge4=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e])
xxconv7f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xxconv7f = bn()(xxconv7f)
merge5=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f])
xxconv7g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xxconv7g = bn()(xxconv7g)
merge6=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g])
xxconv7h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xxconv7h = bn()(xxconv7h)
merge7=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h])
xxconv7i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xxconv7i = bn()(xxconv7i)
merge8=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i])
xxconv7j = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xxconv7j = bn()(xxconv7j)
merge9=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j])
xxconv7k = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge9)
xxconv7k = bn()(xxconv7k)
merge10=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k])
xxconv7l=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge10)
xxconv7l = bn()(xxconv7l)
merge11=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l])
xxconv7m=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge11)
xxconv7m = bn()(xxconv7m)
merge12=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m])
xxconv7n=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge12)
xxconv7n = bn()(xxconv7n)
merge13=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m,xxconv7n])
xxconv7o=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge13)
xxconv7o = bn()(xxconv7o)
merge14=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m,xxconv7n,xxconv7o])
xxconv7p=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge14)
xxconv7p = bn()(xxconv7p)
merge15=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m,xxconv7n,xxconv7o,xxconv7p])
xxconv7q=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge15)
xxconv7q = bn()(xxconv7q)
merge16=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m,xxconv7n,xxconv7o,xxconv7p,xxconv7q])
xxconv7r=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge16)
xxconv7r = bn()(xxconv7r)
merge17=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m,xxconv7n,xxconv7o,xxconv7p,xxconv7q,xxconv7r])
xxconv7s=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge17)
xxconv7s = bn()(xxconv7s)
merge18=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m,xxconv7n,xxconv7o,xxconv7p,xxconv7q,xxconv7r,xxconv7s])
xxconv7t=Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge18)
xxconv7t = bn()(xxconv7t)
merge19=concatenate([xxup7,xxconv7a,xxconv7b,xxconv7c,xxconv7d,xxconv7e,xxconv7f,xxconv7g,xxconv7h,xxconv7i,xxconv7j,xxconv7k,xxconv7l,xxconv7m,xxconv7n,xxconv7o,xxconv7p,xxconv7q,xxconv7r,xxconv7s,xxconv7t])
xxconv7u=Conv2D(128, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge19)
xxconv7u = bn()(xxconv7u)
xxup8 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xxconv7u), xxconv2j],name='xxup8', axis=3)
xxup8 = Dropout(DropP)(xxup8)
#add third xxoutxxout here
xxout8=Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xxup8)
xxout8 = bn()(xxout8)
xxoutput3 = Conv2D(1, (1, 1), activation='sigmoid',name='xxoutput3')(xxout8)
xxconv8a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxup8)
xxconv8a = bn()(xxconv8a)
merge0=concatenate([xxup8,xxconv8a])
xxconv8b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xxconv8b = bn()(xxconv8b)
merge1=concatenate([xxup8,xxconv8a,xxconv8b])
xxconv8c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv8c = bn()(xxconv8c)
merge2=concatenate([xxup8,xxconv8a,xxconv8b,xxconv8c])
xxconv8d = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv8d = bn()(xxconv8d)
merge3=concatenate([xxup8,xxconv8a,xxconv8b,xxconv8c,xxconv8d])
xxconv8e = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge3)
xxconv8e = bn()(xxconv8e)
merge4=concatenate([xxup8,xxconv8a,xxconv8b,xxconv8c,xxconv8d,xxconv8e])
xxconv8f = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge4)
xxconv8f = bn()(xxconv8f)
merge5=concatenate([xxup8,xxconv8a,xxconv8b,xxconv8c,xxconv8d,xxconv8e,xxconv8f])
xxconv8g = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge5)
xxconv8g = bn()(xxconv8g)
merge6=concatenate([xxup8,xxconv8a,xxconv8b,xxconv8c,xxconv8d,xxconv8e,xxconv8f,xxconv8g])
xxconv8h = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge6)
xxconv8h = bn()(xxconv8h)
merge7=concatenate([xxup8,xxconv8a,xxconv8b,xxconv8c,xxconv8d,xxconv8e,xxconv8f,xxconv8g,xxconv8h])
xxconv8i = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge7)
xxconv8i = bn()(xxconv8i)
merge8=concatenate([xxup8,xxconv8a,xxconv8b,xxconv8c,xxconv8d,xxconv8e,xxconv8f,xxconv8g,xxconv8h,xxconv8i])
xxconv8j = Conv2D(64, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge8)
xxconv8j = bn()(xxconv8j)
xxup9 = concatenate([Conv2DTranspose(12,(2, 2), strides=(2, 2), padding='same')(xxconv8j), xxconv1d],name='xxup9',axis=3)
xxup9 = Dropout(DropP)(xxup9)
xxout9=Conv2DTranspose(12,(2, 2), strides=(1, 1), padding='same')(xxup9)
xxout9 = bn()(xxout9)
xxoutput4 = Conv2D(1, (1, 1), activation='sigmoid',name='xxoutput4')(xxout9)
xxconv9a = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(xxup9)
xxconv9a = bn()(xxconv9a)
merge0=concatenate([xxup9,xxconv9a])
xxconv9b = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge0)
xxconv9b = bn()(xxconv9b)
merge1=concatenate([xxup9,xxconv9a,xxconv9b])
xxconv9c = Conv2D(12, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge1)
xxconv9c = bn()(xxconv9c)
merge2=concatenate([xxup9,xxconv9a,xxconv9b,xxconv9c])
xxconv9d = Conv2D(32, (kernel_size, kernel_size), activation='relu', padding='same',
kernel_regularizer=regularizers.l2(l2_lambda) )(merge2)
xxconv9d = bn()(xxconv9d)
xxconv10 = Conv2D(1, (1, 1), activation='sigmoid',name='xxconv10')(xxconv9d)
xxfinalmerge=concatenate([xxout6,xxout7,xxout8,xxout9,xxconv9d])
xxfinal_op=Conv2D(1, (1, 1), activation='sigmoid',name='xxfinal_op')(xxfinalmerge)
#model = Model(inputs=[inputs,input_prob,input_prob_inverse], outputs=[conv10,xconv10,third_out])
model = Model(inputs=inputs, outputs=[output1,output2,output3,output4,conv10,final_op,xoutput1,xoutput2,xoutput3,xoutput4,xconv10,xfinal_op,xxoutput1,xxoutput2,xxoutput3,xxoutput4,xxconv10,xxfinal_op])
model.compile(optimizer=Adam(lr=1e-5), loss={'output1':dice_coef_loss,'output2':dice_coef_loss,'output3':dice_coef_loss,'output4':dice_coef_loss,'conv10':dice_coef_loss,'final_op':dice_coef_loss,
'xoutput1':neg_dice_coef_loss,'xoutput2':neg_dice_coef_loss,'xoutput3':neg_dice_coef_loss,'xoutput4':neg_dice_coef_loss,'xconv10':neg_dice_coef_loss,'xfinal_op':neg_dice_coef_loss,
'xxoutput1':'mse','xxoutput2':'mse','xxoutput3':'mse','xxoutput4':'mse','xxconv10':'mse','xxfinal_op':'mse'})
#loss=[neg_dice_coef_loss,'mse',dice_coef_loss],
#metrics=[neg_dice_coef,'mae',dice_coef])
return model
# In[8]:
model=CompNet(input_shape=(256,256,1))
print(model.summary())
# In[62]:
X_train=np.load("X_train_new.npy")
X_train=X_train.reshape(X_train.shape+(1,))
y_train=np.load("y_train_new.npy").reshape(X_train.shape)
model.fit([X_train], [y_train,y_train,y_train,y_train,y_train,y_train,y_train,y_train,y_train,y_train,y_train,y_train,X_train,X_train,X_train,X_train,X_train,X_train],
batch_size=4,
nb_epoch=10,
#validation_data=([X2_validate],[y_validate]),
shuffle=True)
#callbacks=[xyz],
#class_weight=class_weightt)
# In[29]:
import h5py
#model.save_weights("basic_unet_weights.h5")
model.save('dense_comp_net_dsp.h5')
| 40.470634
| 228
| 0.701799
| 11,940
| 104,738
| 6.032998
| 0.059296
| 0.089819
| 0.071522
| 0.089402
| 0.806439
| 0.788253
| 0.777522
| 0.758836
| 0.743718
| 0.722326
| 0
| 0.079498
| 0.160152
| 104,738
| 2,587
| 229
| 40.486278
| 0.739405
| 0.026791
| 0
| 0.208987
| 0
| 0
| 0.031645
| 0.000206
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002853
| false
| 0
| 0.015692
| 0.001427
| 0.021398
| 0.000713
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c00822eb535df6cb4526d3459ad3dce8a5d53763
| 165
|
py
|
Python
|
content/admin.py
|
divyeshvala/Request_Content
|
5a2dcbbe7e590f1e28dea582ce0b4b77795eb16d
|
[
"Apache-2.0"
] | 1
|
2020-03-08T13:47:58.000Z
|
2020-03-08T13:47:58.000Z
|
content/admin.py
|
divyeshvala/Request_Content
|
5a2dcbbe7e590f1e28dea582ce0b4b77795eb16d
|
[
"Apache-2.0"
] | null | null | null |
content/admin.py
|
divyeshvala/Request_Content
|
5a2dcbbe7e590f1e28dea582ce0b4b77795eb16d
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import Idea, Topic, Creator
admin.site.register(Idea)
admin.site.register(Topic)
admin.site.register(Creator)
| 20.625
| 41
| 0.769697
| 23
| 165
| 5.521739
| 0.478261
| 0.212598
| 0.401575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 165
| 7
| 42
| 23.571429
| 0.888112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
c022d41d9ec7ad745818d4f2a41325c881fa1934
| 51
|
py
|
Python
|
archeion/conf.py
|
ambhudia/archeion
|
0abccf58e498cc7de9b276fc4798df1a30ce0590
|
[
"Apache-2.0"
] | null | null | null |
archeion/conf.py
|
ambhudia/archeion
|
0abccf58e498cc7de9b276fc4798df1a30ce0590
|
[
"Apache-2.0"
] | null | null | null |
archeion/conf.py
|
ambhudia/archeion
|
0abccf58e498cc7de9b276fc4798df1a30ce0590
|
[
"Apache-2.0"
] | null | null | null |
CLIENT_ID = "57d1cc14-cb8e-4d52-a6ba-6f86445999f7"
| 25.5
| 50
| 0.803922
| 7
| 51
| 5.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 0.058824
| 51
| 1
| 51
| 51
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0.705882
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c02c1543fd904dc486a1a44fee1d58bafcb37435
| 1,787
|
py
|
Python
|
tests/write-tests/test_findpeaks.py
|
focolab/gcamp-extractor
|
5e47ab2cfb75e3f09cfd84d40d8be0739a75d39c
|
[
"MIT"
] | null | null | null |
tests/write-tests/test_findpeaks.py
|
focolab/gcamp-extractor
|
5e47ab2cfb75e3f09cfd84d40d8be0739a75d39c
|
[
"MIT"
] | 26
|
2022-03-01T17:34:45.000Z
|
2022-03-31T00:09:55.000Z
|
tests/write-tests/test_findpeaks.py
|
focolab/gcamp-extractor
|
5e47ab2cfb75e3f09cfd84d40d8be0739a75d39c
|
[
"MIT"
] | null | null | null |
import numpy as np
from eats_worm.segfunctions import *
from sklearn.datasets import make_blobs
# generate three points, use them as bright voxels in image, and verify that they are detected
def test_findpeaks2d():
image = np.zeros((25, 25))
samples, labels, blob_centers = make_blobs(n_samples=3, centers=3, n_features=2, center_box=(1, 20), return_centers=True)
bright_voxels = np.sort(samples.astype(int), axis=0)
for bright_voxel in bright_voxels:
for x_index in range(bright_voxel[0] - 1, bright_voxel[0] + 2):
for y_index in range(bright_voxel[1] - 1, bright_voxel[1] + 2):
image[x_index, y_index] = 1
image[tuple(bright_voxel)] += 1
image = np.expand_dims(image, axis=0)
detected_centers = np.sort(findpeaks2d(image)[:,1:3], axis=0)
assert(np.array_equal(bright_voxels, detected_centers.astype(int)))
# generate three points, use them as bright voxels in image, and verify that they are detected
def test_findpeaks3d():
image = np.zeros((25, 25, 25))
samples, labels, blob_centers = make_blobs(n_samples=3, centers=3, n_features=3, center_box=(1, 20), return_centers=True)
bright_voxels = np.sort(samples.astype(int), axis=0)
for bright_voxel in bright_voxels:
for z_index in range(bright_voxel[0] - 1, bright_voxel[0] + 2):
for x_index in range(bright_voxel[1] - 1, bright_voxel[1] + 2):
for y_index in range(bright_voxel[2] - 1, bright_voxel[2] + 2):
image[z_index, x_index, y_index] = 1
image[tuple(bright_voxel)] += 1
detected_centers = np.sort(findpeaks3d(image), axis=0)
assert(np.array_equal(bright_voxels, detected_centers.astype(int)))
if __name__ == '__main__':
test_findpeaks2d()
test_findpeaks3d()
| 51.057143
| 125
| 0.688864
| 277
| 1,787
| 4.216607
| 0.245487
| 0.131849
| 0.061644
| 0.077055
| 0.761986
| 0.739726
| 0.739726
| 0.738014
| 0.714041
| 0.714041
| 0
| 0.040972
| 0.19418
| 1,787
| 35
| 126
| 51.057143
| 0.770139
| 0.103525
| 0
| 0.266667
| 1
| 0
| 0.005
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 1
| 0.066667
| false
| 0
| 0.1
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2244c5918298088ea8deb02c94382d8e4f5b9c17
| 50
|
py
|
Python
|
icu_mortality/main.py
|
RJBeetel3/mimic3_analysis
|
5267a9cc9037da431bb257d157df8e00fab2d295
|
[
"MIT"
] | 2
|
2018-11-27T07:47:10.000Z
|
2020-03-02T07:45:06.000Z
|
icu_mortality/main.py
|
RJBeetel3/mimic3_analysis
|
5267a9cc9037da431bb257d157df8e00fab2d295
|
[
"MIT"
] | 1
|
2018-12-03T18:04:27.000Z
|
2018-12-05T20:38:14.000Z
|
icu_mortality/main.py
|
RJBeetel3/mimic3_analysis
|
5267a9cc9037da431bb257d157df8e00fab2d295
|
[
"MIT"
] | 1
|
2018-03-10T23:23:17.000Z
|
2018-03-10T23:23:17.000Z
|
import ptnt_demog
ptnt_demog.import_demog_data()
| 12.5
| 30
| 0.86
| 8
| 50
| 4.875
| 0.5
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 50
| 4
| 30
| 12.5
| 0.847826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2257be6087e7bd6f98fa803ffb8471795796450b
| 100
|
py
|
Python
|
tienda/stores/models/__init__.py
|
Ricardokaro/tienda
|
3b94d4e661da583dc4026ed6fd422d1f03a25bc3
|
[
"MIT"
] | null | null | null |
tienda/stores/models/__init__.py
|
Ricardokaro/tienda
|
3b94d4e661da583dc4026ed6fd422d1f03a25bc3
|
[
"MIT"
] | 2
|
2022-03-01T10:04:17.000Z
|
2022-03-02T10:04:09.000Z
|
tienda/stores/models/__init__.py
|
Ricardokaro/tienda
|
3b94d4e661da583dc4026ed6fd422d1f03a25bc3
|
[
"MIT"
] | null | null | null |
from .stores import *
from .products import *
from .shopping import *
from .purchase_detail import *
| 25
| 30
| 0.77
| 13
| 100
| 5.846154
| 0.538462
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 100
| 4
| 30
| 25
| 0.894118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
97de1b28a40c0e13e41f29557408d1385695ddcc
| 38
|
py
|
Python
|
skypelib/__init__.py
|
delan/skrollback
|
fa86c4a984a02581980ff9aad5d3c18ffd1c828f
|
[
"0BSD"
] | null | null | null |
skypelib/__init__.py
|
delan/skrollback
|
fa86c4a984a02581980ff9aad5d3c18ffd1c828f
|
[
"0BSD"
] | null | null | null |
skypelib/__init__.py
|
delan/skrollback
|
fa86c4a984a02581980ff9aad5d3c18ffd1c828f
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
import monitor
| 9.5
| 21
| 0.736842
| 6
| 38
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 3
| 22
| 12.666667
| 0.848485
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3f09d40834a622200ad971def26d275c5b003034
| 89
|
py
|
Python
|
info/modules/index/__init__.py
|
moonbria/test1
|
05893bd91d416ca4093e4619ede427434fa665cc
|
[
"MIT"
] | null | null | null |
info/modules/index/__init__.py
|
moonbria/test1
|
05893bd91d416ca4093e4619ede427434fa665cc
|
[
"MIT"
] | null | null | null |
info/modules/index/__init__.py
|
moonbria/test1
|
05893bd91d416ca4093e4619ede427434fa665cc
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
index_blu = Blueprint("index", __name__)
from .views import *
| 29.666667
| 40
| 0.786517
| 12
| 89
| 5.416667
| 0.666667
| 0.430769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123596
| 89
| 3
| 41
| 29.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
3f115cb244168a74e406e9bf962b67347276cc64
| 788
|
py
|
Python
|
tests/test_get_month.py
|
datalab42/kyd-downloader
|
5de439d41998a6035527d0a6dfe8acded2798bfc
|
[
"MIT"
] | 2
|
2020-09-28T03:32:15.000Z
|
2020-12-13T02:38:17.000Z
|
tests/test_get_month.py
|
datalab42/kyd-downloader
|
5de439d41998a6035527d0a6dfe8acded2798bfc
|
[
"MIT"
] | 2
|
2020-08-17T22:30:42.000Z
|
2021-03-31T19:47:37.000Z
|
tests/test_get_month.py
|
datalab42/kyd-downloader
|
5de439d41998a6035527d0a6dfe8acded2798bfc
|
[
"MIT"
] | 5
|
2020-07-21T20:14:00.000Z
|
2021-09-09T07:59:09.000Z
|
import sys
sys.path.append('../functions/')
from datetime import date
from kyd.data.downloaders import get_month
def test_get_month():
assert get_month(date(2020, 7, 4), 0) == date(2020, 7, 1)
assert get_month(date(2020, 7, 4), -1) == date(2020, 6, 1)
assert get_month(date(2020, 7, 4), -2) == date(2020, 5, 1)
assert get_month(date(2020, 7, 4), -3) == date(2020, 4, 1)
assert get_month(date(2020, 7, 4), -4) == date(2020, 3, 1)
assert get_month(date(2020, 7, 4), -5) == date(2020, 2, 1)
assert get_month(date(2020, 7, 4), -6) == date(2020, 1, 1)
assert get_month(date(2020, 7, 4), -7) == date(2019, 12, 1)
assert get_month(date(2020, 7, 4), -8) == date(2019, 11, 1)
assert get_month(date(2020, 7, 4), -9) == date(2019, 10, 1)
| 41.473684
| 64
| 0.596447
| 141
| 788
| 3.241135
| 0.219858
| 0.297593
| 0.21663
| 0.393873
| 0.544858
| 0.544858
| 0.544858
| 0.492341
| 0
| 0
| 0
| 0.213141
| 0.208122
| 788
| 18
| 65
| 43.777778
| 0.519231
| 0
| 0
| 0
| 0
| 0
| 0.016905
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.066667
| true
| 0
| 0.2
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
58bb964a449c2838da0974b566bb6067d7da8a97
| 102
|
py
|
Python
|
pyqt_image_file_explorer_table_widget/__init__.py
|
yjg30737/pyqt-image-file-explorer
|
1971a9e985e4d40b2ff5e8b196a696bb89b8e4f2
|
[
"MIT"
] | 2
|
2022-02-12T13:13:51.000Z
|
2022-02-23T12:08:57.000Z
|
pyqt_image_file_explorer_table_widget/__init__.py
|
yjg30737/pyqt-image-file-explorer
|
1971a9e985e4d40b2ff5e8b196a696bb89b8e4f2
|
[
"MIT"
] | null | null | null |
pyqt_image_file_explorer_table_widget/__init__.py
|
yjg30737/pyqt-image-file-explorer
|
1971a9e985e4d40b2ff5e8b196a696bb89b8e4f2
|
[
"MIT"
] | null | null | null |
from .imageFileExplorerTableWidget import *
from .imageLabelWidget import *
from .imageWidget import *
| 34
| 43
| 0.833333
| 9
| 102
| 9.444444
| 0.555556
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107843
| 102
| 3
| 44
| 34
| 0.934066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
18c9420c63433008ef5ed3442c15e9c6734641c1
| 10,417
|
py
|
Python
|
src/eve_esi_jobs/examples/work_orders.py
|
DonalChilde/eve-esi
|
8050e988a5460aa3dc97e573880fcda7243026da
|
[
"MIT"
] | null | null | null |
src/eve_esi_jobs/examples/work_orders.py
|
DonalChilde/eve-esi
|
8050e988a5460aa3dc97e573880fcda7243026da
|
[
"MIT"
] | null | null | null |
src/eve_esi_jobs/examples/work_orders.py
|
DonalChilde/eve-esi
|
8050e988a5460aa3dc97e573880fcda7243026da
|
[
"MIT"
] | null | null | null |
import logging
from eve_esi_jobs import models
from eve_esi_jobs.examples.jobs import get_markets_region_id_history
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
def example_workorder():
region_id = 10000002
type_id = 34
work_order = models.EsiWorkOrder(
name="example_workorder",
output_path="samples/workorder_output/${ewo_name}",
description=(
"An example of a workorder, with a collection of "
"jobs whose output is gathered under a file path defined in the workorder."
),
)
callbacks = []
callbacks.append(
models.JobCallback(
callback_id="save_result_to_json_file",
kwargs={
"file_path_template": "${esi_job_id_}/market-history-${region_id}-${type_id}-esi-job.json"
},
)
)
callbacks.append(
models.JobCallback(
callback_id="save_result_to_yaml_file",
kwargs={
"file_path_template": "${esi_job_id_}/market-history-${region_id}-${type_id}-esi-job.yaml"
},
)
)
job = get_markets_region_id_history(region_id, type_id, callbacks)
job.name = "Save market history as json"
job.id_ = 1
job.description = (
"Get the market history for Tritainium in The Forge "
"region, and save it to a json file."
)
work_order.jobs.append(job)
#####
callbacks = []
callbacks.append(
models.JobCallback(
callback_id="save_esi_job_to_json_file",
kwargs={
"file_path_template": "${esi_job_id_}/market-history-${region_id}-${type_id}-esi-job.json"
},
)
)
callbacks.append(
models.JobCallback(
callback_id="save_result_to_json_file",
kwargs={
"file_path_template": "${esi_job_id_}/market-history-${region_id}-${type_id}.json"
},
)
)
job_2 = get_markets_region_id_history(region_id, type_id, callbacks)
job_2.name = "Save market history and job as json"
job_2.id_ = 2
job_2.description = (
"Get the market history for Tritainium in The Forge "
"region, and save it to a json file. Also save the job, "
"including the response metadata, to a separate json file."
)
work_order.jobs.append(job_2)
#####
callbacks = []
callbacks.append(
models.JobCallback(
callback_id="save_esi_job_to_json_file",
kwargs={
"file_path_template": "${esi_job_id_}/market-history-${region_id}-${type_id}-esi-job.json"
},
)
)
callbacks.append(
models.JobCallback(
callback_id="save_list_of_dict_result_to_csv_file",
kwargs={
"additional_fields": {"region_id": 10000002, "type_id": 34},
"field_names": [
"date",
"average",
"highest",
"lowest",
"order_count",
"volume",
"region_id",
"type_id",
],
"file_path_template": "${esi_job_id_}/market-history-${region_id}-${type_id}.csv",
},
)
)
job_3 = get_markets_region_id_history(region_id, type_id, callbacks)
job_3.name = "Save market history as csv and job with data as json"
job_3.id_ = 3
job_3.description = (
"Get the market history for Tritainium in The Forge "
"region, and save it to a csv file. The region_id and type_id added to each row, "
"and the columns are given a custom order. "
"Also save the job, including the response metadata and the result data, "
"to a separate json file."
)
work_order.jobs.append(job_3)
#####
callbacks = []
callbacks.append(
models.JobCallback(
callback_id="save_result_to_json_file",
kwargs={
"file_path_template": "${esi_job_id_}/public-contracts/${region_id}.json"
},
)
)
job_4 = models.EsiJob(
name="get paged data",
description="Get the all the pages from a paged api.",
id_=4,
op_id="get_contracts_public_region_id",
parameters={"region_id": 10000002},
callbacks=callbacks,
)
work_order.jobs.append(job_4)
return work_order
def response_to_job_json_file():
work_order = models.EsiWorkOrder(
name="response_to_job_json_file",
output_path="samples/order_output/${ewo_name}",
description=(
"An example of saving a completed job to a json file,"
" including the response data. Result data intentionaly left out."
),
)
job = models.EsiJob(
op_id="get_markets_region_id_history",
parameters={"region_id": 10000002, "type_id": 34},
)
work_order.jobs.append(job)
job.callbacks.append(
models.JobCallback(
callback_id="save_esi_job_to_json_file",
kwargs={
"file_path_template": "data/market-history/${region_id}-${type_id}-esi-job.json"
},
)
)
return work_order
def result_to_job_json_file():
work_order = models.EsiWorkOrder(
name="result_to_job_json_file",
output_path="samples/order_output/${ewo_name}",
description=(
"An example of saving a completed job to a json file, with result data"
),
)
job = models.EsiJob(
op_id="get_markets_region_id_history",
parameters={"region_id": 10000002, "type_id": 34},
)
work_order.jobs.append(job)
job.callbacks.append(
models.JobCallback(
callback_id="save_esi_job_to_json_file",
kwargs={
"file_path_template": "data/market-history/${region_id}-${type_id}-esi-job.json"
},
)
)
return work_order
def result_to_json_file_and_response_to_json_file():
work_order = models.EsiWorkOrder(
name="result_to_json_file_and_response_to_json_file",
output_path="samples/order_output/${ewo_name}",
description=(
"An example of saving the raw results to a json file,"
" and the job with response data to a separate json file"
),
)
job = models.EsiJob(
op_id="get_markets_region_id_history",
parameters={"region_id": 10000002, "type_id": 34},
)
work_order.jobs.append(job)
job.callbacks.append(
models.JobCallback(
callback_id="save_esi_job_to_json_file",
kwargs={
"file_path_template": "data/market-history/${region_id}-${type_id}-esi-job.json"
},
)
)
job.callbacks.append(
models.JobCallback(
callback_id="save_result_to_json_file",
kwargs={
"file_path_template": "data/market-history/${region_id}-${type_id}.json"
},
)
)
return work_order
def result_and_response_to_job_json_file():
work_order = models.EsiWorkOrder(
name="result_and_response_to_job_json_file",
output_path="samples/order_output/${ewo_name}",
description=(
"An example of saving a completed job to a json file,"
" with result and response data"
),
)
job = models.EsiJob(
op_id="get_markets_region_id_history",
parameters={"region_id": 10000002, "type_id": 34},
)
work_order.jobs.append(job)
job.callbacks.append(
models.JobCallback(
callback_id="save_esi_job_to_json_file",
kwargs={
"file_path_template": "data/market-history/${region_id}-${type_id}-esi-job.json"
},
)
)
return work_order
def result_to_json_file():
work_order = models.EsiWorkOrder(
name="result_to_json_file",
output_path="samples/order_output/${ewo_name}",
description=("An example of saving the raw results to a json file."),
)
job = models.EsiJob(
op_id="get_markets_region_id_history",
parameters={"region_id": 10000002, "type_id": 34},
)
work_order.jobs.append(job)
job.callbacks.append(
models.JobCallback(
callback_id="save_result_to_json_file",
kwargs={
"file_path_template": "data/market-history/${region_id}-${type_id}.json"
},
)
)
return work_order
def result_to_csv_file():
work_order = models.EsiWorkOrder(
name="result_to_csv_file",
output_path="samples/order_output/${ewo_name}",
description=(
"An example of saving the json results to a csv file. Also, shows "
"reordering columns, and adding additional columns"
),
)
job = models.EsiJob(
op_id="get_markets_region_id_history",
parameters={"region_id": 10000002, "type_id": 34},
)
work_order.jobs.append(job)
job.callbacks.append(
models.JobCallback(
callback_id="save_list_of_dict_result_to_csv_file",
kwargs={
"additional_fields": {"region_id": 10000002, "type_id": 34},
"field_names": [
"date",
"average",
"highest",
"lowest",
"order_count",
"volume",
"region_id",
"type_id",
],
"file_path_template": "data/market-history/${region_id}-${type_id}.csv",
},
)
)
return work_order
def result_with_pages_to_json_file():
work_order = models.EsiWorkOrder(
name="result_with_pages_to_json_file",
output_path="samples/order_output/${ewo_name}",
description=(
"An example of saving the raw results with a paged api to a json file."
),
)
job = models.EsiJob(
op_id="get_contracts_public_region_id",
parameters={"region_id": 10000002},
)
work_order.jobs.append(job)
job.callbacks.append(
models.JobCallback(
callback_id="save_result_to_json_file",
kwargs={"file_path_template": "data/public-contracts/${region_id}.json"},
)
)
return work_order
| 31.953988
| 106
| 0.586157
| 1,221
| 10,417
| 4.670762
| 0.093366
| 0.061722
| 0.035069
| 0.044187
| 0.865159
| 0.829739
| 0.822199
| 0.809048
| 0.780992
| 0.768894
| 0
| 0.016867
| 0.305654
| 10,417
| 325
| 107
| 32.052308
| 0.771602
| 0
| 0
| 0.557047
| 0
| 0
| 0.378004
| 0.180446
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026846
| false
| 0
| 0.010067
| 0
| 0.063758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
18d3dcf9810646a4ddaf4314d906003756ef779d
| 5,089
|
py
|
Python
|
distla/distla_core/distla_core/blas/summa/test_summa.py
|
google/distla_core
|
7f0d8ab7b847a75e0fc713627488643a8984712a
|
[
"Apache-2.0"
] | 2
|
2021-12-19T21:17:06.000Z
|
2021-12-25T09:19:47.000Z
|
distla/distla_core/distla_core/blas/summa/test_summa.py
|
google/distla_core
|
7f0d8ab7b847a75e0fc713627488643a8984712a
|
[
"Apache-2.0"
] | null | null | null |
distla/distla_core/distla_core/blas/summa/test_summa.py
|
google/distla_core
|
7f0d8ab7b847a75e0fc713627488643a8984712a
|
[
"Apache-2.0"
] | 1
|
2021-12-25T09:19:56.000Z
|
2021-12-25T09:19:56.000Z
|
# Copyright 2021 The Distla Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""Contains tests of the functions in summa.py.
"""
import functools
import jax
import jax.numpy as jnp
from jax import lax
import numpy as np
import pytest
from distla_core.utils import pops
from distla_core.blas.summa import summa
DTYPE = jnp.float32
AXIS_NAME = pops.AXIS_NAME
NROW = pops.NROWS
NCOL = pops.NCOLS
matrix_shapes = [(16, 16), (32, 16), (16, 32), (128, 128)]
p_szs = [3, 4, 8, 16]
precisions = [lax.Precision.DEFAULT, lax.Precision.HIGH, lax.Precision.HIGHEST]
@pytest.mark.parametrize("matrix_shape", matrix_shapes)
@pytest.mark.parametrize("p_sz", p_szs)
@pytest.mark.parametrize("precision", precisions)
def test_summa_TT(matrix_shape, p_sz, precision):
np.random.seed(10)
A = np.random.randn(*matrix_shape).astype(DTYPE)
B = np.random.randn(*matrix_shape).astype(DTYPE)
Ap = pops.distribute(A)
Bp = pops.distribute(B)
summa_f = functools.partial(
summa.summa,
p_sz=p_sz,
transpose_A=True,
transpose_B=True,
precision=precision)
with pytest.raises(NotImplementedError):
_ = jax.pmap(summa_f, axis_name=AXIS_NAME)(Ap, Bp)
@pytest.mark.parametrize("matrix_shape", matrix_shapes)
@pytest.mark.parametrize("p_sz", p_szs)
@pytest.mark.parametrize("precision", precisions)
def test_summa_TN(matrix_shape, p_sz, precision):
np.random.seed(10)
A = np.random.randn(*matrix_shape).astype(DTYPE)
B = np.random.randn(*matrix_shape).astype(DTYPE)
C = pops.dot(A.T, B, precision=precision)
Ap = pops.distribute(A)
Bp = pops.distribute(B)
summa_f = functools.partial(
summa.summa,
p_sz=p_sz,
transpose_A=True,
transpose_B=False,
precision=precision)
Cp = jax.pmap(summa_f, axis_name=AXIS_NAME)(Ap, Bp)
Cp = pops.undistribute(Cp)
atol = jnp.finfo(DTYPE).eps * jnp.linalg.norm(C)
np.testing.assert_allclose(C, Cp, atol=atol)
@pytest.mark.parametrize("matrix_shape", matrix_shapes)
@pytest.mark.parametrize("p_sz", p_szs)
@pytest.mark.parametrize("precision", precisions)
def test_summa_NT(matrix_shape, p_sz, precision):
np.random.seed(10)
A = np.random.randn(*matrix_shape).astype(DTYPE)
B = np.random.randn(*matrix_shape).astype(DTYPE)
C = pops.dot(A, B.T, precision=precision)
Ap = pops.distribute(A)
Bp = pops.distribute(B)
summa_f = functools.partial(
summa.summa,
p_sz=p_sz,
transpose_A=False,
transpose_B=True,
precision=precision)
Cp = jax.pmap(summa_f, axis_name=AXIS_NAME)(Ap, Bp)
Cp = pops.undistribute(Cp)
atol = jnp.finfo(DTYPE).eps * jnp.linalg.norm(C)
np.testing.assert_allclose(C, Cp, atol=atol)
@pytest.mark.parametrize("matrix_shape", matrix_shapes)
@pytest.mark.parametrize("p_sz", p_szs)
@pytest.mark.parametrize("precision", precisions)
def test_summa_NN(matrix_shape, p_sz, precision):
np.random.seed(10)
A = np.random.randn(*matrix_shape).astype(DTYPE)
B = np.random.randn(*matrix_shape).astype(DTYPE).T
C = pops.dot(A, B, precision=precision)
Ap = pops.distribute(A)
Bp = pops.distribute(B)
summa_f = functools.partial(
summa.summa,
p_sz=p_sz,
transpose_A=False,
transpose_B=False,
precision=precision)
Cp = jax.pmap(summa_f, axis_name=AXIS_NAME)(Ap, Bp)
Cp = pops.undistribute(Cp)
atol = jnp.finfo(DTYPE).eps * jnp.linalg.norm(C)
np.testing.assert_allclose(C, Cp, atol=atol)
def test_summa_TN_bad_shape():
matrix_shape = (4, 8)
A = np.ones(matrix_shape, dtype=DTYPE)
Ap = pops.distribute(A)
Bp = pops.distribute(A.T)
summa_f = functools.partial(
summa.summa, p_sz=1, transpose_A=True, transpose_B=False)
summa_f = jax.pmap(summa_f, axis_name=AXIS_NAME)
with pytest.raises(TypeError):
_ = summa_f(Ap, Bp)
def test_summa_NT_bad_shape():
matrix_shape = (4, 8)
A = np.ones(matrix_shape, dtype=DTYPE)
Ap = pops.distribute(A)
Bp = pops.distribute(A.T)
summa_f = functools.partial(
summa.summa, p_sz=1, transpose_A=False, transpose_B=True)
summa_f = jax.pmap(summa_f, axis_name=AXIS_NAME)
with pytest.raises(TypeError):
_ = summa_f(Ap, Bp)
def test_summa_NN_bad_shape():
matrix_shape = (4, 8)
A = np.ones(matrix_shape, dtype=DTYPE)
Ap = pops.distribute(A)
Bp = pops.distribute(A)
summa_f = functools.partial(
summa.summa, p_sz=1, transpose_A=False, transpose_B=False)
summa_f = jax.pmap(summa_f, axis_name=AXIS_NAME)
with pytest.raises(TypeError):
_ = summa_f(Ap, Bp)
| 31.41358
| 79
| 0.706033
| 783
| 5,089
| 4.422733
| 0.195402
| 0.069882
| 0.072769
| 0.043893
| 0.743575
| 0.734334
| 0.730869
| 0.730869
| 0.730869
| 0.730869
| 0
| 0.0118
| 0.150717
| 5,089
| 161
| 80
| 31.608696
| 0.789449
| 0.141089
| 0
| 0.737705
| 0
| 0
| 0.022989
| 0
| 0
| 0
| 0
| 0
| 0.02459
| 1
| 0.057377
| false
| 0
| 0.065574
| 0
| 0.122951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e12f613748d1b4bd07846bff1d2a6754cba591af
| 2,231
|
py
|
Python
|
anndata/tests/test_inplace_subset.py
|
czbiohub/anndata
|
e65b3f7e11c304b75fbfc17956f5adc0d8715a2f
|
[
"BSD-3-Clause"
] | null | null | null |
anndata/tests/test_inplace_subset.py
|
czbiohub/anndata
|
e65b3f7e11c304b75fbfc17956f5adc0d8715a2f
|
[
"BSD-3-Clause"
] | null | null | null |
anndata/tests/test_inplace_subset.py
|
czbiohub/anndata
|
e65b3f7e11c304b75fbfc17956f5adc0d8715a2f
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pytest
from sklearn.utils.testing import assert_array_equal
from scipy import sparse
from anndata.tests.helpers import gen_adata, subset_func, asarray
@pytest.fixture(
params=[np.array, sparse.csr_matrix, sparse.csc_matrix],
ids=["np_array", "scipy_csr", "scipy_csc"],
)
def matrix_type(request):
return request.param
# TODO: Test values of .uns
def test_inplace_subset_var(matrix_type, subset_func):
orig = gen_adata((30, 30), X_type=matrix_type)
subset_idx = subset_func(orig.var_names)
modified = orig.copy()
from_view = orig[:, subset_idx].copy()
modified._inplace_subset_var(subset_idx)
assert_array_equal(asarray(from_view.X), asarray(modified.X))
assert_array_equal(from_view.obs, modified.obs)
assert_array_equal(from_view.var, modified.var)
for k in from_view.obsm:
assert_array_equal(
asarray(from_view.obsm[k]), asarray(modified.obsm[k])
)
assert_array_equal(asarray(orig.obsm[k]), asarray(modified.obsm[k]))
for k in from_view.varm:
assert_array_equal(
asarray(from_view.varm[k]), asarray(modified.varm[k])
)
for k in from_view.layers:
assert_array_equal(
asarray(from_view.layers[k]), asarray(modified.layers[k])
)
def test_inplace_subset_obs(matrix_type, subset_func):
orig = gen_adata((30, 30), X_type=matrix_type)
subset_idx = subset_func(orig.obs_names)
modified = orig.copy()
from_view = orig[subset_idx, :].copy()
modified._inplace_subset_obs(subset_idx)
assert_array_equal(asarray(from_view.X), asarray(modified.X))
assert_array_equal(from_view.obs, modified.obs)
assert_array_equal(from_view.var, modified.var)
for k in from_view.obsm:
assert_array_equal(
asarray(from_view.obsm[k]), asarray(modified.obsm[k])
)
for k in from_view.varm:
assert_array_equal(
asarray(from_view.varm[k]), asarray(modified.varm[k])
)
assert_array_equal(asarray(orig.varm[k]), asarray(modified.varm[k]))
for k in from_view.layers:
assert_array_equal(
asarray(from_view.layers[k]), asarray(modified.layers[k])
)
| 32.808824
| 76
| 0.69117
| 320
| 2,231
| 4.5375
| 0.171875
| 0.110193
| 0.165289
| 0.158402
| 0.763085
| 0.763085
| 0.725895
| 0.725895
| 0.725895
| 0.725895
| 0
| 0.004454
| 0.19498
| 2,231
| 67
| 77
| 33.298507
| 0.804009
| 0.011206
| 0
| 0.509091
| 0
| 0
| 0.011797
| 0
| 0
| 0
| 0
| 0.014925
| 0.272727
| 1
| 0.054545
| false
| 0
| 0.090909
| 0.018182
| 0.163636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e19976f39c9463b520075aa0c1e924fa5e5f158f
| 244
|
py
|
Python
|
tests/utils.py
|
DramatikMan/flask-practice-two
|
ab012172041127579f0c07296a3913ebe17f4e94
|
[
"MIT"
] | null | null | null |
tests/utils.py
|
DramatikMan/flask-practice-two
|
ab012172041127579f0c07296a3913ebe17f4e94
|
[
"MIT"
] | null | null | null |
tests/utils.py
|
DramatikMan/flask-practice-two
|
ab012172041127579f0c07296a3913ebe17f4e94
|
[
"MIT"
] | null | null | null |
def login(client, username, password):
payload = dict(username=username, password=password)
return client.post('/login', data=payload, follow_redirects=True)
def logout(client):
return client.get('/logout', follow_redirects=True)
| 30.5
| 69
| 0.741803
| 30
| 244
| 5.966667
| 0.5
| 0.178771
| 0.212291
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127049
| 244
| 7
| 70
| 34.857143
| 0.840376
| 0
| 0
| 0
| 0
| 0
| 0.053279
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0.2
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
e1a55986a33ae9963d8d26156ba0f8b801b54e4f
| 118,870
|
py
|
Python
|
swagger_client/models/get_characters_character_id_stats_combat.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
swagger_client/models/get_characters_character_id_stats_combat.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
swagger_client/models/get_characters_character_id_stats_combat.py
|
rseichter/bootini-star
|
a80258f01a05e4df38748b8cb47dfadabd42c20d
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online # noqa: E501
OpenAPI spec version: 0.8.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GetCharactersCharacterIdStatsCombat(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'cap_drainedby_npc': 'int',
'cap_drainedby_pc': 'int',
'cap_draining_pc': 'int',
'criminal_flag_set': 'int',
'damage_from_np_cs_amount': 'int',
'damage_from_np_cs_num_shots': 'int',
'damage_from_players_bomb_amount': 'int',
'damage_from_players_bomb_num_shots': 'int',
'damage_from_players_combat_drone_amount': 'int',
'damage_from_players_combat_drone_num_shots': 'int',
'damage_from_players_energy_amount': 'int',
'damage_from_players_energy_num_shots': 'int',
'damage_from_players_fighter_bomber_amount': 'int',
'damage_from_players_fighter_bomber_num_shots': 'int',
'damage_from_players_fighter_drone_amount': 'int',
'damage_from_players_fighter_drone_num_shots': 'int',
'damage_from_players_hybrid_amount': 'int',
'damage_from_players_hybrid_num_shots': 'int',
'damage_from_players_missile_amount': 'int',
'damage_from_players_missile_num_shots': 'int',
'damage_from_players_projectile_amount': 'int',
'damage_from_players_projectile_num_shots': 'int',
'damage_from_players_smart_bomb_amount': 'int',
'damage_from_players_smart_bomb_num_shots': 'int',
'damage_from_players_super_amount': 'int',
'damage_from_players_super_num_shots': 'int',
'damage_from_structures_total_amount': 'int',
'damage_from_structures_total_num_shots': 'int',
'damage_to_players_bomb_amount': 'int',
'damage_to_players_bomb_num_shots': 'int',
'damage_to_players_combat_drone_amount': 'int',
'damage_to_players_combat_drone_num_shots': 'int',
'damage_to_players_energy_amount': 'int',
'damage_to_players_energy_num_shots': 'int',
'damage_to_players_fighter_bomber_amount': 'int',
'damage_to_players_fighter_bomber_num_shots': 'int',
'damage_to_players_fighter_drone_amount': 'int',
'damage_to_players_fighter_drone_num_shots': 'int',
'damage_to_players_hybrid_amount': 'int',
'damage_to_players_hybrid_num_shots': 'int',
'damage_to_players_missile_amount': 'int',
'damage_to_players_missile_num_shots': 'int',
'damage_to_players_projectile_amount': 'int',
'damage_to_players_projectile_num_shots': 'int',
'damage_to_players_smart_bomb_amount': 'int',
'damage_to_players_smart_bomb_num_shots': 'int',
'damage_to_players_super_amount': 'int',
'damage_to_players_super_num_shots': 'int',
'damage_to_structures_total_amount': 'int',
'damage_to_structures_total_num_shots': 'int',
'deaths_high_sec': 'int',
'deaths_low_sec': 'int',
'deaths_null_sec': 'int',
'deaths_pod_high_sec': 'int',
'deaths_pod_low_sec': 'int',
'deaths_pod_null_sec': 'int',
'deaths_pod_wormhole': 'int',
'deaths_wormhole': 'int',
'drone_engage': 'int',
'dscans': 'int',
'duel_requested': 'int',
'engagement_register': 'int',
'kills_assists': 'int',
'kills_high_sec': 'int',
'kills_low_sec': 'int',
'kills_null_sec': 'int',
'kills_pod_high_sec': 'int',
'kills_pod_low_sec': 'int',
'kills_pod_null_sec': 'int',
'kills_pod_wormhole': 'int',
'kills_wormhole': 'int',
'npc_flag_set': 'int',
'probe_scans': 'int',
'pvp_flag_set': 'int',
'repair_armor_by_remote_amount': 'int',
'repair_armor_remote_amount': 'int',
'repair_armor_self_amount': 'int',
'repair_capacitor_by_remote_amount': 'int',
'repair_capacitor_remote_amount': 'int',
'repair_capacitor_self_amount': 'int',
'repair_hull_by_remote_amount': 'int',
'repair_hull_remote_amount': 'int',
'repair_hull_self_amount': 'int',
'repair_shield_by_remote_amount': 'int',
'repair_shield_remote_amount': 'int',
'repair_shield_self_amount': 'int',
'self_destructs': 'int',
'warp_scramble_pc': 'int',
'warp_scrambledby_npc': 'int',
'warp_scrambledby_pc': 'int',
'weapon_flag_set': 'int',
'webifiedby_npc': 'int',
'webifiedby_pc': 'int',
'webifying_pc': 'int'
}
attribute_map = {
'cap_drainedby_npc': 'cap_drainedby_npc',
'cap_drainedby_pc': 'cap_drainedby_pc',
'cap_draining_pc': 'cap_draining_pc',
'criminal_flag_set': 'criminal_flag_set',
'damage_from_np_cs_amount': 'damage_from_np_cs_amount',
'damage_from_np_cs_num_shots': 'damage_from_np_cs_num_shots',
'damage_from_players_bomb_amount': 'damage_from_players_bomb_amount',
'damage_from_players_bomb_num_shots': 'damage_from_players_bomb_num_shots',
'damage_from_players_combat_drone_amount': 'damage_from_players_combat_drone_amount',
'damage_from_players_combat_drone_num_shots': 'damage_from_players_combat_drone_num_shots',
'damage_from_players_energy_amount': 'damage_from_players_energy_amount',
'damage_from_players_energy_num_shots': 'damage_from_players_energy_num_shots',
'damage_from_players_fighter_bomber_amount': 'damage_from_players_fighter_bomber_amount',
'damage_from_players_fighter_bomber_num_shots': 'damage_from_players_fighter_bomber_num_shots',
'damage_from_players_fighter_drone_amount': 'damage_from_players_fighter_drone_amount',
'damage_from_players_fighter_drone_num_shots': 'damage_from_players_fighter_drone_num_shots',
'damage_from_players_hybrid_amount': 'damage_from_players_hybrid_amount',
'damage_from_players_hybrid_num_shots': 'damage_from_players_hybrid_num_shots',
'damage_from_players_missile_amount': 'damage_from_players_missile_amount',
'damage_from_players_missile_num_shots': 'damage_from_players_missile_num_shots',
'damage_from_players_projectile_amount': 'damage_from_players_projectile_amount',
'damage_from_players_projectile_num_shots': 'damage_from_players_projectile_num_shots',
'damage_from_players_smart_bomb_amount': 'damage_from_players_smart_bomb_amount',
'damage_from_players_smart_bomb_num_shots': 'damage_from_players_smart_bomb_num_shots',
'damage_from_players_super_amount': 'damage_from_players_super_amount',
'damage_from_players_super_num_shots': 'damage_from_players_super_num_shots',
'damage_from_structures_total_amount': 'damage_from_structures_total_amount',
'damage_from_structures_total_num_shots': 'damage_from_structures_total_num_shots',
'damage_to_players_bomb_amount': 'damage_to_players_bomb_amount',
'damage_to_players_bomb_num_shots': 'damage_to_players_bomb_num_shots',
'damage_to_players_combat_drone_amount': 'damage_to_players_combat_drone_amount',
'damage_to_players_combat_drone_num_shots': 'damage_to_players_combat_drone_num_shots',
'damage_to_players_energy_amount': 'damage_to_players_energy_amount',
'damage_to_players_energy_num_shots': 'damage_to_players_energy_num_shots',
'damage_to_players_fighter_bomber_amount': 'damage_to_players_fighter_bomber_amount',
'damage_to_players_fighter_bomber_num_shots': 'damage_to_players_fighter_bomber_num_shots',
'damage_to_players_fighter_drone_amount': 'damage_to_players_fighter_drone_amount',
'damage_to_players_fighter_drone_num_shots': 'damage_to_players_fighter_drone_num_shots',
'damage_to_players_hybrid_amount': 'damage_to_players_hybrid_amount',
'damage_to_players_hybrid_num_shots': 'damage_to_players_hybrid_num_shots',
'damage_to_players_missile_amount': 'damage_to_players_missile_amount',
'damage_to_players_missile_num_shots': 'damage_to_players_missile_num_shots',
'damage_to_players_projectile_amount': 'damage_to_players_projectile_amount',
'damage_to_players_projectile_num_shots': 'damage_to_players_projectile_num_shots',
'damage_to_players_smart_bomb_amount': 'damage_to_players_smart_bomb_amount',
'damage_to_players_smart_bomb_num_shots': 'damage_to_players_smart_bomb_num_shots',
'damage_to_players_super_amount': 'damage_to_players_super_amount',
'damage_to_players_super_num_shots': 'damage_to_players_super_num_shots',
'damage_to_structures_total_amount': 'damage_to_structures_total_amount',
'damage_to_structures_total_num_shots': 'damage_to_structures_total_num_shots',
'deaths_high_sec': 'deaths_high_sec',
'deaths_low_sec': 'deaths_low_sec',
'deaths_null_sec': 'deaths_null_sec',
'deaths_pod_high_sec': 'deaths_pod_high_sec',
'deaths_pod_low_sec': 'deaths_pod_low_sec',
'deaths_pod_null_sec': 'deaths_pod_null_sec',
'deaths_pod_wormhole': 'deaths_pod_wormhole',
'deaths_wormhole': 'deaths_wormhole',
'drone_engage': 'drone_engage',
'dscans': 'dscans',
'duel_requested': 'duel_requested',
'engagement_register': 'engagement_register',
'kills_assists': 'kills_assists',
'kills_high_sec': 'kills_high_sec',
'kills_low_sec': 'kills_low_sec',
'kills_null_sec': 'kills_null_sec',
'kills_pod_high_sec': 'kills_pod_high_sec',
'kills_pod_low_sec': 'kills_pod_low_sec',
'kills_pod_null_sec': 'kills_pod_null_sec',
'kills_pod_wormhole': 'kills_pod_wormhole',
'kills_wormhole': 'kills_wormhole',
'npc_flag_set': 'npc_flag_set',
'probe_scans': 'probe_scans',
'pvp_flag_set': 'pvp_flag_set',
'repair_armor_by_remote_amount': 'repair_armor_by_remote_amount',
'repair_armor_remote_amount': 'repair_armor_remote_amount',
'repair_armor_self_amount': 'repair_armor_self_amount',
'repair_capacitor_by_remote_amount': 'repair_capacitor_by_remote_amount',
'repair_capacitor_remote_amount': 'repair_capacitor_remote_amount',
'repair_capacitor_self_amount': 'repair_capacitor_self_amount',
'repair_hull_by_remote_amount': 'repair_hull_by_remote_amount',
'repair_hull_remote_amount': 'repair_hull_remote_amount',
'repair_hull_self_amount': 'repair_hull_self_amount',
'repair_shield_by_remote_amount': 'repair_shield_by_remote_amount',
'repair_shield_remote_amount': 'repair_shield_remote_amount',
'repair_shield_self_amount': 'repair_shield_self_amount',
'self_destructs': 'self_destructs',
'warp_scramble_pc': 'warp_scramble_pc',
'warp_scrambledby_npc': 'warp_scrambledby_npc',
'warp_scrambledby_pc': 'warp_scrambledby_pc',
'weapon_flag_set': 'weapon_flag_set',
'webifiedby_npc': 'webifiedby_npc',
'webifiedby_pc': 'webifiedby_pc',
'webifying_pc': 'webifying_pc'
}
def __init__(self, cap_drainedby_npc=None, cap_drainedby_pc=None, cap_draining_pc=None, criminal_flag_set=None, damage_from_np_cs_amount=None, damage_from_np_cs_num_shots=None, damage_from_players_bomb_amount=None, damage_from_players_bomb_num_shots=None, damage_from_players_combat_drone_amount=None, damage_from_players_combat_drone_num_shots=None, damage_from_players_energy_amount=None, damage_from_players_energy_num_shots=None, damage_from_players_fighter_bomber_amount=None, damage_from_players_fighter_bomber_num_shots=None, damage_from_players_fighter_drone_amount=None, damage_from_players_fighter_drone_num_shots=None, damage_from_players_hybrid_amount=None, damage_from_players_hybrid_num_shots=None, damage_from_players_missile_amount=None, damage_from_players_missile_num_shots=None, damage_from_players_projectile_amount=None, damage_from_players_projectile_num_shots=None, damage_from_players_smart_bomb_amount=None, damage_from_players_smart_bomb_num_shots=None, damage_from_players_super_amount=None, damage_from_players_super_num_shots=None, damage_from_structures_total_amount=None, damage_from_structures_total_num_shots=None, damage_to_players_bomb_amount=None, damage_to_players_bomb_num_shots=None, damage_to_players_combat_drone_amount=None, damage_to_players_combat_drone_num_shots=None, damage_to_players_energy_amount=None, damage_to_players_energy_num_shots=None, damage_to_players_fighter_bomber_amount=None, damage_to_players_fighter_bomber_num_shots=None, damage_to_players_fighter_drone_amount=None, damage_to_players_fighter_drone_num_shots=None, damage_to_players_hybrid_amount=None, damage_to_players_hybrid_num_shots=None, damage_to_players_missile_amount=None, damage_to_players_missile_num_shots=None, damage_to_players_projectile_amount=None, damage_to_players_projectile_num_shots=None, damage_to_players_smart_bomb_amount=None, damage_to_players_smart_bomb_num_shots=None, damage_to_players_super_amount=None, damage_to_players_super_num_shots=None, damage_to_structures_total_amount=None, damage_to_structures_total_num_shots=None, deaths_high_sec=None, deaths_low_sec=None, deaths_null_sec=None, deaths_pod_high_sec=None, deaths_pod_low_sec=None, deaths_pod_null_sec=None, deaths_pod_wormhole=None, deaths_wormhole=None, drone_engage=None, dscans=None, duel_requested=None, engagement_register=None, kills_assists=None, kills_high_sec=None, kills_low_sec=None, kills_null_sec=None, kills_pod_high_sec=None, kills_pod_low_sec=None, kills_pod_null_sec=None, kills_pod_wormhole=None, kills_wormhole=None, npc_flag_set=None, probe_scans=None, pvp_flag_set=None, repair_armor_by_remote_amount=None, repair_armor_remote_amount=None, repair_armor_self_amount=None, repair_capacitor_by_remote_amount=None, repair_capacitor_remote_amount=None, repair_capacitor_self_amount=None, repair_hull_by_remote_amount=None, repair_hull_remote_amount=None, repair_hull_self_amount=None, repair_shield_by_remote_amount=None, repair_shield_remote_amount=None, repair_shield_self_amount=None, self_destructs=None, warp_scramble_pc=None, warp_scrambledby_npc=None, warp_scrambledby_pc=None, weapon_flag_set=None, webifiedby_npc=None, webifiedby_pc=None, webifying_pc=None): # noqa: E501
"""GetCharactersCharacterIdStatsCombat - a model defined in Swagger""" # noqa: E501
self._cap_drainedby_npc = None
self._cap_drainedby_pc = None
self._cap_draining_pc = None
self._criminal_flag_set = None
self._damage_from_np_cs_amount = None
self._damage_from_np_cs_num_shots = None
self._damage_from_players_bomb_amount = None
self._damage_from_players_bomb_num_shots = None
self._damage_from_players_combat_drone_amount = None
self._damage_from_players_combat_drone_num_shots = None
self._damage_from_players_energy_amount = None
self._damage_from_players_energy_num_shots = None
self._damage_from_players_fighter_bomber_amount = None
self._damage_from_players_fighter_bomber_num_shots = None
self._damage_from_players_fighter_drone_amount = None
self._damage_from_players_fighter_drone_num_shots = None
self._damage_from_players_hybrid_amount = None
self._damage_from_players_hybrid_num_shots = None
self._damage_from_players_missile_amount = None
self._damage_from_players_missile_num_shots = None
self._damage_from_players_projectile_amount = None
self._damage_from_players_projectile_num_shots = None
self._damage_from_players_smart_bomb_amount = None
self._damage_from_players_smart_bomb_num_shots = None
self._damage_from_players_super_amount = None
self._damage_from_players_super_num_shots = None
self._damage_from_structures_total_amount = None
self._damage_from_structures_total_num_shots = None
self._damage_to_players_bomb_amount = None
self._damage_to_players_bomb_num_shots = None
self._damage_to_players_combat_drone_amount = None
self._damage_to_players_combat_drone_num_shots = None
self._damage_to_players_energy_amount = None
self._damage_to_players_energy_num_shots = None
self._damage_to_players_fighter_bomber_amount = None
self._damage_to_players_fighter_bomber_num_shots = None
self._damage_to_players_fighter_drone_amount = None
self._damage_to_players_fighter_drone_num_shots = None
self._damage_to_players_hybrid_amount = None
self._damage_to_players_hybrid_num_shots = None
self._damage_to_players_missile_amount = None
self._damage_to_players_missile_num_shots = None
self._damage_to_players_projectile_amount = None
self._damage_to_players_projectile_num_shots = None
self._damage_to_players_smart_bomb_amount = None
self._damage_to_players_smart_bomb_num_shots = None
self._damage_to_players_super_amount = None
self._damage_to_players_super_num_shots = None
self._damage_to_structures_total_amount = None
self._damage_to_structures_total_num_shots = None
self._deaths_high_sec = None
self._deaths_low_sec = None
self._deaths_null_sec = None
self._deaths_pod_high_sec = None
self._deaths_pod_low_sec = None
self._deaths_pod_null_sec = None
self._deaths_pod_wormhole = None
self._deaths_wormhole = None
self._drone_engage = None
self._dscans = None
self._duel_requested = None
self._engagement_register = None
self._kills_assists = None
self._kills_high_sec = None
self._kills_low_sec = None
self._kills_null_sec = None
self._kills_pod_high_sec = None
self._kills_pod_low_sec = None
self._kills_pod_null_sec = None
self._kills_pod_wormhole = None
self._kills_wormhole = None
self._npc_flag_set = None
self._probe_scans = None
self._pvp_flag_set = None
self._repair_armor_by_remote_amount = None
self._repair_armor_remote_amount = None
self._repair_armor_self_amount = None
self._repair_capacitor_by_remote_amount = None
self._repair_capacitor_remote_amount = None
self._repair_capacitor_self_amount = None
self._repair_hull_by_remote_amount = None
self._repair_hull_remote_amount = None
self._repair_hull_self_amount = None
self._repair_shield_by_remote_amount = None
self._repair_shield_remote_amount = None
self._repair_shield_self_amount = None
self._self_destructs = None
self._warp_scramble_pc = None
self._warp_scrambledby_npc = None
self._warp_scrambledby_pc = None
self._weapon_flag_set = None
self._webifiedby_npc = None
self._webifiedby_pc = None
self._webifying_pc = None
self.discriminator = None
if cap_drainedby_npc is not None:
self.cap_drainedby_npc = cap_drainedby_npc
if cap_drainedby_pc is not None:
self.cap_drainedby_pc = cap_drainedby_pc
if cap_draining_pc is not None:
self.cap_draining_pc = cap_draining_pc
if criminal_flag_set is not None:
self.criminal_flag_set = criminal_flag_set
if damage_from_np_cs_amount is not None:
self.damage_from_np_cs_amount = damage_from_np_cs_amount
if damage_from_np_cs_num_shots is not None:
self.damage_from_np_cs_num_shots = damage_from_np_cs_num_shots
if damage_from_players_bomb_amount is not None:
self.damage_from_players_bomb_amount = damage_from_players_bomb_amount
if damage_from_players_bomb_num_shots is not None:
self.damage_from_players_bomb_num_shots = damage_from_players_bomb_num_shots
if damage_from_players_combat_drone_amount is not None:
self.damage_from_players_combat_drone_amount = damage_from_players_combat_drone_amount
if damage_from_players_combat_drone_num_shots is not None:
self.damage_from_players_combat_drone_num_shots = damage_from_players_combat_drone_num_shots
if damage_from_players_energy_amount is not None:
self.damage_from_players_energy_amount = damage_from_players_energy_amount
if damage_from_players_energy_num_shots is not None:
self.damage_from_players_energy_num_shots = damage_from_players_energy_num_shots
if damage_from_players_fighter_bomber_amount is not None:
self.damage_from_players_fighter_bomber_amount = damage_from_players_fighter_bomber_amount
if damage_from_players_fighter_bomber_num_shots is not None:
self.damage_from_players_fighter_bomber_num_shots = damage_from_players_fighter_bomber_num_shots
if damage_from_players_fighter_drone_amount is not None:
self.damage_from_players_fighter_drone_amount = damage_from_players_fighter_drone_amount
if damage_from_players_fighter_drone_num_shots is not None:
self.damage_from_players_fighter_drone_num_shots = damage_from_players_fighter_drone_num_shots
if damage_from_players_hybrid_amount is not None:
self.damage_from_players_hybrid_amount = damage_from_players_hybrid_amount
if damage_from_players_hybrid_num_shots is not None:
self.damage_from_players_hybrid_num_shots = damage_from_players_hybrid_num_shots
if damage_from_players_missile_amount is not None:
self.damage_from_players_missile_amount = damage_from_players_missile_amount
if damage_from_players_missile_num_shots is not None:
self.damage_from_players_missile_num_shots = damage_from_players_missile_num_shots
if damage_from_players_projectile_amount is not None:
self.damage_from_players_projectile_amount = damage_from_players_projectile_amount
if damage_from_players_projectile_num_shots is not None:
self.damage_from_players_projectile_num_shots = damage_from_players_projectile_num_shots
if damage_from_players_smart_bomb_amount is not None:
self.damage_from_players_smart_bomb_amount = damage_from_players_smart_bomb_amount
if damage_from_players_smart_bomb_num_shots is not None:
self.damage_from_players_smart_bomb_num_shots = damage_from_players_smart_bomb_num_shots
if damage_from_players_super_amount is not None:
self.damage_from_players_super_amount = damage_from_players_super_amount
if damage_from_players_super_num_shots is not None:
self.damage_from_players_super_num_shots = damage_from_players_super_num_shots
if damage_from_structures_total_amount is not None:
self.damage_from_structures_total_amount = damage_from_structures_total_amount
if damage_from_structures_total_num_shots is not None:
self.damage_from_structures_total_num_shots = damage_from_structures_total_num_shots
if damage_to_players_bomb_amount is not None:
self.damage_to_players_bomb_amount = damage_to_players_bomb_amount
if damage_to_players_bomb_num_shots is not None:
self.damage_to_players_bomb_num_shots = damage_to_players_bomb_num_shots
if damage_to_players_combat_drone_amount is not None:
self.damage_to_players_combat_drone_amount = damage_to_players_combat_drone_amount
if damage_to_players_combat_drone_num_shots is not None:
self.damage_to_players_combat_drone_num_shots = damage_to_players_combat_drone_num_shots
if damage_to_players_energy_amount is not None:
self.damage_to_players_energy_amount = damage_to_players_energy_amount
if damage_to_players_energy_num_shots is not None:
self.damage_to_players_energy_num_shots = damage_to_players_energy_num_shots
if damage_to_players_fighter_bomber_amount is not None:
self.damage_to_players_fighter_bomber_amount = damage_to_players_fighter_bomber_amount
if damage_to_players_fighter_bomber_num_shots is not None:
self.damage_to_players_fighter_bomber_num_shots = damage_to_players_fighter_bomber_num_shots
if damage_to_players_fighter_drone_amount is not None:
self.damage_to_players_fighter_drone_amount = damage_to_players_fighter_drone_amount
if damage_to_players_fighter_drone_num_shots is not None:
self.damage_to_players_fighter_drone_num_shots = damage_to_players_fighter_drone_num_shots
if damage_to_players_hybrid_amount is not None:
self.damage_to_players_hybrid_amount = damage_to_players_hybrid_amount
if damage_to_players_hybrid_num_shots is not None:
self.damage_to_players_hybrid_num_shots = damage_to_players_hybrid_num_shots
if damage_to_players_missile_amount is not None:
self.damage_to_players_missile_amount = damage_to_players_missile_amount
if damage_to_players_missile_num_shots is not None:
self.damage_to_players_missile_num_shots = damage_to_players_missile_num_shots
if damage_to_players_projectile_amount is not None:
self.damage_to_players_projectile_amount = damage_to_players_projectile_amount
if damage_to_players_projectile_num_shots is not None:
self.damage_to_players_projectile_num_shots = damage_to_players_projectile_num_shots
if damage_to_players_smart_bomb_amount is not None:
self.damage_to_players_smart_bomb_amount = damage_to_players_smart_bomb_amount
if damage_to_players_smart_bomb_num_shots is not None:
self.damage_to_players_smart_bomb_num_shots = damage_to_players_smart_bomb_num_shots
if damage_to_players_super_amount is not None:
self.damage_to_players_super_amount = damage_to_players_super_amount
if damage_to_players_super_num_shots is not None:
self.damage_to_players_super_num_shots = damage_to_players_super_num_shots
if damage_to_structures_total_amount is not None:
self.damage_to_structures_total_amount = damage_to_structures_total_amount
if damage_to_structures_total_num_shots is not None:
self.damage_to_structures_total_num_shots = damage_to_structures_total_num_shots
if deaths_high_sec is not None:
self.deaths_high_sec = deaths_high_sec
if deaths_low_sec is not None:
self.deaths_low_sec = deaths_low_sec
if deaths_null_sec is not None:
self.deaths_null_sec = deaths_null_sec
if deaths_pod_high_sec is not None:
self.deaths_pod_high_sec = deaths_pod_high_sec
if deaths_pod_low_sec is not None:
self.deaths_pod_low_sec = deaths_pod_low_sec
if deaths_pod_null_sec is not None:
self.deaths_pod_null_sec = deaths_pod_null_sec
if deaths_pod_wormhole is not None:
self.deaths_pod_wormhole = deaths_pod_wormhole
if deaths_wormhole is not None:
self.deaths_wormhole = deaths_wormhole
if drone_engage is not None:
self.drone_engage = drone_engage
if dscans is not None:
self.dscans = dscans
if duel_requested is not None:
self.duel_requested = duel_requested
if engagement_register is not None:
self.engagement_register = engagement_register
if kills_assists is not None:
self.kills_assists = kills_assists
if kills_high_sec is not None:
self.kills_high_sec = kills_high_sec
if kills_low_sec is not None:
self.kills_low_sec = kills_low_sec
if kills_null_sec is not None:
self.kills_null_sec = kills_null_sec
if kills_pod_high_sec is not None:
self.kills_pod_high_sec = kills_pod_high_sec
if kills_pod_low_sec is not None:
self.kills_pod_low_sec = kills_pod_low_sec
if kills_pod_null_sec is not None:
self.kills_pod_null_sec = kills_pod_null_sec
if kills_pod_wormhole is not None:
self.kills_pod_wormhole = kills_pod_wormhole
if kills_wormhole is not None:
self.kills_wormhole = kills_wormhole
if npc_flag_set is not None:
self.npc_flag_set = npc_flag_set
if probe_scans is not None:
self.probe_scans = probe_scans
if pvp_flag_set is not None:
self.pvp_flag_set = pvp_flag_set
if repair_armor_by_remote_amount is not None:
self.repair_armor_by_remote_amount = repair_armor_by_remote_amount
if repair_armor_remote_amount is not None:
self.repair_armor_remote_amount = repair_armor_remote_amount
if repair_armor_self_amount is not None:
self.repair_armor_self_amount = repair_armor_self_amount
if repair_capacitor_by_remote_amount is not None:
self.repair_capacitor_by_remote_amount = repair_capacitor_by_remote_amount
if repair_capacitor_remote_amount is not None:
self.repair_capacitor_remote_amount = repair_capacitor_remote_amount
if repair_capacitor_self_amount is not None:
self.repair_capacitor_self_amount = repair_capacitor_self_amount
if repair_hull_by_remote_amount is not None:
self.repair_hull_by_remote_amount = repair_hull_by_remote_amount
if repair_hull_remote_amount is not None:
self.repair_hull_remote_amount = repair_hull_remote_amount
if repair_hull_self_amount is not None:
self.repair_hull_self_amount = repair_hull_self_amount
if repair_shield_by_remote_amount is not None:
self.repair_shield_by_remote_amount = repair_shield_by_remote_amount
if repair_shield_remote_amount is not None:
self.repair_shield_remote_amount = repair_shield_remote_amount
if repair_shield_self_amount is not None:
self.repair_shield_self_amount = repair_shield_self_amount
if self_destructs is not None:
self.self_destructs = self_destructs
if warp_scramble_pc is not None:
self.warp_scramble_pc = warp_scramble_pc
if warp_scrambledby_npc is not None:
self.warp_scrambledby_npc = warp_scrambledby_npc
if warp_scrambledby_pc is not None:
self.warp_scrambledby_pc = warp_scrambledby_pc
if weapon_flag_set is not None:
self.weapon_flag_set = weapon_flag_set
if webifiedby_npc is not None:
self.webifiedby_npc = webifiedby_npc
if webifiedby_pc is not None:
self.webifiedby_pc = webifiedby_pc
if webifying_pc is not None:
self.webifying_pc = webifying_pc
@property
def cap_drainedby_npc(self):
"""Gets the cap_drainedby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
cap_drainedby_npc integer # noqa: E501
:return: The cap_drainedby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._cap_drainedby_npc
@cap_drainedby_npc.setter
def cap_drainedby_npc(self, cap_drainedby_npc):
"""Sets the cap_drainedby_npc of this GetCharactersCharacterIdStatsCombat.
cap_drainedby_npc integer # noqa: E501
:param cap_drainedby_npc: The cap_drainedby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._cap_drainedby_npc = cap_drainedby_npc
@property
def cap_drainedby_pc(self):
"""Gets the cap_drainedby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
cap_drainedby_pc integer # noqa: E501
:return: The cap_drainedby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._cap_drainedby_pc
@cap_drainedby_pc.setter
def cap_drainedby_pc(self, cap_drainedby_pc):
"""Sets the cap_drainedby_pc of this GetCharactersCharacterIdStatsCombat.
cap_drainedby_pc integer # noqa: E501
:param cap_drainedby_pc: The cap_drainedby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._cap_drainedby_pc = cap_drainedby_pc
@property
def cap_draining_pc(self):
"""Gets the cap_draining_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
cap_draining_pc integer # noqa: E501
:return: The cap_draining_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._cap_draining_pc
@cap_draining_pc.setter
def cap_draining_pc(self, cap_draining_pc):
"""Sets the cap_draining_pc of this GetCharactersCharacterIdStatsCombat.
cap_draining_pc integer # noqa: E501
:param cap_draining_pc: The cap_draining_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._cap_draining_pc = cap_draining_pc
@property
def criminal_flag_set(self):
"""Gets the criminal_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
criminal_flag_set integer # noqa: E501
:return: The criminal_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._criminal_flag_set
@criminal_flag_set.setter
def criminal_flag_set(self, criminal_flag_set):
"""Sets the criminal_flag_set of this GetCharactersCharacterIdStatsCombat.
criminal_flag_set integer # noqa: E501
:param criminal_flag_set: The criminal_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._criminal_flag_set = criminal_flag_set
@property
def damage_from_np_cs_amount(self):
"""Gets the damage_from_np_cs_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_np_cs_amount integer # noqa: E501
:return: The damage_from_np_cs_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_np_cs_amount
@damage_from_np_cs_amount.setter
def damage_from_np_cs_amount(self, damage_from_np_cs_amount):
"""Sets the damage_from_np_cs_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_np_cs_amount integer # noqa: E501
:param damage_from_np_cs_amount: The damage_from_np_cs_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_np_cs_amount = damage_from_np_cs_amount
@property
def damage_from_np_cs_num_shots(self):
"""Gets the damage_from_np_cs_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_np_cs_num_shots integer # noqa: E501
:return: The damage_from_np_cs_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_np_cs_num_shots
@damage_from_np_cs_num_shots.setter
def damage_from_np_cs_num_shots(self, damage_from_np_cs_num_shots):
"""Sets the damage_from_np_cs_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_np_cs_num_shots integer # noqa: E501
:param damage_from_np_cs_num_shots: The damage_from_np_cs_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_np_cs_num_shots = damage_from_np_cs_num_shots
@property
def damage_from_players_bomb_amount(self):
"""Gets the damage_from_players_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_bomb_amount integer # noqa: E501
:return: The damage_from_players_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_bomb_amount
@damage_from_players_bomb_amount.setter
def damage_from_players_bomb_amount(self, damage_from_players_bomb_amount):
"""Sets the damage_from_players_bomb_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_bomb_amount integer # noqa: E501
:param damage_from_players_bomb_amount: The damage_from_players_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_bomb_amount = damage_from_players_bomb_amount
@property
def damage_from_players_bomb_num_shots(self):
"""Gets the damage_from_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_bomb_num_shots integer # noqa: E501
:return: The damage_from_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_bomb_num_shots
@damage_from_players_bomb_num_shots.setter
def damage_from_players_bomb_num_shots(self, damage_from_players_bomb_num_shots):
"""Sets the damage_from_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_bomb_num_shots integer # noqa: E501
:param damage_from_players_bomb_num_shots: The damage_from_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_bomb_num_shots = damage_from_players_bomb_num_shots
@property
def damage_from_players_combat_drone_amount(self):
"""Gets the damage_from_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_combat_drone_amount integer # noqa: E501
:return: The damage_from_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_combat_drone_amount
@damage_from_players_combat_drone_amount.setter
def damage_from_players_combat_drone_amount(self, damage_from_players_combat_drone_amount):
"""Sets the damage_from_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_combat_drone_amount integer # noqa: E501
:param damage_from_players_combat_drone_amount: The damage_from_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_combat_drone_amount = damage_from_players_combat_drone_amount
@property
def damage_from_players_combat_drone_num_shots(self):
"""Gets the damage_from_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_combat_drone_num_shots integer # noqa: E501
:return: The damage_from_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_combat_drone_num_shots
@damage_from_players_combat_drone_num_shots.setter
def damage_from_players_combat_drone_num_shots(self, damage_from_players_combat_drone_num_shots):
"""Sets the damage_from_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_combat_drone_num_shots integer # noqa: E501
:param damage_from_players_combat_drone_num_shots: The damage_from_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_combat_drone_num_shots = damage_from_players_combat_drone_num_shots
@property
def damage_from_players_energy_amount(self):
"""Gets the damage_from_players_energy_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_energy_amount integer # noqa: E501
:return: The damage_from_players_energy_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_energy_amount
@damage_from_players_energy_amount.setter
def damage_from_players_energy_amount(self, damage_from_players_energy_amount):
"""Sets the damage_from_players_energy_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_energy_amount integer # noqa: E501
:param damage_from_players_energy_amount: The damage_from_players_energy_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_energy_amount = damage_from_players_energy_amount
@property
def damage_from_players_energy_num_shots(self):
"""Gets the damage_from_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_energy_num_shots integer # noqa: E501
:return: The damage_from_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_energy_num_shots
@damage_from_players_energy_num_shots.setter
def damage_from_players_energy_num_shots(self, damage_from_players_energy_num_shots):
"""Sets the damage_from_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_energy_num_shots integer # noqa: E501
:param damage_from_players_energy_num_shots: The damage_from_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_energy_num_shots = damage_from_players_energy_num_shots
@property
def damage_from_players_fighter_bomber_amount(self):
"""Gets the damage_from_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_fighter_bomber_amount integer # noqa: E501
:return: The damage_from_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_fighter_bomber_amount
@damage_from_players_fighter_bomber_amount.setter
def damage_from_players_fighter_bomber_amount(self, damage_from_players_fighter_bomber_amount):
"""Sets the damage_from_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_fighter_bomber_amount integer # noqa: E501
:param damage_from_players_fighter_bomber_amount: The damage_from_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_fighter_bomber_amount = damage_from_players_fighter_bomber_amount
@property
def damage_from_players_fighter_bomber_num_shots(self):
"""Gets the damage_from_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_fighter_bomber_num_shots integer # noqa: E501
:return: The damage_from_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_fighter_bomber_num_shots
@damage_from_players_fighter_bomber_num_shots.setter
def damage_from_players_fighter_bomber_num_shots(self, damage_from_players_fighter_bomber_num_shots):
"""Sets the damage_from_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_fighter_bomber_num_shots integer # noqa: E501
:param damage_from_players_fighter_bomber_num_shots: The damage_from_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_fighter_bomber_num_shots = damage_from_players_fighter_bomber_num_shots
@property
def damage_from_players_fighter_drone_amount(self):
"""Gets the damage_from_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_fighter_drone_amount integer # noqa: E501
:return: The damage_from_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_fighter_drone_amount
@damage_from_players_fighter_drone_amount.setter
def damage_from_players_fighter_drone_amount(self, damage_from_players_fighter_drone_amount):
"""Sets the damage_from_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_fighter_drone_amount integer # noqa: E501
:param damage_from_players_fighter_drone_amount: The damage_from_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_fighter_drone_amount = damage_from_players_fighter_drone_amount
@property
def damage_from_players_fighter_drone_num_shots(self):
"""Gets the damage_from_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_fighter_drone_num_shots integer # noqa: E501
:return: The damage_from_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_fighter_drone_num_shots
@damage_from_players_fighter_drone_num_shots.setter
def damage_from_players_fighter_drone_num_shots(self, damage_from_players_fighter_drone_num_shots):
"""Sets the damage_from_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_fighter_drone_num_shots integer # noqa: E501
:param damage_from_players_fighter_drone_num_shots: The damage_from_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_fighter_drone_num_shots = damage_from_players_fighter_drone_num_shots
@property
def damage_from_players_hybrid_amount(self):
"""Gets the damage_from_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_hybrid_amount integer # noqa: E501
:return: The damage_from_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_hybrid_amount
@damage_from_players_hybrid_amount.setter
def damage_from_players_hybrid_amount(self, damage_from_players_hybrid_amount):
"""Sets the damage_from_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_hybrid_amount integer # noqa: E501
:param damage_from_players_hybrid_amount: The damage_from_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_hybrid_amount = damage_from_players_hybrid_amount
@property
def damage_from_players_hybrid_num_shots(self):
"""Gets the damage_from_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_hybrid_num_shots integer # noqa: E501
:return: The damage_from_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_hybrid_num_shots
@damage_from_players_hybrid_num_shots.setter
def damage_from_players_hybrid_num_shots(self, damage_from_players_hybrid_num_shots):
"""Sets the damage_from_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_hybrid_num_shots integer # noqa: E501
:param damage_from_players_hybrid_num_shots: The damage_from_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_hybrid_num_shots = damage_from_players_hybrid_num_shots
@property
def damage_from_players_missile_amount(self):
"""Gets the damage_from_players_missile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_missile_amount integer # noqa: E501
:return: The damage_from_players_missile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_missile_amount
@damage_from_players_missile_amount.setter
def damage_from_players_missile_amount(self, damage_from_players_missile_amount):
"""Sets the damage_from_players_missile_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_missile_amount integer # noqa: E501
:param damage_from_players_missile_amount: The damage_from_players_missile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_missile_amount = damage_from_players_missile_amount
@property
def damage_from_players_missile_num_shots(self):
"""Gets the damage_from_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_missile_num_shots integer # noqa: E501
:return: The damage_from_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_missile_num_shots
@damage_from_players_missile_num_shots.setter
def damage_from_players_missile_num_shots(self, damage_from_players_missile_num_shots):
"""Sets the damage_from_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_missile_num_shots integer # noqa: E501
:param damage_from_players_missile_num_shots: The damage_from_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_missile_num_shots = damage_from_players_missile_num_shots
@property
def damage_from_players_projectile_amount(self):
"""Gets the damage_from_players_projectile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_projectile_amount integer # noqa: E501
:return: The damage_from_players_projectile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_projectile_amount
@damage_from_players_projectile_amount.setter
def damage_from_players_projectile_amount(self, damage_from_players_projectile_amount):
"""Sets the damage_from_players_projectile_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_projectile_amount integer # noqa: E501
:param damage_from_players_projectile_amount: The damage_from_players_projectile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_projectile_amount = damage_from_players_projectile_amount
@property
def damage_from_players_projectile_num_shots(self):
"""Gets the damage_from_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_projectile_num_shots integer # noqa: E501
:return: The damage_from_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_projectile_num_shots
@damage_from_players_projectile_num_shots.setter
def damage_from_players_projectile_num_shots(self, damage_from_players_projectile_num_shots):
"""Sets the damage_from_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_projectile_num_shots integer # noqa: E501
:param damage_from_players_projectile_num_shots: The damage_from_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_projectile_num_shots = damage_from_players_projectile_num_shots
@property
def damage_from_players_smart_bomb_amount(self):
"""Gets the damage_from_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_smart_bomb_amount integer # noqa: E501
:return: The damage_from_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_smart_bomb_amount
@damage_from_players_smart_bomb_amount.setter
def damage_from_players_smart_bomb_amount(self, damage_from_players_smart_bomb_amount):
"""Sets the damage_from_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_smart_bomb_amount integer # noqa: E501
:param damage_from_players_smart_bomb_amount: The damage_from_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_smart_bomb_amount = damage_from_players_smart_bomb_amount
@property
def damage_from_players_smart_bomb_num_shots(self):
"""Gets the damage_from_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_smart_bomb_num_shots integer # noqa: E501
:return: The damage_from_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_smart_bomb_num_shots
@damage_from_players_smart_bomb_num_shots.setter
def damage_from_players_smart_bomb_num_shots(self, damage_from_players_smart_bomb_num_shots):
"""Sets the damage_from_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_smart_bomb_num_shots integer # noqa: E501
:param damage_from_players_smart_bomb_num_shots: The damage_from_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_smart_bomb_num_shots = damage_from_players_smart_bomb_num_shots
@property
def damage_from_players_super_amount(self):
"""Gets the damage_from_players_super_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_super_amount integer # noqa: E501
:return: The damage_from_players_super_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_super_amount
@damage_from_players_super_amount.setter
def damage_from_players_super_amount(self, damage_from_players_super_amount):
"""Sets the damage_from_players_super_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_players_super_amount integer # noqa: E501
:param damage_from_players_super_amount: The damage_from_players_super_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_super_amount = damage_from_players_super_amount
@property
def damage_from_players_super_num_shots(self):
"""Gets the damage_from_players_super_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_players_super_num_shots integer # noqa: E501
:return: The damage_from_players_super_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_players_super_num_shots
@damage_from_players_super_num_shots.setter
def damage_from_players_super_num_shots(self, damage_from_players_super_num_shots):
"""Sets the damage_from_players_super_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_players_super_num_shots integer # noqa: E501
:param damage_from_players_super_num_shots: The damage_from_players_super_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_players_super_num_shots = damage_from_players_super_num_shots
@property
def damage_from_structures_total_amount(self):
"""Gets the damage_from_structures_total_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_structures_total_amount integer # noqa: E501
:return: The damage_from_structures_total_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_structures_total_amount
@damage_from_structures_total_amount.setter
def damage_from_structures_total_amount(self, damage_from_structures_total_amount):
"""Sets the damage_from_structures_total_amount of this GetCharactersCharacterIdStatsCombat.
damage_from_structures_total_amount integer # noqa: E501
:param damage_from_structures_total_amount: The damage_from_structures_total_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_structures_total_amount = damage_from_structures_total_amount
@property
def damage_from_structures_total_num_shots(self):
"""Gets the damage_from_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_from_structures_total_num_shots integer # noqa: E501
:return: The damage_from_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_from_structures_total_num_shots
@damage_from_structures_total_num_shots.setter
def damage_from_structures_total_num_shots(self, damage_from_structures_total_num_shots):
"""Sets the damage_from_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_from_structures_total_num_shots integer # noqa: E501
:param damage_from_structures_total_num_shots: The damage_from_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_from_structures_total_num_shots = damage_from_structures_total_num_shots
@property
def damage_to_players_bomb_amount(self):
"""Gets the damage_to_players_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_bomb_amount integer # noqa: E501
:return: The damage_to_players_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_bomb_amount
@damage_to_players_bomb_amount.setter
def damage_to_players_bomb_amount(self, damage_to_players_bomb_amount):
"""Sets the damage_to_players_bomb_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_bomb_amount integer # noqa: E501
:param damage_to_players_bomb_amount: The damage_to_players_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_bomb_amount = damage_to_players_bomb_amount
@property
def damage_to_players_bomb_num_shots(self):
"""Gets the damage_to_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_bomb_num_shots integer # noqa: E501
:return: The damage_to_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_bomb_num_shots
@damage_to_players_bomb_num_shots.setter
def damage_to_players_bomb_num_shots(self, damage_to_players_bomb_num_shots):
"""Sets the damage_to_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_bomb_num_shots integer # noqa: E501
:param damage_to_players_bomb_num_shots: The damage_to_players_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_bomb_num_shots = damage_to_players_bomb_num_shots
@property
def damage_to_players_combat_drone_amount(self):
"""Gets the damage_to_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_combat_drone_amount integer # noqa: E501
:return: The damage_to_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_combat_drone_amount
@damage_to_players_combat_drone_amount.setter
def damage_to_players_combat_drone_amount(self, damage_to_players_combat_drone_amount):
"""Sets the damage_to_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_combat_drone_amount integer # noqa: E501
:param damage_to_players_combat_drone_amount: The damage_to_players_combat_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_combat_drone_amount = damage_to_players_combat_drone_amount
@property
def damage_to_players_combat_drone_num_shots(self):
"""Gets the damage_to_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_combat_drone_num_shots integer # noqa: E501
:return: The damage_to_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_combat_drone_num_shots
@damage_to_players_combat_drone_num_shots.setter
def damage_to_players_combat_drone_num_shots(self, damage_to_players_combat_drone_num_shots):
"""Sets the damage_to_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_combat_drone_num_shots integer # noqa: E501
:param damage_to_players_combat_drone_num_shots: The damage_to_players_combat_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_combat_drone_num_shots = damage_to_players_combat_drone_num_shots
@property
def damage_to_players_energy_amount(self):
"""Gets the damage_to_players_energy_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_energy_amount integer # noqa: E501
:return: The damage_to_players_energy_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_energy_amount
@damage_to_players_energy_amount.setter
def damage_to_players_energy_amount(self, damage_to_players_energy_amount):
"""Sets the damage_to_players_energy_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_energy_amount integer # noqa: E501
:param damage_to_players_energy_amount: The damage_to_players_energy_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_energy_amount = damage_to_players_energy_amount
@property
def damage_to_players_energy_num_shots(self):
"""Gets the damage_to_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_energy_num_shots integer # noqa: E501
:return: The damage_to_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_energy_num_shots
@damage_to_players_energy_num_shots.setter
def damage_to_players_energy_num_shots(self, damage_to_players_energy_num_shots):
"""Sets the damage_to_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_energy_num_shots integer # noqa: E501
:param damage_to_players_energy_num_shots: The damage_to_players_energy_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_energy_num_shots = damage_to_players_energy_num_shots
@property
def damage_to_players_fighter_bomber_amount(self):
"""Gets the damage_to_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_fighter_bomber_amount integer # noqa: E501
:return: The damage_to_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_fighter_bomber_amount
@damage_to_players_fighter_bomber_amount.setter
def damage_to_players_fighter_bomber_amount(self, damage_to_players_fighter_bomber_amount):
"""Sets the damage_to_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_fighter_bomber_amount integer # noqa: E501
:param damage_to_players_fighter_bomber_amount: The damage_to_players_fighter_bomber_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_fighter_bomber_amount = damage_to_players_fighter_bomber_amount
@property
def damage_to_players_fighter_bomber_num_shots(self):
"""Gets the damage_to_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_fighter_bomber_num_shots integer # noqa: E501
:return: The damage_to_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_fighter_bomber_num_shots
@damage_to_players_fighter_bomber_num_shots.setter
def damage_to_players_fighter_bomber_num_shots(self, damage_to_players_fighter_bomber_num_shots):
"""Sets the damage_to_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_fighter_bomber_num_shots integer # noqa: E501
:param damage_to_players_fighter_bomber_num_shots: The damage_to_players_fighter_bomber_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_fighter_bomber_num_shots = damage_to_players_fighter_bomber_num_shots
@property
def damage_to_players_fighter_drone_amount(self):
"""Gets the damage_to_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_fighter_drone_amount integer # noqa: E501
:return: The damage_to_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_fighter_drone_amount
@damage_to_players_fighter_drone_amount.setter
def damage_to_players_fighter_drone_amount(self, damage_to_players_fighter_drone_amount):
"""Sets the damage_to_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_fighter_drone_amount integer # noqa: E501
:param damage_to_players_fighter_drone_amount: The damage_to_players_fighter_drone_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_fighter_drone_amount = damage_to_players_fighter_drone_amount
@property
def damage_to_players_fighter_drone_num_shots(self):
"""Gets the damage_to_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_fighter_drone_num_shots integer # noqa: E501
:return: The damage_to_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_fighter_drone_num_shots
@damage_to_players_fighter_drone_num_shots.setter
def damage_to_players_fighter_drone_num_shots(self, damage_to_players_fighter_drone_num_shots):
"""Sets the damage_to_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_fighter_drone_num_shots integer # noqa: E501
:param damage_to_players_fighter_drone_num_shots: The damage_to_players_fighter_drone_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_fighter_drone_num_shots = damage_to_players_fighter_drone_num_shots
@property
def damage_to_players_hybrid_amount(self):
"""Gets the damage_to_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_hybrid_amount integer # noqa: E501
:return: The damage_to_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_hybrid_amount
@damage_to_players_hybrid_amount.setter
def damage_to_players_hybrid_amount(self, damage_to_players_hybrid_amount):
"""Sets the damage_to_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_hybrid_amount integer # noqa: E501
:param damage_to_players_hybrid_amount: The damage_to_players_hybrid_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_hybrid_amount = damage_to_players_hybrid_amount
@property
def damage_to_players_hybrid_num_shots(self):
"""Gets the damage_to_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_hybrid_num_shots integer # noqa: E501
:return: The damage_to_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_hybrid_num_shots
@damage_to_players_hybrid_num_shots.setter
def damage_to_players_hybrid_num_shots(self, damage_to_players_hybrid_num_shots):
"""Sets the damage_to_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_hybrid_num_shots integer # noqa: E501
:param damage_to_players_hybrid_num_shots: The damage_to_players_hybrid_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_hybrid_num_shots = damage_to_players_hybrid_num_shots
@property
def damage_to_players_missile_amount(self):
"""Gets the damage_to_players_missile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_missile_amount integer # noqa: E501
:return: The damage_to_players_missile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_missile_amount
@damage_to_players_missile_amount.setter
def damage_to_players_missile_amount(self, damage_to_players_missile_amount):
"""Sets the damage_to_players_missile_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_missile_amount integer # noqa: E501
:param damage_to_players_missile_amount: The damage_to_players_missile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_missile_amount = damage_to_players_missile_amount
@property
def damage_to_players_missile_num_shots(self):
"""Gets the damage_to_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_missile_num_shots integer # noqa: E501
:return: The damage_to_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_missile_num_shots
@damage_to_players_missile_num_shots.setter
def damage_to_players_missile_num_shots(self, damage_to_players_missile_num_shots):
"""Sets the damage_to_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_missile_num_shots integer # noqa: E501
:param damage_to_players_missile_num_shots: The damage_to_players_missile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_missile_num_shots = damage_to_players_missile_num_shots
@property
def damage_to_players_projectile_amount(self):
"""Gets the damage_to_players_projectile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_projectile_amount integer # noqa: E501
:return: The damage_to_players_projectile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_projectile_amount
@damage_to_players_projectile_amount.setter
def damage_to_players_projectile_amount(self, damage_to_players_projectile_amount):
"""Sets the damage_to_players_projectile_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_projectile_amount integer # noqa: E501
:param damage_to_players_projectile_amount: The damage_to_players_projectile_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_projectile_amount = damage_to_players_projectile_amount
@property
def damage_to_players_projectile_num_shots(self):
"""Gets the damage_to_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_projectile_num_shots integer # noqa: E501
:return: The damage_to_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_projectile_num_shots
@damage_to_players_projectile_num_shots.setter
def damage_to_players_projectile_num_shots(self, damage_to_players_projectile_num_shots):
"""Sets the damage_to_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_projectile_num_shots integer # noqa: E501
:param damage_to_players_projectile_num_shots: The damage_to_players_projectile_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_projectile_num_shots = damage_to_players_projectile_num_shots
@property
def damage_to_players_smart_bomb_amount(self):
"""Gets the damage_to_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_smart_bomb_amount integer # noqa: E501
:return: The damage_to_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_smart_bomb_amount
@damage_to_players_smart_bomb_amount.setter
def damage_to_players_smart_bomb_amount(self, damage_to_players_smart_bomb_amount):
"""Sets the damage_to_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_smart_bomb_amount integer # noqa: E501
:param damage_to_players_smart_bomb_amount: The damage_to_players_smart_bomb_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_smart_bomb_amount = damage_to_players_smart_bomb_amount
@property
def damage_to_players_smart_bomb_num_shots(self):
"""Gets the damage_to_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_smart_bomb_num_shots integer # noqa: E501
:return: The damage_to_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_smart_bomb_num_shots
@damage_to_players_smart_bomb_num_shots.setter
def damage_to_players_smart_bomb_num_shots(self, damage_to_players_smart_bomb_num_shots):
"""Sets the damage_to_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_smart_bomb_num_shots integer # noqa: E501
:param damage_to_players_smart_bomb_num_shots: The damage_to_players_smart_bomb_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_smart_bomb_num_shots = damage_to_players_smart_bomb_num_shots
@property
def damage_to_players_super_amount(self):
"""Gets the damage_to_players_super_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_super_amount integer # noqa: E501
:return: The damage_to_players_super_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_super_amount
@damage_to_players_super_amount.setter
def damage_to_players_super_amount(self, damage_to_players_super_amount):
"""Sets the damage_to_players_super_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_players_super_amount integer # noqa: E501
:param damage_to_players_super_amount: The damage_to_players_super_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_super_amount = damage_to_players_super_amount
@property
def damage_to_players_super_num_shots(self):
"""Gets the damage_to_players_super_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_players_super_num_shots integer # noqa: E501
:return: The damage_to_players_super_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_players_super_num_shots
@damage_to_players_super_num_shots.setter
def damage_to_players_super_num_shots(self, damage_to_players_super_num_shots):
"""Sets the damage_to_players_super_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_players_super_num_shots integer # noqa: E501
:param damage_to_players_super_num_shots: The damage_to_players_super_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_players_super_num_shots = damage_to_players_super_num_shots
@property
def damage_to_structures_total_amount(self):
"""Gets the damage_to_structures_total_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_structures_total_amount integer # noqa: E501
:return: The damage_to_structures_total_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_structures_total_amount
@damage_to_structures_total_amount.setter
def damage_to_structures_total_amount(self, damage_to_structures_total_amount):
"""Sets the damage_to_structures_total_amount of this GetCharactersCharacterIdStatsCombat.
damage_to_structures_total_amount integer # noqa: E501
:param damage_to_structures_total_amount: The damage_to_structures_total_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_structures_total_amount = damage_to_structures_total_amount
@property
def damage_to_structures_total_num_shots(self):
"""Gets the damage_to_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
damage_to_structures_total_num_shots integer # noqa: E501
:return: The damage_to_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._damage_to_structures_total_num_shots
@damage_to_structures_total_num_shots.setter
def damage_to_structures_total_num_shots(self, damage_to_structures_total_num_shots):
"""Sets the damage_to_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat.
damage_to_structures_total_num_shots integer # noqa: E501
:param damage_to_structures_total_num_shots: The damage_to_structures_total_num_shots of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._damage_to_structures_total_num_shots = damage_to_structures_total_num_shots
@property
def deaths_high_sec(self):
"""Gets the deaths_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_high_sec integer # noqa: E501
:return: The deaths_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_high_sec
@deaths_high_sec.setter
def deaths_high_sec(self, deaths_high_sec):
"""Sets the deaths_high_sec of this GetCharactersCharacterIdStatsCombat.
deaths_high_sec integer # noqa: E501
:param deaths_high_sec: The deaths_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_high_sec = deaths_high_sec
@property
def deaths_low_sec(self):
"""Gets the deaths_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_low_sec integer # noqa: E501
:return: The deaths_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_low_sec
@deaths_low_sec.setter
def deaths_low_sec(self, deaths_low_sec):
"""Sets the deaths_low_sec of this GetCharactersCharacterIdStatsCombat.
deaths_low_sec integer # noqa: E501
:param deaths_low_sec: The deaths_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_low_sec = deaths_low_sec
@property
def deaths_null_sec(self):
"""Gets the deaths_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_null_sec integer # noqa: E501
:return: The deaths_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_null_sec
@deaths_null_sec.setter
def deaths_null_sec(self, deaths_null_sec):
"""Sets the deaths_null_sec of this GetCharactersCharacterIdStatsCombat.
deaths_null_sec integer # noqa: E501
:param deaths_null_sec: The deaths_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_null_sec = deaths_null_sec
@property
def deaths_pod_high_sec(self):
"""Gets the deaths_pod_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_pod_high_sec integer # noqa: E501
:return: The deaths_pod_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_pod_high_sec
@deaths_pod_high_sec.setter
def deaths_pod_high_sec(self, deaths_pod_high_sec):
"""Sets the deaths_pod_high_sec of this GetCharactersCharacterIdStatsCombat.
deaths_pod_high_sec integer # noqa: E501
:param deaths_pod_high_sec: The deaths_pod_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_pod_high_sec = deaths_pod_high_sec
@property
def deaths_pod_low_sec(self):
"""Gets the deaths_pod_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_pod_low_sec integer # noqa: E501
:return: The deaths_pod_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_pod_low_sec
@deaths_pod_low_sec.setter
def deaths_pod_low_sec(self, deaths_pod_low_sec):
"""Sets the deaths_pod_low_sec of this GetCharactersCharacterIdStatsCombat.
deaths_pod_low_sec integer # noqa: E501
:param deaths_pod_low_sec: The deaths_pod_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_pod_low_sec = deaths_pod_low_sec
@property
def deaths_pod_null_sec(self):
"""Gets the deaths_pod_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_pod_null_sec integer # noqa: E501
:return: The deaths_pod_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_pod_null_sec
@deaths_pod_null_sec.setter
def deaths_pod_null_sec(self, deaths_pod_null_sec):
"""Sets the deaths_pod_null_sec of this GetCharactersCharacterIdStatsCombat.
deaths_pod_null_sec integer # noqa: E501
:param deaths_pod_null_sec: The deaths_pod_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_pod_null_sec = deaths_pod_null_sec
@property
def deaths_pod_wormhole(self):
"""Gets the deaths_pod_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_pod_wormhole integer # noqa: E501
:return: The deaths_pod_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_pod_wormhole
@deaths_pod_wormhole.setter
def deaths_pod_wormhole(self, deaths_pod_wormhole):
"""Sets the deaths_pod_wormhole of this GetCharactersCharacterIdStatsCombat.
deaths_pod_wormhole integer # noqa: E501
:param deaths_pod_wormhole: The deaths_pod_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_pod_wormhole = deaths_pod_wormhole
@property
def deaths_wormhole(self):
"""Gets the deaths_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
deaths_wormhole integer # noqa: E501
:return: The deaths_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._deaths_wormhole
@deaths_wormhole.setter
def deaths_wormhole(self, deaths_wormhole):
"""Sets the deaths_wormhole of this GetCharactersCharacterIdStatsCombat.
deaths_wormhole integer # noqa: E501
:param deaths_wormhole: The deaths_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._deaths_wormhole = deaths_wormhole
@property
def drone_engage(self):
"""Gets the drone_engage of this GetCharactersCharacterIdStatsCombat. # noqa: E501
drone_engage integer # noqa: E501
:return: The drone_engage of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._drone_engage
@drone_engage.setter
def drone_engage(self, drone_engage):
"""Sets the drone_engage of this GetCharactersCharacterIdStatsCombat.
drone_engage integer # noqa: E501
:param drone_engage: The drone_engage of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._drone_engage = drone_engage
@property
def dscans(self):
"""Gets the dscans of this GetCharactersCharacterIdStatsCombat. # noqa: E501
dscans integer # noqa: E501
:return: The dscans of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._dscans
@dscans.setter
def dscans(self, dscans):
"""Sets the dscans of this GetCharactersCharacterIdStatsCombat.
dscans integer # noqa: E501
:param dscans: The dscans of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._dscans = dscans
@property
def duel_requested(self):
"""Gets the duel_requested of this GetCharactersCharacterIdStatsCombat. # noqa: E501
duel_requested integer # noqa: E501
:return: The duel_requested of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._duel_requested
@duel_requested.setter
def duel_requested(self, duel_requested):
"""Sets the duel_requested of this GetCharactersCharacterIdStatsCombat.
duel_requested integer # noqa: E501
:param duel_requested: The duel_requested of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._duel_requested = duel_requested
@property
def engagement_register(self):
"""Gets the engagement_register of this GetCharactersCharacterIdStatsCombat. # noqa: E501
engagement_register integer # noqa: E501
:return: The engagement_register of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._engagement_register
@engagement_register.setter
def engagement_register(self, engagement_register):
"""Sets the engagement_register of this GetCharactersCharacterIdStatsCombat.
engagement_register integer # noqa: E501
:param engagement_register: The engagement_register of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._engagement_register = engagement_register
@property
def kills_assists(self):
"""Gets the kills_assists of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_assists integer # noqa: E501
:return: The kills_assists of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_assists
@kills_assists.setter
def kills_assists(self, kills_assists):
"""Sets the kills_assists of this GetCharactersCharacterIdStatsCombat.
kills_assists integer # noqa: E501
:param kills_assists: The kills_assists of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_assists = kills_assists
@property
def kills_high_sec(self):
"""Gets the kills_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_high_sec integer # noqa: E501
:return: The kills_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_high_sec
@kills_high_sec.setter
def kills_high_sec(self, kills_high_sec):
"""Sets the kills_high_sec of this GetCharactersCharacterIdStatsCombat.
kills_high_sec integer # noqa: E501
:param kills_high_sec: The kills_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_high_sec = kills_high_sec
@property
def kills_low_sec(self):
"""Gets the kills_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_low_sec integer # noqa: E501
:return: The kills_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_low_sec
@kills_low_sec.setter
def kills_low_sec(self, kills_low_sec):
"""Sets the kills_low_sec of this GetCharactersCharacterIdStatsCombat.
kills_low_sec integer # noqa: E501
:param kills_low_sec: The kills_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_low_sec = kills_low_sec
@property
def kills_null_sec(self):
"""Gets the kills_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_null_sec integer # noqa: E501
:return: The kills_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_null_sec
@kills_null_sec.setter
def kills_null_sec(self, kills_null_sec):
"""Sets the kills_null_sec of this GetCharactersCharacterIdStatsCombat.
kills_null_sec integer # noqa: E501
:param kills_null_sec: The kills_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_null_sec = kills_null_sec
@property
def kills_pod_high_sec(self):
"""Gets the kills_pod_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_pod_high_sec integer # noqa: E501
:return: The kills_pod_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_pod_high_sec
@kills_pod_high_sec.setter
def kills_pod_high_sec(self, kills_pod_high_sec):
"""Sets the kills_pod_high_sec of this GetCharactersCharacterIdStatsCombat.
kills_pod_high_sec integer # noqa: E501
:param kills_pod_high_sec: The kills_pod_high_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_pod_high_sec = kills_pod_high_sec
@property
def kills_pod_low_sec(self):
"""Gets the kills_pod_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_pod_low_sec integer # noqa: E501
:return: The kills_pod_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_pod_low_sec
@kills_pod_low_sec.setter
def kills_pod_low_sec(self, kills_pod_low_sec):
"""Sets the kills_pod_low_sec of this GetCharactersCharacterIdStatsCombat.
kills_pod_low_sec integer # noqa: E501
:param kills_pod_low_sec: The kills_pod_low_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_pod_low_sec = kills_pod_low_sec
@property
def kills_pod_null_sec(self):
"""Gets the kills_pod_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_pod_null_sec integer # noqa: E501
:return: The kills_pod_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_pod_null_sec
@kills_pod_null_sec.setter
def kills_pod_null_sec(self, kills_pod_null_sec):
"""Sets the kills_pod_null_sec of this GetCharactersCharacterIdStatsCombat.
kills_pod_null_sec integer # noqa: E501
:param kills_pod_null_sec: The kills_pod_null_sec of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_pod_null_sec = kills_pod_null_sec
@property
def kills_pod_wormhole(self):
"""Gets the kills_pod_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_pod_wormhole integer # noqa: E501
:return: The kills_pod_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_pod_wormhole
@kills_pod_wormhole.setter
def kills_pod_wormhole(self, kills_pod_wormhole):
"""Sets the kills_pod_wormhole of this GetCharactersCharacterIdStatsCombat.
kills_pod_wormhole integer # noqa: E501
:param kills_pod_wormhole: The kills_pod_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_pod_wormhole = kills_pod_wormhole
@property
def kills_wormhole(self):
"""Gets the kills_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
kills_wormhole integer # noqa: E501
:return: The kills_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._kills_wormhole
@kills_wormhole.setter
def kills_wormhole(self, kills_wormhole):
"""Sets the kills_wormhole of this GetCharactersCharacterIdStatsCombat.
kills_wormhole integer # noqa: E501
:param kills_wormhole: The kills_wormhole of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._kills_wormhole = kills_wormhole
@property
def npc_flag_set(self):
"""Gets the npc_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
npc_flag_set integer # noqa: E501
:return: The npc_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._npc_flag_set
@npc_flag_set.setter
def npc_flag_set(self, npc_flag_set):
"""Sets the npc_flag_set of this GetCharactersCharacterIdStatsCombat.
npc_flag_set integer # noqa: E501
:param npc_flag_set: The npc_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._npc_flag_set = npc_flag_set
@property
def probe_scans(self):
"""Gets the probe_scans of this GetCharactersCharacterIdStatsCombat. # noqa: E501
probe_scans integer # noqa: E501
:return: The probe_scans of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._probe_scans
@probe_scans.setter
def probe_scans(self, probe_scans):
"""Sets the probe_scans of this GetCharactersCharacterIdStatsCombat.
probe_scans integer # noqa: E501
:param probe_scans: The probe_scans of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._probe_scans = probe_scans
@property
def pvp_flag_set(self):
"""Gets the pvp_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
pvp_flag_set integer # noqa: E501
:return: The pvp_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._pvp_flag_set
@pvp_flag_set.setter
def pvp_flag_set(self, pvp_flag_set):
"""Sets the pvp_flag_set of this GetCharactersCharacterIdStatsCombat.
pvp_flag_set integer # noqa: E501
:param pvp_flag_set: The pvp_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._pvp_flag_set = pvp_flag_set
@property
def repair_armor_by_remote_amount(self):
"""Gets the repair_armor_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_armor_by_remote_amount integer # noqa: E501
:return: The repair_armor_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_armor_by_remote_amount
@repair_armor_by_remote_amount.setter
def repair_armor_by_remote_amount(self, repair_armor_by_remote_amount):
"""Sets the repair_armor_by_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_armor_by_remote_amount integer # noqa: E501
:param repair_armor_by_remote_amount: The repair_armor_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_armor_by_remote_amount = repair_armor_by_remote_amount
@property
def repair_armor_remote_amount(self):
"""Gets the repair_armor_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_armor_remote_amount integer # noqa: E501
:return: The repair_armor_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_armor_remote_amount
@repair_armor_remote_amount.setter
def repair_armor_remote_amount(self, repair_armor_remote_amount):
"""Sets the repair_armor_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_armor_remote_amount integer # noqa: E501
:param repair_armor_remote_amount: The repair_armor_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_armor_remote_amount = repair_armor_remote_amount
@property
def repair_armor_self_amount(self):
"""Gets the repair_armor_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_armor_self_amount integer # noqa: E501
:return: The repair_armor_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_armor_self_amount
@repair_armor_self_amount.setter
def repair_armor_self_amount(self, repair_armor_self_amount):
"""Sets the repair_armor_self_amount of this GetCharactersCharacterIdStatsCombat.
repair_armor_self_amount integer # noqa: E501
:param repair_armor_self_amount: The repair_armor_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_armor_self_amount = repair_armor_self_amount
@property
def repair_capacitor_by_remote_amount(self):
"""Gets the repair_capacitor_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_capacitor_by_remote_amount integer # noqa: E501
:return: The repair_capacitor_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_capacitor_by_remote_amount
@repair_capacitor_by_remote_amount.setter
def repair_capacitor_by_remote_amount(self, repair_capacitor_by_remote_amount):
"""Sets the repair_capacitor_by_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_capacitor_by_remote_amount integer # noqa: E501
:param repair_capacitor_by_remote_amount: The repair_capacitor_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_capacitor_by_remote_amount = repair_capacitor_by_remote_amount
@property
def repair_capacitor_remote_amount(self):
"""Gets the repair_capacitor_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_capacitor_remote_amount integer # noqa: E501
:return: The repair_capacitor_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_capacitor_remote_amount
@repair_capacitor_remote_amount.setter
def repair_capacitor_remote_amount(self, repair_capacitor_remote_amount):
"""Sets the repair_capacitor_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_capacitor_remote_amount integer # noqa: E501
:param repair_capacitor_remote_amount: The repair_capacitor_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_capacitor_remote_amount = repair_capacitor_remote_amount
@property
def repair_capacitor_self_amount(self):
"""Gets the repair_capacitor_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_capacitor_self_amount integer # noqa: E501
:return: The repair_capacitor_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_capacitor_self_amount
@repair_capacitor_self_amount.setter
def repair_capacitor_self_amount(self, repair_capacitor_self_amount):
"""Sets the repair_capacitor_self_amount of this GetCharactersCharacterIdStatsCombat.
repair_capacitor_self_amount integer # noqa: E501
:param repair_capacitor_self_amount: The repair_capacitor_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_capacitor_self_amount = repair_capacitor_self_amount
@property
def repair_hull_by_remote_amount(self):
"""Gets the repair_hull_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_hull_by_remote_amount integer # noqa: E501
:return: The repair_hull_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_hull_by_remote_amount
@repair_hull_by_remote_amount.setter
def repair_hull_by_remote_amount(self, repair_hull_by_remote_amount):
"""Sets the repair_hull_by_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_hull_by_remote_amount integer # noqa: E501
:param repair_hull_by_remote_amount: The repair_hull_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_hull_by_remote_amount = repair_hull_by_remote_amount
@property
def repair_hull_remote_amount(self):
"""Gets the repair_hull_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_hull_remote_amount integer # noqa: E501
:return: The repair_hull_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_hull_remote_amount
@repair_hull_remote_amount.setter
def repair_hull_remote_amount(self, repair_hull_remote_amount):
"""Sets the repair_hull_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_hull_remote_amount integer # noqa: E501
:param repair_hull_remote_amount: The repair_hull_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_hull_remote_amount = repair_hull_remote_amount
@property
def repair_hull_self_amount(self):
"""Gets the repair_hull_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_hull_self_amount integer # noqa: E501
:return: The repair_hull_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_hull_self_amount
@repair_hull_self_amount.setter
def repair_hull_self_amount(self, repair_hull_self_amount):
"""Sets the repair_hull_self_amount of this GetCharactersCharacterIdStatsCombat.
repair_hull_self_amount integer # noqa: E501
:param repair_hull_self_amount: The repair_hull_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_hull_self_amount = repair_hull_self_amount
@property
def repair_shield_by_remote_amount(self):
"""Gets the repair_shield_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_shield_by_remote_amount integer # noqa: E501
:return: The repair_shield_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_shield_by_remote_amount
@repair_shield_by_remote_amount.setter
def repair_shield_by_remote_amount(self, repair_shield_by_remote_amount):
"""Sets the repair_shield_by_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_shield_by_remote_amount integer # noqa: E501
:param repair_shield_by_remote_amount: The repair_shield_by_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_shield_by_remote_amount = repair_shield_by_remote_amount
@property
def repair_shield_remote_amount(self):
"""Gets the repair_shield_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_shield_remote_amount integer # noqa: E501
:return: The repair_shield_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_shield_remote_amount
@repair_shield_remote_amount.setter
def repair_shield_remote_amount(self, repair_shield_remote_amount):
"""Sets the repair_shield_remote_amount of this GetCharactersCharacterIdStatsCombat.
repair_shield_remote_amount integer # noqa: E501
:param repair_shield_remote_amount: The repair_shield_remote_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_shield_remote_amount = repair_shield_remote_amount
@property
def repair_shield_self_amount(self):
"""Gets the repair_shield_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
repair_shield_self_amount integer # noqa: E501
:return: The repair_shield_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._repair_shield_self_amount
@repair_shield_self_amount.setter
def repair_shield_self_amount(self, repair_shield_self_amount):
"""Sets the repair_shield_self_amount of this GetCharactersCharacterIdStatsCombat.
repair_shield_self_amount integer # noqa: E501
:param repair_shield_self_amount: The repair_shield_self_amount of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._repair_shield_self_amount = repair_shield_self_amount
@property
def self_destructs(self):
"""Gets the self_destructs of this GetCharactersCharacterIdStatsCombat. # noqa: E501
self_destructs integer # noqa: E501
:return: The self_destructs of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._self_destructs
@self_destructs.setter
def self_destructs(self, self_destructs):
"""Sets the self_destructs of this GetCharactersCharacterIdStatsCombat.
self_destructs integer # noqa: E501
:param self_destructs: The self_destructs of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._self_destructs = self_destructs
@property
def warp_scramble_pc(self):
"""Gets the warp_scramble_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
warp_scramble_pc integer # noqa: E501
:return: The warp_scramble_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._warp_scramble_pc
@warp_scramble_pc.setter
def warp_scramble_pc(self, warp_scramble_pc):
"""Sets the warp_scramble_pc of this GetCharactersCharacterIdStatsCombat.
warp_scramble_pc integer # noqa: E501
:param warp_scramble_pc: The warp_scramble_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._warp_scramble_pc = warp_scramble_pc
@property
def warp_scrambledby_npc(self):
"""Gets the warp_scrambledby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
warp_scrambledby_npc integer # noqa: E501
:return: The warp_scrambledby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._warp_scrambledby_npc
@warp_scrambledby_npc.setter
def warp_scrambledby_npc(self, warp_scrambledby_npc):
"""Sets the warp_scrambledby_npc of this GetCharactersCharacterIdStatsCombat.
warp_scrambledby_npc integer # noqa: E501
:param warp_scrambledby_npc: The warp_scrambledby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._warp_scrambledby_npc = warp_scrambledby_npc
@property
def warp_scrambledby_pc(self):
"""Gets the warp_scrambledby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
warp_scrambledby_pc integer # noqa: E501
:return: The warp_scrambledby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._warp_scrambledby_pc
@warp_scrambledby_pc.setter
def warp_scrambledby_pc(self, warp_scrambledby_pc):
"""Sets the warp_scrambledby_pc of this GetCharactersCharacterIdStatsCombat.
warp_scrambledby_pc integer # noqa: E501
:param warp_scrambledby_pc: The warp_scrambledby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._warp_scrambledby_pc = warp_scrambledby_pc
@property
def weapon_flag_set(self):
"""Gets the weapon_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
weapon_flag_set integer # noqa: E501
:return: The weapon_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._weapon_flag_set
@weapon_flag_set.setter
def weapon_flag_set(self, weapon_flag_set):
"""Sets the weapon_flag_set of this GetCharactersCharacterIdStatsCombat.
weapon_flag_set integer # noqa: E501
:param weapon_flag_set: The weapon_flag_set of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._weapon_flag_set = weapon_flag_set
@property
def webifiedby_npc(self):
"""Gets the webifiedby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
webifiedby_npc integer # noqa: E501
:return: The webifiedby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._webifiedby_npc
@webifiedby_npc.setter
def webifiedby_npc(self, webifiedby_npc):
"""Sets the webifiedby_npc of this GetCharactersCharacterIdStatsCombat.
webifiedby_npc integer # noqa: E501
:param webifiedby_npc: The webifiedby_npc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._webifiedby_npc = webifiedby_npc
@property
def webifiedby_pc(self):
"""Gets the webifiedby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
webifiedby_pc integer # noqa: E501
:return: The webifiedby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._webifiedby_pc
@webifiedby_pc.setter
def webifiedby_pc(self, webifiedby_pc):
"""Sets the webifiedby_pc of this GetCharactersCharacterIdStatsCombat.
webifiedby_pc integer # noqa: E501
:param webifiedby_pc: The webifiedby_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._webifiedby_pc = webifiedby_pc
@property
def webifying_pc(self):
"""Gets the webifying_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
webifying_pc integer # noqa: E501
:return: The webifying_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:rtype: int
"""
return self._webifying_pc
@webifying_pc.setter
def webifying_pc(self, webifying_pc):
"""Sets the webifying_pc of this GetCharactersCharacterIdStatsCombat.
webifying_pc integer # noqa: E501
:param webifying_pc: The webifying_pc of this GetCharactersCharacterIdStatsCombat. # noqa: E501
:type: int
"""
self._webifying_pc = webifying_pc
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GetCharactersCharacterIdStatsCombat):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 43.718279
| 3,211
| 0.744662
| 14,623
| 118,870
| 5.537851
| 0.011489
| 0.065201
| 0.092368
| 0.156705
| 0.942023
| 0.887787
| 0.813324
| 0.70673
| 0.591245
| 0.473808
| 0
| 0.01505
| 0.201245
| 118,870
| 2,718
| 3,212
| 43.734364
| 0.837838
| 0.424195
| 0
| 0.089385
| 1
| 0
| 0.128785
| 0.096896
| 0
| 0
| 0
| 0
| 0
| 1
| 0.180633
| false
| 0
| 0.002793
| 0
| 0.27933
| 0.001862
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e1c9d1b82c4d4a0f8f08691c430078353e4e4c36
| 35
|
py
|
Python
|
webevents/__init__.py
|
Zamony/webevents
|
af1e2ef0d62c11e547b83e47613fe4aae953d322
|
[
"MIT"
] | 1
|
2019-10-07T10:57:22.000Z
|
2019-10-07T10:57:22.000Z
|
webevents/__init__.py
|
Zamony/webevents
|
af1e2ef0d62c11e547b83e47613fe4aae953d322
|
[
"MIT"
] | null | null | null |
webevents/__init__.py
|
Zamony/webevents
|
af1e2ef0d62c11e547b83e47613fe4aae953d322
|
[
"MIT"
] | null | null | null |
from webevents.webevents import run
| 35
| 35
| 0.885714
| 5
| 35
| 6.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bedf24ab7e6134db4a8bfc7ca111875a9b7fc542
| 69
|
py
|
Python
|
pygrep/classes/__init__.py
|
sstadick/pygrep
|
13c53ac427adda9974ee9e62c22391bf0682008c
|
[
"Apache-2.0"
] | null | null | null |
pygrep/classes/__init__.py
|
sstadick/pygrep
|
13c53ac427adda9974ee9e62c22391bf0682008c
|
[
"Apache-2.0"
] | null | null | null |
pygrep/classes/__init__.py
|
sstadick/pygrep
|
13c53ac427adda9974ee9e62c22391bf0682008c
|
[
"Apache-2.0"
] | null | null | null |
from .boyerMoore import *
from .naive import *
from .helpers import *
| 23
| 25
| 0.753623
| 9
| 69
| 5.777778
| 0.555556
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15942
| 69
| 3
| 26
| 23
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
830f300591565e591198ac648100f9292af09d0c
| 16
|
py
|
Python
|
eds/openmtc-gevent/common/openmtc/lib/pyio.py
|
piyush82/elastest-device-emulator-service
|
b4d6b393d6042c54a7b3dfb5f58cad5efd00f0e7
|
[
"Apache-2.0"
] | 2
|
2021-05-27T13:32:16.000Z
|
2022-03-30T01:23:34.000Z
|
simulator/uio.py
|
ondiiik/meteoink
|
9bc7af929de12ed5eb2fafd64fcfe447f07b6eeb
|
[
"MIT"
] | null | null | null |
simulator/uio.py
|
ondiiik/meteoink
|
9bc7af929de12ed5eb2fafd64fcfe447f07b6eeb
|
[
"MIT"
] | null | null | null |
from io import *
| 16
| 16
| 0.75
| 3
| 16
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 16
| 1
| 16
| 16
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
55c44922049c54857887dfdd26f888d41447d9f7
| 144
|
py
|
Python
|
notes/Rest_Flask_Example/controllers/default_controller.py
|
microservice-tools/pixis
|
ce5a1ecc70732677518d21a0e876440af1245eac
|
[
"MIT"
] | null | null | null |
notes/Rest_Flask_Example/controllers/default_controller.py
|
microservice-tools/pixis
|
ce5a1ecc70732677518d21a0e876440af1245eac
|
[
"MIT"
] | 21
|
2018-04-25T19:07:41.000Z
|
2018-07-18T06:04:56.000Z
|
notes/Rest_Flask_Example/controllers/default_controller.py
|
microservice-tools/pixis
|
ce5a1ecc70732677518d21a0e876440af1245eac
|
[
"MIT"
] | 1
|
2018-04-23T14:44:00.000Z
|
2018-04-23T14:44:00.000Z
|
from flask import Blueprint
default_api = Blueprint('default_api', __name__)
@default_api.route('/')
def index():
return "Hello, World!"
| 16
| 48
| 0.715278
| 18
| 144
| 5.333333
| 0.722222
| 0.3125
| 0.395833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 144
| 8
| 49
| 18
| 0.780488
| 0
| 0
| 0
| 0
| 0
| 0.173611
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0.4
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3606848770d0838d3271f54ca89863b5c629b85a
| 7,014
|
py
|
Python
|
src/gae/fit.py
|
Abdumaleek/infinity-mirror
|
b493c5602d9e4bcf374b748e9b80e7c85be54a88
|
[
"MIT"
] | 5
|
2020-03-13T02:54:03.000Z
|
2022-03-18T02:33:12.000Z
|
src/gae/fit.py
|
Abdumaleek/infinity-mirror
|
b493c5602d9e4bcf374b748e9b80e7c85be54a88
|
[
"MIT"
] | 2
|
2021-11-10T19:47:00.000Z
|
2022-02-10T01:24:59.000Z
|
src/gae/fit.py
|
Abdumaleek/infinity-mirror
|
b493c5602d9e4bcf374b748e9b80e7c85be54a88
|
[
"MIT"
] | 1
|
2021-05-24T21:54:44.000Z
|
2021-05-24T21:54:44.000Z
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
# Train on CPU (hide GPU) due to memory constraints
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
import tensorflow as tf
import numpy as np
import scipy.sparse as sp
from collections import namedtuple
from src.gae.gae.optimizer import OptimizerAE, OptimizerVAE
from src.gae.gae.model import GCNModelAE, GCNModelVAE
from src.gae.gae.preprocessing import preprocess_graph, construct_feed_dict, sparse_to_tuple, mask_test_edges
# Settings
flags = namedtuple('FLAGS', 'learning_rate epochs hidden1 hidden2 weight_decay dropout model dataset features')
FLAGS = flags(0.01, 200, 32, 16, 0., 0., 'gcn_ae', 'cora', 1)
# flags = tf.app.flags
# FLAGS = flags.FLAGS
# flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
# flags.DEFINE_integer('epochs', 200, 'Number of epochs to train.')
# flags.DEFINE_integer('hidden1', 32, 'Number of units in hidden layer 1.')
# flags.DEFINE_integer('hidden2', 16, 'Number of units in hidden layer 2.')
# flags.DEFINE_float('weight_decay', 0., 'Weight for L2 loss on embedding matrix.')
# flags.DEFINE_float('dropout', 0., 'Dropout rate (1 - keep probability).')
#
# flags.DEFINE_string('model', 'gcn_ae', 'Model string.')
# flags.DEFINE_string('dataset', 'cora', 'Dataset string.')
# flags.DEFINE_integer('features', 1, 'Whether to use features (1) or not (0).')
def fit_ae(adj_matrix, epochs=200):
''' trains a non-variational graph autoencoder on a given input graph
parameters:
adj_matrix (ndarray): adjacency matrix of the graph
epochs (int): how many iterations to train the model for
output:
a matrix containing probabilities corresponding to edges in an adjacency matrix
'''
# load data
adj = adj_matrix
features = sp.identity(adj.shape[0])
# store original adjacency matrix (without diagonal entries) for later
adj_orig = adj
adj_orig = adj_orig - sp.dia_matrix((adj_orig.diagonal()[np.newaxis, :], [0]), shape=adj_orig.shape)
adj_orig.eliminate_zeros()
# compute train/test/validation splits
while True:
try:
adj_train, train_edges, val_edges, val_edges_false, test_edges, test_edges_false = mask_test_edges(adj)
except AssertionError as e:
continue
else:
break
adj = adj_train
# some preprocessing
adj_norm = preprocess_graph(adj)
# define placeholders
placeholders = {
'features': tf.sparse_placeholder(tf.float32),
'adj': tf.sparse_placeholder(tf.float32),
'adj_orig': tf.sparse_placeholder(tf.float32),
'dropout': tf.placeholder_with_default(0., shape=())
}
features = sparse_to_tuple(features.tocoo())
num_features = features[2][1]
features_nonzero = features[1].shape[0]
# define the model
model = GCNModelAE(placeholders, num_features, features_nonzero)
pos_weight = float(adj.shape[0] * adj.shape[0] - adj.sum()) / adj.sum()
norm = adj.shape[0] * adj.shape[0] / float((adj.shape[0] * adj.shape[0] - adj.sum()) * 2)
# define the optimizer
with tf.name_scope('optimizer'):
opt = OptimizerAE(preds=model.reconstructions,
labels=tf.reshape(tf.sparse_tensor_to_dense(placeholders['adj_orig'], validate_indices=False), [-1]),
pos_weight=pos_weight,
norm=norm)
# start up TensorFlow session
sess = tf.Session()
sess.run(tf.global_variables_initializer())
adj_label = adj_train + sp.eye(adj_train.shape[0])
adj_label = sparse_to_tuple(adj_label)
# train the model
for epoch in range(epochs):
# construct feed dictionary
feed_dict = construct_feed_dict(adj_norm, adj_label, features, placeholders)
feed_dict.update({placeholders['dropout']: FLAGS.dropout})
# run single weight update
outs = sess.run([opt.opt_op, opt.cost, opt.accuracy, opt.preds_sub], feed_dict=feed_dict)
probs = sess.run(tf.nn.sigmoid(outs[3])).reshape(adj_matrix.shape)
sess.close()
return probs
def fit_vae(adj_matrix, epochs=200):
''' trains a variational graph autoencoder on a given input graph
parameters:
adj_matrix (ndarray): adjacency matrix of the graph
epochs (int): how many iterations to train the model for
output:
a matrix containing probabilities corresponding to edges in an adjacency matrix
'''
# load data
adj = adj_matrix
features = sp.identity(adj.shape[0])
# store original adjacency matrix (without diagonal entries) for later
adj_orig = adj
adj_orig = adj_orig - sp.dia_matrix((adj_orig.diagonal()[np.newaxis, :], [0]), shape=adj_orig.shape)
adj_orig.eliminate_zeros()
# compute train/test/validation splits
while True:
# compute train/test/validation splits
try:
adj_train, train_edges, val_edges, val_edges_false, test_edges, test_edges_false = mask_test_edges(adj)
except AssertionError as e:
continue
else:
break
adj = adj_train
# some preprocessing
adj_norm = preprocess_graph(adj)
# define placeholders
placeholders = {
'features': tf.sparse_placeholder(tf.float32),
'adj': tf.sparse_placeholder(tf.float32),
'adj_orig': tf.sparse_placeholder(tf.float32),
'dropout': tf.placeholder_with_default(0., shape=())
}
num_nodes = adj.shape[0]
features = sparse_to_tuple(features.tocoo())
num_features = features[2][1]
features_nonzero = features[1].shape[0]
# define the model
model = GCNModelVAE(placeholders, num_features, num_nodes, features_nonzero)
pos_weight = float(adj.shape[0] * adj.shape[0] - adj.sum()) / adj.sum()
norm = adj.shape[0] * adj.shape[0] / float((adj.shape[0] * adj.shape[0] - adj.sum()) * 2)
# define the optimizer
with tf.name_scope('optimizer'):
opt = OptimizerVAE(preds=model.reconstructions,
labels=tf.reshape(tf.sparse_tensor_to_dense(placeholders['adj_orig'], validate_indices=False), [-1]),
model=model, num_nodes=num_nodes,
pos_weight=pos_weight,
norm=norm)
# start up TensorFlow session
sess = tf.Session()
sess.run(tf.global_variables_initializer())
adj_label = adj_train + sp.eye(adj_train.shape[0])
adj_label = sparse_to_tuple(adj_label)
# train the model
for epoch in range(epochs):
# construct feed dictionary
feed_dict = construct_feed_dict(adj_norm, adj_label, features, placeholders)
feed_dict.update({placeholders['dropout']: FLAGS.dropout})
# run single weight update
outs = sess.run([opt.opt_op, opt.cost, opt.accuracy, opt.preds_sub], feed_dict=feed_dict)
probs = sess.run(tf.nn.sigmoid(outs[3])).reshape(adj_matrix.shape)
sess.close()
return probs
| 37.508021
| 128
| 0.666239
| 926
| 7,014
| 4.86933
| 0.211663
| 0.025283
| 0.02994
| 0.026613
| 0.757596
| 0.750499
| 0.727878
| 0.727878
| 0.727878
| 0.727878
| 0
| 0.016691
| 0.222697
| 7,014
| 186
| 129
| 37.709677
| 0.810345
| 0.276305
| 0
| 0.78
| 0
| 0
| 0.048005
| 0
| 0
| 0
| 0
| 0
| 0.02
| 1
| 0.02
| false
| 0
| 0.08
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
363b12f7744ea8bb315ebd17d2182e64416292bb
| 296
|
py
|
Python
|
eventsourcing/tests/example_application_tests/test_example_application_with_cassandra.py
|
alexanderlarin/eventsourcing
|
6f2a4ded3c783ba3ee465243a48f66ecdee20f52
|
[
"BSD-3-Clause"
] | 1
|
2020-02-10T08:12:31.000Z
|
2020-02-10T08:12:31.000Z
|
eventsourcing/tests/example_application_tests/test_example_application_with_cassandra.py
|
alexanderlarin/eventsourcing
|
6f2a4ded3c783ba3ee465243a48f66ecdee20f52
|
[
"BSD-3-Clause"
] | null | null | null |
eventsourcing/tests/example_application_tests/test_example_application_with_cassandra.py
|
alexanderlarin/eventsourcing
|
6f2a4ded3c783ba3ee465243a48f66ecdee20f52
|
[
"BSD-3-Clause"
] | null | null | null |
from eventsourcing.tests.example_application_tests import base
from eventsourcing.tests.sequenced_item_tests.test_cassandra_record_manager import \
WithCassandraRecordManagers
class TestExampleApplicationWithCassandra(WithCassandraRecordManagers, base.ExampleApplicationTestCase):
pass
| 37
| 104
| 0.885135
| 26
| 296
| 9.807692
| 0.692308
| 0.133333
| 0.172549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077703
| 296
| 7
| 105
| 42.285714
| 0.934066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
7fd5e0f0499eab07ccf2febf733bb79921633a30
| 140
|
py
|
Python
|
scrapli_community/alcatel/aos/__init__.py
|
ikievite/scrapli_community
|
b160ae6c21177c949a0b8210810ba2584b31861f
|
[
"MIT"
] | 37
|
2020-11-13T20:50:30.000Z
|
2022-03-25T16:15:28.000Z
|
scrapli_community/alcatel/aos/__init__.py
|
ikievite/scrapli_community
|
b160ae6c21177c949a0b8210810ba2584b31861f
|
[
"MIT"
] | 84
|
2020-08-02T16:20:15.000Z
|
2022-03-02T14:38:26.000Z
|
scrapli_community/alcatel/aos/__init__.py
|
ikievite/scrapli_community
|
b160ae6c21177c949a0b8210810ba2584b31861f
|
[
"MIT"
] | 25
|
2020-08-01T23:51:37.000Z
|
2022-02-21T10:06:33.000Z
|
"""scrapli_community.alcatel.aos"""
from scrapli_community.alcatel.aos.alcatel_aos import SCRAPLI_PLATFORM
__all__ = ("SCRAPLI_PLATFORM",)
| 28
| 70
| 0.814286
| 17
| 140
| 6.176471
| 0.470588
| 0.285714
| 0.438095
| 0.495238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064286
| 140
| 4
| 71
| 35
| 0.801527
| 0.207143
| 0
| 0
| 0
| 0
| 0.152381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
1829ef31efdf5d293a75daffce85af63e4d2218d
| 2,256
|
py
|
Python
|
domaci-zadaci/06/test_trim_str.py
|
lukin155/skola-programiranja
|
481eea1bc7429d13e006952162a8c76074fcd4dc
|
[
"MIT"
] | 2
|
2019-04-29T09:09:05.000Z
|
2019-09-22T10:40:54.000Z
|
domaci-zadaci/06/test_trim_str.py
|
lukin155/skola-programiranja
|
481eea1bc7429d13e006952162a8c76074fcd4dc
|
[
"MIT"
] | null | null | null |
domaci-zadaci/06/test_trim_str.py
|
lukin155/skola-programiranja
|
481eea1bc7429d13e006952162a8c76074fcd4dc
|
[
"MIT"
] | null | null | null |
from solutions import trim_str
import random
import unittest
class TestTrimStr(unittest.TestCase):
# 0 leading spaces, 0 trailing spaces
def test_00(self):
teststr = "Test string."
expected = teststr
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# 0 leading spaces, 1 trailing space
def test_01(self):
teststr = "Test string. "
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# 0 leading spaces, n trailing spaces
def test_0n(self):
teststr = "Test string." + (random.randint(0, 100) * " ")
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# 1 leading space, 0 trailing spaces
def test_10(self):
teststr = " Test string."
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# 1 leading space, 1 trailing space
def test_11(self):
teststr = " Test string. "
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# 1 leading space, n trailing spaces
def test_1n(self):
teststr = " Test string." + (random.randint(0, 100) * " ")
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# n leading spaces, 0 trailing spaces
def test_n0(self):
teststr = (random.randint(0, 100) * " ") + "Test string."
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# n leading spaces, 1 trailing space
def test_n1(self):
teststr = (random.randint(0, 100) * " ") + "Test string. "
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
# n leading spaces, n trailing spaces
def test_nn(self):
teststr = (random.randint(0, 100) * " ") + "Test string." + (random.randint(0, 100) * " ")
expected = "Test string."
actual = trim_str(teststr)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
| 25.066667
| 98
| 0.602837
| 257
| 2,256
| 5.18677
| 0.151751
| 0.127532
| 0.087772
| 0.135034
| 0.900975
| 0.830458
| 0.830458
| 0.723181
| 0.702176
| 0.702176
| 0
| 0.029832
| 0.286791
| 2,256
| 89
| 99
| 25.348315
| 0.798633
| 0.140514
| 0
| 0.666667
| 0
| 0
| 0.116122
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 1
| 0.176471
| false
| 0
| 0.058824
| 0
| 0.254902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a10223934e49c46a5f8fa785e9479044ec4407eb
| 4,968
|
py
|
Python
|
radbm/utils/torch/multi_bernoulli/log_arithmetic.py
|
duchesneaumathieu/radbm
|
3d9dbad51e1bfc0bbb1a60d0aa03c99340f6930c
|
[
"MIT"
] | null | null | null |
radbm/utils/torch/multi_bernoulli/log_arithmetic.py
|
duchesneaumathieu/radbm
|
3d9dbad51e1bfc0bbb1a60d0aa03c99340f6930c
|
[
"MIT"
] | null | null | null |
radbm/utils/torch/multi_bernoulli/log_arithmetic.py
|
duchesneaumathieu/radbm
|
3d9dbad51e1bfc0bbb1a60d0aa03c99340f6930c
|
[
"MIT"
] | null | null | null |
import torch
from radbm.utils.torch import torch_lse, torch_logsumexp
from .poisson_binomial import log_poisson_binomial
logsigmoid = torch.nn.LogSigmoid()
def multi_bernoulli_equality(xz, yz):
"""
Compute the bitwise log probability that two Multi-Bernoulli are equal.
Parameters
----------
xz : torch.tensor
the logits (before sigmoid) of the first Multi-Bernoulli
yz : torch.tensor
the logits (before sigmoid) of the second Multi-Bernoulli
Returns
-------
log_p0 : torch.tensor
the bitwise log probability that the two Multi-Bernoulli are not equal
log_p1 : torch.tensor
the bitwise log probability that the two Multi-Bernoulli are equal
Notes
-----
xz and yz need not to have the same shape, but they should
be broadcastable.
"""
xp, yp, xn, yn = map(logsigmoid, (xz, yz, -xz, -yz))
log_p0 = torch_logsumexp(xp + yn, xn + yp)
log_p1 = torch_logsumexp(xp + yp, xn + yn)
return log_p0, log_p1
def multi_bernoulli_subset(xz, yz):
"""
Compute the bitwise log probability that the first Multi-Bernoulli
is lower are equal to the second.
Parameters
----------
xz : torch.tensor
the logits (before sigmoid) of the first Multi-Bernoulli
yz : torch.tensor
the logits (before sigmoid) of the second Multi-Bernoulli
Returns
-------
log_p0 : torch.tensor
the bitwise log probability of not subset
log_p1 : torch.tensor
the bitwise log probability of subset
Notes
-----
xz and yz need not to have the same shape, but they should
be broadcastable.
"""
xp, yp, xn, yn = map(logsigmoid, (xz, yz, -xz, -yz))
log_p0 = xp + yn
log_p1 = torch_logsumexp(xp + yp, xn + yn, xn + yp)
return log_p0, log_p1
def multi_bernoulli_activated_equality(xz, yz, az):
"""
Compute the bitwise log probability that two Multi-Bernoulli are equal
or that a third Multi-Bernoulli is one.
Parameters
----------
xz : torch.tensor
the logits (before sigmoid) of the first Multi-Bernoulli
yz : torch.tensor
the logits (before sigmoid) of the second Multi-Bernoulli
az : torch.tensor
the logits of the third Multi-Bernoulli which act as an activation
of the equality.
Returns
-------
log_p0 : torch.tensor
the bitwise log probability that the two Multi-Bernoulli are not equal
and the third is zero.
log_p1 : torch.tensor
the bitwise log probability that the two Multi-Bernoulli are equal
or the third is one.
Notes
-----
xz and yz need not to have the same shape, but they should
be broadcastable.
"""
xp, yp, ap, xn, yn, an = map(logsigmoid, (xz, yz, az, -xz, -yz, -az))
log_p0 = torch_logsumexp(an + xp + yn, an + xn + yp)
log_p1 = torch_logsumexp(ap, an + xp + yp, an + xn + yn)
return log_p0, log_p1
def multi_bernoulli_activated_subset(xz, yz, az):
"""
Compute the bitwise log probability that the first Multi-Bernoulli
is lower are equal to the second or that a third Multi-Bernoulli is one.
Parameters
----------
xz : torch.tensor
the logits (before sigmoid) of the first Multi-Bernoulli
yz : torch.tensor
the logits (before sigmoid) of the second Multi-Bernoulli
az : torch.tensor
the logits of the third Multi-Bernoulli which act as an activation
of the "subset".
Returns
-------
log_p0 : torch.tensor
log_p1 : torch.tensor
Notes
-----
xz and yz need not to have the same shape, but they should
be broadcastable.
"""
xp, yp, ap, xn, yn, an = map(logsigmoid, (xz, yz, az, -xz, -yz, -az))
log_p0 = an + xp + yn
log_p1 = torch_logsumexp(ap, an + xp + yp, an + xn + yn, an + xn + yp)
return log_p0, log_p1
def torch_log_prob_any(log_q0, log_q1):
"""
Similar to x.any() but for log probabilities (instead of booleans). The any is taken
across the last dim.
Parameters
----------
log_q0 : torch.tensor (dtype=torch.float)
The log probability of each bits to be zero. The any operation is over
the last dim. shape=(a1,a2,a3,...,am,n) where n is the number of (independant)
Bernoullis. a1,a2,a3,...,am are arbitrary but should match with log_q1.
log_q1 : torch.tensor (dtype=torch.float)
The log probability of each bits to be one. The any operation is over
the last dim. shape=(a1,a2,a3,...,am,n) where n is the number of (independant)
Bernoullis. a1,a2,a3,...,am are arbitrary but should match with log_q1.
Returns
-------
log_nor : torch.tensor (dtype=torch.float)
log_or : torch.tensor (dtype=torch.float)
"""
p = log_poisson_binomial(log_q0, log_q1)
return p[..., 0], torch_lse(p[..., 1:], dim=-1)
| 33.12
| 88
| 0.628623
| 726
| 4,968
| 4.217631
| 0.14876
| 0.109732
| 0.073155
| 0.07838
| 0.838668
| 0.814174
| 0.808295
| 0.806989
| 0.766166
| 0.745591
| 0
| 0.013037
| 0.274356
| 4,968
| 150
| 89
| 33.12
| 0.836338
| 0.648551
| 0
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.185185
| false
| 0
| 0.111111
| 0
| 0.481481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a1527b32af1140ac3ee943f03e38174f0cc29cd2
| 206
|
py
|
Python
|
compile.py
|
smathot/stimulus-sets
|
c4d3eca2ef0a4d82e5518a0bd26b47c7f8dd0f13
|
[
"CC-BY-3.0"
] | 6
|
2015-05-20T04:28:50.000Z
|
2021-05-12T23:14:51.000Z
|
compile.py
|
smathot/stimulus-sets
|
c4d3eca2ef0a4d82e5518a0bd26b47c7f8dd0f13
|
[
"CC-BY-3.0"
] | null | null | null |
compile.py
|
smathot/stimulus-sets
|
c4d3eca2ef0a4d82e5518a0bd26b47c7f8dd0f13
|
[
"CC-BY-3.0"
] | 1
|
2022-01-01T13:12:17.000Z
|
2022-01-01T13:12:17.000Z
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from academicmarkdown import build
build.HTML(u'stimulus-sets.md', u'stimulus-sets.html', standalone=False)
build.PDF(u'stimulus-sets.md', u'stimulus-sets.pdf')
| 29.428571
| 72
| 0.73301
| 33
| 206
| 4.575758
| 0.575758
| 0.238411
| 0.344371
| 0.198676
| 0.370861
| 0.370861
| 0.370861
| 0
| 0
| 0
| 0
| 0.005236
| 0.072816
| 206
| 6
| 73
| 34.333333
| 0.78534
| 0.194175
| 0
| 0
| 0
| 0
| 0.408537
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a1ec7295eb172a4376794f7f571a9f67a8313d45
| 154
|
py
|
Python
|
tests/test_pyqt.py
|
yueranyuan/pyscreenshot
|
3287b798691de8791bc3b3314f2545f7b0b1cb99
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_pyqt.py
|
yueranyuan/pyscreenshot
|
3287b798691de8791bc3b3314f2545f7b0b1cb99
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_pyqt.py
|
yueranyuan/pyscreenshot
|
3287b798691de8791bc3b3314f2545f7b0b1cb99
|
[
"BSD-2-Clause"
] | null | null | null |
from ref import backend_ref
from size import backend_size
def test_size_pyqt():
backend_size('pyqt')
def test_ref_pyqt():
backend_ref('pyqt')
| 14
| 29
| 0.74026
| 24
| 154
| 4.416667
| 0.333333
| 0.245283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 154
| 10
| 30
| 15.4
| 0.828125
| 0
| 0
| 0
| 0
| 0
| 0.051948
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6294c25d3011f74d3e215080a6eab6a89131999c
| 102
|
py
|
Python
|
kwat/grid/get_1d_grid_resolution.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-05-05T17:50:28.000Z
|
2019-01-30T19:23:02.000Z
|
kwat/grid/get_1d_grid_resolution.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-05-05T01:52:31.000Z
|
2019-04-20T21:06:05.000Z
|
kwat/grid/get_1d_grid_resolution.py
|
KwatME/ccal
|
d96dfa811482eee067f346386a2181ec514625f4
|
[
"MIT"
] | 5
|
2017-07-17T18:55:54.000Z
|
2019-02-02T04:46:19.000Z
|
from numpy import diff, unique
def get_1d_grid_resolution(co_):
return diff(unique(co_)).min()
| 14.571429
| 34
| 0.735294
| 16
| 102
| 4.375
| 0.8125
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0.156863
| 102
| 6
| 35
| 17
| 0.802326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
629cd812cedd27e70311fe2b2ad3749f50427eca
| 13,006
|
py
|
Python
|
inventory/inventory/doctype/inventory_validator/inventory_validator.py
|
riconova92/inventory
|
7cc4f49bda31f802af36ee4ea6eb43092b5094a7
|
[
"MIT"
] | null | null | null |
inventory/inventory/doctype/inventory_validator/inventory_validator.py
|
riconova92/inventory
|
7cc4f49bda31f802af36ee4ea6eb43092b5094a7
|
[
"MIT"
] | null | null | null |
inventory/inventory/doctype/inventory_validator/inventory_validator.py
|
riconova92/inventory
|
7cc4f49bda31f802af36ee4ea6eb43092b5094a7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Myme and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
form_grid_templates = {
"data_inventory_unchecked": "templates/includes/item_grid_packing_list_inventory_validator.html",
"data_inventory_checked": "templates/includes/item_grid_packing_list_inventory_validator.html",
"data_inventory_missing": "templates/includes/item_grid_packing_list_inventory_validator.html",
}
class InventoryValidator(Document):
def get_button(self):
if not self.get("get_item_code") :
frappe.throw("Masukkan Item Code")
item_clause = """ AND mi.`item_code`="{0}" """.format(self.get("get_item_code"))
colour_clause = ""
if self.get("get_colour") :
colour_clause = """ AND di.`colour`="{0}" """.format(self.get("get_colour"))
group_clause = ""
if self.get("get_group") :
group_clause = """ AND di.`group`="{0}" """.format(self.get("get_group"))
data = frappe.db.sql(""" SELECT mi.`name`,mi.`item_code`,di.`colour`,di.`group`,di.`total_roll`,
di.`yard_atau_meter_per_roll`,di.`warehouse`,di.`inventory_uom`,di.`total_yard_atau_meter`
FROM `tabMaster Inventory`mi JOIN `tabData Inventory`di ON di.`parent`=mi.`name`
WHERE di.`total_roll` != 0
{0} {1} {2}
ORDER BY di.`idx`
""".format(item_clause,colour_clause,group_clause),as_dict = 1)
# frappe.msgprint(str(data))
for item in data :
new_row = self.append("data_inventory_unchecked")
new_row.item_code_variant = item.item_code
new_row.colour = item.colour
new_row.warehouse = item.warehouse
new_row.inventory_uom = item.inventory_uom
new_row.yard_atau_meter_per_roll = item.yard_atau_meter_per_roll
new_row.total_yard_atau_meter = item.total_yard_atau_meter
new_row.total_roll = item.total_roll
new_row.group = item.group
def validate_item(self):
if self.item_code_variant_depan and self.yard_atau_meter and self.colour and self.warehouse and self.qty_roll :
checker = False
for d in self.get("data_inventory_unchecked"):
if d.item_code_variant == self.item_code_variant_depan and d.yard_atau_meter_per_roll == self.yard_atau_meter and d.colour == self.colour and d.warehouse == self.warehouse :
if self.group_code :
if self.qty_roll > 0 and ((not self.group_prefix+"."+self.group_code) or (self.group_prefix+"."+self.group_code) == d.group) :
ch = ""
for item_checked in self.get("data_inventory_checked") :
if (item_checked.item_code_variant == self.item_code_variant_depan and
item_checked.yard_atau_meter_per_roll == self.yard_atau_meter and
item_checked.colour == self.colour and
item_checked.warehouse == self.warehouse
and ((not self.group_prefix+"."+self.group_code) or (self.group_prefix+"."+self.group_code) == item_checked.group)) :
ch = item_checked
if ch == "" :
frappe.throw("Beda")
ch = self.append('data_inventory_checked')
ch.item_code_variant = d.item_code_variant
ch.yard_atau_meter_per_roll = d.yard_atau_meter_per_roll
ch.colour = d.colour
ch.inventory_uom = d.inventory_uom
ch.group = d.group
ch.warehouse = d.warehouse
ch.total_roll = 0
ch.total_yard_atau_meter = 0
checker = True
ch = self.append('data_inventory_checked',{})
if self.qty_roll >= d.total_roll :
ch.total_roll = ch.total_roll + d.total_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + d.total_yard_atau_meter
self.qty_roll = self.qty_roll - ch.total_roll
self.remove(d)
else :
ch.total_roll = ch.total_roll + self.qty_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + (self.qty_roll * self.yard_atau_meter)
d.total_roll = d.total_roll - self.qty_roll
d.total_yard_atau_meter = d.total_yard_atau_meter - (self.qty_roll * self.yard_atau_meter)
self.qty_roll = 0
elif self.qty_roll < 0 :
ch = ""
for item_miss in self.get("data_inventory_missing") :
if (item_miss.item_code_variant == self.item_code_variant_depan and
item_miss.yard_atau_meter_per_roll == self.yard_atau_meter and
item_miss.colour == self.colour and
item_miss.warehouse == self.warehouse
and ((not self.group_prefix+"."+self.group_code) or (self.group_prefix+"."+self.group_code) == item_miss.group)) :
ch = item_miss
if ch == "" :
frappe.throw("Beda")
ch = self.append('data_inventory_missing')
ch.item_code_variant = d.item_code_variant
ch.yard_atau_meter_per_roll = d.yard_atau_meter_per_roll
ch.colour = d.colour
ch.inventory_uom = d.inventory_uom
ch.group = d.group
ch.warehouse = d.warehouse
ch.total_roll = 0
ch.total_yard_atau_meter = 0
if self.qty_roll <= d.total_roll :
ch.total_roll = ch.total_roll + d.total_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + d.total_yard_atau_meter
self.qty_roll = self.qty_roll - d.total_roll
self.remove(d)
else :
ch.total_roll = ch.total_roll + self.qty_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + (self.qty_roll * self.yard_atau_meter)
d.total_roll = d.total_roll - self.qty_roll
d.total_yard_atau_meter = d.total_yard_atau_meter - (self.qty_roll * self.yard_atau_meter)
self.qty_roll = 0
else :
if self.qty_roll > 0 :
ch = ""
for item_checked in self.get("data_inventory_checked") :
if (item_checked.item_code_variant == self.item_code_variant_depan and
item_checked.yard_atau_meter_per_roll == self.yard_atau_meter and
item_checked.colour == self.colour and
item_checked.warehouse == self.warehouse
and not item_checked.group ) :
ch = item_checked
continue
frappe.throw("BEDA")
if ch == "" :
ch = self.append('data_inventory_checked')
ch.item_code_variant = d.item_code_variant
ch.yard_atau_meter_per_roll = d.yard_atau_meter_per_roll
ch.colour = d.colour
ch.inventory_uom = d.inventory_uom
ch.warehouse = d.warehouse
ch.total_roll = 0
ch.total_yard_atau_meter = 0
checker = True
if self.qty_roll >= d.total_roll :
ch.total_roll = ch.total_roll + d.total_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + d.total_yard_atau_meter
self.qty_roll = self.qty_roll - d.total_roll
self.remove(d)
else :
ch.total_roll = ch.total_roll + self.qty_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + (self.qty_roll * self.yard_atau_meter)
d.total_roll = d.total_roll - self.qty_roll
d.total_yard_atau_meter = d.total_yard_atau_meter - (self.qty_roll * self.yard_atau_meter)
self.qty_roll = 0
elif self.qty_roll < 0 :
ch = ""
for item_miss in self.get("data_inventory_missing") :
if (item_miss.item_code_variant == self.item_code_variant_depan and
item_miss.yard_atau_meter_per_roll == self.yard_atau_meter and
item_miss.colour == self.colour and
item_miss.warehouse == self.warehouse
and not item_miss.group) :
ch = item_miss
if ch == "" :
ch = self.append('data_inventory_missing')
ch.item_code_variant = d.item_code_variant
ch.yard_atau_meter_per_roll = d.yard_atau_meter_per_roll
ch.colour = d.colour
ch.inventory_uom = d.inventory_uom
ch.warehouse = d.warehouse
ch.total_roll = 0
ch.total_yard_atau_meter = 0
if self.qty_roll <= d.total_roll :
ch.total_roll = ch.total_roll + d.total_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + d.total_yard_atau_meter
self.qty_roll = self.qty_roll - d.total_roll
self.remove(d)
else :
ch.total_roll = ch.total_roll + self.qty_roll
ch.total_yard_atau_meter = ch.total_yard_atau_meter + (self.qty_roll * self.yard_atau_meter)
d.total_roll = d.total_roll - self.qty_roll
d.total_yard_atau_meter = d.total_yard_atau_meter - (self.qty_roll * self.yard_atau_meter)
self.qty_roll = 0
if self.qty_roll > 0 :
if checker :
frappe.msgprint("Jumlah item melebihi yang tercatat pada Inventory. Kelebihan akan dimasukkan ke Missing")
frappe.msgprint("Item tidak ada di dalam Inventory")
add_item(self)
self.yard_atau_meter = 0
self.qty_roll = 1
self.colour = self.get_colour
else :
frappe.throw("Data Item belum terisi dengan lengkap")
pass
def add_item(self):
count = 0
if self.item_code_variant_depan and self.yard_atau_meter and self.colour and self.warehouse :
master_item = frappe.get_doc("Item", self.item_code_variant_depan)
if self.get("data_inventory_missing") :
for i in self.data_inventory_missing :
if self.group_prefix and self.group_code :
if i.item_code_variant == self.item_code_variant_depan and i.yard_atau_meter_per_roll == self.yard_atau_meter and i.warehouse == self.warehouse and i.colour == self.colour and i.group == (self.group_prefix+"."+self.group_code) :
count = 1
else :
if i.item_code_variant == self.item_code_variant_depan and i.yard_atau_meter_per_roll == self.yard_atau_meter and i.warehouse == self.warehouse and i.colour == self.colour and not i.group :
count = 1
if count == 1 :
for i in self.data_inventory_missing :
if self.group_prefix and self.group_code :
if i.item_code_variant == self.item_code_variant_depan and i.yard_atau_meter_per_roll == self.yard_atau_meter and i.warehouse == self.warehouse and i.colour == self.colour and i.group == (self.group_prefix+"."+self.group_code) :
i.total_roll = i.total_roll + self.qty_roll
i.total_yard_atau_meter = i.total_yard_atau_meter + (self.yard_atau_meter * self.qty_roll)
else :
if i.item_code_variant == self.item_code_variant_depan and i.yard_atau_meter_per_roll == self.yard_atau_meter and i.warehouse == self.warehouse and i.colour == self.colour and not i.group :
i.total_roll = i.total_roll + self.qty_roll
i.total_yard_atau_meter = i.total_yard_atau_meter + (self.yard_atau_meter * self.qty_roll)
else :
if self.group_prefix and self.group_code :
pp_so = self.append('data_inventory_missing', {})
pp_so.item_code_variant = self.item_code_variant_depan
pp_so.yard_atau_meter_per_roll = self.yard_atau_meter
pp_so.total_yard_atau_meter = (self.yard_atau_meter * self.qty_roll)
pp_so.total_roll = self.qty_roll
pp_so.group = self.group_prefix+"."+self.group_code
pp_so.warehouse = self.warehouse
pp_so.colour = self.colour
pp_so.inventory_uom = master_item.stock_uom
else :
pp_so = self.append('data_inventory_missing', {})
pp_so.item_code_variant = self.item_code_variant_depan
pp_so.yard_atau_meter_per_roll = self.yard_atau_meter
pp_so.total_yard_atau_meter = (self.yard_atau_meter * self.qty_roll)
pp_so.total_roll = self.qty_roll
pp_so.warehouse = self.warehouse
pp_so.colour = self.colour
pp_so.inventory_uom = master_item.stock_uom
else :
if self.group_prefix and self.group_code :
pp_so = self.append('data_inventory_missing', {})
pp_so.item_code_variant = self.item_code_variant_depan
pp_so.yard_atau_meter_per_roll = self.yard_atau_meter
pp_so.total_yard_atau_meter = (self.yard_atau_meter * self.qty_roll)
pp_so.total_roll = self.qty_roll
pp_so.group = self.group_prefix+"."+self.group_code
pp_so.warehouse = self.warehouse
pp_so.colour = self.colour
pp_so.inventory_uom = master_item.stock_uom
else :
pp_so = self.append('data_inventory_missing', {})
pp_so.item_code_variant = self.item_code_variant_depan
pp_so.yard_atau_meter_per_roll = self.yard_atau_meter
pp_so.total_yard_atau_meter = (self.yard_atau_meter * self.qty_roll)
pp_so.total_roll = self.qty_roll
pp_so.warehouse = self.warehouse
pp_so.colour = self.colour
pp_so.inventory_uom = master_item.stock_uom
else :
frappe.throw("Item Code / Colour / Warehouse / Yard / Meter tidak terisi")
@frappe.whitelist()
def save_inventory_validator(doc,method):
# unchecked
if doc.data_inventory_unchecked :
total_uncheck = 0
for i in doc.data_inventory_unchecked :
total_uncheck = total_uncheck + float(i.total_roll)
doc.total_uncheck = total_uncheck
# missing
if doc.data_inventory_missing :
total_missing = 0
for i in doc.data_inventory_missing :
total_missing = total_missing + float(i.total_roll)
doc.total_missing = total_missing
| 41.685897
| 235
| 0.69368
| 1,978
| 13,006
| 4.199191
| 0.06724
| 0.093426
| 0.151818
| 0.093186
| 0.80785
| 0.77438
| 0.744642
| 0.737178
| 0.73164
| 0.7196
| 0
| 0.003692
| 0.208673
| 13,006
| 312
| 236
| 41.685897
| 0.803342
| 0.011994
| 0
| 0.690196
| 0
| 0.007843
| 0.104251
| 0.062052
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015686
| false
| 0.003922
| 0.011765
| 0
| 0.031373
| 0.007843
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
62b0ff2afff3705e24659e9cff703915faab119e
| 513
|
py
|
Python
|
projecteuler/#5.py
|
droiddoes9/USACO
|
d63d07d225cebb86f3a93b5b52995fa7e81af8ee
|
[
"MIT"
] | null | null | null |
projecteuler/#5.py
|
droiddoes9/USACO
|
d63d07d225cebb86f3a93b5b52995fa7e81af8ee
|
[
"MIT"
] | null | null | null |
projecteuler/#5.py
|
droiddoes9/USACO
|
d63d07d225cebb86f3a93b5b52995fa7e81af8ee
|
[
"MIT"
] | null | null | null |
num=20
while num<670442572800:
if num%20==0:
if num%19==0:
if num%18==0:
if num%17==0:
if num%16==0:
if num%15==0:
if num%14==0:
if num%13==0:
if num%12==0:
if num%11==0:
print num
print "YES"
num+=20
| 32.0625
| 55
| 0.245614
| 51
| 513
| 2.470588
| 0.333333
| 0.396825
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.265896
| 0.662768
| 513
| 15
| 56
| 34.2
| 0.462428
| 0
| 0
| 0
| 0
| 0
| 0.005848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.133333
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c53d9f85e6ecf13aef41aafe1ed282baee089c3f
| 256
|
py
|
Python
|
src/sample/Env.py
|
TestowanieAutomatyczneUG/laboratorium-9-wgulan
|
b69d4a04acadde0f2609fe51096d64d725da1c71
|
[
"MIT"
] | null | null | null |
src/sample/Env.py
|
TestowanieAutomatyczneUG/laboratorium-9-wgulan
|
b69d4a04acadde0f2609fe51096d64d725da1c71
|
[
"MIT"
] | null | null | null |
src/sample/Env.py
|
TestowanieAutomatyczneUG/laboratorium-9-wgulan
|
b69d4a04acadde0f2609fe51096d64d725da1c71
|
[
"MIT"
] | null | null | null |
class Env:
def __init__(self):
self.played = False
def getTime(self):
pass
def playWavFile(self, file):
pass
def wavWasPlayed(self):
self.played = True
def resetWav(self):
self.played = False
| 16
| 32
| 0.566406
| 29
| 256
| 4.862069
| 0.482759
| 0.170213
| 0.297872
| 0.269504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.34375
| 256
| 15
| 33
| 17.066667
| 0.839286
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0.181818
| 0
| 0
| 0.545455
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
c5622f45fb59bddf809b61abe045fa63fe69ef6b
| 37
|
py
|
Python
|
lxmls/my_tools.py
|
jnobre/lxmls-toolkit-2017
|
528da3377723cb9a048d13ac80786408d16df88d
|
[
"MIT"
] | null | null | null |
lxmls/my_tools.py
|
jnobre/lxmls-toolkit-2017
|
528da3377723cb9a048d13ac80786408d16df88d
|
[
"MIT"
] | null | null | null |
lxmls/my_tools.py
|
jnobre/lxmls-toolkit-2017
|
528da3377723cb9a048d13ac80786408d16df88d
|
[
"MIT"
] | null | null | null |
def my_print(input):
print input
| 12.333333
| 20
| 0.702703
| 6
| 37
| 4.166667
| 0.666667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 37
| 3
| 21
| 12.333333
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
3d678eb4ce540aea29f0a656acdc78871b2309e2
| 183
|
py
|
Python
|
src/graph_transpiler/webdnn/backend/webgpu/kernels/atanh.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | 1
|
2021-04-09T15:55:35.000Z
|
2021-04-09T15:55:35.000Z
|
src/graph_transpiler/webdnn/backend/webgpu/kernels/atanh.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
src/graph_transpiler/webdnn/backend/webgpu/kernels/atanh.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
from webdnn.backend.webgpu.kernels.elementwise import register_elementwise_kernel
from webdnn.graph.operators.atanh import Atanh
register_elementwise_kernel(Atanh, "y = atanh(x0);")
| 36.6
| 81
| 0.84153
| 24
| 183
| 6.25
| 0.583333
| 0.133333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005882
| 0.071038
| 183
| 4
| 82
| 45.75
| 0.876471
| 0
| 0
| 0
| 0
| 0
| 0.076503
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ded8c56091de597a0ad6ada23cca960bff7fe72
| 117
|
py
|
Python
|
waffles/view/util/resource.py
|
IonTeLOS/wasf
|
2e77dd65afffbbf1545e9ced2296dcbd0ab3c8e4
|
[
"Zlib"
] | null | null | null |
waffles/view/util/resource.py
|
IonTeLOS/wasf
|
2e77dd65afffbbf1545e9ced2296dcbd0ab3c8e4
|
[
"Zlib"
] | null | null | null |
waffles/view/util/resource.py
|
IonTeLOS/wasf
|
2e77dd65afffbbf1545e9ced2296dcbd0ab3c8e4
|
[
"Zlib"
] | null | null | null |
from waffles import ROOT_DIR
def get_path(resource_path):
return ROOT_DIR + '/view/resources/' + resource_path
| 19.5
| 56
| 0.760684
| 17
| 117
| 4.941176
| 0.705882
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 117
| 5
| 57
| 23.4
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0.136752
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
b18cd19402d66356499413f2e5fd1199b44f4519
| 3,005
|
py
|
Python
|
commands.py
|
Treeniks/Vinimum
|
77c484125b38aca3a8d8010e60a0a5b1f6a769f9
|
[
"Vim"
] | null | null | null |
commands.py
|
Treeniks/Vinimum
|
77c484125b38aca3a8d8010e60a0a5b1f6a769f9
|
[
"Vim"
] | null | null | null |
commands.py
|
Treeniks/Vinimum
|
77c484125b38aca3a8d8010e60a0a5b1f6a769f9
|
[
"Vim"
] | null | null | null |
import sublime
import Vinimum.vinimum as vinimum
class Command:
def __init__(self, view):
self.view = view
def run(self):
pass
def repeatable(self):
return True
# i
class InsertCommand(Command):
def run(self):
vinimum.enter_sublime_mode()
# technically repeatable
# but only once proper repeat engine is in place
# better not make it repeatable until then
def repeatable(self):
return False
# I
class InsertBOLCommand(Command):
def run(self):
vinimum.enter_sublime_mode()
self.view.run_command("move_to", {"to": "bol"})
# technically repeatable
# but only once proper repeat engine is in place
# better not make it repeatable until then
def repeatable(self):
return False
# a
class AppendCommand(Command):
def run(self):
vinimum.enter_sublime_mode()
self.view.run_command("move", {"by": "characters", "forward": True})
# technically repeatable
# but only once proper repeat engine is in place
# better not make it repeatable until then
def repeatable(self):
return False
# A
class AppendEOLCommand(Command):
def run(self):
vinimum.enter_sublime_mode()
self.view.run_command("move_to", {"to": "eol"})
# technically repeatable
# but only once proper repeat engine is in place
# better not make it repeatable until then
def repeatable(self):
return False
# o
class NewLineAfterCommand(Command):
def run(self):
vinimum.enter_sublime_mode()
self.view.run_command("run_macro_file", {"file": "res://Packages/Default/Add Line.sublime-macro"})
# technically repeatable
# but only once proper repeat engine is in place
# better not make it repeatable until then
def repeatable(self):
return False
# O
class NewLineBeforeCommand(Command):
def run(self):
vinimum.enter_sublime_mode()
self.view.run_command("run_macro_file", {"file": "res://Packages/Default/Add Line Before.sublime-macro"})
# technically repeatable
# but only once proper repeat engine is in place
# better not make it repeatable until then
def repeatable(self):
return False
# x
class RemoveCharacterCommand(Command):
def run(self):
self.view.run_command("right_delete")
# D
class DeleteToEOLCommand(Command):
def run(self):
self.view.run_command("run_macro_file", {"file": "res://Packages/Default/Delete to Hard EOL.sublime-macro"})
# C
class ChangeToEOLCommand(Command):
def run(self):
vinimum.enter_sublime_mode()
self.view.run_command("run_macro_file", {"file": "res://Packages/Default/Delete to Hard EOL.sublime-macro"})
commands = {
"i": InsertCommand,
"I": InsertBOLCommand,
"a": AppendCommand,
"A": AppendEOLCommand,
"o": NewLineAfterCommand,
"O": NewLineBeforeCommand,
"x": RemoveCharacterCommand,
"D": DeleteToEOLCommand,
"C": ChangeToEOLCommand,
}
| 26.830357
| 116
| 0.671547
| 369
| 3,005
| 5.368564
| 0.184282
| 0.05048
| 0.05048
| 0.077234
| 0.734982
| 0.734982
| 0.734982
| 0.734982
| 0.688541
| 0.688541
| 0
| 0
| 0.225624
| 3,005
| 111
| 117
| 27.072072
| 0.851311
| 0.227288
| 0
| 0.492308
| 0
| 0
| 0.151264
| 0.047951
| 0
| 0
| 0
| 0
| 0
| 1
| 0.276923
| false
| 0.015385
| 0.030769
| 0.107692
| 0.569231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
491699f8e4d47893e3e5ee5661274f798a5ac2d2
| 1,299
|
py
|
Python
|
migration/migrator/migrations/course/20200131192137_office_hours_queue_queue_tokens.py
|
zeez2030/Submitty
|
7118944ff4adc6f15d76984eb10a1e862926d724
|
[
"BSD-3-Clause"
] | 411
|
2016-06-14T20:52:25.000Z
|
2022-03-31T21:20:25.000Z
|
migration/migrator/migrations/course/20200131192137_office_hours_queue_queue_tokens.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 5,730
|
2016-05-23T21:04:32.000Z
|
2022-03-31T10:08:06.000Z
|
migration/migrator/migrations/course/20200131192137_office_hours_queue_queue_tokens.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 423
|
2016-09-22T21:11:30.000Z
|
2022-03-29T18:55:28.000Z
|
"""Migration for a given Submitty course database."""
def up(config, database, semester, course):
"""
Run up migration.
:param config: Object holding configuration details about Submitty
:type config: migrator.config.Config
:param database: Object for interacting with given database for environment
:type database: migrator.db.Database
:param semester: Semester of the course being migrated
:type semester: str
:param course: Code of course being migrated
:type course: str
"""
database.execute("ALTER TABLE queue_settings ADD IF NOT EXISTS token TEXT NOT null DEFAULT 'temp_token'");
database.execute("Update queue_settings SET token = code Where token = 'temp_token';");
def down(config, database, semester, course):
"""
Run down migration (rollback).
:param config: Object holding configuration details about Submitty
:type config: migrator.config.Config
:param database: Object for interacting with given database for environment
:type database: migrator.db.Database
:param semester: Semester of the course being migrated
:type semester: str
:param course: Code of course being migrated
:type course: str
"""
database.execute("ALTER TABLE queue_settings DROP COLUMN IF EXISTS token;");
| 35.108108
| 110
| 0.724403
| 164
| 1,299
| 5.707317
| 0.310976
| 0.047009
| 0.081197
| 0.098291
| 0.767094
| 0.700855
| 0.700855
| 0.700855
| 0.700855
| 0.700855
| 0
| 0
| 0.197845
| 1,299
| 36
| 111
| 36.083333
| 0.898273
| 0.622017
| 0
| 0
| 0
| 0
| 0.526854
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
493669c55b08d9d78d4f4133d5c02379f318d478
| 28
|
py
|
Python
|
src/infrastructure/queries/__init__.py
|
nedagarmo/Libraries
|
7618bd329593684d475a9e64a38409ffb30697df
|
[
"Apache-2.0"
] | null | null | null |
src/infrastructure/queries/__init__.py
|
nedagarmo/Libraries
|
7618bd329593684d475a9e64a38409ffb30697df
|
[
"Apache-2.0"
] | null | null | null |
src/infrastructure/queries/__init__.py
|
nedagarmo/Libraries
|
7618bd329593684d475a9e64a38409ffb30697df
|
[
"Apache-2.0"
] | null | null | null |
from .book import BookQuery
| 14
| 27
| 0.821429
| 4
| 28
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4941cb411d40a7842537c22584e826b47c2972c5
| 35
|
py
|
Python
|
negate.py
|
bourneagain/pythonBytes
|
be115162147e52718aacbfb9cd2763aa02754f28
|
[
"MIT"
] | 1
|
2017-05-29T02:02:27.000Z
|
2017-05-29T02:02:27.000Z
|
negate.py
|
bourneagain/pythonBytes
|
be115162147e52718aacbfb9cd2763aa02754f28
|
[
"MIT"
] | null | null | null |
negate.py
|
bourneagain/pythonBytes
|
be115162147e52718aacbfb9cd2763aa02754f28
|
[
"MIT"
] | null | null | null |
def negate(n):
print negate(n)
| 5.833333
| 15
| 0.628571
| 6
| 35
| 3.666667
| 0.666667
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228571
| 35
| 5
| 16
| 7
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
494bcfb5124aa77ecfc60bc145d664dd5077a142
| 2,210
|
py
|
Python
|
pypeln/task/api/each_task_test.py
|
quarckster/pypeln
|
f4160d0f4d4718b67f79a0707d7261d249459a4b
|
[
"MIT"
] | 1,281
|
2018-09-20T05:35:27.000Z
|
2022-03-30T01:29:48.000Z
|
pypeln/task/api/each_task_test.py
|
webclinic017/pypeln
|
5231806f2cac9d2019dacbbcf913484fd268b8c1
|
[
"MIT"
] | 78
|
2018-09-18T20:38:12.000Z
|
2022-03-30T20:16:02.000Z
|
pypeln/task/api/each_task_test.py
|
webclinic017/pypeln
|
5231806f2cac9d2019dacbbcf913484fd268b8c1
|
[
"MIT"
] | 88
|
2018-09-24T10:46:14.000Z
|
2022-03-28T09:34:50.000Z
|
import sys
import time
import typing as tp
from unittest import TestCase
import hypothesis as hp
from hypothesis import strategies as st
import pypeln as pl
MAX_EXAMPLES = 10
T = tp.TypeVar("T")
class TestEach(TestCase):
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_each(self, nums: tp.List[int]):
nums_pl = pl.task.each(lambda x: x, nums)
assert nums is not None
if nums_pl is not None:
pl.task.run(nums_pl)
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_each_list(self, nums: tp.List[int]):
nums_pl = pl.task.each(lambda x: x, nums)
assert nums is not None
if nums_pl is not None:
nums_pl = list(nums_pl)
if nums:
assert nums_pl != nums
else:
assert nums_pl == nums
assert nums_pl == []
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_each_run(self, nums: tp.List[int]):
nums_pl = pl.task.each(lambda x: x, nums, run=True)
assert nums_pl is None
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_each_list_2(self, nums: tp.List[int]):
nums_pl = pl.task.each(lambda x: x, nums)
assert nums is not None
if nums_pl is not None:
nums_pl = await nums_pl
if nums:
assert nums_pl != nums
else:
assert nums_pl == nums
assert nums_pl == []
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_each_list_3(self, nums: tp.List[int]):
nums_pl = await pl.task.each(lambda x: x, nums)
assert nums_pl == []
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_each_list_4(self, nums: tp.List[int]):
nums_pl = await (pl.task.each(lambda x: x, nums))
assert nums_pl == []
| 24.285714
| 59
| 0.614027
| 336
| 2,210
| 3.872024
| 0.14881
| 0.106072
| 0.096849
| 0.059954
| 0.823982
| 0.823982
| 0.823982
| 0.823982
| 0.823982
| 0.823982
| 0
| 0.003109
| 0.272398
| 2,210
| 90
| 60
| 24.555556
| 0.80597
| 0
| 0
| 0.610169
| 0
| 0
| 0.000452
| 0
| 0
| 0
| 0
| 0
| 0.20339
| 1
| 0.050847
| false
| 0
| 0.118644
| 0
| 0.186441
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
49679d9e5f4642bf7fa2b1598dcba6c0d3c2e196
| 318
|
py
|
Python
|
Codewars/8kyu/tip-calculator/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/tip-calculator/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/tip-calculator/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 2.7.6
Test.assert_equals(calculate_tip(30, 'poor'), 2)
Test.assert_equals(calculate_tip(20, 'Excellent'), 4)
Test.assert_equals(calculate_tip(20, 'hi'), 'Rating not recognised')
Test.assert_equals(calculate_tip(107.65, 'GReat'), 17)
Test.assert_equals(calculate_tip(20, 'great!'), 'Rating not recognised')
| 39.75
| 72
| 0.751572
| 49
| 318
| 4.673469
| 0.44898
| 0.218341
| 0.349345
| 0.545852
| 0.637555
| 0.393013
| 0
| 0
| 0
| 0
| 0
| 0.068027
| 0.075472
| 318
| 7
| 73
| 45.428571
| 0.710884
| 0.044025
| 0
| 0
| 0
| 0
| 0.225166
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b8fdaf2a00677033fbbf3dca34355d8b3b057cf2
| 170
|
py
|
Python
|
holocron/nn/modules/__init__.py
|
frgfm/torch-zoo
|
c97beacf3d49eaa34398abf47f378ea6b48a70f3
|
[
"Apache-2.0"
] | null | null | null |
holocron/nn/modules/__init__.py
|
frgfm/torch-zoo
|
c97beacf3d49eaa34398abf47f378ea6b48a70f3
|
[
"Apache-2.0"
] | null | null | null |
holocron/nn/modules/__init__.py
|
frgfm/torch-zoo
|
c97beacf3d49eaa34398abf47f378ea6b48a70f3
|
[
"Apache-2.0"
] | null | null | null |
from .activation import *
from .attention import *
from .conv import *
from .downsample import *
from .dropblock import *
from .lambda_layer import *
from .loss import *
| 21.25
| 27
| 0.752941
| 22
| 170
| 5.772727
| 0.454545
| 0.472441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164706
| 170
| 7
| 28
| 24.285714
| 0.894366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7702a30764fc1236bf22446e9cd8a3b920aaa84b
| 35
|
py
|
Python
|
samples/import_lib.py
|
codes1gn/gemini
|
4b173ea583f2578244d1d0fb482ccb77818f7558
|
[
"MIT"
] | null | null | null |
samples/import_lib.py
|
codes1gn/gemini
|
4b173ea583f2578244d1d0fb482ccb77818f7558
|
[
"MIT"
] | null | null | null |
samples/import_lib.py
|
codes1gn/gemini
|
4b173ea583f2578244d1d0fb482ccb77818f7558
|
[
"MIT"
] | null | null | null |
def do_print():
print('hello')
| 11.666667
| 18
| 0.6
| 5
| 35
| 4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 35
| 2
| 19
| 17.5
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
773e1e8274d2f21928ab32b326dbd5f1422bbaef
| 201
|
py
|
Python
|
pyaop/aspects/aspect_manager.py
|
stonewell/python-aop
|
563f41187f8f4d84e09344569541f985ffe90c6f
|
[
"MIT"
] | null | null | null |
pyaop/aspects/aspect_manager.py
|
stonewell/python-aop
|
563f41187f8f4d84e09344569541f985ffe90c6f
|
[
"MIT"
] | null | null | null |
pyaop/aspects/aspect_manager.py
|
stonewell/python-aop
|
563f41187f8f4d84e09344569541f985ffe90c6f
|
[
"MIT"
] | null | null | null |
class AspectManager(object):
def __init__(self):
super(AspectManager, self).__init__()
def get_module_hooker(self, name):
return None
def load_aspects(self):
pass
| 20.1
| 45
| 0.646766
| 23
| 201
| 5.173913
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.258706
| 201
| 9
| 46
| 22.333333
| 0.798658
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
775e1a04595ea17b577520e917d68cc6ebd99e11
| 25
|
py
|
Python
|
cflearn/misc/__init__.py
|
carefree0910/carefree-learn
|
2043812afbe9c56f01ec1639961736313ee062ba
|
[
"MIT"
] | 400
|
2020-07-05T18:55:49.000Z
|
2022-02-21T02:33:08.000Z
|
cflow/misc/__init__.py
|
carefree0910/carefree-flow
|
7035015a072cf8142074d01683889f90950d2939
|
[
"MIT"
] | 82
|
2020-08-01T13:29:38.000Z
|
2021-10-09T07:13:44.000Z
|
cflearn/misc/__init__.py
|
carefree0910/carefree-learn
|
2043812afbe9c56f01ec1639961736313ee062ba
|
[
"MIT"
] | 34
|
2020-07-05T21:15:34.000Z
|
2021-12-20T08:45:17.000Z
|
from .internal_ import *
| 12.5
| 24
| 0.76
| 3
| 25
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
620a5ac5dd61aa27da74339e0a7d30b1abcfc2f6
| 2,604
|
py
|
Python
|
rul_features/computed_features/basic_statistical.py
|
inovex/RCIS2021-degradation-bearing-vessels
|
27bd1a2e3f08c5b42011596aa98e5ac627a416d6
|
[
"MIT"
] | 2
|
2021-06-21T11:40:38.000Z
|
2021-12-29T02:40:30.000Z
|
rul_features/computed_features/basic_statistical.py
|
chenzhengkun7/RCIS2021-degradation-estimation-bearing-vessels
|
27bd1a2e3f08c5b42011596aa98e5ac627a416d6
|
[
"MIT"
] | 2
|
2021-04-08T11:30:28.000Z
|
2021-04-12T06:41:31.000Z
|
rul_features/computed_features/basic_statistical.py
|
chenzhengkun7/RCIS2021-degradation-estimation-bearing-vessels
|
27bd1a2e3f08c5b42011596aa98e5ac627a416d6
|
[
"MIT"
] | 2
|
2021-06-21T11:40:43.000Z
|
2021-12-29T02:36:51.000Z
|
"""
Contains all basic statistical features that can be computed from one observation.
"""
import pandas as pd
import numpy as np
import tsfresh.feature_extraction.feature_calculators as tsf
import math
np.seterr('raise')
"""
Vibration Features:
- Basic statistical
- Entropy features
- frequency features
"""
# Basic statistical features #
def mean(current_observation: pd.DataFrame, raw_key: str):
return current_observation[raw_key].mean()
# Feature list taken from Mao et al. 2020
def maximum(current_observation: pd.DataFrame, raw_key: str):
return current_observation[raw_key].max()
def minimum(current_observation: pd.DataFrame, raw_key: str):
return current_observation[raw_key].min()
def root_mean_square(current_observation: pd.DataFrame, raw_key: str):
return math.sqrt(current_observation[raw_key].pow(2).mean())
def abs_avg(current_observation: pd.DataFrame, raw_key: str):
return root_mean_square(current_observation, raw_key)
def peak_to_peak_value(current_observation: pd.DataFrame, raw_key: str):
return maximum(current_observation, raw_key) - minimum(current_observation, raw_key)
def standard_deviation(current_observation: pd.DataFrame, raw_key: str):
return np.std(current_observation[raw_key])
def skewness(current_observation: pd.DataFrame, raw_key: str):
return tsf.skewness(current_observation[raw_key])
def kurtosis(current_observation: pd.DataFrame, raw_key: str):
return tsf.kurtosis(current_observation[raw_key])
def variance(current_observation: pd.DataFrame, raw_key: str):
return tsf.variance(current_observation[raw_key])
def peak_factor(current_observation: pd.DataFrame, raw_key: str):
root_mean_square_val = root_mean_square(current_observation, raw_key)
if root_mean_square_val == 0:
return 0
return maximum(current_observation, raw_key) / root_mean_square_val
def change_coefficient(current_observation: pd.DataFrame, raw_key: str):
standard_deviation_val = standard_deviation(current_observation, raw_key)
if standard_deviation_val == 0:
return 0
return mean(current_observation, raw_key) / standard_deviation_val
def clearance_factor(current_observation: pd.DataFrame, raw_key: str):
mean_val = current_observation[raw_key].pow(2).mean()
if mean_val == 0:
return 0
return maximum(current_observation, raw_key) / mean_val
def abs_energy(current_observation: pd.DataFrame, raw_key: str):
return tsf.abs_energy(current_observation[raw_key])
if __name__ == '__main__':
signal = pd.DataFrame([{1: 1}, {1: 2}, {1: 3}, {1: 4}])
| 29.931034
| 88
| 0.767665
| 363
| 2,604
| 5.206612
| 0.212121
| 0.304762
| 0.2
| 0.228571
| 0.634921
| 0.528571
| 0.492063
| 0.397884
| 0.258201
| 0.15873
| 0
| 0.008889
| 0.135945
| 2,604
| 87
| 89
| 29.931034
| 0.831111
| 0.057988
| 0
| 0.068182
| 0
| 0
| 0.005525
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.318182
| false
| 0
| 0.090909
| 0.25
| 0.795455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
62374556130d0603903be3bb4706b60820b9639b
| 48,191
|
py
|
Python
|
tests/hikari/impl/test_event_manager.py
|
IkBenOlie5/hikari
|
09502f05427ad92b05103bd1a56533296a593755
|
[
"MIT"
] | null | null | null |
tests/hikari/impl/test_event_manager.py
|
IkBenOlie5/hikari
|
09502f05427ad92b05103bd1a56533296a593755
|
[
"MIT"
] | 34
|
2021-10-01T17:08:11.000Z
|
2022-03-29T02:21:07.000Z
|
tests/hikari/impl/test_event_manager.py
|
IkBenOlie5/hikari
|
09502f05427ad92b05103bd1a56533296a593755
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Nekokatt
# Copyright (c) 2021 davfsa
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import base64
import contextlib
import random
import mock
import pytest
from hikari import channels
from hikari import errors
from hikari import intents
from hikari import presences
from hikari.impl import event_manager
from hikari.internal import time
from tests.hikari import hikari_test_helpers
def test_fixed_size_nonce():
stack = contextlib.ExitStack()
monotonic = stack.enter_context(mock.patch.object(time, "monotonic_ns"))
monotonic.return_value.to_bytes = mock.Mock(return_value="foo")
randbits = stack.enter_context(mock.patch.object(random, "getrandbits"))
randbits.return_value.to_bytes = mock.Mock(return_value="bar")
encode = stack.enter_context(mock.patch.object(base64, "b64encode"))
encode.return_value.decode = mock.Mock(return_value="nonce")
with stack:
assert event_manager._fixed_size_nonce() == "nonce"
monotonic.assert_called_once_with()
monotonic.return_value.to_bytes.assert_called_once_with(8, "big")
randbits.assert_called_once_with(92)
randbits.return_value.to_bytes.assert_called_once_with(12, "big")
encode.assert_called_once_with("foobar")
encode.return_value.decode.assert_called_once_with("ascii")
@pytest.fixture()
def shard():
return mock.Mock(id=987)
@pytest.mark.asyncio()
async def test__request_guild_members(shard):
shard.request_guild_members = mock.AsyncMock()
await event_manager._request_guild_members(shard, 123, include_presences=True, nonce="okokok")
shard.request_guild_members.assert_awaited_once_with(123, include_presences=True, nonce="okokok")
@pytest.mark.asyncio()
async def test__request_guild_members_handles_state_conflict_error(shard):
shard.request_guild_members = mock.AsyncMock(side_effect=errors.ComponentStateConflictError(reason="OK"))
await event_manager._request_guild_members(shard, 123, include_presences=True, nonce="okokok")
shard.request_guild_members.assert_awaited_once_with(123, include_presences=True, nonce="okokok")
class TestEventManagerImpl:
@pytest.fixture()
def event_factory(self):
return mock.Mock()
@pytest.fixture()
def event_manager(self, event_factory):
obj = hikari_test_helpers.mock_class_namespace(event_manager.EventManagerImpl, slots_=False)(
event_factory, intents.Intents.ALL, cache=mock.Mock()
)
obj.dispatch = mock.AsyncMock()
return obj
@pytest.fixture()
def stateless_event_manager(self, event_factory):
obj = hikari_test_helpers.mock_class_namespace(event_manager.EventManagerImpl, slots_=False)(
event_factory, intents.Intents.ALL, cache=None
)
obj.dispatch = mock.AsyncMock()
return obj
@pytest.mark.asyncio()
async def test_on_ready_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(my_user=mock.Mock())
event_factory.deserialize_ready_event.return_value = event
await event_manager.on_ready(shard, payload)
event_manager._cache.update_me.assert_called_once_with(event.my_user)
event_factory.deserialize_ready_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_ready_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_ready(shard, payload)
event_factory.deserialize_ready_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_ready_event.return_value)
@pytest.mark.asyncio()
async def test_on_resumed(self, event_manager, shard, event_factory):
payload = {}
await event_manager.on_resumed(shard, payload)
event_factory.deserialize_resumed_event.assert_called_once_with(shard)
event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_resumed_event.return_value)
@pytest.mark.asyncio()
async def test_on_channel_create_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(channel=mock.Mock(channels.GuildChannel))
event_factory.deserialize_channel_create_event.return_value = event
await event_manager.on_channel_create(shard, payload)
event_manager._cache.set_guild_channel.assert_called_once_with(event.channel)
event_factory.deserialize_channel_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_channel_create_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_channel_create(shard, payload)
event_factory.deserialize_channel_create_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_channel_create_event.return_value
)
@pytest.mark.asyncio()
async def test_on_channel_update_stateful(self, event_manager, shard, event_factory):
payload = {"id": 123}
old_channel = object()
event = mock.Mock(channel=mock.Mock(channels.GuildChannel))
event_factory.deserialize_channel_update_event.return_value = event
event_manager._cache.get_guild_channel.return_value = old_channel
await event_manager.on_channel_update(shard, payload)
event_manager._cache.get_guild_channel.assert_called_once_with(123)
event_manager._cache.update_guild_channel.assert_called_once_with(event.channel)
event_factory.deserialize_channel_update_event.assert_called_once_with(shard, payload, old_channel=old_channel)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_channel_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"id": 123}
await stateless_event_manager.on_channel_update(shard, payload)
event_factory.deserialize_channel_update_event.assert_called_once_with(shard, payload, old_channel=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_channel_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_channel_delete_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(channel=mock.Mock(id=123))
event_factory.deserialize_channel_delete_event.return_value = event
await event_manager.on_channel_delete(shard, payload)
event_manager._cache.delete_guild_channel.assert_called_once_with(123)
event_factory.deserialize_channel_delete_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_channel_delete_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_channel_delete(shard, payload)
event_factory.deserialize_channel_delete_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_channel_delete_event.return_value
)
@pytest.mark.asyncio()
async def test_on_channel_pins_update(self, stateless_event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_channel_pins_update_event.return_value = event
await stateless_event_manager.on_channel_pins_update(shard, payload)
event_factory.deserialize_channel_pins_update_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_create_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(
guild=mock.Mock(id=123, is_large=False),
channels={"TestChannel": 456},
emojis={"TestEmoji": 789},
roles={"TestRole": 1234},
members={"TestMember": 5678},
presences={"TestPresence": 9012},
voice_states={"TestState": 345},
chunk_nonce=None,
)
event_factory.deserialize_guild_create_event.return_value = event
shard.request_guild_members = mock.AsyncMock()
await event_manager.on_guild_create(shard, payload)
assert event.chunk_nonce is None
shard.request_guild_members.assert_not_called()
event_manager._cache.update_guild.assert_called_once_with(event.guild)
event_manager._cache.clear_guild_channels_for_guild.assert_called_once_with(123)
event_manager._cache.set_guild_channel.assert_called_once_with(456)
event_manager._cache.clear_emojis_for_guild.assert_called_once_with(123)
event_manager._cache.set_emoji.assert_called_once_with(789)
event_manager._cache.clear_roles_for_guild.assert_called_once_with(123)
event_manager._cache.set_role.assert_called_once_with(1234)
event_manager._cache.clear_members_for_guild.assert_called_once_with(123)
event_manager._cache.set_member.assert_called_once_with(5678)
event_manager._cache.clear_presences_for_guild.assert_called_once_with(123)
event_manager._cache.set_presence.assert_called_once_with(9012)
event_manager._cache.clear_voice_states_for_guild.assert_called_once_with(123)
event_manager._cache.set_voice_state.assert_called_once_with(345)
event_factory.deserialize_guild_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_create_when_request_chunks(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(
guild=mock.Mock(id=123, is_large=True),
channels={"TestChannel": 456},
emojis={"TestEmoji": 789},
roles={"TestRole": 1234},
members={"TestMember": 5678},
presences={"TestPresence": 9012},
voice_states={"TestState": 345},
chunk_nonce=None,
)
event_factory.deserialize_guild_create_event.return_value = event
shard.request_guild_members = mock.Mock()
stack = contextlib.ExitStack()
create_task = stack.enter_context(mock.patch.object(asyncio, "create_task"))
uuid = stack.enter_context(mock.patch("hikari.impl.event_manager._fixed_size_nonce", return_value="uuid"))
_request_guild_members = stack.enter_context(
mock.patch("hikari.impl.event_manager._request_guild_members", new_callable=mock.Mock)
)
with stack:
await event_manager.on_guild_create(shard, payload)
uuid.assert_called_once_with()
nonce = "987.uuid"
assert event.chunk_nonce == nonce
_request_guild_members.assert_called_once_with(shard, event.guild, include_presences=True, nonce=nonce)
create_task.assert_called_once_with(
_request_guild_members.return_value, name="987:123 guild create members request"
)
event_manager._cache.update_guild.assert_called_once_with(event.guild)
event_manager._cache.clear_guild_channels_for_guild.assert_called_once_with(123)
event_manager._cache.set_guild_channel.assert_called_once_with(456)
event_manager._cache.clear_emojis_for_guild.assert_called_once_with(123)
event_manager._cache.set_emoji.assert_called_once_with(789)
event_manager._cache.clear_roles_for_guild.assert_called_once_with(123)
event_manager._cache.set_role.assert_called_once_with(1234)
event_manager._cache.clear_members_for_guild.assert_called_once_with(123)
event_manager._cache.set_member.assert_called_once_with(5678)
event_manager._cache.clear_presences_for_guild.assert_called_once_with(123)
event_manager._cache.set_presence.assert_called_once_with(9012)
event_manager._cache.clear_voice_states_for_guild.assert_called_once_with(123)
event_manager._cache.set_voice_state.assert_called_once_with(345)
event_factory.deserialize_guild_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_create_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
shard.request_guild_members = mock.AsyncMock()
await stateless_event_manager.on_guild_create(shard, payload)
event_factory.deserialize_guild_create_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_create_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_update_stateful(self, event_manager, shard, event_factory):
payload = {"id": 123}
old_guild = object()
mock_role = object()
mock_emoji = object()
event = mock.Mock(roles={555: mock_role}, emojis={333: mock_emoji}, guild=mock.Mock(id=123))
event_factory.deserialize_guild_update_event.return_value = event
event_manager._cache.get_guild.return_value = old_guild
await event_manager.on_guild_update(shard, payload)
event_manager._cache.get_guild.assert_called_once_with(123)
event_manager._cache.update_guild.assert_called_once_with(event.guild)
event_manager._cache.clear_roles_for_guild.assert_called_once_with(123)
event_manager._cache.set_role.assert_called_once_with(mock_role)
event_manager._cache.clear_emojis_for_guild.assert_called_once_with(123)
event_manager._cache.set_emoji.assert_called_once_with(mock_emoji)
event_factory.deserialize_guild_update_event.assert_called_once_with(shard, payload, old_guild=old_guild)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"id": 123}
await stateless_event_manager.on_guild_update(shard, payload)
event_factory.deserialize_guild_update_event.assert_called_once_with(shard, payload, old_guild=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_delete_stateful_when_available(self, event_manager, shard, event_factory):
payload = {"unavailable": False}
event = mock.Mock(guild_id=123)
event_factory.deserialize_guild_leave_event.return_value = event
await event_manager.on_guild_delete(shard, payload)
event_manager._cache.delete_guild.assert_called_once_with(123)
event_manager._cache.clear_voice_states_for_guild.assert_called_once_with(123)
event_manager._cache.clear_invites_for_guild.assert_called_once_with(123)
event_manager._cache.clear_members_for_guild.assert_called_once_with(123)
event_manager._cache.clear_presences_for_guild.assert_called_once_with(123)
event_manager._cache.clear_guild_channels_for_guild.assert_called_once_with(123)
event_manager._cache.clear_emojis_for_guild.assert_called_once_with(123)
event_manager._cache.clear_roles_for_guild.assert_called_once_with(123)
event_factory.deserialize_guild_leave_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_delete_stateful_when_unavailable(self, event_manager, shard, event_factory):
payload = {"unavailable": True}
event = mock.Mock(guild_id=123)
event_factory.deserialize_guild_unavailable_event.return_value = event
await event_manager.on_guild_delete(shard, payload)
event_manager._cache.set_guild_availability.assert_called_once_with(event.guild_id, False)
event_factory.deserialize_guild_unavailable_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_delete_stateless_when_available(self, stateless_event_manager, shard, event_factory):
payload = {"unavailable": False}
await stateless_event_manager.on_guild_delete(shard, payload)
event_factory.deserialize_guild_leave_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_leave_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_delete_stateless_when_unavailable(self, stateless_event_manager, shard, event_factory):
payload = {"unavailable": True}
await stateless_event_manager.on_guild_delete(shard, payload)
event_factory.deserialize_guild_unavailable_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_unavailable_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_ban_add(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_guild_ban_add_event.return_value = event
await event_manager.on_guild_ban_add(shard, payload)
event_factory.deserialize_guild_ban_add_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_ban_remove(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_guild_ban_remove_event.return_value = event
await event_manager.on_guild_ban_remove(shard, payload)
event_factory.deserialize_guild_ban_remove_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_emojis_update_stateful(self, event_manager, shard, event_factory):
payload = {"guild_id": 123}
old_emojis = {"Test": 123}
mock_emoji = object()
event = mock.Mock(emojis=[mock_emoji], guild_id=123)
event_factory.deserialize_guild_emojis_update_event.return_value = event
event_manager._cache.clear_emojis_for_guild.return_value = old_emojis
await event_manager.on_guild_emojis_update(shard, payload)
event_manager._cache.clear_emojis_for_guild.assert_called_once_with(123)
event_manager._cache.set_emoji.assert_called_once_with(mock_emoji)
event_factory.deserialize_guild_emojis_update_event.assert_called_once_with(shard, payload, old_emojis=[123])
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_emojis_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"guild_id": 123}
await stateless_event_manager.on_guild_emojis_update(shard, payload)
event_factory.deserialize_guild_emojis_update_event.assert_called_once_with(shard, payload, old_emojis=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_emojis_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_integrations_update(self, event_manager, shard):
assert await event_manager.on_guild_integrations_update(shard, {}) is None
event_manager.dispatch.assert_not_called()
@pytest.mark.asyncio()
async def test_on_integration_create(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_integration_create_event.return_value = event
await event_manager.on_integration_create(shard, payload)
event_factory.deserialize_integration_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_integration_delete(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_integration_delete_event.return_value = event
await event_manager.on_integration_delete(shard, payload)
event_factory.deserialize_integration_delete_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_integration_update(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_integration_update_event.return_value = event
await event_manager.on_integration_update(shard, payload)
event_factory.deserialize_integration_update_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_member_add_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(user=object(), member=object())
event_factory.deserialize_guild_member_add_event.return_value = event
await event_manager.on_guild_member_add(shard, payload)
event_manager._cache.update_member.assert_called_once_with(event.member)
event_factory.deserialize_guild_member_add_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_member_add_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_guild_member_add(shard, payload)
event_factory.deserialize_guild_member_add_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_member_add_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_member_remove_stateful(self, event_manager, shard, event_factory):
payload = {"guild_id": "456", "user": {"id": "123"}}
await event_manager.on_guild_member_remove(shard, payload)
event_manager._cache.delete_member.assert_called_once_with(456, 123)
event_factory.deserialize_guild_member_remove_event.assert_called_once_with(
shard, payload, old_member=event_manager._cache.delete_member.return_value
)
event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_member_remove_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_member_remove_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_guild_member_remove(shard, payload)
event_factory.deserialize_guild_member_remove_event.assert_called_once_with(shard, payload, old_member=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_member_remove_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_member_update_stateful(self, event_manager, shard, event_factory):
payload = {"user": {"id": 123}, "guild_id": 456}
old_member = object()
event = mock.Mock(member=mock.Mock())
event_factory.deserialize_guild_member_update_event.return_value = event
event_manager._cache.get_member.return_value = old_member
await event_manager.on_guild_member_update(shard, payload)
event_manager._cache.get_member.assert_called_once_with(456, 123)
event_manager._cache.update_member.assert_called_once_with(event.member)
event_factory.deserialize_guild_member_update_event.assert_called_once_with(
shard, payload, old_member=old_member
)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_member_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"user": {"id": 123}, "guild_id": 456}
await stateless_event_manager.on_guild_member_update(shard, payload)
event_factory.deserialize_guild_member_update_event.assert_called_once_with(shard, payload, old_member=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_member_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_members_chunk_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(members={"TestMember": 123}, presences={"TestPresences": 456})
event_factory.deserialize_guild_member_chunk_event.return_value = event
await event_manager.on_guild_members_chunk(shard, payload)
event_manager._cache.set_member.assert_called_once_with(123)
event_manager._cache.set_presence.assert_called_once_with(456)
event_factory.deserialize_guild_member_chunk_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_members_chunk_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_guild_members_chunk(shard, payload)
event_factory.deserialize_guild_member_chunk_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_member_chunk_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_role_create_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(role=object())
event_factory.deserialize_guild_role_create_event.return_value = event
await event_manager.on_guild_role_create(shard, payload)
event_manager._cache.set_role.assert_called_once_with(event.role)
event_factory.deserialize_guild_role_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_role_create_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_guild_role_create(shard, payload)
event_factory.deserialize_guild_role_create_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_role_create_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_role_update_stateful(self, event_manager, shard, event_factory):
payload = {"role": {"id": 123}}
old_role = object()
event = mock.Mock(role=mock.Mock())
event_factory.deserialize_guild_role_update_event.return_value = event
event_manager._cache.get_role.return_value = old_role
await event_manager.on_guild_role_update(shard, payload)
event_manager._cache.get_role.assert_called_once_with(123)
event_manager._cache.update_role.assert_called_once_with(event.role)
event_factory.deserialize_guild_role_update_event.assert_called_once_with(shard, payload, old_role=old_role)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_guild_role_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"role": {"id": 123}}
await stateless_event_manager.on_guild_role_update(shard, payload)
event_factory.deserialize_guild_role_update_event.assert_called_once_with(shard, payload, old_role=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_role_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_guild_role_delete_stateful(self, event_manager, shard, event_factory):
payload = {"role_id": "123"}
await event_manager.on_guild_role_delete(shard, payload)
event_manager._cache.delete_role.assert_called_once_with(123)
event_factory.deserialize_guild_role_delete_event.assert_called_once_with(
shard, payload, old_role=event_manager._cache.delete_role.return_value
)
event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_guild_role_delete_event.return_value)
@pytest.mark.asyncio()
async def test_on_guild_role_delete_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_guild_role_delete(shard, payload)
event_factory.deserialize_guild_role_delete_event.assert_called_once_with(shard, payload, old_role=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_guild_role_delete_event.return_value
)
@pytest.mark.asyncio()
async def test_on_invite_create_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(invite="qwerty")
event_factory.deserialize_invite_create_event.return_value = event
await event_manager.on_invite_create(shard, payload)
event_manager._cache.set_invite.assert_called_once_with("qwerty")
event_factory.deserialize_invite_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_invite_create_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_invite_create(shard, payload)
event_factory.deserialize_invite_create_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_invite_create_event.return_value
)
@pytest.mark.asyncio()
async def test_on_invite_delete_stateful(self, event_manager, shard, event_factory):
payload = {"code": "qwerty"}
await event_manager.on_invite_delete(shard, payload)
event_manager._cache.delete_invite.assert_called_once_with("qwerty")
event_factory.deserialize_invite_delete_event.assert_called_once_with(
shard, payload, old_invite=event_manager._cache.delete_invite.return_value
)
event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_invite_delete_event.return_value)
@pytest.mark.asyncio()
async def test_on_invite_delete_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_invite_delete(shard, payload)
event_factory.deserialize_invite_delete_event.assert_called_once_with(shard, payload, old_invite=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_invite_delete_event.return_value
)
@pytest.mark.asyncio()
async def test_on_message_create_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(message=object())
event_factory.deserialize_message_create_event.return_value = event
await event_manager.on_message_create(shard, payload)
event_manager._cache.set_message.assert_called_once_with(event.message)
event_factory.deserialize_message_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_message_create_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_message_create(shard, payload)
event_factory.deserialize_message_create_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_message_create_event.return_value
)
@pytest.mark.asyncio()
async def test_on_message_update_stateful(self, event_manager, shard, event_factory):
payload = {"id": 123}
old_message = object()
event = mock.Mock(message=mock.Mock())
event_factory.deserialize_message_update_event.return_value = event
event_manager._cache.get_message.return_value = old_message
await event_manager.on_message_update(shard, payload)
event_manager._cache.get_message.assert_called_once_with(123)
event_manager._cache.update_message.assert_called_once_with(event.message)
event_factory.deserialize_message_update_event.assert_called_once_with(shard, payload, old_message=old_message)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_message_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"id": 123}
await stateless_event_manager.on_message_update(shard, payload)
event_factory.deserialize_message_update_event.assert_called_once_with(shard, payload, old_message=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_message_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_message_delete_stateless(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(message_id=123)
event_factory.deserialize_message_delete_event.return_value = event
await event_manager.on_message_delete(shard, payload)
event_manager._cache.delete_message.assert_called_once_with(123)
event_factory.deserialize_message_delete_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_message_delete_stateful(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_message_delete(shard, payload)
event_factory.deserialize_message_delete_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_message_delete_event.return_value
)
@pytest.mark.asyncio()
async def test_on_message_delete_bulk_stateful(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock(message_ids=[123, 456, 789])
event_factory.deserialize_message_delete_bulk_event.return_value = event
await event_manager.on_message_delete_bulk(shard, payload)
event_manager._cache.delete_message.assert_has_calls([mock.call(123), mock.call(456), mock.call(789)])
event_factory.deserialize_message_delete_bulk_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_message_delete_bulk_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_message_delete_bulk(shard, payload)
event_factory.deserialize_message_delete_bulk_event.assert_called_once_with(shard, payload)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_message_delete_bulk_event.return_value
)
@pytest.mark.asyncio()
async def test_on_message_reaction_add(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_message_reaction_add_event.return_value = event
await event_manager.on_message_reaction_add(shard, payload)
event_factory.deserialize_message_reaction_add_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_message_reaction_remove(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_message_reaction_remove_event.return_value = event
await event_manager.on_message_reaction_remove(shard, payload)
event_factory.deserialize_message_reaction_remove_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_message_reaction_remove_all(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_message_reaction_remove_all_event.return_value = event
await event_manager.on_message_reaction_remove_all(shard, payload)
event_factory.deserialize_message_reaction_remove_all_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_message_reaction_remove_emoji(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_message_reaction_remove_emoji_event.return_value = event
await event_manager.on_message_reaction_remove_emoji(shard, payload)
event_factory.deserialize_message_reaction_remove_emoji_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_presence_update_stateful_update(self, event_manager, shard, event_factory):
payload = {"user": {"id": 123}, "guild_id": 456}
old_presence = object()
event = mock.Mock(presence=mock.Mock(visible_status=presences.Status.ONLINE))
event_factory.deserialize_presence_update_event.return_value = event
event_manager._cache.get_presence.return_value = old_presence
await event_manager.on_presence_update(shard, payload)
event_manager._cache.get_presence.assert_called_once_with(456, 123)
event_manager._cache.update_presence.assert_called_once_with(event.presence)
event_factory.deserialize_presence_update_event.assert_called_once_with(
shard, payload, old_presence=old_presence
)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_presence_update_stateful_delete(self, event_manager, shard, event_factory):
payload = {"user": {"id": 123}, "guild_id": 456}
old_presence = object()
event = mock.Mock(presence=mock.Mock(visible_status=presences.Status.OFFLINE))
event_factory.deserialize_presence_update_event.return_value = event
event_manager._cache.get_presence.return_value = old_presence
await event_manager.on_presence_update(shard, payload)
event_manager._cache.get_presence.assert_called_once_with(456, 123)
event_manager._cache.delete_presence.assert_called_once_with(event.presence.guild_id, event.presence.user_id)
event_factory.deserialize_presence_update_event.assert_called_once_with(
shard, payload, old_presence=old_presence
)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_presence_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"user": {"id": 123}, "guild_id": 456}
await stateless_event_manager.on_presence_update(shard, payload)
event_factory.deserialize_presence_update_event.assert_called_once_with(shard, payload, old_presence=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_presence_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_typing_start(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_typing_start_event.return_value = event
await event_manager.on_typing_start(shard, payload)
event_factory.deserialize_typing_start_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_user_update_stateful(self, event_manager, shard, event_factory):
payload = {}
old_user = object()
event = mock.Mock(user=mock.Mock())
event_factory.deserialize_own_user_update_event.return_value = event
event_manager._cache.get_me.return_value = old_user
await event_manager.on_user_update(shard, payload)
event_manager._cache.update_me.assert_called_once_with(event.user)
event_factory.deserialize_own_user_update_event.assert_called_once_with(shard, payload, old_user=old_user)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_user_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {}
await stateless_event_manager.on_user_update(shard, payload)
event_factory.deserialize_own_user_update_event.assert_called_once_with(shard, payload, old_user=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_own_user_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_voice_state_update_stateful_update(self, event_manager, shard, event_factory):
payload = {"user_id": 123, "guild_id": 456}
old_state = object()
event = mock.Mock(state=mock.Mock(channel_id=123))
event_factory.deserialize_voice_state_update_event.return_value = event
event_manager._cache.get_voice_state.return_value = old_state
await event_manager.on_voice_state_update(shard, payload)
event_manager._cache.get_voice_state.assert_called_once_with(456, 123)
event_manager._cache.update_voice_state.assert_called_once_with(event.state)
event_factory.deserialize_voice_state_update_event.assert_called_once_with(shard, payload, old_state=old_state)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_voice_state_update_stateful_delete(self, event_manager, shard, event_factory):
payload = {"user_id": 123, "guild_id": 456}
old_state = object()
event = mock.Mock(state=mock.Mock(channel_id=None))
event_factory.deserialize_voice_state_update_event.return_value = event
event_manager._cache.get_voice_state.return_value = old_state
await event_manager.on_voice_state_update(shard, payload)
event_manager._cache.get_voice_state.assert_called_once_with(456, 123)
event_manager._cache.delete_voice_state.assert_called_once_with(event.state.guild_id, event.state.user_id)
event_factory.deserialize_voice_state_update_event.assert_called_once_with(shard, payload, old_state=old_state)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_voice_state_update_stateless(self, stateless_event_manager, shard, event_factory):
payload = {"user_id": 123, "guild_id": 456}
await stateless_event_manager.on_voice_state_update(shard, payload)
event_factory.deserialize_voice_state_update_event.assert_called_once_with(shard, payload, old_state=None)
stateless_event_manager.dispatch.assert_awaited_once_with(
event_factory.deserialize_voice_state_update_event.return_value
)
@pytest.mark.asyncio()
async def test_on_voice_server_update(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_voice_server_update_event.return_value = event
await event_manager.on_voice_server_update(shard, payload)
event_factory.deserialize_voice_server_update_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_webhooks_update(self, event_manager, shard, event_factory):
payload = {}
event = mock.Mock()
event_factory.deserialize_webhook_update_event.return_value = event
await event_manager.on_webhooks_update(shard, payload)
event_factory.deserialize_webhook_update_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event)
@pytest.mark.asyncio()
async def test_on_application_command_create(self, event_manager, shard, event_factory):
payload = {"id": "4544333334dd44"}
await event_manager.on_application_command_create(shard, payload)
event_factory.deserialize_command_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_command_create_event.return_value)
@pytest.mark.asyncio()
async def test_on_application_command_update(self, event_manager, shard, event_factory):
payload = {"id": "454433333444"}
await event_manager.on_application_command_update(shard, payload)
event_factory.deserialize_command_update_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_command_update_event.return_value)
@pytest.mark.asyncio()
async def test_on_application_command_delete(self, event_manager, shard, event_factory):
payload = {"id": "4544444"}
await event_manager.on_application_command_delete(shard, payload)
event_factory.deserialize_command_delete_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_command_delete_event.return_value)
@pytest.mark.asyncio()
async def test_on_interaction_create(self, event_manager, shard, event_factory):
payload = {"id": "123"}
await event_manager.on_interaction_create(shard, payload)
event_factory.deserialize_interaction_create_event.assert_called_once_with(shard, payload)
event_manager.dispatch.assert_awaited_once_with(event_factory.deserialize_interaction_create_event.return_value)
| 44.497692
| 120
| 0.760204
| 6,134
| 48,191
| 5.514998
| 0.044506
| 0.111384
| 0.072364
| 0.090455
| 0.909516
| 0.897071
| 0.866948
| 0.841467
| 0.785096
| 0.711963
| 0
| 0.011018
| 0.163765
| 48,191
| 1,082
| 121
| 44.538817
| 0.828432
| 0.022743
| 0
| 0.468586
| 0
| 0
| 0.014594
| 0.001933
| 0
| 0
| 0
| 0
| 0.304974
| 1
| 0.006545
| false
| 0
| 0.017016
| 0.002618
| 0.030105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6251ec0dcf2dd0fe1afe67fa6c9fb6c4b36072f0
| 93
|
py
|
Python
|
src/__init__.py
|
spitis/PyIndex
|
b4719440a4ff8e439d570a79067fd5e0eb66b7c0
|
[
"MIT"
] | 14
|
2016-12-30T16:51:37.000Z
|
2022-02-13T17:27:48.000Z
|
src/__init__.py
|
spitis/PyIndex
|
b4719440a4ff8e439d570a79067fd5e0eb66b7c0
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
spitis/PyIndex
|
b4719440a4ff8e439d570a79067fd5e0eb66b7c0
|
[
"MIT"
] | 3
|
2018-10-11T18:59:55.000Z
|
2019-05-31T12:47:57.000Z
|
from .indices import *
from .postings import *
from .manager import *
from .swhoosh import *
| 18.6
| 23
| 0.741935
| 12
| 93
| 5.75
| 0.5
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172043
| 93
| 4
| 24
| 23.25
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
625562a8891aaf23e953f5dc08e988873f0a439c
| 10,314
|
py
|
Python
|
python/stg/models.py
|
runopti/s
|
6e642a46de625cbd7a333c1c70fa7a9bee82f717
|
[
"MIT"
] | 55
|
2020-08-06T22:31:53.000Z
|
2022-03-21T03:21:10.000Z
|
python/stg/models.py
|
runopti/s
|
6e642a46de625cbd7a333c1c70fa7a9bee82f717
|
[
"MIT"
] | 1
|
2020-09-20T07:05:00.000Z
|
2020-09-20T07:05:00.000Z
|
python/stg/models.py
|
runopti/s
|
6e642a46de625cbd7a333c1c70fa7a9bee82f717
|
[
"MIT"
] | 10
|
2020-07-10T23:21:14.000Z
|
2022-01-28T20:35:45.000Z
|
import torch.nn as nn
import torch
import math
import numpy as np
from torch.autograd import Variable
from .layers import MLPLayer, FeatureSelector, GatingLayer
from .losses import PartialLogLikelihood
__all__ = ['MLPModel', 'MLPRegressionModel', 'MLPClassificationModel', 'LinearRegressionModel', 'LinearClassificationModel']
class ModelIOKeysMixin(object):
def _get_input(self, feed_dict):
return feed_dict['input']
def _get_label(self, feed_dict):
return feed_dict['label']
def _get_covariate(self, feed_dict):
'''For cox'''
return feed_dict['X']
def _get_fail_indicator(self, feed_dict):
'''For cox'''
return feed_dict['E'].reshape(-1, 1)
def _get_failure_time(self, feed_dict):
'''For cox'''
return feed_dict['T']
def _compose_output(self, value):
return dict(pred=value)
class MLPModel(MLPLayer):
def freeze_weights(self):
for name, p in self.named_parameters():
if name != 'mu':
p.requires_grad = False
def get_gates(self, mode):
if mode == 'raw':
return self.mu.detach().cpu().numpy()
elif mode == 'prob':
return np.minimum(1.0, np.maximum(0.0, self.mu.detach().cpu().numpy() + 0.5))
else:
raise NotImplementedError()
class L1RegressionModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, output_dim, hidden_dims, device, batch_norm=None, dropout=None, activation='relu',
sigma=1.0, lam=0.1):
super().__init__(input_dim, output_dim, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.loss = nn.MSELoss()
self.lam = lam
def forward(self, feed_dict):
pred = super().forward(self._get_input(feed_dict))
if self.training:
loss = self.loss(pred, self._get_label(feed_dict))
reg = torch.mean(torch.abs(self.mlp[0][0].weight))
total_loss = loss + self.lam * reg
return total_loss, dict(), dict()
else:
return self._compose_output(pred)
class L1GateRegressionModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, output_dim, hidden_dims, device, batch_norm=None, dropout=None, activation='relu',
sigma=1.0, lam=0.1):
super().__init__(input_dim, output_dim, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.GateingLayer = GatingLayer(input_dim, device)
self.reg = self.GateingLayer.regularizer
self.mu = self.GateingLayer.mu
self.loss = nn.MSELoss()
self.lam = lam
def forward(self, feed_dict):
x = self.GateingLayer(self._get_input(feed_dict))
pred = super().forward(x)
if self.training:
loss = self.loss(pred, self._get_label(feed_dict))
reg = torch.mean(self.reg(self.mu))
total_loss = loss + self.lam * reg
return total_loss, dict(), dict()
else:
return self._compose_output(pred)
class SoftThreshRegressionModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, output_dim, hidden_dims, device, batch_norm=None, dropout=None, activation='relu',
sigma=1.0, lam=0.1):
super().__init__(input_dim, output_dim, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.loss = nn.MSELoss()
self.lam = lam
def prox_plus(self, w):
"""Projection onto non-negative numbers
"""
below = w < 0
w[below] = 0
return w
def prox_op(self, w):
return torch.sign(w) * self.prox_plus(torch.abs(w) - self.lam)
def forward(self, feed_dict):
pred = super().forward(self._get_input(feed_dict))
if self.training:
loss = self.loss(pred, self._get_label(feed_dict))
total_loss = loss
return total_loss, dict(), dict()
else:
return self._compose_output(pred)
class STGRegressionModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, output_dim, hidden_dims, device, batch_norm=None, dropout=None, activation='relu',
sigma=1.0, lam=0.1):
super().__init__(input_dim, output_dim, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.FeatureSelector = FeatureSelector(input_dim, sigma, device)
self.loss = nn.MSELoss()
self.reg = self.FeatureSelector.regularizer
self.lam = lam
self.mu = self.FeatureSelector.mu
self.sigma = self.FeatureSelector.sigma
def forward(self, feed_dict):
x = self.FeatureSelector(self._get_input(feed_dict))
pred = super().forward(x)
if self.training:
loss = self.loss(pred, self._get_label(feed_dict))
reg = torch.mean(self.reg((self.mu + 0.5)/self.sigma))
total_loss = loss + self.lam * reg
return total_loss, dict(), dict()
else:
return self._compose_output(pred)
class STGClassificationModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, nr_classes, hidden_dims, device, batch_norm=None, dropout=None, activation='relu',
sigma=1.0, lam=0.1):
super().__init__(input_dim, nr_classes, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.FeatureSelector = FeatureSelector(input_dim, sigma, device)
self.softmax = nn.Softmax()
self.loss = nn.CrossEntropyLoss()
self.reg = self.FeatureSelector.regularizer
self.lam = lam
self.mu = self.FeatureSelector.mu
self.sigma = self.FeatureSelector.sigma
def forward(self, feed_dict):
x = self.FeatureSelector(self._get_input(feed_dict))
logits = super().forward(x)
if self.training:
loss = self.loss(logits, self._get_label(feed_dict))
reg = torch.mean(self.reg((self.mu + 0.5)/self.sigma))
total_loss = loss + self.lam * reg
return total_loss, dict(), dict()
else:
return self._compose_output(logits)
def _compose_output(self, logits):
value = self.softmax(logits)
_, pred = value.max(dim=1)
return dict(prob=value, pred=pred, logits=logits)
class STGCoxModel(MLPModel, ModelIOKeysMixin):
#TODO: Finish impl cox model.
def __init__(self, input_dim, nr_classes, hidden_dims, device, lam, batch_norm=None, dropout=None, activation='relu',
sigma=1.0):
super().__init__(input_dim, nr_classes, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.FeatureSelector = FeatureSelector(input_dim, sigma, device)
self.loss = PartialLogLikelihood
self.noties = 'noties'
self.lam = lam
self.reg = self.FeatureSelector.regularizer
self.mu = self.FeatureSelector.mu
self.sigma = self.FeatureSelector.sigma
def forward(self, feed_dict):
x = self.FeatureSelector(self._get_covariate(feed_dict))
logits = super().forward(x)
if self.training:
loss = self.loss(logits, self._get_fail_indicator(feed_dict), self.noties)
reg = torch.sum(self.reg((self.mu + 0.5)/self.sigma))
total_loss = loss + reg
return total_loss, logits, dict()
else:
return self._compose_output(logits)
def _compose_output(self, logits):
return dict(logits=logits)
class MLPCoxModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, nr_classes, hidden_dims, batch_norm=None, dropout=None, activation='relu'):
super().__init__(input_dim, nr_classes, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.loss = PartialLogLikelihood
self.noties = 'noties'
def forward(self, feed_dict):
logits = super().forward(self._get_covariate(feed_dict))
if self.training:
loss = self.loss(logits, self._get_fail_indicator(feed_dict), self.noties)
return loss, logits, dict()
else:
return self._compose_output(logits)
def _compose_output(self, logits):
return dict(logits=logits)
class MLPRegressionModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, output_dim, hidden_dims, batch_norm=None, dropout=None, activation='relu'):
super().__init__(input_dim, output_dim, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.loss = nn.MSELoss()
def forward(self, feed_dict):
pred = super().forward(self._get_input(feed_dict))
if self.training:
loss = self.loss(pred, self._get_label(feed_dict))
return loss, dict(), dict()
else:
return self._compose_output(pred)
class MLPClassificationModel(MLPModel, ModelIOKeysMixin):
def __init__(self, input_dim, nr_classes, hidden_dims, batch_norm=None, dropout=None, activation='relu'):
super().__init__(input_dim, nr_classes, hidden_dims,
batch_norm=batch_norm, dropout=dropout, activation=activation)
self.softmax = nn.Softmax()
self.loss = nn.CrossEntropyLoss()
def forward(self, feed_dict):
logits = super().forward(self._get_input(feed_dict))
if self.training:
loss = self.loss(logits, self._get_label(feed_dict))
return loss, dict(), dict()
else:
return self._compose_output(logits)
def _compose_output(self, logits):
value = self.softmax(logits)
_, pred = value.max(dim=1)
return dict(prob=value, pred=pred, logits=logits)
class LinearRegressionModel(MLPRegressionModel):
def __init__(self, input_dim, output_dim):
super().__init__(input_dim, output_dim, [])
class LinearClassificationModel(MLPClassificationModel):
def __init__(self, input_dim, nr_classes):
super().__init__(input_dim, nr_classes, [])
| 38.059041
| 124
| 0.63632
| 1,234
| 10,314
| 5.058347
| 0.106159
| 0.047421
| 0.026914
| 0.032682
| 0.800064
| 0.78933
| 0.755207
| 0.745434
| 0.716597
| 0.716597
| 0
| 0.005714
| 0.253345
| 10,314
| 271
| 125
| 38.059041
| 0.804831
| 0.009502
| 0
| 0.663507
| 0
| 0
| 0.016085
| 0.006669
| 0
| 0
| 0
| 0.00369
| 0
| 1
| 0.161137
| false
| 0
| 0.033175
| 0.028436
| 0.407583
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
627b26ea291145821c22507edb77dbb60f152f3b
| 50
|
py
|
Python
|
aparent/predictor/__init__.py
|
876lkj/APARENT
|
5c8b9c038a46b129b5e0e5ce1453c4725b62322e
|
[
"MIT"
] | 20
|
2019-04-23T20:35:23.000Z
|
2022-02-02T02:07:06.000Z
|
aparent/predictor/__init__.py
|
JoshuaChou2018/aparent
|
5c8b9c038a46b129b5e0e5ce1453c4725b62322e
|
[
"MIT"
] | 6
|
2019-10-14T16:35:00.000Z
|
2021-03-24T17:55:07.000Z
|
aparent/predictor/__init__.py
|
JoshuaChou2018/aparent
|
5c8b9c038a46b129b5e0e5ce1453c4725b62322e
|
[
"MIT"
] | 11
|
2019-06-10T08:53:57.000Z
|
2021-01-25T00:54:59.000Z
|
from aparent.predictor.aparent_predictor import *
| 25
| 49
| 0.86
| 6
| 50
| 7
| 0.666667
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 50
| 1
| 50
| 50
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
659ee65a9d696fac693364cd7f98e79b8f70d9a8
| 41
|
py
|
Python
|
pycow/__init__.py
|
p2k/PyCow
|
1e2c9a16a9402d9ad4ade8d2fe352bf80606d5e2
|
[
"Apache-2.0"
] | 9
|
2015-04-10T10:54:34.000Z
|
2019-08-21T23:18:42.000Z
|
pycow/__init__.py
|
p2k/PyCow
|
1e2c9a16a9402d9ad4ade8d2fe352bf80606d5e2
|
[
"Apache-2.0"
] | null | null | null |
pycow/__init__.py
|
p2k/PyCow
|
1e2c9a16a9402d9ad4ade8d2fe352bf80606d5e2
|
[
"Apache-2.0"
] | 3
|
2017-06-13T08:01:01.000Z
|
2021-06-30T08:06:09.000Z
|
from pycow import *
from utils import *
| 10.25
| 19
| 0.731707
| 6
| 41
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.219512
| 41
| 3
| 20
| 13.666667
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
65c432a2e3d4f38ce8106294845b190fecfba0a3
| 88
|
py
|
Python
|
utilities.py
|
epf02013/5sale
|
2ad7efbaa57be37368e8b3d99104511295b570c9
|
[
"Apache-2.0"
] | null | null | null |
utilities.py
|
epf02013/5sale
|
2ad7efbaa57be37368e8b3d99104511295b570c9
|
[
"Apache-2.0"
] | null | null | null |
utilities.py
|
epf02013/5sale
|
2ad7efbaa57be37368e8b3d99104511295b570c9
|
[
"Apache-2.0"
] | null | null | null |
from flask import session
def calc_index(time) :
return time.hour*2+(time.minute/30)
| 14.666667
| 36
| 0.75
| 15
| 88
| 4.333333
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039474
| 0.136364
| 88
| 5
| 37
| 17.6
| 0.815789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
02b2d44fc78cb452246348774b786d1ace97a208
| 207
|
py
|
Python
|
src/sage/dynamics/complex_dynamics/all.py
|
bopopescu/sage
|
2d495be78e0bdc7a0a635454290b27bb4f5f70f0
|
[
"BSL-1.0"
] | 3
|
2019-07-15T13:48:24.000Z
|
2019-11-08T12:31:43.000Z
|
src/sage/dynamics/complex_dynamics/all.py
|
bopopescu/sage
|
2d495be78e0bdc7a0a635454290b27bb4f5f70f0
|
[
"BSL-1.0"
] | 2
|
2018-10-30T13:40:20.000Z
|
2020-07-23T12:13:30.000Z
|
src/sage/dynamics/complex_dynamics/all.py
|
bopopescu/sage
|
2d495be78e0bdc7a0a635454290b27bb4f5f70f0
|
[
"BSL-1.0"
] | 7
|
2021-11-08T10:01:59.000Z
|
2022-03-03T11:25:52.000Z
|
from __future__ import absolute_import
from sage.misc.lazy_import import lazy_import
lazy_import("sage.dynamics.complex_dynamics.mandel_julia",
["mandelbrot_plot", "external_ray", "julia_plot"])
| 41.4
| 62
| 0.78744
| 27
| 207
| 5.555556
| 0.555556
| 0.2
| 0.213333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115942
| 207
| 4
| 63
| 51.75
| 0.819672
| 0
| 0
| 0
| 0
| 0
| 0.386473
| 0.207729
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f30613926c3b4a0d6d5052d965cbb9eb37a99caf
| 59
|
py
|
Python
|
onyx/database/guild.py
|
mudkipdev/onyx
|
333d23c1f83bb2f69a9f570ce874b9d05dc2edda
|
[
"MIT"
] | null | null | null |
onyx/database/guild.py
|
mudkipdev/onyx
|
333d23c1f83bb2f69a9f570ce874b9d05dc2edda
|
[
"MIT"
] | null | null | null |
onyx/database/guild.py
|
mudkipdev/onyx
|
333d23c1f83bb2f69a9f570ce874b9d05dc2edda
|
[
"MIT"
] | null | null | null |
import discord
class CustomGuild(discord.Guild):
pass
| 11.8
| 33
| 0.762712
| 7
| 59
| 6.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 59
| 4
| 34
| 14.75
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
f31e08d6345a417d5c8f81582a0d47a1deb30000
| 42
|
py
|
Python
|
brutejudge/http/base.py
|
sleirsgoevy/brutejudge
|
7ffe685c2e424e1b14ae1c27cc2fd0a25751c40e
|
[
"MIT"
] | 1
|
2021-02-04T00:56:17.000Z
|
2021-02-04T00:56:17.000Z
|
brutejudge/http/base.py
|
sleirsgoevy/brutejudge
|
7ffe685c2e424e1b14ae1c27cc2fd0a25751c40e
|
[
"MIT"
] | 1
|
2019-11-11T00:31:03.000Z
|
2019-12-24T19:57:04.000Z
|
brutejudge/http/base.py
|
sleirsgoevy/brutejudge
|
7ffe685c2e424e1b14ae1c27cc2fd0a25751c40e
|
[
"MIT"
] | null | null | null |
from brutejudge._http.base import Backend
| 21
| 41
| 0.857143
| 6
| 42
| 5.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b8359c25087cbb4d85d635f015a274407d344c13
| 3,803
|
py
|
Python
|
tests/utils_tests/test_inspect.py
|
jpmallarino/django
|
659d2421c7adbbcd205604002d521d82d6b0b465
|
[
"BSD-3-Clause",
"0BSD"
] | 16
|
2019-08-10T12:24:06.000Z
|
2020-05-21T09:11:14.000Z
|
tests/utils_tests/test_inspect.py
|
jpmallarino/django
|
659d2421c7adbbcd205604002d521d82d6b0b465
|
[
"BSD-3-Clause",
"0BSD"
] | 12
|
2019-08-10T11:55:29.000Z
|
2020-05-21T04:46:30.000Z
|
tests/utils_tests/test_inspect.py
|
jpmallarino/django
|
659d2421c7adbbcd205604002d521d82d6b0b465
|
[
"BSD-3-Clause",
"0BSD"
] | 3
|
2019-08-20T13:29:34.000Z
|
2020-01-30T22:05:10.000Z
|
import unittest
from django.utils import inspect
class Person:
def no_arguments(self):
return None
def one_argument(self, something):
return something
def just_args(self, *args):
return args
def all_kinds(self, name, address="home", age=25, *args, **kwargs):
return kwargs
@classmethod
def cls_all_kinds(cls, name, address="home", age=25, *args, **kwargs):
return kwargs
class TestInspectMethods(unittest.TestCase):
def test_get_callable_parameters(self):
self.assertIs(
inspect._get_callable_parameters(Person.no_arguments),
inspect._get_callable_parameters(Person.no_arguments),
)
self.assertIs(
inspect._get_callable_parameters(Person().no_arguments),
inspect._get_callable_parameters(Person().no_arguments),
)
def test_get_func_full_args_no_arguments(self):
self.assertEqual(inspect.get_func_full_args(Person.no_arguments), [])
self.assertEqual(inspect.get_func_full_args(Person().no_arguments), [])
def test_get_func_full_args_one_argument(self):
self.assertEqual(
inspect.get_func_full_args(Person.one_argument), [("something",)]
)
self.assertEqual(
inspect.get_func_full_args(Person().one_argument),
[("something",)],
)
def test_get_func_full_args_all_arguments_method(self):
arguments = [
("name",),
("address", "home"),
("age", 25),
("*args",),
("**kwargs",),
]
self.assertEqual(inspect.get_func_full_args(Person.all_kinds), arguments)
self.assertEqual(inspect.get_func_full_args(Person().all_kinds), arguments)
def test_get_func_full_args_all_arguments_classmethod(self):
arguments = [
("name",),
("address", "home"),
("age", 25),
("*args",),
("**kwargs",),
]
self.assertEqual(inspect.get_func_full_args(Person.cls_all_kinds), arguments)
self.assertEqual(inspect.get_func_full_args(Person().cls_all_kinds), arguments)
def test_func_accepts_var_args_has_var_args(self):
self.assertIs(inspect.func_accepts_var_args(Person.just_args), True)
self.assertIs(inspect.func_accepts_var_args(Person().just_args), True)
def test_func_accepts_var_args_no_var_args(self):
self.assertIs(inspect.func_accepts_var_args(Person.one_argument), False)
self.assertIs(inspect.func_accepts_var_args(Person().one_argument), False)
def test_method_has_no_args(self):
self.assertIs(inspect.method_has_no_args(Person.no_arguments), True)
self.assertIs(inspect.method_has_no_args(Person().no_arguments), True)
self.assertIs(inspect.method_has_no_args(Person.one_argument), False)
self.assertIs(inspect.method_has_no_args(Person().one_argument), False)
def test_func_supports_parameter(self):
self.assertIs(
inspect.func_supports_parameter(Person.all_kinds, "address"), True
)
self.assertIs(
inspect.func_supports_parameter(Person().all_kinds, "address"),
True,
)
self.assertIs(inspect.func_supports_parameter(Person.all_kinds, "zone"), False)
self.assertIs(
inspect.func_supports_parameter(Person().all_kinds, "zone"),
False,
)
def test_func_accepts_kwargs(self):
self.assertIs(inspect.func_accepts_kwargs(Person.just_args), False)
self.assertIs(inspect.func_accepts_kwargs(Person().just_args), False)
self.assertIs(inspect.func_accepts_kwargs(Person.all_kinds), True)
self.assertIs(inspect.func_accepts_kwargs(Person().just_args), False)
| 36.92233
| 87
| 0.66579
| 452
| 3,803
| 5.236726
| 0.106195
| 0.091255
| 0.144487
| 0.076046
| 0.841994
| 0.836924
| 0.812421
| 0.806506
| 0.777778
| 0.720321
| 0
| 0.002697
| 0.220089
| 3,803
| 102
| 88
| 37.284314
| 0.795347
| 0
| 0
| 0.321429
| 0
| 0
| 0.028925
| 0
| 0
| 0
| 0
| 0
| 0.309524
| 1
| 0.178571
| false
| 0
| 0.02381
| 0.059524
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b83ee05d564e08e39f4a4637329d66b925d383e1
| 32
|
py
|
Python
|
tests/test_training.py
|
gabrielelanaro/food-pytorch
|
4b59d6ac2433ec0dba13cfece11aea1e5c1e8e80
|
[
"MIT"
] | null | null | null |
tests/test_training.py
|
gabrielelanaro/food-pytorch
|
4b59d6ac2433ec0dba13cfece11aea1e5c1e8e80
|
[
"MIT"
] | null | null | null |
tests/test_training.py
|
gabrielelanaro/food-pytorch
|
4b59d6ac2433ec0dba13cfece11aea1e5c1e8e80
|
[
"MIT"
] | null | null | null |
def test_training():
pass
| 6.4
| 20
| 0.625
| 4
| 32
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.28125
| 32
| 4
| 21
| 8
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
b85e5ca2b645fd1831e9e580a3e8ef912e91075e
| 27,614
|
py
|
Python
|
ndcube/tests/test_sequence_plotting.py
|
BaptistePellorceAstro/ndcube
|
eaa2841a6bf90ac2fb2f901747c9a297c0810862
|
[
"BSD-2-Clause"
] | null | null | null |
ndcube/tests/test_sequence_plotting.py
|
BaptistePellorceAstro/ndcube
|
eaa2841a6bf90ac2fb2f901747c9a297c0810862
|
[
"BSD-2-Clause"
] | null | null | null |
ndcube/tests/test_sequence_plotting.py
|
BaptistePellorceAstro/ndcube
|
eaa2841a6bf90ac2fb2f901747c9a297c0810862
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import pytest
import datetime
import numpy as np
import astropy.units as u
import matplotlib
from ndcube import NDCube, NDCubeSequence
from ndcube.utils.wcs import WCS
import ndcube.mixins.sequence_plotting
# Set matplotlib display for testing
#matplotlib.use('Agg')
# sample data for tests
# TODO: use a fixture reading from a test file. file TBD.
data = np.array([[[1, 2, 3, 4], [2, 4, 5, 3], [0, -1, 2, 3]],
[[2, 4, 5, 1], [10, 5, 2, 2], [10, 3, 3, 0]]])
data2 = np.array([[[11, 22, 33, 44], [22, 44, 55, 33], [0, -1, 22, 33]],
[[22, 44, 55, 11], [10, 55, 22, 22], [10, 33, 33, 0]]])
ht = {'CTYPE3': 'HPLT-TAN', 'CUNIT3': 'deg', 'CDELT3': 0.5, 'CRPIX3': 0, 'CRVAL3': 0, 'NAXIS3': 2,
'CTYPE2': 'WAVE ', 'CUNIT2': 'Angstrom', 'CDELT2': 0.2, 'CRPIX2': 0, 'CRVAL2': 0,
'NAXIS2': 3,
'CTYPE1': 'TIME ', 'CUNIT1': 'min', 'CDELT1': 0.4, 'CRPIX1': 0, 'CRVAL1': 0, 'NAXIS1': 4}
wt = WCS(header=ht, naxis=3)
cube1 = NDCube(
data, wt, missing_axis=[False, False, False, True],
extra_coords=[
('pix', 0, u.Quantity(range(data.shape[0]), unit=u.pix)),
('hi', 1, u.Quantity(range(data.shape[1]), unit=u.s)),
('distance', None, u.Quantity(0, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 0))])
cube1_with_unit = NDCube(
data, wt, missing_axis=[False, False, False, True],
unit=u.km,
extra_coords=[
('pix', 0, u.Quantity(range(data.shape[0]), unit=u.pix)),
('hi', 1, u.Quantity(range(data.shape[1]), unit=u.s)),
('distance', None, u.Quantity(0, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 0))])
cube1_with_mask = NDCube(
data, wt, missing_axis=[False, False, False, True],
mask=np.zeros_like(data, dtype=bool),
extra_coords=[
('pix', 0, u.Quantity(range(data.shape[0]), unit=u.pix)),
('hi', 1, u.Quantity(range(data.shape[1]), unit=u.s)),
('distance', None, u.Quantity(0, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 0))])
cube1_with_uncertainty = NDCube(
data, wt, missing_axis=[False, False, False, True],
uncertainty=np.sqrt(data),
extra_coords=[
('pix', 0, u.Quantity(range(data.shape[0]), unit=u.pix)),
('hi', 1, u.Quantity(range(data.shape[1]), unit=u.s)),
('distance', None, u.Quantity(0, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 0))])
cube1_with_unit_and_uncertainty = NDCube(
data, wt, missing_axis=[False, False, False, True],
unit=u.km, uncertainty=np.sqrt(data),
extra_coords=[
('pix', 0, u.Quantity(range(data.shape[0]), unit=u.pix)),
('hi', 1, u.Quantity(range(data.shape[1]), unit=u.s)),
('distance', None, u.Quantity(0, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 0))])
cube3 = NDCube(
data2, wt, missing_axis=[False, False, False, True],
extra_coords=[
('pix', 0, u.Quantity(np.arange(1, data2.shape[0]+1), unit=u.pix) +
cube1.extra_coords['pix']['value'][-1]),
('hi', 1, u.Quantity(range(data2.shape[1]), unit=u.s)),
('distance', None, u.Quantity(2, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 2))])
cube3_with_unit = NDCube(
data2, wt, missing_axis=[False, False, False, True],
unit=u.m,
extra_coords=[
('pix', 0, u.Quantity(np.arange(1, data2.shape[0]+1), unit=u.pix) +
cube1.extra_coords['pix']['value'][-1]),
('hi', 1, u.Quantity(range(data2.shape[1]), unit=u.s)),
('distance', None, u.Quantity(2, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 2))])
cube3_with_mask = NDCube(
data2, wt, missing_axis=[False, False, False, True],
mask=np.zeros_like(data2, dtype=bool),
extra_coords=[
('pix', 0, u.Quantity(np.arange(1, data2.shape[0]+1), unit=u.pix) +
cube1.extra_coords['pix']['value'][-1]),
('hi', 1, u.Quantity(range(data2.shape[1]), unit=u.s)),
('distance', None, u.Quantity(2, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 2))])
cube3_with_uncertainty = NDCube(
data2, wt, missing_axis=[False, False, False, True],
uncertainty=np.sqrt(data2),
extra_coords=[
('pix', 0, u.Quantity(np.arange(1, data2.shape[0]+1), unit=u.pix) +
cube1.extra_coords['pix']['value'][-1]),
('hi', 1, u.Quantity(range(data2.shape[1]), unit=u.s)),
('distance', None, u.Quantity(2, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 2))])
cube3_with_unit_and_uncertainty = NDCube(
data2, wt, missing_axis=[False, False, False, True],
unit=u.m, uncertainty=np.sqrt(data2),
extra_coords=[
('pix', 0, u.Quantity(np.arange(1, data2.shape[0]+1), unit=u.pix) +
cube1.extra_coords['pix']['value'][-1]),
('hi', 1, u.Quantity(range(data2.shape[1]), unit=u.s)),
('distance', None, u.Quantity(2, unit=u.cm)),
('time', None, datetime.datetime(2000, 1, 1, 0, 2))])
# Define some test NDCubeSequences.
common_axis = 0
seq = NDCubeSequence(data_list=[cube1, cube3, cube1, cube3], common_axis=common_axis)
seq_no_common_axis = NDCubeSequence(data_list=[cube1, cube3, cube1, cube3])
seq_with_units = NDCubeSequence(
data_list=[cube1_with_unit, cube3_with_unit, cube1_with_unit, cube3_with_unit],
common_axis=common_axis)
seq_with_masks = NDCubeSequence(
data_list=[cube1_with_mask, cube3_with_mask, cube1_with_mask, cube3_with_mask],
common_axis=common_axis)
seq_with_unit0 = NDCubeSequence(data_list=[cube1_with_unit, cube3,
cube1_with_unit, cube3], common_axis=common_axis)
seq_with_mask0 = NDCubeSequence(data_list=[cube1_with_mask, cube3,
cube1_with_mask, cube3], common_axis=common_axis)
seq_with_uncertainty = NDCubeSequence(data_list=[cube1_with_uncertainty, cube3_with_uncertainty,
cube1_with_uncertainty, cube3_with_uncertainty],
common_axis=common_axis)
seq_with_some_uncertainty = NDCubeSequence(
data_list=[cube1_with_uncertainty, cube3, cube1, cube3_with_uncertainty],
common_axis=common_axis)
seq_with_units_and_uncertainty = NDCubeSequence(
data_list=[cube1_with_unit_and_uncertainty, cube3_with_unit_and_uncertainty,
cube1_with_unit_and_uncertainty, cube3_with_unit_and_uncertainty],
common_axis=common_axis)
seq_with_units_and_some_uncertainty = NDCubeSequence(
data_list=[cube1_with_unit_and_uncertainty, cube3_with_unit,
cube1_with_unit, cube3_with_unit_and_uncertainty],
common_axis=common_axis)
# Derive some expected data arrays in plot objects.
seq_data_stack = np.stack([cube.data for cube in seq_with_masks.data])
seq_mask_stack = np.stack([cube.mask for cube in seq_with_masks.data])
seq_stack = np.ma.masked_array(seq_data_stack, seq_mask_stack)
seq_stack_km = np.ma.masked_array(
np.stack([(cube.data * cube.unit).to(u.km).value for cube in seq_with_units.data]),
seq_mask_stack)
seq_data_concat = np.concatenate([cube.data for cube in seq_with_masks.data], axis=common_axis)
seq_mask_concat = np.concatenate([cube.mask for cube in seq_with_masks.data], axis=common_axis)
seq_concat = np.ma.masked_array(seq_data_concat, seq_mask_concat)
seq_concat_km = np.ma.masked_array(
np.concatenate([(cube.data * cube.unit).to(u.km).value
for cube in seq_with_units.data], axis=common_axis),
seq_mask_concat)
# Derive expected axis_ranges
x_axis_coords = np.array([0.4, 0.8, 1.2, 1.6]).reshape((1, 1, 4))
new_x_axis_coords_shape = u.Quantity(seq.dimensions, unit=u.pix).value.astype(int)
new_x_axis_coords_shape[-1] = 1
none_axis_ranges_axis3 = [np.arange(len(seq.data)), np.array([0., 2.]), np.array([0., 1.5, 3.]),
np.tile(np.array(x_axis_coords), new_x_axis_coords_shape)]
# Derive expected extents
seq_axis1_lim_deg = [0.49998731, 0.99989848]
seq_axis1_lim_arcsec = [(axis1_xlim*u.deg).to(u.arcsec).value for axis1_xlim in seq_axis1_lim_deg]
seq_axis2_lim_m = [seq[:, :, :, 0].data[0].axis_world_coords()[-1][0].value,
seq[:, :, :, 0].data[0].axis_world_coords()[-1][-1].value]
@pytest.mark.parametrize("test_input, test_kwargs, expected_values", [
(seq[:, 0, 0, 0], {},
(np.arange(len(seq.data)), np.array([1, 11, 1, 11]),
"meta.obs.sequence [None]", "Data [None]", (0, len(seq[:, 0, 0, 0].data)-1),
(min([cube.data.min() for cube in seq[:, 0, 0, 0].data]),
max([cube.data.max() for cube in seq[:, 0, 0, 0].data])))),
(seq_with_units[:, 0, 0, 0], {},
(np.arange(len(seq_with_units.data)), np.array([1, 0.011, 1, 0.011]),
"meta.obs.sequence [None]", "Data [km]", (0, len(seq_with_units[:, 0, 0, 0].data)-1),
(min([(cube.data * cube.unit).to(seq_with_units[:, 0, 0, 0].data[0].unit).value
for cube in seq_with_units[:, 0, 0, 0].data]),
max([(cube.data * cube.unit).to(seq_with_units[:, 0, 0, 0].data[0].unit).value
for cube in seq_with_units[:, 0, 0, 0].data])))),
(seq_with_uncertainty[:, 0, 0, 0], {},
(np.arange(len(seq_with_uncertainty.data)), np.array([1, 11, 1, 11]),
"meta.obs.sequence [None]", "Data [None]", (0, len(seq_with_uncertainty[:, 0, 0, 0].data)-1),
(min([cube.data for cube in seq_with_uncertainty[:, 0, 0, 0].data]),
max([cube.data for cube in seq_with_uncertainty[:, 0, 0, 0].data])))),
(seq_with_units_and_uncertainty[:, 0, 0, 0], {},
(np.arange(len(seq_with_units_and_uncertainty.data)), np.array([1, 0.011, 1, 0.011]),
"meta.obs.sequence [None]", "Data [km]",
(0, len(seq_with_units_and_uncertainty[:, 0, 0, 0].data)-1),
(min([(cube.data*cube.unit).to(seq_with_units_and_uncertainty[:, 0, 0, 0].data[0].unit).value
for cube in seq_with_units_and_uncertainty[:, 0, 0, 0].data]),
max([(cube.data*cube.unit).to(seq_with_units_and_uncertainty[:, 0, 0, 0].data[0].unit).value
for cube in seq_with_units_and_uncertainty[:, 0, 0, 0].data])))),
(seq_with_units_and_some_uncertainty[:, 0, 0, 0], {},
(np.arange(len(seq_with_units_and_some_uncertainty.data)), np.array([1, 0.011, 1, 0.011]),
"meta.obs.sequence [None]", "Data [km]",
(0, len(seq_with_units_and_some_uncertainty[:, 0, 0, 0].data)-1),
(min([(cube.data*cube.unit).to(
seq_with_units_and_some_uncertainty[:, 0, 0, 0].data[0].unit).value
for cube in seq_with_units_and_some_uncertainty[:, 0, 0, 0].data]),
max([(cube.data*cube.unit).to(
seq_with_units_and_some_uncertainty[:, 0, 0, 0].data[0].unit).value
for cube in seq_with_units_and_some_uncertainty[:, 0, 0, 0].data])))),
(seq[:, 0, 0, 0], {"axes_coordinates": "distance"},
((seq.sequence_axis_extra_coords["distance"]), np.array([1, 11, 1, 11]),
"distance [{0}]".format(seq.sequence_axis_extra_coords["distance"].unit), "Data [None]",
(min(seq.sequence_axis_extra_coords["distance"].value),
max(seq.sequence_axis_extra_coords["distance"].value)),
(min([cube.data.min() for cube in seq[:, 0, 0, 0].data]),
max([cube.data.max() for cube in seq[:, 0, 0, 0].data])))),
(seq[:, 0, 0, 0], {"axes_coordinates": u.Quantity(np.arange(len(seq.data)), unit=u.cm),
"axes_units": u.km},
(u.Quantity(np.arange(len(seq.data)), unit=u.cm).to(u.km), np.array([1, 11, 1, 11]),
"meta.obs.sequence [km]", "Data [None]",
(min((u.Quantity(np.arange(len(seq.data)), unit=u.cm).to(u.km).value)),
max((u.Quantity(np.arange(len(seq.data)), unit=u.cm).to(u.km).value))),
(min([cube.data.min() for cube in seq[:, 0, 0, 0].data]),
max([cube.data.max() for cube in seq[:, 0, 0, 0].data]))))
])
def test_sequence_plot_1D_plot(test_input, test_kwargs, expected_values):
# Unpack expected values
expected_x_data, expected_y_data, expected_xlabel, expected_ylabel, \
expected_xlim, expected_ylim = expected_values
# Run plot method
output = test_input.plot(**test_kwargs)
# Check values are correct
assert isinstance(output, matplotlib.axes.Axes)
np.testing.assert_array_equal(output.lines[0].get_xdata(), expected_x_data)
np.testing.assert_array_equal(output.lines[0].get_ydata(), expected_y_data)
assert output.axes.get_xlabel() == expected_xlabel
assert output.axes.get_ylabel() == expected_ylabel
output_xlim = output.axes.get_xlim()
assert output_xlim[0] <= expected_xlim[0]
assert output_xlim[1] >= expected_xlim[1]
output_ylim = output.axes.get_ylim()
assert output_ylim[0] <= expected_ylim[0]
assert output_ylim[1] >= expected_ylim[1]
@pytest.mark.parametrize("test_input, test_kwargs, expected_values", [
(seq[:, :, 0, 0], {},
(np.array([0.49998731, 0.99989848, 0.49998731, 0.99989848,
0.49998731, 0.99989848, 0.49998731, 0.99989848]),
np.array([1, 2, 11, 22, 1, 2, 11, 22]),
"{0} [{1}]".format(seq[:, :, 0, 0].cube_like_world_axis_physical_types[common_axis], "deg"),
"Data [None]", tuple(seq_axis1_lim_deg),
(min([cube.data.min() for cube in seq[:, :, 0, 0].data]),
max([cube.data.max() for cube in seq[:, :, 0, 0].data])))),
(seq_with_units[:, :, 0, 0], {},
(np.array([0.49998731, 0.99989848, 0.49998731, 0.99989848,
0.49998731, 0.99989848, 0.49998731, 0.99989848]),
np.array([1, 2, 0.011, 0.022, 1, 2, 0.011, 0.022]),
"{0} [{1}]".format(seq[:, :, 0, 0].cube_like_world_axis_physical_types[common_axis], "deg"),
"Data [km]", tuple(seq_axis1_lim_deg),
(min([min((cube.data * cube.unit).to(u.km).value)
for cube in seq_with_units[:, :, 0, 0].data]),
max([max((cube.data * cube.unit).to(u.km).value)
for cube in seq_with_units[:, :, 0, 0].data])))),
(seq_with_uncertainty[:, :, 0, 0], {},
(np.array([0.49998731, 0.99989848, 0.49998731, 0.99989848,
0.49998731, 0.99989848, 0.49998731, 0.99989848]),
np.array([1, 2, 11, 22, 1, 2, 11, 22]),
"{0} [{1}]".format(
seq_with_uncertainty[:, :, 0, 0].cube_like_world_axis_physical_types[
common_axis], "deg"),
"Data [None]", tuple(seq_axis1_lim_deg),
(min([cube.data.min() for cube in seq_with_uncertainty[:, :, 0, 0].data]),
max([cube.data.max() for cube in seq_with_uncertainty[:, :, 0, 0].data])))),
(seq_with_some_uncertainty[:, :, 0, 0], {},
(np.array([0.49998731, 0.99989848, 0.49998731, 0.99989848,
0.49998731, 0.99989848, 0.49998731, 0.99989848]),
np.array([1, 2, 11, 22, 1, 2, 11, 22]),
"{0} [{1}]".format(
seq_with_some_uncertainty[:, :, 0, 0].cube_like_world_axis_physical_types[
common_axis], "deg"),
"Data [None]", tuple(seq_axis1_lim_deg),
(min([cube.data.min() for cube in seq_with_some_uncertainty[:, :, 0, 0].data]),
max([cube.data.max() for cube in seq_with_some_uncertainty[:, :, 0, 0].data])))),
(seq_with_units_and_uncertainty[:, :, 0, 0], {},
(np.array([0.49998731, 0.99989848, 0.49998731, 0.99989848,
0.49998731, 0.99989848, 0.49998731, 0.99989848]),
np.array([1, 2, 0.011, 0.022, 1, 2, 0.011, 0.022]),
"{0} [{1}]".format(
seq_with_units_and_uncertainty[:, :, 0, 0].cube_like_world_axis_physical_types[
common_axis], "deg"),
"Data [km]", tuple(seq_axis1_lim_deg),
(min([min((cube.data * cube.unit).to(u.km).value)
for cube in seq_with_units[:, :, 0, 0].data]),
max([max((cube.data * cube.unit).to(u.km).value)
for cube in seq_with_units[:, :, 0, 0].data])))),
(seq_with_units_and_some_uncertainty[:, :, 0, 0], {},
(np.array([0.49998731, 0.99989848, 0.49998731, 0.99989848,
0.49998731, 0.99989848, 0.49998731, 0.99989848]),
np.array([1, 2, 0.011, 0.022, 1, 2, 0.011, 0.022]),
"{0} [{1}]".format(
seq_with_units_and_some_uncertainty[:, :, 0, 0].cube_like_world_axis_physical_types[
common_axis], "deg"),
"Data [km]", tuple(seq_axis1_lim_deg),
(min([min((cube.data * cube.unit).to(u.km).value)
for cube in seq_with_units[:, :, 0, 0].data]),
max([max((cube.data * cube.unit).to(u.km).value)
for cube in seq_with_units[:, :, 0, 0].data])))),
(seq[:, :, 0, 0], {"axes_coordinates": "pix"},
(seq[:, :, 0, 0].common_axis_extra_coords["pix"].value,
np.array([1, 2, 11, 22, 1, 2, 11, 22]), "pix [pix]", "Data [None]",
(min(seq[:, :, 0, 0].common_axis_extra_coords["pix"].value),
max(seq[:, :, 0, 0].common_axis_extra_coords["pix"].value)),
(min([cube.data.min() for cube in seq[:, :, 0, 0].data]),
max([cube.data.max() for cube in seq[:, :, 0, 0].data])))),
(seq[:, :, 0, 0],
{"axes_coordinates": np.arange(10, 10+seq[:, :, 0, 0].cube_like_dimensions[0].value)},
(np.arange(10, 10 + seq[:, :, 0, 0].cube_like_dimensions[0].value),
np.array([1, 2, 11, 22, 1, 2, 11, 22]),
"{0} [{1}]".format("", None), "Data [None]",
(10, 10 + seq[:, :, 0, 0].cube_like_dimensions[0].value - 1),
(min([cube.data.min() for cube in seq[:, :, 0, 0].data]),
max([cube.data.max() for cube in seq[:, :, 0, 0].data]))))
])
def test_sequence_plot_as_cube_1D_plot(test_input, test_kwargs, expected_values):
# Unpack expected values
expected_x_data, expected_y_data, expected_xlabel, expected_ylabel, \
expected_xlim, expected_ylim = expected_values
# Run plot method
output = test_input.plot_as_cube(**test_kwargs)
# Check values are correct
# Check type of ouput plot object
assert isinstance(output, matplotlib.axes.Axes)
# Check x and y data are correct.
assert np.allclose(output.lines[0].get_xdata(), expected_x_data)
assert np.allclose(output.lines[0].get_ydata(), expected_y_data)
# Check x and y axis labels are correct.
assert output.axes.get_xlabel() == expected_xlabel
assert output.axes.get_ylabel() == expected_ylabel
# Check all data is contained within x and y axes limits.
output_xlim = output.axes.get_xlim()
assert output_xlim[0] <= expected_xlim[0]
assert output_xlim[1] >= expected_xlim[1]
output_ylim = output.axes.get_ylim()
assert output_ylim[0] <= expected_ylim[0]
assert output_ylim[1] >= expected_ylim[1]
def test_sequence_plot_as_cube_error():
with pytest.raises(TypeError):
seq_no_common_axis.plot_as_cube()
@pytest.mark.parametrize("test_input, test_kwargs, expected_values", [
(seq[:, :, 0, 0], {},
(seq_stack[:, :, 0, 0],
"custom:pos.helioprojective.lat [deg]", "meta.obs.sequence [None]",
tuple(seq_axis1_lim_deg + [0, len(seq.data)-1]))),
(seq_with_units[:, :, 0, 0], {},
(seq_stack_km[:, :, 0, 0],
"custom:pos.helioprojective.lat [deg]", "meta.obs.sequence [None]",
tuple(seq_axis1_lim_deg + [0, len(seq.data)-1]))),
(seq[:, :, 0, 0], {"plot_axis_indices": [0, 1]},
(seq_stack[:, :, 0, 0].transpose(),
"meta.obs.sequence [None]", "custom:pos.helioprojective.lat [deg]",
tuple([0, len(seq.data)-1] + seq_axis1_lim_deg))),
(seq[:, :, 0, 0], {"axes_coordinates": ["pix", "distance"]},
(seq_stack[:, :, 0, 0],
"pix [pix]", "distance [cm]",
(min(seq[0, :, 0, 0].extra_coords["pix"]["value"].value),
max(seq[0, :, 0, 0].extra_coords["pix"]["value"].value),
min(seq[:, :, 0, 0].sequence_axis_extra_coords["distance"].value),
max(seq[:, :, 0, 0].sequence_axis_extra_coords["distance"].value)))),
# This example shows weakness of current extra coord axis values on 2D plotting!
# Only the coordinates from the first cube are shown.
(seq[:, :, 0, 0], {"axes_coordinates": [np.arange(
10, 10+seq[:, :, 0, 0].dimensions[-1].value), "distance"], "axes_units": [None, u.m]},
(seq_stack[:, :, 0, 0],
" [None]", "distance [m]",
(10, 10+seq[:, :, 0, 0].dimensions[-1].value-1,
min(seq[:, :, 0, 0].sequence_axis_extra_coords["distance"].to(u.m).value),
max(seq[:, :, 0, 0].sequence_axis_extra_coords["distance"].to(u.m).value)))),
(seq[:, :, 0, 0], {"axes_coordinates": [np.arange(
10, 10+seq[:, :, 0, 0].dimensions[-1].value)*u.deg, None], "axes_units": [u.arcsec, None]},
(seq_stack[:, :, 0, 0],
" [arcsec]", "meta.obs.sequence [None]",
tuple(list(
(np.arange(10, 10+seq[:, :, 0, 0].dimensions[-1].value)*u.deg).to(u.arcsec).value) \
+ [0, len(seq.data)-1])))
])
def test_sequence_plot_2D_image(test_input, test_kwargs, expected_values):
# Unpack expected values
expected_data, expected_xlabel, expected_ylabel, expected_extent = expected_values
# Run plot method
output = test_input.plot(**test_kwargs)
# Check values are correct
assert isinstance(output, matplotlib.axes.Axes)
np.testing.assert_array_equal(output.images[0].get_array(), expected_data)
assert output.xaxis.get_label_text() == expected_xlabel
assert output.yaxis.get_label_text() == expected_ylabel
assert np.allclose(output.images[0].get_extent(), expected_extent, rtol=1e-3)
# Also check x and y values?????
@pytest.mark.parametrize("test_input, test_kwargs, expected_error", [
(seq[:, :, 0, 0], {"axes_coordinates": [
np.arange(10, 10+seq[:, :, 0, 0].dimensions[-1].value), None],
"axes_units": [u.m, None]}, ValueError),
(seq[:, :, 0, 0], {"axes_coordinates": [
None, np.arange(10, 10+seq[:, :, 0, 0].dimensions[0].value)],
"axes_units": [None, u.m]}, ValueError)
])
def test_sequence_plot_2D_image_errors(test_input, test_kwargs, expected_error):
with pytest.raises(expected_error):
output = test_input.plot(**test_kwargs)
@pytest.mark.parametrize("test_input, test_kwargs, expected_values", [
(seq[:, :, :, 0], {},
(seq_concat[:, :, 0],
"em.wl [m]", "custom:pos.helioprojective.lat [deg]",
tuple(seq_axis2_lim_m + seq_axis1_lim_deg))),
(seq_with_units[:, :, :, 0], {},
(seq_concat_km[:, :, 0],
"em.wl [m]", "custom:pos.helioprojective.lat [deg]",
tuple(seq_axis2_lim_m + seq_axis1_lim_deg))),
(seq[:, :, :, 0], {"plot_axis_indices": [0, 1],
"axes_coordinates": ["pix", "hi"]},
(seq_concat[:, :, 0].transpose(), "pix [pix]", "hi [s]",
((seq[:, :, :, 0].common_axis_extra_coords["pix"][0].value,
seq[:, :, :, 0].common_axis_extra_coords["pix"][-1].value,
seq[:, :, :, 0].data[0].extra_coords["hi"]["value"][0].value,
seq[:, :, :, 0].data[0].extra_coords["hi"]["value"][-1].value)))),
(seq[:, :, :, 0], {"axes_coordinates": [
np.arange(10, 10+seq[:, :, :, 0].cube_like_dimensions[-1].value) * u.m,
np.arange(10, 10+seq[:, :, :, 0].cube_like_dimensions[0].value) * u.m]},
(seq_concat[:, :, 0], " [m]", " [m]",
(10, 10+seq[:, :, :, 0].cube_like_dimensions[-1].value-1,
10, 10+seq[:, :, :, 0].cube_like_dimensions[0].value-1))),
(seq[:, :, :, 0], {"axes_coordinates": [
np.arange(10, 10+seq[:, :, :, 0].cube_like_dimensions[-1].value) * u.m,
np.arange(10, 10+seq[:, :, :, 0].cube_like_dimensions[0].value) * u.m],
"axes_units": ["cm", u.cm]},
(seq_concat[:, :, 0], " [cm]", " [cm]",
(10*100, (10+seq[:, :, :, 0].cube_like_dimensions[-1].value-1)*100,
10*100, (10+seq[:, :, :, 0].cube_like_dimensions[0].value-1)*100)))
])
def test_sequence_plot_as_cube_2D_image(test_input, test_kwargs, expected_values):
# Unpack expected values
expected_data, expected_xlabel, expected_ylabel, expected_extent = expected_values
# Run plot method
output = test_input.plot_as_cube(**test_kwargs)
# Check values are correct
assert isinstance(output, matplotlib.axes.Axes)
np.testing.assert_array_equal(output.images[0].get_array(), expected_data)
assert output.xaxis.get_label_text() == expected_xlabel
assert output.yaxis.get_label_text() == expected_ylabel
assert np.allclose(output.images[0].get_extent(), expected_extent, rtol=1e-3)
# Also check x and y values?????
@pytest.mark.parametrize("test_input, test_kwargs, expected_error", [
(seq[:, :, :, 0], {"axes_coordinates": [
np.arange(10, 10+seq[:, :, :, 0].cube_like_dimensions[-1].value), None],
"axes_units": [u.m, None]}, ValueError),
(seq[:, :, :, 0], {"axes_coordinates": [
None, np.arange(10, 10+seq[:, :, :, 0].cube_like_dimensions[0].value)],
"axes_units": [None, u.m]}, ValueError)
])
def test_sequence_plot_as_cube_2D_image_errors(test_input, test_kwargs, expected_error):
with pytest.raises(expected_error):
output = test_input.plot_as_cube(**test_kwargs)
@pytest.mark.parametrize("test_input, test_kwargs, expected_data", [
(seq, {}, seq_stack.reshape(4, 1, 2, 3, 4)),
(seq_with_units, {}, seq_stack_km.reshape(4, 1, 2, 3, 4))
])
def test_sequence_plot_ImageAnimator(test_input, test_kwargs, expected_data):
# Run plot method
output = test_input.plot(**test_kwargs)
# Check plot object properties are correct.
assert isinstance(output, ndcube.mixins.sequence_plotting.ImageAnimatorNDCubeSequence)
np.testing.assert_array_equal(output.data, expected_data)
@pytest.mark.parametrize("test_input, test_kwargs, expected_data", [
(seq, {}, seq_concat.reshape(1, 8, 3, 4)),
(seq_with_units, {}, seq_concat_km.reshape(1, 8, 3, 4))
])
def test_sequence_plot_as_cube_ImageAnimator(test_input, test_kwargs, expected_data):
# Run plot method
output = test_input.plot_as_cube(**test_kwargs)
# Check plot object properties are correct.
assert isinstance(output, ndcube.mixins.sequence_plotting.ImageAnimatorCubeLikeNDCubeSequence)
np.testing.assert_array_equal(output.data, expected_data)
@pytest.mark.parametrize("test_input, expected", [
((seq_with_unit0.data, None), (None, None)),
((seq_with_unit0.data, u.km), (None, None)),
((seq_with_units.data, None), ([u.km, u.m, u.km, u.m], u.km)),
((seq_with_units.data, u.cm), ([u.km, u.m, u.km, u.m], u.cm))])
def test_determine_sequence_units(test_input, expected):
output_seq_unit, output_unit = ndcube.mixins.sequence_plotting._determine_sequence_units(
test_input[0], unit=test_input[1])
assert output_seq_unit == expected[0]
assert output_unit == expected[1]
def test_determine_sequence_units():
with pytest.raises(ValueError):
output_seq_unit, output_unit = ndcube.mixins.sequence_plotting._determine_sequence_units(
seq.data, u.m)
@pytest.mark.parametrize("test_input, expected", [
((3, 1, "time", u.s), ([1], [None, 'time', None], [None, u.s, None])),
((3, None, None, None), ([-1, -2], None, None))])
def test_prep_axes_kwargs(test_input, expected):
output = ndcube.mixins.sequence_plotting._prep_axes_kwargs(*test_input)
for i in range(3):
assert output[i] == expected[i]
@pytest.mark.parametrize("test_input, expected_error", [
((3, [0, 1, 2], ["time", "pix"], u.s), ValueError),
((3, 0, ["time", "pix"], u.s), ValueError),
((3, 0, "time", [u.s, u.pix]), ValueError),
((3, 0, 0, u.s), TypeError),
((3, 0, "time", 0), TypeError)])
def test_prep_axes_kwargs_errors(test_input, expected_error):
with pytest.raises(expected_error):
output = ndcube.mixins.sequence_plotting._prep_axes_kwargs(*test_input)
| 47.692573
| 99
| 0.623307
| 4,132
| 27,614
| 3.953533
| 0.059293
| 0.016405
| 0.014079
| 0.026445
| 0.859329
| 0.839128
| 0.79285
| 0.758815
| 0.725331
| 0.66736
| 0
| 0.066145
| 0.184798
| 27,614
| 578
| 100
| 47.775087
| 0.659544
| 0.036757
| 0
| 0.5
| 0
| 0
| 0.076266
| 0.005647
| 0
| 0
| 0
| 0.00173
| 0.075758
| 1
| 0.028139
| false
| 0
| 0.017316
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b87e5aeb3f5ebc501a9acdf3179a27b173b631c9
| 420
|
py
|
Python
|
build-sys/build_sys/__init__.py
|
lukas-ke/faint-graphics-editor
|
33eb9e6a3f2216fb2cf6ef9709a14f3d20b78fbf
|
[
"Apache-2.0"
] | 10
|
2016-12-28T22:06:31.000Z
|
2021-05-24T13:42:30.000Z
|
build-sys/build_sys/__init__.py
|
lukas-ke/faint-graphics-editor
|
33eb9e6a3f2216fb2cf6ef9709a14f3d20b78fbf
|
[
"Apache-2.0"
] | 4
|
2015-10-09T23:55:10.000Z
|
2020-04-04T08:09:22.000Z
|
build-sys/build_sys/__init__.py
|
lukas-ke/faint-graphics-editor
|
33eb9e6a3f2216fb2cf6ef9709a14f3d20b78fbf
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from . build_sys import build
from . build_sys import unknown_version_str
from . build_sys import build_installer
from . build_sys import parse_command_line
from build_sys.opts import BuildOptions
import build_sys.gen_method_def as gen_method_def
import build_sys.gen_resource as gen_resource
import build_sys.gen_text_expressions as gen_text_expressions
import build_sys.util as util
| 35
| 62
| 0.838095
| 69
| 420
| 4.753623
| 0.362319
| 0.219512
| 0.182927
| 0.219512
| 0.140244
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002732
| 0.128571
| 420
| 11
| 63
| 38.181818
| 0.893443
| 0.05
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b8a4d55eea9894e2226fc3251703985f3a867c6c
| 37,489
|
py
|
Python
|
usaspending_api/search/tests/unit/test_spending_by_category.py
|
millerjoey975/usaspending-api
|
66dd6b231087e92696d0ac09ef7700b6069829ad
|
[
"CC0-1.0"
] | 1
|
2022-01-28T16:08:04.000Z
|
2022-01-28T16:08:04.000Z
|
usaspending_api/search/tests/unit/test_spending_by_category.py
|
millerjoey975/usaspending-api
|
66dd6b231087e92696d0ac09ef7700b6069829ad
|
[
"CC0-1.0"
] | null | null | null |
usaspending_api/search/tests/unit/test_spending_by_category.py
|
millerjoey975/usaspending-api
|
66dd6b231087e92696d0ac09ef7700b6069829ad
|
[
"CC0-1.0"
] | null | null | null |
import pytest
from model_mommy import mommy
from usaspending_api.common.helpers.generic_helper import get_time_period_message
from usaspending_api.search.tests.data.utilities import setup_elasticsearch_test
from usaspending_api.search.v2.views.spending_by_category_views.spending_by_agency_types import (
AwardingAgencyViewSet,
AwardingSubagencyViewSet,
FundingAgencyViewSet,
FundingSubagencyViewSet,
)
from usaspending_api.search.v2.views.spending_by_category_views.spending_by_federal_account import FederalAccountViewSet
from usaspending_api.search.v2.views.spending_by_category_views.spending_by_industry_codes import (
CfdaViewSet,
PSCViewSet,
NAICSViewSet,
)
from usaspending_api.search.v2.views.spending_by_category_views.spending_by_locations import (
CountyViewSet,
DistrictViewSet,
StateTerritoryViewSet,
CountryViewSet,
)
from usaspending_api.search.v2.views.spending_by_category_views.spending_by_recipient_duns import RecipientDunsViewSet
@pytest.fixture
def psc_test_data(db):
mommy.make("awards.Award", id=1, latest_transaction_id=1)
mommy.make("awards.Award", id=2, latest_transaction_id=2)
mommy.make("awards.Award", id=3, latest_transaction_id=3)
mommy.make("awards.Award", id=4, latest_transaction_id=4)
mommy.make(
"awards.TransactionNormalized",
id=1,
award_id=1,
is_fpds=True,
federal_action_obligation=1,
action_date="2020-01-01",
)
mommy.make(
"awards.TransactionNormalized",
id=2,
award_id=2,
is_fpds=True,
federal_action_obligation=1,
action_date="2020-01-02",
)
mommy.make(
"awards.TransactionNormalized",
id=3,
award_id=3,
is_fpds=True,
federal_action_obligation=2,
action_date="2020-01-03",
)
mommy.make(
"awards.TransactionNormalized",
id=4,
award_id=4,
is_fpds=True,
federal_action_obligation=2,
action_date="2020-01-04",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=1,
product_or_service_code="1234",
product_or_service_co_desc="PSC DESCRIPTION UP",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=2,
product_or_service_code="1234",
product_or_service_co_desc="PSC DESCRIPTION UP",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=3,
product_or_service_code="9876",
product_or_service_co_desc="PSC DESCRIPTION DOWN",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=4,
product_or_service_code="9876",
product_or_service_co_desc="PSC DESCRIPTION DOWN",
)
mommy.make("references.PSC", code="1234", description="PSC DESCRIPTION UP")
mommy.make("references.PSC", code="9876", description="PSC DESCRIPTION DOWN")
@pytest.fixture
def cfda_test_data(db):
mommy.make("awards.Award", id=1, latest_transaction_id=1)
mommy.make("awards.Award", id=2, latest_transaction_id=2)
mommy.make(
"awards.Subaward", id=1, award_id=1, amount=1, cfda_id=1, cfda_number="CFDA1234", cfda_title="CFDA TITLE 1234"
)
mommy.make(
"awards.Subaward", id=2, award_id=2, amount=1, cfda_id=1, cfda_number="CFDA1234", cfda_title="CFDA TITLE 1234"
)
mommy.make("awards.TransactionNormalized", id=1, award_id=1, federal_action_obligation=1, action_date="2020-01-01")
mommy.make("awards.TransactionNormalized", id=2, award_id=2, federal_action_obligation=1, action_date="2020-01-02")
mommy.make("awards.TransactionFABS", transaction_id=1, cfda_number="CFDA1234", cfda_title="CFDA TITLE 1234")
mommy.make("awards.TransactionFABS", transaction_id=2, cfda_number="CFDA1234", cfda_title="CFDA TITLE 1234")
mommy.make("references.Cfda", id=1, program_number="CFDA1234", program_title="CFDA TITLE 1234")
@pytest.fixture
def naics_test_data(db):
mommy.make("awards.Award", id=1, latest_transaction_id=1)
mommy.make("awards.Award", id=2, latest_transaction_id=2)
mommy.make("awards.Award", id=3, latest_transaction_id=3)
mommy.make("awards.Award", id=4, latest_transaction_id=4)
mommy.make(
"awards.TransactionNormalized",
id=1,
award_id=1,
is_fpds=True,
federal_action_obligation=1,
action_date="2020-01-01",
)
mommy.make(
"awards.TransactionNormalized",
id=2,
award_id=2,
is_fpds=True,
federal_action_obligation=1,
action_date="2020-01-02",
)
mommy.make(
"awards.TransactionNormalized",
id=3,
award_id=3,
is_fpds=True,
federal_action_obligation=2,
action_date="2020-01-03",
)
mommy.make(
"awards.TransactionNormalized",
id=4,
award_id=4,
is_fpds=True,
federal_action_obligation=2,
action_date="2020-01-04",
)
mommy.make("awards.TransactionFPDS", transaction_id=1, naics="NAICS 1234", naics_description="NAICS DESC 1234")
mommy.make("awards.TransactionFPDS", transaction_id=2, naics="NAICS 1234", naics_description="NAICS DESC 1234")
mommy.make("awards.TransactionFPDS", transaction_id=3, naics="NAICS 9876", naics_description="NAICS DESC 9876")
mommy.make("awards.TransactionFPDS", transaction_id=4, naics="NAICS 9876", naics_description="NAICS DESC 9876")
mommy.make("references.NAICS", code="NAICS 1234", description="SOURCE NAICS DESC 1234", year=1955)
mommy.make("references.NAICS", code="NAICS 9876", description="SOURCE NAICS DESC 9876", year=1985)
@pytest.fixture
def agency_test_data(db):
mommy.make("awards.Award", id=1, latest_transaction_id=1)
mommy.make("awards.Award", id=2, latest_transaction_id=2)
mommy.make(
"awards.Subaward",
id=1,
latest_transaction_id=1,
amount=50,
awarding_agency_id=1003,
funding_agency_id=1004,
awarding_toptier_agency_name="Awarding Toptier Agency 3",
awarding_subtier_agency_name="Awarding Subtier Agency 3",
funding_toptier_agency_name="Funding Toptier Agency 4",
funding_subtier_agency_name="Funding Subtier Agency 4",
awarding_toptier_agency_abbreviation="TA3",
awarding_subtier_agency_abbreviation="SA3",
funding_toptier_agency_abbreviation="TA4",
funding_subtier_agency_abbreviation="SA4",
)
mommy.make(
"awards.Subaward",
id=2,
latest_transaction_id=2,
amount=100,
awarding_agency_id=1003,
funding_agency_id=1004,
awarding_toptier_agency_name="Awarding Toptier Agency 3",
awarding_subtier_agency_name="Awarding Subtier Agency 3",
funding_toptier_agency_name="Funding Toptier Agency 4",
funding_subtier_agency_name="Funding Subtier Agency 4",
awarding_toptier_agency_abbreviation="TA3",
awarding_subtier_agency_abbreviation="SA3",
funding_toptier_agency_abbreviation="TA4",
funding_subtier_agency_abbreviation="SA4",
)
mommy.make(
"awards.TransactionNormalized",
id=1,
award_id=1,
awarding_agency_id=1001,
funding_agency_id=1002,
federal_action_obligation=5,
action_date="2020-01-01",
)
mommy.make(
"awards.TransactionNormalized",
id=2,
award_id=2,
awarding_agency_id=1001,
funding_agency_id=1002,
federal_action_obligation=10,
action_date="2020-01-02",
)
mommy.make("references.ToptierAgency", toptier_agency_id=2001, name="Awarding Toptier Agency 1", abbreviation="TA1")
mommy.make("references.SubtierAgency", subtier_agency_id=3001, name="Awarding Subtier Agency 1", abbreviation="SA1")
mommy.make("references.ToptierAgency", toptier_agency_id=2003, name="Awarding Toptier Agency 3", abbreviation="TA3")
mommy.make("references.SubtierAgency", subtier_agency_id=3003, name="Awarding Subtier Agency 3", abbreviation="SA3")
mommy.make("references.ToptierAgency", toptier_agency_id=2002, name="Funding Toptier Agency 2", abbreviation="TA2")
mommy.make("references.SubtierAgency", subtier_agency_id=3002, name="Funding Subtier Agency 2", abbreviation="SA2")
mommy.make("references.ToptierAgency", toptier_agency_id=2004, name="Funding Toptier Agency 4", abbreviation="TA4")
mommy.make("references.SubtierAgency", subtier_agency_id=3004, name="Funding Subtier Agency 4", abbreviation="SA4")
mommy.make("references.Agency", id=1001, toptier_agency_id=2001, subtier_agency_id=3001, toptier_flag=True)
mommy.make("references.Agency", id=1002, toptier_agency_id=2002, subtier_agency_id=3002, toptier_flag=True)
mommy.make("references.Agency", id=1003, toptier_agency_id=2003, subtier_agency_id=3003, toptier_flag=True)
mommy.make("references.Agency", id=1004, toptier_agency_id=2004, subtier_agency_id=3004, toptier_flag=True)
@pytest.fixture
def recipient_test_data(db):
mommy.make("awards.Award", id=1, latest_transaction_id=1)
mommy.make("awards.Award", id=2, latest_transaction_id=2)
mommy.make("awards.Award", id=3, latest_transaction_id=3)
mommy.make("awards.Award", id=4, latest_transaction_id=4)
mommy.make("awards.Award", id=5, latest_transaction_id=5)
mommy.make(
"awards.Subaward",
id=1,
award_id=1,
amount=1,
recipient_name="University of Pawnee",
recipient_unique_id="00UOP00",
)
mommy.make(
"awards.Subaward",
id=2,
award_id=2,
amount=10,
recipient_name="University of Pawnee",
recipient_unique_id="00UOP00",
)
mommy.make(
"awards.Subaward", id=3, award_id=3, amount=100, recipient_name="John Doe", recipient_unique_id="1234JD4321"
)
mommy.make(
"awards.Subaward", id=4, award_id=4, amount=1000, recipient_name="John Doe", recipient_unique_id="1234JD4321"
)
mommy.make(
"awards.Subaward",
id=5,
award_id=5,
amount=10000,
recipient_name="MULTIPLE RECIPIENTS",
recipient_unique_id=None,
)
mommy.make(
"awards.TransactionNormalized",
id=1,
award_id=1,
federal_action_obligation=1,
action_date="2020-01-01",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=2,
award_id=2,
federal_action_obligation=1,
action_date="2020-01-02",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=3,
award_id=3,
federal_action_obligation=1,
action_date="2020-01-03",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=4,
award_id=4,
federal_action_obligation=10,
action_date="2020-01-04",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=5,
award_id=5,
federal_action_obligation=15,
action_date="2020-01-05",
is_fpds=True,
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=1,
awardee_or_recipient_legal="University of Pawnee",
awardee_or_recipient_uniqu="00UOP00",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=2,
awardee_or_recipient_legal="University of Pawnee",
awardee_or_recipient_uniqu="00UOP00",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=3,
awardee_or_recipient_legal="John Doe",
awardee_or_recipient_uniqu="1234JD4321",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=4,
awardee_or_recipient_legal="John Doe",
awardee_or_recipient_uniqu="1234JD4321",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=5,
awardee_or_recipient_legal="MULTIPLE RECIPIENTS",
awardee_or_recipient_uniqu=None,
)
mommy.make(
"recipient.RecipientLookup",
duns="00UOP00",
legal_business_name="University of Pawnee",
recipient_hash="2af2a5a5-3126-2c76-3681-dec2cf148f1a",
)
mommy.make(
"recipient.RecipientLookup",
duns="1234JD4321",
legal_business_name="John Doe",
recipient_hash="0b54895d-2393-ea12-48e3-deae990614d9",
)
mommy.make(
"recipient.RecipientLookup",
duns=None,
legal_business_name="MULTIPLE RECIPIENTS",
recipient_hash="64af1cb7-993c-b64b-1c58-f5289af014c0",
)
mommy.make(
"recipient.RecipientProfile",
recipient_unique_id="00UOP00",
recipient_level="P",
recipient_hash="2af2a5a5-3126-2c76-3681-dec2cf148f1a",
recipient_name="University of Pawnee",
)
mommy.make(
"recipient.RecipientProfile",
recipient_unique_id="1234JD4321",
recipient_level="C",
recipient_hash="0b54895d-2393-ea12-48e3-deae990614d9",
recipient_name="John Doe",
)
mommy.make(
"recipient.RecipientProfile",
recipient_unique_id=None,
recipient_level="R",
recipient_hash="64af1cb7-993c-b64b-1c58-f5289af014c0",
recipient_name="MULTIPLE RECIPIENTS",
)
@pytest.fixture
def geo_test_data(db):
mommy.make("awards.Award", id=1, latest_transaction_id=1)
mommy.make("awards.Award", id=2, latest_transaction_id=2)
mommy.make("awards.Award", id=3, latest_transaction_id=3)
mommy.make("awards.Award", id=4, latest_transaction_id=4)
mommy.make(
"awards.Subaward",
id=1,
award_id=1,
amount=1,
pop_country_name=None,
pop_country_code="US",
pop_state_code="XY",
pop_county_code="04",
pop_county_name="COUNTYSVILLE",
pop_zip4="12345",
pop_congressional_code="06",
)
mommy.make(
"awards.Subaward",
id=2,
award_id=2,
amount=10,
pop_country_name=None,
pop_country_code="US",
pop_state_code="XY",
pop_county_code="04",
pop_county_name="COUNTYSVILLE",
pop_zip4="12345",
pop_congressional_code="06",
)
mommy.make(
"awards.Subaward",
id=3,
award_id=3,
amount=100,
pop_country_name=None,
pop_country_code="US",
pop_state_code="XY",
pop_county_code="01",
pop_county_name="SOMEWHEREVILLE",
pop_zip4="98765",
pop_congressional_code="90",
)
mommy.make(
"awards.Subaward",
id=4,
award_id=4,
amount=1000,
pop_country_name=None,
pop_country_code="US",
pop_state_code="XY",
pop_county_code="01",
pop_county_name="SOMEWHEREVILLE",
pop_zip4="98765",
pop_congressional_code="90",
)
mommy.make(
"awards.TransactionNormalized",
id=1,
award_id=1,
federal_action_obligation=1,
action_date="2020-01-01",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=2,
award_id=2,
federal_action_obligation=2,
action_date="2020-01-02",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=3,
award_id=3,
federal_action_obligation=3,
action_date="2020-01-03",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=4,
award_id=4,
federal_action_obligation=4,
action_date="2020-01-04",
is_fpds=True,
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=1,
place_of_perf_country_desc=None,
place_of_perform_country_c="US",
place_of_performance_state="XY",
place_of_perform_county_co="04",
place_of_perform_county_na="COUNTYSVILLE",
place_of_performance_zip5="12345",
place_of_performance_congr="06",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=2,
place_of_perf_country_desc=None,
place_of_perform_country_c="US",
place_of_performance_state="XY",
place_of_perform_county_co="04",
place_of_perform_county_na="COUNTYSVILLE",
place_of_performance_zip5="12345",
place_of_performance_congr="06",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=3,
place_of_perf_country_desc=None,
place_of_perform_country_c="US",
place_of_performance_state="XY",
place_of_perform_county_co="01",
place_of_perform_county_na="SOMEWHEREVILLE",
place_of_performance_zip5="98765",
place_of_performance_congr="90",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=4,
place_of_perf_country_desc=None,
place_of_perform_country_c="US",
place_of_performance_state="XY",
place_of_perform_county_co="01",
place_of_perform_county_na="SOMEWHEREVILLE",
place_of_performance_zip5="98765",
place_of_performance_congr="90",
)
mommy.make("recipient.StateData", name="Test State", code="XY")
mommy.make("references.RefCountryCode", country_name="UNITED STATES", country_code="US")
@pytest.fixture
def federal_accounts_test_data(db):
mommy.make("awards.Award", id=1, latest_transaction_id=1)
mommy.make("awards.Award", id=2, latest_transaction_id=2)
mommy.make(
"awards.TransactionNormalized",
id=1,
award_id=1,
federal_action_obligation=1,
action_date="2020-01-01",
is_fpds=True,
)
mommy.make(
"awards.TransactionNormalized",
id=2,
award_id=2,
federal_action_obligation=2,
action_date="2020-01-02",
is_fpds=True,
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=1,
awardee_or_recipient_legal="Sample Recipient",
awardee_or_recipient_uniqu="000000000",
)
mommy.make(
"awards.TransactionFPDS",
transaction_id=2,
awardee_or_recipient_legal="Sample Recipient",
awardee_or_recipient_uniqu="000000000",
)
mommy.make(
"recipient.RecipientLookup",
duns="000000000",
legal_business_name="Sample Recipient",
recipient_hash="dece8b43-c2a8-d056-7e82-0fc2f1c7c4e4",
)
mommy.make(
"recipient.RecipientProfile",
recipient_unique_id="000000000",
recipient_level="R",
recipient_hash="dece8b43-c2a8-d056-7e82-0fc2f1c7c4e4",
recipient_name="Sample Recipient",
)
mommy.make("awards.FinancialAccountsByAwards", financial_accounts_by_awards_id=1, award_id=1, treasury_account_id=1)
mommy.make("awards.FinancialAccountsByAwards", financial_accounts_by_awards_id=2, award_id=2, treasury_account_id=1)
mommy.make("accounts.TreasuryAppropriationAccount", treasury_account_identifier=1, federal_account_id=10)
mommy.make(
"accounts.FederalAccount",
id=10,
agency_identifier="020",
main_account_code="0001",
account_title="Test Federal Account",
federal_account_code="020-0001",
)
def test_category_awarding_agency_awards(agency_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "awarding_agency", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = AwardingAgencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "awarding_agency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 15, "name": "Awarding Toptier Agency 1", "code": "TA1", "id": 1001}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_awarding_agency_subawards(agency_test_data):
test_payload = {"category": "awarding_agency", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = AwardingAgencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "awarding_agency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 150, "name": "Awarding Toptier Agency 3", "code": "TA3", "id": 1003}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_awarding_subagency_awards(agency_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "awarding_subagency", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = AwardingSubagencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "awarding_subagency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 15, "name": "Awarding Subtier Agency 1", "code": "SA1", "id": 1001}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_awarding_subagency_subawards(agency_test_data):
test_payload = {"category": "awarding_subagency", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = AwardingSubagencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "awarding_subagency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 150, "name": "Awarding Subtier Agency 3", "code": "SA3", "id": 1003}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_funding_agency_awards(agency_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "funding_agency", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = FundingAgencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "funding_agency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 15, "name": "Funding Toptier Agency 2", "code": "TA2", "id": 1002}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_funding_agency_subawards(agency_test_data):
test_payload = {"category": "funding_agency", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = FundingAgencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "funding_agency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 150, "name": "Funding Toptier Agency 4", "code": "TA4", "id": 1004}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_funding_subagency_awards(agency_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "funding_subagency", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = FundingSubagencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "funding_subagency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 15, "name": "Funding Subtier Agency 2", "code": "SA2", "id": 1002}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_funding_subagency_subawards(agency_test_data):
test_payload = {"category": "funding_subagency", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = FundingSubagencyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "funding_subagency",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 150, "name": "Funding Subtier Agency 4", "code": "SA4", "id": 1004}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
@pytest.mark.django_db
def test_category_recipient_duns_awards(recipient_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "recipient_duns", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = RecipientDunsViewSet().perform_search(test_payload, {})
expected_response = {
"category": "recipient_duns",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 15, "name": "MULTIPLE RECIPIENTS", "code": "DUNS Number not provided", "recipient_id": None},
{
"amount": 11,
"name": "JOHN DOE",
"code": "1234JD4321",
"recipient_id": "0b54895d-2393-ea12-48e3-deae990614d9-C",
},
{
"amount": 2,
"name": "UNIVERSITY OF PAWNEE",
"code": "00UOP00",
"recipient_id": "2af2a5a5-3126-2c76-3681-dec2cf148f1a-P",
},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
@pytest.mark.django_db
def test_category_recipient_duns_subawards(recipient_test_data):
test_payload = {"category": "recipient_duns", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = RecipientDunsViewSet().perform_search(test_payload, {})
expected_response = {
"category": "recipient_duns",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 10000, "code": None, "name": "MULTIPLE RECIPIENTS", "recipient_id": None},
{
"amount": 1100,
"code": "1234JD4321",
"recipient_id": "0b54895d-2393-ea12-48e3-deae990614d9-C",
"name": "JOHN DOE",
},
{
"amount": 11,
"code": "00UOP00",
"recipient_id": "2af2a5a5-3126-2c76-3681-dec2cf148f1a-P",
"name": "UNIVERSITY OF PAWNEE",
},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_cfda_awards(cfda_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "cfda", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = CfdaViewSet().perform_search(test_payload, {})
expected_response = {
"category": "cfda",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 2, "code": "CFDA1234", "name": "CFDA TITLE 1234", "id": 1}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_cfda_subawards(cfda_test_data):
test_payload = {"category": "cfda", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = CfdaViewSet().perform_search(test_payload, {})
expected_response = {
"category": "cfda",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 2, "code": "CFDA1234", "name": "CFDA TITLE 1234", "id": 1}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_psc_awards(psc_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "psc", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = PSCViewSet().perform_search(test_payload, {})
expected_response = {
"category": "psc",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 4, "code": "9876", "id": None, "name": "PSC DESCRIPTION DOWN"},
{"amount": 2, "code": "1234", "id": None, "name": "PSC DESCRIPTION UP"},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_naics_awards(naics_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "naics", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = NAICSViewSet().perform_search(test_payload, {})
expected_response = {
"category": "naics",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 4, "code": "NAICS 9876", "name": "SOURCE NAICS DESC 9876", "id": None},
{"amount": 2, "code": "NAICS 1234", "name": "SOURCE NAICS DESC 1234", "id": None},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_county_awards(geo_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "county", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = CountyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "county",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 7, "code": "001", "name": "SOMEWHEREVILLE", "id": None},
{"amount": 3, "code": "004", "name": "COUNTYSVILLE", "id": None},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_county_subawards(geo_test_data):
test_payload = {"category": "county", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = CountyViewSet().perform_search(test_payload, {})
expected_response = {
"category": "county",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 1100, "code": "001", "id": None, "name": "SOMEWHEREVILLE"},
{"amount": 11, "code": "004", "id": None, "name": "COUNTYSVILLE"},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_district_awards(geo_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "district", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = DistrictViewSet().perform_search(test_payload, {})
expected_response = {
"category": "district",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 7, "code": "90", "name": "XY-MULTIPLE DISTRICTS", "id": None},
{"amount": 3, "code": "06", "name": "XY-06", "id": None},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
def test_category_district_subawards(geo_test_data):
test_payload = {"category": "district", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = DistrictViewSet().perform_search(test_payload, {})
expected_response = {
"category": "district",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [
{"amount": 1100, "code": "90", "id": None, "name": "XY-MULTIPLE DISTRICTS"},
{"amount": 11, "code": "06", "id": None, "name": "XY-06"},
],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
@pytest.mark.django_db
def test_category_state_territory(geo_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "state_territory", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = StateTerritoryViewSet().perform_search(test_payload, {})
expected_response = {
"category": "state_territory",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 10, "code": "XY", "name": "Test State", "id": None}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
@pytest.mark.django_db
def test_category_state_territory_subawards(geo_test_data):
test_payload = {"category": "state_territory", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = StateTerritoryViewSet().perform_search(test_payload, {})
expected_response = {
"category": "state_territory",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 1111, "code": "XY", "id": None, "name": "Test State"}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
@pytest.mark.django_db
def test_category_country(geo_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {"category": "country", "subawards": False, "page": 1, "limit": 50}
spending_by_category_logic = CountryViewSet().perform_search(test_payload, {})
expected_response = {
"category": "country",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 10, "code": "US", "name": "UNITED STATES", "id": None}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
@pytest.mark.django_db
def test_category_country_subawards(geo_test_data):
test_payload = {"category": "country", "subawards": True, "page": 1, "limit": 50}
spending_by_category_logic = CountryViewSet().perform_search(test_payload, {})
expected_response = {
"category": "country",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 1111, "code": "US", "id": None, "name": "UNITED STATES"}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
@pytest.mark.django_db
def test_category_federal_accounts(federal_accounts_test_data, monkeypatch, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
test_payload = {
"category": "federal_account",
"filters": {"recipient_id": "dece8b43-c2a8-d056-7e82-0fc2f1c7c4e4-R"},
"subawards": False,
"page": 1,
"limit": 50,
}
spending_by_category_logic = FederalAccountViewSet().perform_search(test_payload, {})
expected_response = {
"category": "federal_account",
"limit": 50,
"page_metadata": {"page": 1, "next": None, "previous": None, "hasNext": False, "hasPrevious": False},
"results": [{"amount": 3, "code": "020-0001", "name": "Test Federal Account", "id": 10}],
"messages": [get_time_period_message()],
}
assert expected_response == spending_by_category_logic
| 35.635932
| 120
| 0.655045
| 4,209
| 37,489
| 5.537182
| 0.059872
| 0.042865
| 0.052776
| 0.045396
| 0.876169
| 0.838025
| 0.82601
| 0.767871
| 0.751824
| 0.744701
| 0
| 0.05013
| 0.211956
| 37,489
| 1,051
| 121
| 35.669838
| 0.738754
| 0
| 0
| 0.690286
| 0
| 0
| 0.233428
| 0.05679
| 0
| 0
| 0
| 0
| 0.026286
| 1
| 0.034286
| false
| 0
| 0.010286
| 0
| 0.044571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b28a8078f564deffd83a21b58f3a6d6aa2b0c0d8
| 17,402
|
py
|
Python
|
wikiwho_wrapper/views.py
|
robertour/wikiwho_wrapper
|
5390341d6ee8dbdd91168f61a53dfe2cd1d8e9ff
|
[
"MIT"
] | 3
|
2020-11-26T03:33:47.000Z
|
2021-03-12T17:24:31.000Z
|
wikiwho_wrapper/views.py
|
robertour/wikiwho_wrapper
|
5390341d6ee8dbdd91168f61a53dfe2cd1d8e9ff
|
[
"MIT"
] | 1
|
2020-04-13T23:06:56.000Z
|
2020-04-13T23:06:56.000Z
|
wikiwho_wrapper/views.py
|
robertour/wikiwho_wrapper
|
5390341d6ee8dbdd91168f61a53dfe2cd1d8e9ff
|
[
"MIT"
] | null | null | null |
"""Summary
"""
import pandas as pd
import itertools
from typing import Union
import deprecation
from .api import WikiWhoAPI
from . import __version__
class DataView:
"""Qurey methods for correspondence of the WikiWhoAPI methods
Attributes:
api (TYPE): Description
"""
def __init__(self, api):
"""Constructor of the DataView
Args:
api (TYPE): the WikiWhoAPI
"""
self.api = api
def __get_iterator(self, token_dict, _in, out):
if _in and out:
return enumerate(zip(
itertools.chain((-1,), token_dict["in"]) if _in else (None,),
itertools.chain(token_dict["out"], (-1,)) if out else (None,)
))
elif _in:
return enumerate(itertools.zip_longest(
itertools.chain((-1,), token_dict["in"]) if _in else (None,),
itertools.chain(token_dict["out"], (-1,)) if out else (None,)
))
elif out:
return enumerate(itertools.zip_longest(
itertools.chain((-1,), token_dict["in"]) if _in else (None,),
itertools.chain(token_dict["out"]) if out else (None,)
))
else:
return enumerate(itertools.zip_longest(
itertools.chain((-1,), token_dict["in"]) if _in else (None,),
itertools.chain(token_dict["out"], (-1,)) if out else (None,)
))
def all_content(self,
article: Union[int, str],
o_rev_id: bool=True,
editor: bool=True,
token_id: bool=True,
out: bool=True,
_in: bool=True) -> pd.DataFrame:
"""Get all content on an article, i.e. Outputs all tokens that have ever existed
in a given article, including their change history for each.
Args:
article (Union[int, str]): page id (int) or title (str) of the page.
o_rev_id (bool, optional): Origin revision ID per token
editor (bool, optional): Editor ID/Name per token
token_id (bool, optional): Token ID per token
out (bool, optional): Outbound revision IDs per token
_in (bool, optional): Outbound revision IDs per token
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in 2 - All content in
https://api.wikiwho.net/en/api/v1.0.0-beta/
"""
response = self.api.all_content(
article, o_rev_id, editor, token_id, out, _in)
rows = ((response["article_title"],
response["page_id"],
token_dict["o_rev_id"] if o_rev_id else None,
token_dict["editor"] if editor else None,
token_dict["str"],
token_dict["token_id"] if token_id else None,
_i,
_o)
for token_dict in response["all_tokens"]
for i, (_i, _o) in self.__get_iterator(token_dict, _in, out)
)
df = pd.DataFrame(data=rows, columns=[
'article_title', 'page_id', 'o_rev_id', 'o_editor', 'token', 'token_id', 'in', 'out'
])
return df.drop(columns=[name for name, include in zip(
['o_rev_id', 'o_editor', 'token_id', 'in', 'out'],
[o_rev_id, editor, token_id, _in, out]) if not include
])
def last_rev_content(self,
article: Union[int, str],
o_rev_id: bool=True,
editor: bool=True,
token_id: bool=True,
out: bool=False,
_in: bool=False) -> pd.DataFrame:
"""Get the content of the most recent (last) revision of the given article, as available on Wikipedia.
Args:
article (Union[int, str]): page id (int) or title (str) of the page.
o_rev_id (bool, optional): Origin revision ID per token
editor (bool, optional): Editor ID/Name per token
token_id (bool, optional): Token ID per token
out (bool, optional): Outbound revision IDs per token
_in (bool, optional): Outbound revision IDs per token
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in 1 - Content per revision for GET /rev_content/{article_title}/ and GET /rev_content/page_id/{page_id}/ in
https://api.wikiwho.net/en/api/v1.0.0-beta/
"""
response = self.api.last_rev_content(
article, o_rev_id, editor, token_id, out, _in)
rows = ((response["article_title"],
response["page_id"],
token_dict["o_rev_id"] if o_rev_id else None,
token_dict["editor"] if editor else None,
rev_id,
rev_dict['editor'] if editor else None,
rev_dict['time'],
token_dict["str"],
token_dict["token_id"] if token_id else None,
_i,
_o)
for dummy_rev in response["revisions"]
for rev_id, rev_dict in dummy_rev.items()
for token_dict in rev_dict['tokens']
for i, (_i, _o) in self.__get_iterator(token_dict, _in, out)
)
df = pd.DataFrame(data=rows, columns=[
'article_title', 'page_id', 'o_rev_id', 'o_editor', 'rev_id',
'rev_editor', 'rev_time', 'token', 'token_id', 'in', 'out'
])
return df.drop(columns=[name for name, include in zip(
['o_rev_id', 'o_editor', 'rev_editor', 'token_id', 'in', 'out'],
[o_rev_id, editor, editor, token_id, _in, out]) if not include
])
def specific_rev_content_by_rev_id(self,
rev_id: int,
article: Union[int, str]=None,
o_rev_id: bool=True,
editor: bool=True,
token_id: bool=True,
out: bool=False,
_in: bool=False) -> pd.DataFrame:
"""Get the content of the given revision id.
Args:
rev_id (int): Revision ID to get content for.
article (Union[int, str]): page id (int) or title (str) of the page.
o_rev_id (bool, optional): Origin revision ID per token
editor (bool, optional): Editor ID/Name per token
token_id (bool, optional): Token ID per token
out (bool, optional): Outbound revision IDs per token
_in (bool, optional): Outbound revision IDs per token
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in 1 - Content per revision for GET /rev_content/rev_id/{rev_id}/ in
https://api.wikiwho.net/en/api/v1.0.0-beta/
"""
response = self.api.specific_rev_content_by_rev_id(
rev_id, article, o_rev_id, editor, token_id, out, _in)
if 'Error' in response:
raise ValueError(response['Error'])
rows = ((response["article_title"],
response["page_id"],
token_dict["o_rev_id"] if o_rev_id else None,
token_dict["editor"] if editor else None,
rev_id,
rev_dict['editor'] if editor else None,
rev_dict['time'],
token_dict["str"],
token_dict["token_id"] if token_id else None,
_i,
_o
)
for dummy_rev in response["revisions"]
for rev_id, rev_dict in dummy_rev.items()
for token_dict in rev_dict['tokens']
for i, (_i, _o) in self.__get_iterator(token_dict, _in, out)
)
df = pd.DataFrame(data=rows, columns=[
'article_title', 'page_id', 'o_rev_id', 'o_editor', 'rev_id',
'rev_editor', 'rev_time', 'token', 'token_id', 'in', 'out'
])
return df.drop(columns=[name for name, include in zip(
['o_rev_id', 'o_editor', 'rev_editor', 'token_id', 'in', 'out'],
[o_rev_id, editor, editor, token_id, _in, out]) if not include
])
def range_rev_content_by_article_title(self,
article: Union[int, str],
start_rev_id: int,
end_rev_id: int,
o_rev_id: bool=True,
editor: bool=True,
token_id: bool=True,
out: bool=False,
_in: bool=False) -> pd.DataFrame:
"""Get the content of a range of revisions of an article, by given article title, start revison id and end revison id.
Args:
article (Union[int, str]): page id (int) or title (str) of the page.
start_rev_id (int): Start revision ID
end_rev_id (int): End revision ID
o_rev_id (bool, optional): Origin revision ID per token
editor (bool, optional): Editor ID/Name per token
token_id (bool, optional): Token ID per token
out (bool, optional): Outbound revision IDs per token
_in (bool, optional): Outbound revision IDs per token
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in 1 - Content per revision for GET /rev_content/{article_title}/{start_rev_id}/{end_rev_id}/ in
https://api.wikiwho.net/en/api/v1.0.0-beta/
"""
response = self.api.range_rev_content_by_article_title(
article, start_rev_id, end_rev_id, o_rev_id, editor, token_id, out, _in)
rows = ((response["article_title"],
response["page_id"],
token_dict["o_rev_id"] if o_rev_id else None,
token_dict["editor"] if editor else None,
rev_id,
rev_dict['editor'] if editor else None,
rev_dict['time'],
token_dict["str"],
token_dict["token_id"] if token_id else None,
_i,
_o
)
for dummy_rev in response["revisions"]
for rev_id, rev_dict in dummy_rev.items()
for token_dict in rev_dict['tokens']
for i, (_i, _o) in self.__get_iterator(token_dict, _in, out)
)
df = pd.DataFrame(data=rows, columns=[
'article_title', 'page_id', 'o_rev_id', 'o_editor', 'rev_id',
'rev_editor', 'rev_time', 'token', 'token_id', 'in', 'out'
])
return df.drop(columns=[name for name, include in zip(
['o_rev_id', 'o_editor', 'rev_editor', 'token_id', 'in', 'out'],
[o_rev_id, editor, editor, token_id, _in, out]) if not include
])
def rev_ids_of_article(self,
article: Union[int, str],
editor: bool=True,
timestamp: bool=True) -> pd.DataFrame:
"""Get revision IDs of an article by given article title or page id.
Args:
article (Union[int, str]): page id (int) or title (str) of the page.
editor (bool, optional): Editor ID/Name per token
timestamp (bool, optional): timestamp of each revision
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in 1 - Content per revision for GET /rev_ids/{article_title}/ and GET /rev_ids/page_id/{page_id}/ in
https://api.wikiwho.net/en/api/v1.0.0-beta/
"""
response = self.api.rev_ids_of_article(article, editor, timestamp)
rows = ((response["article_title"],
response["page_id"],
rev['timestamp'] if timestamp else None,
rev['id'],
rev['editor'] if editor else None
)
for rev in response["revisions"]
)
df = pd.DataFrame(data=rows, columns=[
'article_title', 'page_id', 'rev_time', 'rev_id', 'o_editor'
])
return df.drop(columns=[name for name, include in zip(
['rev_time', 'o_editor'],
[timestamp, editor]) if not include
])
@deprecation.deprecated(deprecated_in="1.5", removed_in="1.6",
current_version=__version__,
details="Use the edit_persistence function instead.")
def actions(self,
page_id: int=None,
editor_id: int=None,
start: str=None,
end: str=None) -> pd.DataFrame:
"""Get monthly editons for given editor id.
Args:
page_id (int, optional): page id (int).
editor_id (int, optional): editor id (int).
start (str, optional): Origin revision ID per token
end (str, optional): Editor ID/Name per token
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in /editor/{editor_id}/ in
https://www.wikiwho.net/en/edit_persistence/v1.0.0-beta/
"""
response = self.api.edit_persistence(page_id, editor_id, start, end)
rows = ((element['year_month'],
element["page_id"],
element["editor_id"],
element["adds"],
element["adds_surv_48h"],
element["adds_persistent"],
element["adds_stopword_count"],
element["dels"],
element["dels_surv_48h"],
element["dels_persistent"],
element["dels_stopword_count"],
element["reins"],
element["reins_surv_48h"],
element["reins_persistent"],
element["reins_stopword_count"],
)
for element in response["editions"]
)
df = pd.DataFrame(data=rows, columns=[
'year_month', 'page_id', 'editor_id',
'adds', 'adds_surv_48h', 'adds_persistent', 'adds_stopword_count',
'dels', 'dels_surv_48h', 'dels_persistent', 'dels_stopword_count',
'reins', 'reins_surv_48h', 'reins_persistent', 'reins_stopword_count'
])
return df
@deprecation.deprecated(deprecated_in="1.5", removed_in="1.6",
current_version=__version__,
details="Use the edit_persistence function instead.")
def actions_as_table(self,
page_id: int=None,
editor_id: int=None,
start: str=None,
end: str=None) -> pd.DataFrame:
"""Get monthly editons in tabular format for given page id or editor id or both.
Args:
page_id (int, optional): page id (int).
editor_id (int, optional): editor id (int).
start (str, optional): Origin revision ID per token
end (str, optional): Editor ID/Name per token
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in /editor/{editor_id}/ in
https://www.wikiwho.net/en/edit_persistence/v1.0.0-beta/
"""
response = self.api.edit_persistence_as_table(
page_id, editor_id, start, end)
df = pd.DataFrame(data=response['editions_data'], columns=response[
'editions_columns'])
return df
def edit_persistence(self,
page_id: int=None,
editor_id: int=None,
start: str=None,
end: str=None) -> pd.DataFrame:
"""Get monthly editons for given editor id.
Args:
page_id (int, optional): page id (int).
editor_id (int, optional): editor id (int).
start (str, optional): Origin revision ID per token
end (str, optional): Editor ID/Name per token
Returns:
pd.DataFrame: Return a Pandas DataFrame of the api query as documented in /editor/{editor_id}/ in
https://www.wikiwho.net/en/edit_persistence/v1.0.0-beta/
"""
response = self.api.edit_persistence_as_table(
page_id, editor_id, start, end)
df = pd.DataFrame(data=response['editions_data'], columns=response[
'editions_columns'])
return df
| 42.756757
| 192
| 0.514941
| 2,027
| 17,402
| 4.211643
| 0.076961
| 0.03397
| 0.02249
| 0.016868
| 0.805318
| 0.787045
| 0.764906
| 0.753426
| 0.748975
| 0.73937
| 0
| 0.005224
| 0.384036
| 17,402
| 406
| 193
| 42.862069
| 0.791212
| 0.275773
| 0
| 0.658333
| 0
| 0
| 0.119073
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.025
| 0
| 0.120833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b2a05a157e8340ebdd20fdda596698afe6cf8cf8
| 202
|
py
|
Python
|
colour/colorimetry/datasets/light_sources/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | 2
|
2020-05-03T20:15:42.000Z
|
2021-04-09T18:19:06.000Z
|
colour/colorimetry/datasets/light_sources/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | null | null | null |
colour/colorimetry/datasets/light_sources/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | 1
|
2019-12-11T19:48:27.000Z
|
2019-12-11T19:48:27.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .chromaticity_coordinates import LIGHT_SOURCES
from .sds import LIGHT_SOURCES_SDS
__all__ = ['LIGHT_SOURCES', 'LIGHT_SOURCES_SDS']
| 22.444444
| 51
| 0.787129
| 26
| 202
| 5.5
| 0.5
| 0.335664
| 0.251748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005618
| 0.118812
| 202
| 8
| 52
| 25.25
| 0.797753
| 0.10396
| 0
| 0
| 0
| 0
| 0.167598
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a24a386fb31b926f4a3f2624fcb3841bbc0fbd5c
| 93
|
py
|
Python
|
migration.py
|
FromSi/TP_bot
|
2a3487f517d3a9d13a95607e5c7b3157b37154cb
|
[
"MIT"
] | 1
|
2020-11-23T12:23:12.000Z
|
2020-11-23T12:23:12.000Z
|
migration.py
|
FromSi/TP_bot
|
2a3487f517d3a9d13a95607e5c7b3157b37154cb
|
[
"MIT"
] | 2
|
2021-06-01T23:59:48.000Z
|
2021-12-13T20:06:41.000Z
|
migration.py
|
FromSi/TP_bot
|
2a3487f517d3a9d13a95607e5c7b3157b37154cb
|
[
"MIT"
] | 1
|
2019-08-30T06:04:12.000Z
|
2019-08-30T06:04:12.000Z
|
from bot import manager
from bot import models
if __name__ == '__main__':
manager.run()
| 15.5
| 26
| 0.72043
| 13
| 93
| 4.538462
| 0.692308
| 0.237288
| 0.440678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 93
| 6
| 27
| 15.5
| 0.786667
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a29dfe8c7ef29c7b4b4f9bc08dfe3e25b01760ee
| 25,611
|
py
|
Python
|
mayan/apps/document_states/tests/test_api.py
|
nadwiabd/insight_edms
|
90a09d7ca77cb111c791e307b55a603e82042dfe
|
[
"Apache-2.0"
] | null | null | null |
mayan/apps/document_states/tests/test_api.py
|
nadwiabd/insight_edms
|
90a09d7ca77cb111c791e307b55a603e82042dfe
|
[
"Apache-2.0"
] | null | null | null |
mayan/apps/document_states/tests/test_api.py
|
nadwiabd/insight_edms
|
90a09d7ca77cb111c791e307b55a603e82042dfe
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import, unicode_literals
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from django.test import override_settings
from rest_framework.test import APITestCase
from acls.models import AccessControlList
from documents.models import DocumentType
from documents.tests.literals import (
TEST_DOCUMENT_TYPE, TEST_SMALL_DOCUMENT_PATH
)
from permissions import Permission
from permissions.models import Role
from permissions.tests.literals import TEST_ROLE_LABEL
from rest_api.tests import BaseAPITestCase
from user_management.tests import (
TEST_ADMIN_EMAIL, TEST_ADMIN_PASSWORD, TEST_ADMIN_USERNAME,
TEST_GROUP_NAME, TEST_USER_EMAIL, TEST_USER_USERNAME, TEST_USER_PASSWORD
)
from ..models import Workflow
from ..permissions import permission_workflow_transition
from .literals import (
TEST_WORKFLOW_LABEL, TEST_WORKFLOW_LABEL_EDITED,
TEST_WORKFLOW_INITIAL_STATE_COMPLETION, TEST_WORKFLOW_INITIAL_STATE_LABEL,
TEST_WORKFLOW_INSTANCE_LOG_ENTRY_COMMENT, TEST_WORKFLOW_STATE_COMPLETION,
TEST_WORKFLOW_STATE_LABEL, TEST_WORKFLOW_STATE_LABEL_EDITED,
TEST_WORKFLOW_TRANSITION_LABEL, TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
@override_settings(OCR_AUTO_OCR=False)
class WorkflowAPITestCase(BaseAPITestCase):
def setUp(self):
super(WorkflowAPITestCase, self).setUp()
self.admin_user = get_user_model().objects.create_superuser(
username=TEST_ADMIN_USERNAME, email=TEST_ADMIN_EMAIL,
password=TEST_ADMIN_PASSWORD
)
self.client.login(
username=TEST_ADMIN_USERNAME, password=TEST_ADMIN_PASSWORD
)
self.document_type = DocumentType.objects.create(
label=TEST_DOCUMENT_TYPE
)
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
self.document = self.document_type.new_document(
file_object=file_object
)
def tearDown(self):
if hasattr(self, 'document_type'):
self.document_type.delete()
super(WorkflowAPITestCase, self).tearDown()
def _create_workflow(self):
return Workflow.objects.create(label=TEST_WORKFLOW_LABEL)
def test_workflow_create_view(self):
response = self.client.post(
reverse('rest_api:workflow-list'), {
'label': TEST_WORKFLOW_LABEL
}
)
workflow = Workflow.objects.first()
self.assertEqual(Workflow.objects.count(), 1)
self.assertEqual(response.data['id'], workflow.pk)
def test_workflow_create_with_document_type_view(self):
response = self.client.post(
reverse('rest_api:workflow-list'), {
'label': TEST_WORKFLOW_LABEL,
'document_types_pk_list': '{}'.format(self.document_type.pk)
}
)
workflow = Workflow.objects.first()
self.assertEqual(Workflow.objects.count(), 1)
self.assertQuerysetEqual(
workflow.document_types.all(), (repr(self.document_type),)
)
self.assertEqual(response.data['id'], workflow.pk)
def test_workflow_delete_view(self):
workflow = self._create_workflow()
self.client.delete(
reverse('rest_api:workflow-detail', args=(workflow.pk,))
)
self.assertEqual(Workflow.objects.count(), 0)
def test_workflow_detail_view(self):
workflow = self._create_workflow()
response = self.client.get(
reverse('rest_api:workflow-detail', args=(workflow.pk,))
)
self.assertEqual(response.data['label'], workflow.label)
def test_workflow_document_type_create_view(self):
workflow = self._create_workflow()
self.client.post(
reverse(
'rest_api:workflow-document-type-list',
args=(workflow.pk,)
), data={'document_type_pk': self.document_type.pk}
)
self.assertQuerysetEqual(
workflow.document_types.all(), (repr(self.document_type),)
)
def test_workflow_document_type_delete_view(self):
workflow = self._create_workflow()
workflow.document_types.add(self.document_type)
self.client.delete(
reverse(
'rest_api:workflow-document-type-detail',
args=(workflow.pk, self.document_type.pk)
)
)
workflow.refresh_from_db()
self.assertQuerysetEqual(workflow.document_types.all(), ())
# The workflow document type entry was deleted and not the document
# type itself.
self.assertQuerysetEqual(
DocumentType.objects.all(), (repr(self.document_type),)
)
def test_workflow_document_type_detail_view(self):
workflow = self._create_workflow()
workflow.document_types.add(self.document_type)
response = self.client.get(
reverse(
'rest_api:workflow-document-type-detail',
args=(workflow.pk, self.document_type.pk)
)
)
self.assertEqual(response.data['label'], self.document_type.label)
def test_workflow_document_type_list_view(self):
workflow = self._create_workflow()
workflow.document_types.add(self.document_type)
response = self.client.get(
reverse(
'rest_api:workflow-document-type-list', args=(workflow.pk,)
)
)
self.assertEqual(
response.data['results'][0]['label'], self.document_type.label
)
def test_workflow_list_view(self):
workflow = self._create_workflow()
response = self.client.get(reverse('rest_api:workflow-list'))
self.assertEqual(response.data['results'][0]['label'], workflow.label)
def test_workflow_put_view(self):
workflow = self._create_workflow()
self.client.put(
reverse('rest_api:workflow-detail', args=(workflow.pk,)),
data={'label': TEST_WORKFLOW_LABEL_EDITED}
)
workflow.refresh_from_db()
self.assertEqual(workflow.label, TEST_WORKFLOW_LABEL_EDITED)
def test_workflow_patch_view(self):
workflow = self._create_workflow()
self.client.patch(
reverse('rest_api:workflow-detail', args=(workflow.pk,)),
data={'label': TEST_WORKFLOW_LABEL_EDITED}
)
workflow.refresh_from_db()
self.assertEqual(workflow.label, TEST_WORKFLOW_LABEL_EDITED)
def test_document_type_workflow_list(self):
workflow = self._create_workflow()
workflow.document_types.add(self.document_type)
response = self.client.get(
reverse(
'rest_api:documenttype-workflow-list',
args=(self.document_type.pk,)
),
)
self.assertEqual(response.data['results'][0]['label'], workflow.label)
@override_settings(OCR_AUTO_OCR=False)
class WorkflowStatesAPITestCase(BaseAPITestCase):
def setUp(self):
super(WorkflowStatesAPITestCase, self).setUp()
self.admin_user = get_user_model().objects.create_superuser(
username=TEST_ADMIN_USERNAME, email=TEST_ADMIN_EMAIL,
password=TEST_ADMIN_PASSWORD
)
self.client.login(
username=TEST_ADMIN_USERNAME, password=TEST_ADMIN_PASSWORD
)
self.document_type = DocumentType.objects.create(
label=TEST_DOCUMENT_TYPE
)
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
self.document = self.document_type.new_document(
file_object=file_object
)
def tearDown(self):
if hasattr(self, 'document_type'):
self.document_type.delete()
super(WorkflowStatesAPITestCase, self).tearDown()
def _create_workflow(self):
self.workflow = Workflow.objects.create(label=TEST_WORKFLOW_LABEL)
def _create_workflow_state(self):
self._create_workflow()
self.workflow_state = self.workflow.states.create(
completion=TEST_WORKFLOW_STATE_COMPLETION,
label=TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_create_view(self):
self._create_workflow()
self.client.post(
reverse(
'rest_api:workflowstate-list', args=(self.workflow.pk,)
), data={
'completion': TEST_WORKFLOW_STATE_COMPLETION,
'label': TEST_WORKFLOW_STATE_LABEL
}
)
self.workflow.refresh_from_db()
self.assertEqual(
self.workflow.states.first().label, TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_delete_view(self):
self._create_workflow_state()
self.client.delete(
reverse(
'rest_api:workflowstate-detail',
args=(self.workflow.pk, self.workflow_state.pk)
),
)
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.states.count(), 0)
def test_workflow_state_detail_view(self):
self._create_workflow_state()
response = self.client.get(
reverse(
'rest_api:workflowstate-detail',
args=(self.workflow.pk, self.workflow_state.pk)
),
)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_list_view(self):
self._create_workflow_state()
response = self.client.get(
reverse('rest_api:workflowstate-list', args=(self.workflow.pk,)),
)
self.assertEqual(
response.data['results'][0]['label'], TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_patch_view(self):
self._create_workflow_state()
self.client.patch(
reverse(
'rest_api:workflowstate-detail',
args=(self.workflow.pk, self.workflow_state.pk)
),
data={'label': TEST_WORKFLOW_STATE_LABEL_EDITED}
)
self.workflow_state.refresh_from_db()
self.assertEqual(
self.workflow_state.label,
TEST_WORKFLOW_STATE_LABEL_EDITED
)
def test_workflow_state_put_view(self):
self._create_workflow_state()
self.client.put(
reverse(
'rest_api:workflowstate-detail',
args=(self.workflow.pk, self.workflow_state.pk)
),
data={'label': TEST_WORKFLOW_STATE_LABEL_EDITED}
)
self.workflow_state.refresh_from_db()
self.assertEqual(
self.workflow_state.label,
TEST_WORKFLOW_STATE_LABEL_EDITED
)
@override_settings(OCR_AUTO_OCR=False)
class WorkflowTransitionsAPITestCase(BaseAPITestCase):
def setUp(self):
super(WorkflowTransitionsAPITestCase, self).setUp()
self.admin_user = get_user_model().objects.create_superuser(
username=TEST_ADMIN_USERNAME, email=TEST_ADMIN_EMAIL,
password=TEST_ADMIN_PASSWORD
)
self.client.login(
username=TEST_ADMIN_USERNAME, password=TEST_ADMIN_PASSWORD
)
self.document_type = DocumentType.objects.create(
label=TEST_DOCUMENT_TYPE
)
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
self.document = self.document_type.new_document(
file_object=file_object
)
def tearDown(self):
if hasattr(self, 'document_type'):
self.document_type.delete()
super(WorkflowTransitionsAPITestCase, self).tearDown()
def _create_workflow(self):
self.workflow = Workflow.objects.create(label=TEST_WORKFLOW_LABEL)
def _create_workflow_states(self):
self._create_workflow()
self.workflow_state_1 = self.workflow.states.create(
completion=TEST_WORKFLOW_INITIAL_STATE_COMPLETION,
label=TEST_WORKFLOW_INITIAL_STATE_LABEL
)
self.workflow_state_2 = self.workflow.states.create(
completion=TEST_WORKFLOW_STATE_COMPLETION,
label=TEST_WORKFLOW_STATE_LABEL
)
def _create_workflow_transition(self):
self._create_workflow_states()
self.workflow_transition = self.workflow.transitions.create(
label=TEST_WORKFLOW_TRANSITION_LABEL,
origin_state=self.workflow_state_1,
destination_state=self.workflow_state_2,
)
def test_workflow_transition_create_view(self):
self._create_workflow_states()
self.client.post(
reverse(
'rest_api:workflowtransition-list', args=(self.workflow.pk,)
), data={
'label': TEST_WORKFLOW_TRANSITION_LABEL,
'origin_state_pk': self.workflow_state_1.pk,
'destination_state_pk': self.workflow_state_2.pk,
}
)
self.workflow.refresh_from_db()
self.assertEqual(
self.workflow.transitions.first().label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_delete_view(self):
self._create_workflow_transition()
self.client.delete(
reverse(
'rest_api:workflowtransition-detail',
args=(self.workflow.pk, self.workflow_transition.pk)
),
)
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.transitions.count(), 0)
def test_workflow_transition_detail_view(self):
self._create_workflow_transition()
response = self.client.get(
reverse(
'rest_api:workflowtransition-detail',
args=(self.workflow.pk, self.workflow_transition.pk)
),
)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_list_view(self):
self._create_workflow_transition()
response = self.client.get(
reverse(
'rest_api:workflowtransition-list', args=(self.workflow.pk,)
),
)
self.assertEqual(
response.data['results'][0]['label'],
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_patch_view(self):
self._create_workflow_transition()
self.client.patch(
reverse(
'rest_api:workflowtransition-detail',
args=(self.workflow.pk, self.workflow_transition.pk)
),
data={
'label': TEST_WORKFLOW_TRANSITION_LABEL_EDITED,
'origin_state_pk': self.workflow_state_2.pk,
'destination_state_pk': self.workflow_state_1.pk,
}
)
self.workflow_transition.refresh_from_db()
self.assertEqual(
self.workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
self.assertEqual(
self.workflow_transition.origin_state,
self.workflow_state_2
)
self.assertEqual(
self.workflow_transition.destination_state,
self.workflow_state_1
)
def test_workflow_transition_put_view(self):
self._create_workflow_transition()
self.client.put(
reverse(
'rest_api:workflowtransition-detail',
args=(self.workflow.pk, self.workflow_transition.pk)
),
data={
'label': TEST_WORKFLOW_TRANSITION_LABEL_EDITED,
'origin_state_pk': self.workflow_state_2.pk,
'destination_state_pk': self.workflow_state_1.pk,
}
)
self.workflow_transition.refresh_from_db()
self.assertEqual(
self.workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
self.assertEqual(
self.workflow_transition.origin_state,
self.workflow_state_2
)
self.assertEqual(
self.workflow_transition.destination_state,
self.workflow_state_1
)
@override_settings(OCR_AUTO_OCR=False)
class DocumentWorkflowsAPITestCase(BaseAPITestCase):
def setUp(self):
super(DocumentWorkflowsAPITestCase, self).setUp()
self.admin_user = get_user_model().objects.create_superuser(
username=TEST_ADMIN_USERNAME, email=TEST_ADMIN_EMAIL,
password=TEST_ADMIN_PASSWORD
)
self.client.login(
username=TEST_ADMIN_USERNAME, password=TEST_ADMIN_PASSWORD
)
self.document_type = DocumentType.objects.create(
label=TEST_DOCUMENT_TYPE
)
def tearDown(self):
if hasattr(self, 'document_type'):
self.document_type.delete()
super(DocumentWorkflowsAPITestCase, self).tearDown()
def _create_document(self):
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
self.document = self.document_type.new_document(
file_object=file_object
)
def _create_workflow(self):
self.workflow = Workflow.objects.create(label=TEST_WORKFLOW_LABEL)
self.workflow.document_types.add(self.document_type)
def _create_workflow_states(self):
self._create_workflow()
self.workflow_state_1 = self.workflow.states.create(
completion=TEST_WORKFLOW_INITIAL_STATE_COMPLETION,
initial=True, label=TEST_WORKFLOW_INITIAL_STATE_LABEL
)
self.workflow_state_2 = self.workflow.states.create(
completion=TEST_WORKFLOW_STATE_COMPLETION,
label=TEST_WORKFLOW_STATE_LABEL
)
def _create_workflow_transition(self):
self._create_workflow_states()
self.workflow_transition = self.workflow.transitions.create(
label=TEST_WORKFLOW_TRANSITION_LABEL,
origin_state=self.workflow_state_1,
destination_state=self.workflow_state_2,
)
def _create_workflow_instance_log_entry(self):
self.document.workflows.first().log_entries.create(
comment=TEST_WORKFLOW_INSTANCE_LOG_ENTRY_COMMENT, transition=self.workflow_transition,
user=self.admin_user
)
def test_workflow_instance_detail_view(self):
self._create_workflow_transition()
self._create_document()
response = self.client.get(
reverse(
'rest_api:workflowinstance-detail', args=(
self.document.pk, self.document.workflows.first().pk
)
),
)
self.assertEqual(
response.data['workflow']['label'],
TEST_WORKFLOW_LABEL
)
def test_workflow_instance_list_view(self):
self._create_workflow_transition()
self._create_document()
response = self.client.get(
reverse(
'rest_api:workflowinstance-list', args=(self.document.pk,)
),
)
self.assertEqual(
response.data['results'][0]['workflow']['label'],
TEST_WORKFLOW_LABEL
)
def test_workflow_instance_log_entries_create_view(self):
self._create_workflow_transition()
self._create_document()
workflow_instance = self.document.workflows.first()
self.client.post(
reverse(
'rest_api:workflowinstancelogentry-list', args=(
self.document.pk, workflow_instance.pk
),
), data={'transition_pk': self.workflow_transition.pk}
)
workflow_instance.refresh_from_db()
self.assertEqual(
workflow_instance.log_entries.first().transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_instance_log_entries_list_view(self):
self._create_workflow_transition()
self._create_document()
self._create_workflow_instance_log_entry()
response = self.client.get(
reverse(
'rest_api:workflowinstancelogentry-list', args=(
self.document.pk, self.document.workflows.first().pk
)
),
)
self.assertEqual(
response.data['results'][0]['transition']['label'],
TEST_WORKFLOW_TRANSITION_LABEL
)
@override_settings(OCR_AUTO_OCR=False)
class DocumentWorkflowsTransitionACLsAPITestCase(APITestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
username=TEST_USER_USERNAME, email=TEST_USER_EMAIL,
password=TEST_USER_PASSWORD
)
self.client.login(
username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD
)
self.document_type = DocumentType.objects.create(
label=TEST_DOCUMENT_TYPE
)
self.group = Group.objects.create(name=TEST_GROUP_NAME)
self.role = Role.objects.create(label=TEST_ROLE_LABEL)
self.group.user_set.add(self.user)
self.role.groups.add(self.group)
Permission.invalidate_cache()
def tearDown(self):
if hasattr(self, 'document_type'):
self.document_type.delete()
def _create_document(self):
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
self.document = self.document_type.new_document(
file_object=file_object
)
def _create_workflow(self):
self.workflow = Workflow.objects.create(label=TEST_WORKFLOW_LABEL)
self.workflow.document_types.add(self.document_type)
def _create_workflow_states(self):
self._create_workflow()
self.workflow_state_1 = self.workflow.states.create(
completion=TEST_WORKFLOW_INITIAL_STATE_COMPLETION,
initial=True, label=TEST_WORKFLOW_INITIAL_STATE_LABEL
)
self.workflow_state_2 = self.workflow.states.create(
completion=TEST_WORKFLOW_STATE_COMPLETION,
label=TEST_WORKFLOW_STATE_LABEL
)
def _create_workflow_transition(self):
self._create_workflow_states()
self.workflow_transition = self.workflow.transitions.create(
label=TEST_WORKFLOW_TRANSITION_LABEL,
origin_state=self.workflow_state_1,
destination_state=self.workflow_state_2,
)
def test_workflow_transition_view_no_permission(self):
self._create_workflow_transition()
self._create_document()
workflow_instance = self.document.workflows.first()
self.client.post(
reverse(
'rest_api:workflowinstancelogentry-list', args=(
self.document.pk, workflow_instance.pk
),
), data={'transition_pk': self.workflow_transition.pk}
)
workflow_instance.refresh_from_db()
self.assertEqual(workflow_instance.log_entries.count(), 0)
def test_workflow_transition_view_with_permission(self):
self._create_workflow_transition()
self._create_document()
workflow_instance = self.document.workflows.first()
self.role.permissions.add(
permission_workflow_transition.stored_permission
)
self.client.post(
reverse(
'rest_api:workflowinstancelogentry-list', args=(
self.document.pk, workflow_instance.pk
),
), data={'transition_pk': self.workflow_transition.pk}
)
workflow_instance.refresh_from_db()
self.assertEqual(
workflow_instance.log_entries.first().transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_view_with_workflow_acl(self):
self._create_workflow_transition()
self._create_document()
workflow_instance = self.document.workflows.first()
acl = AccessControlList.objects.create(
content_object=self.workflow, role=self.role
)
acl.permissions.add(permission_workflow_transition.stored_permission)
self.client.post(
reverse(
'rest_api:workflowinstancelogentry-list', args=(
self.document.pk, workflow_instance.pk
),
), data={'transition_pk': self.workflow_transition.pk}
)
workflow_instance.refresh_from_db()
self.assertEqual(
workflow_instance.log_entries.first().transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_view_transition_acl(self):
self._create_workflow_transition()
self._create_document()
workflow_instance = self.document.workflows.first()
acl = AccessControlList.objects.create(
content_object=self.workflow_transition, role=self.role
)
acl.permissions.add(permission_workflow_transition.stored_permission)
self.client.post(
reverse(
'rest_api:workflowinstancelogentry-list', args=(
self.document.pk, workflow_instance.pk
),
), data={'transition_pk': self.workflow_transition.pk}
)
workflow_instance.refresh_from_db()
self.assertEqual(
workflow_instance.log_entries.first().transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
| 32.134253
| 98
| 0.639608
| 2,639
| 25,611
| 5.859038
| 0.050777
| 0.07761
| 0.052775
| 0.038417
| 0.879317
| 0.8528
| 0.812508
| 0.788708
| 0.73755
| 0.683029
| 0
| 0.001881
| 0.273398
| 25,611
| 796
| 99
| 32.174623
| 0.829007
| 0.003046
| 0
| 0.628389
| 0
| 0
| 0.057932
| 0.040227
| 0
| 0
| 0
| 0
| 0.063796
| 1
| 0.090909
| false
| 0.019139
| 0.027113
| 0.001595
| 0.127592
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a2b2f4a6d82acdb4737c1aa10ddfde50d69630b8
| 45
|
py
|
Python
|
stringcluster/__init__.py
|
chris-santiago/stringcluster
|
c3971e8ab585e422d022870aaf42539d3f2f7503
|
[
"MIT"
] | null | null | null |
stringcluster/__init__.py
|
chris-santiago/stringcluster
|
c3971e8ab585e422d022870aaf42539d3f2f7503
|
[
"MIT"
] | null | null | null |
stringcluster/__init__.py
|
chris-santiago/stringcluster
|
c3971e8ab585e422d022870aaf42539d3f2f7503
|
[
"MIT"
] | null | null | null |
from .base import STOP_TOKENS, StringCluster
| 22.5
| 44
| 0.844444
| 6
| 45
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 45
| 1
| 45
| 45
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0c440055e91cb43ad7881803b3e421f0b435fed0
| 259
|
py
|
Python
|
django_productline/features/multilanguage_switcher/feature.py
|
henzk/django-productline
|
24ff156924c1a8c07b99cbb8a1de0a42b8d81f60
|
[
"MIT"
] | 5
|
2015-06-16T17:36:33.000Z
|
2017-10-17T19:22:59.000Z
|
django_productline/features/multilanguage_switcher/feature.py
|
henzk/django-productline
|
24ff156924c1a8c07b99cbb8a1de0a42b8d81f60
|
[
"MIT"
] | 8
|
2016-03-14T09:02:13.000Z
|
2017-11-16T16:00:31.000Z
|
django_productline/features/djpladmin/feature.py
|
henzk/django-productline
|
24ff156924c1a8c07b99cbb8a1de0a42b8d81f60
|
[
"MIT"
] | 17
|
2015-08-04T18:45:18.000Z
|
2017-11-16T14:52:46.000Z
|
def select(composer):
from . import settings
import django_productline.settings
composer.compose(settings, django_productline.settings)
from . import urls
import django_productline.urls
composer.compose(urls, django_productline.urls)
| 28.777778
| 59
| 0.76834
| 29
| 259
| 6.724138
| 0.344828
| 0.348718
| 0.235897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166023
| 259
| 8
| 60
| 32.375
| 0.902778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.571429
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0c4b6b5bd509a00536dc3aa59fdbd0961f16a0a2
| 655
|
py
|
Python
|
artssat/scattering/psd/__init__.py
|
simonpf/pARTS
|
b4d9f4c2ceac594273c5589e44fe6a3a4f8d7028
|
[
"MIT"
] | 3
|
2020-09-02T08:20:42.000Z
|
2020-12-18T17:19:38.000Z
|
artssat/scattering/psd/__init__.py
|
simonpf/pARTS
|
b4d9f4c2ceac594273c5589e44fe6a3a4f8d7028
|
[
"MIT"
] | null | null | null |
artssat/scattering/psd/__init__.py
|
simonpf/pARTS
|
b4d9f4c2ceac594273c5589e44fe6a3a4f8d7028
|
[
"MIT"
] | null | null | null |
"""
The PSD Submodule
=================
The PSD submodule provides implementations of various particle size
distributions for the use in scattering calculations.
In addition to that, :code:`artssat.scattering.psd.arts` subpackage defines
the interface for PSDs in ARTS, while the :code:`artssat.scattering.psd.data`
subpackage provides functionality for the handling of PSD data.
"""
from artssat.scattering.psd.d14 import D14, D14N, D14MN
from artssat.scattering.psd.my05 import MY05
from artssat.scattering.psd.ab12 import AB12
from artssat.scattering.psd.binned import Binned
from artssat.scattering.psd.fixed_shape import FixedShape
| 36.388889
| 77
| 0.778626
| 90
| 655
| 5.655556
| 0.444444
| 0.233792
| 0.275049
| 0.235756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028169
| 0.132824
| 655
| 17
| 78
| 38.529412
| 0.867958
| 0.575573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0c629c3c1d17c38c68d1c9fa864f0987276a879a
| 377
|
py
|
Python
|
stapy/sta/entities/__init__.py
|
zMoooooritz/STApy
|
022183e0a35ba0d73b97986a695b2a1e6bd0c77c
|
[
"MIT"
] | 8
|
2021-09-02T18:53:19.000Z
|
2022-03-10T13:40:57.000Z
|
stapy/sta/entities/__init__.py
|
zMoooooritz/stapy
|
022183e0a35ba0d73b97986a695b2a1e6bd0c77c
|
[
"MIT"
] | 20
|
2021-08-30T19:06:30.000Z
|
2022-03-15T21:16:53.000Z
|
stapy/sta/entities/__init__.py
|
zMoooooritz/stapy
|
022183e0a35ba0d73b97986a695b2a1e6bd0c77c
|
[
"MIT"
] | null | null | null |
from stapy.sta.entities.location import Location
from stapy.sta.entities.featureofinterest import FeatureOfInterest
from stapy.sta.entities.observation import Observation
from stapy.sta.entities.observedproperty import ObservedProperty
from stapy.sta.entities.datastream import Datastream
from stapy.sta.entities.sensor import Sensor
from stapy.sta.entities.thing import Thing
| 47.125
| 66
| 0.870027
| 49
| 377
| 6.693878
| 0.244898
| 0.192073
| 0.256098
| 0.426829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074271
| 377
| 7
| 67
| 53.857143
| 0.939828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a7c66e10994a2dd58f333b10e6983d307afdee7b
| 40
|
py
|
Python
|
src/models/segmentation/standalone/__init__.py
|
Alicegaz/torchok
|
7b8f95df466a25b1ad8ee93bed1a3c7516440cf4
|
[
"Apache-2.0"
] | 8
|
2021-10-12T05:39:20.000Z
|
2022-03-31T10:55:01.000Z
|
src/models/segmentation/standalone/__init__.py
|
Alicegaz/torchok
|
7b8f95df466a25b1ad8ee93bed1a3c7516440cf4
|
[
"Apache-2.0"
] | 1
|
2022-03-30T19:23:42.000Z
|
2022-03-30T19:23:42.000Z
|
src/models/segmentation/standalone/__init__.py
|
Alicegaz/torchok
|
7b8f95df466a25b1ad8ee93bed1a3c7516440cf4
|
[
"Apache-2.0"
] | 5
|
2021-11-17T07:38:28.000Z
|
2022-01-31T10:46:36.000Z
|
from . import u2net
from . import hrnet
| 13.333333
| 19
| 0.75
| 6
| 40
| 5
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.2
| 40
| 2
| 20
| 20
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
ac25b06598f224a845ed47e3f2bc9b9bda656305
| 251
|
py
|
Python
|
examples/user_defined_threads_example/test3.py
|
egineering-llc/egat
|
63a172276b554ae1c7d0f13ba305881201c49d55
|
[
"MIT"
] | 4
|
2016-01-15T13:23:59.000Z
|
2020-07-01T19:00:51.000Z
|
examples/auto_threaded_example/test3.py
|
egineering-llc/egat
|
63a172276b554ae1c7d0f13ba305881201c49d55
|
[
"MIT"
] | null | null | null |
examples/auto_threaded_example/test3.py
|
egineering-llc/egat
|
63a172276b554ae1c7d0f13ba305881201c49d55
|
[
"MIT"
] | 5
|
2015-09-17T17:56:12.000Z
|
2019-02-11T16:19:18.000Z
|
import egat.testset as testset
class Test3(testset.UnorderedTestSet):
def test3_1(self):
pass
def test3_2(self):
pass
def test3_3(self):
pass
def test3_4(self):
pass
def test3_5(self):
pass
| 17.928571
| 38
| 0.593625
| 34
| 251
| 4.235294
| 0.441176
| 0.277778
| 0.305556
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065089
| 0.326693
| 251
| 13
| 39
| 19.307692
| 0.786982
| 0
| 0
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.416667
| false
| 0.416667
| 0.083333
| 0
| 0.583333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
ac2ba40b8460517dd9ea356db585c57c3dd61ad8
| 138
|
py
|
Python
|
scripts/npc/autogen_9000133.py
|
hsienjan/SideQuest-Server
|
3e88debaf45615b759d999255908f99a15283695
|
[
"MIT"
] | null | null | null |
scripts/npc/autogen_9000133.py
|
hsienjan/SideQuest-Server
|
3e88debaf45615b759d999255908f99a15283695
|
[
"MIT"
] | null | null | null |
scripts/npc/autogen_9000133.py
|
hsienjan/SideQuest-Server
|
3e88debaf45615b759d999255908f99a15283695
|
[
"MIT"
] | null | null | null |
# Character field ID when accessed: 820000000
# ObjectID: 1000035
# ParentID: 9000133
# Object Position Y: -281
# Object Position X: -257
| 23
| 45
| 0.746377
| 18
| 138
| 5.722222
| 0.888889
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252174
| 0.166667
| 138
| 5
| 46
| 27.6
| 0.643478
| 0.92029
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ac5dc56292104fdbfd0c699ae4c8b0d75852e922
| 41
|
py
|
Python
|
8_kyu/Third_Angle_of_a_Triangle.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
8_kyu/Third_Angle_of_a_Triangle.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
8_kyu/Third_Angle_of_a_Triangle.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
def other_angle(a, b):
return 180-a-b
| 20.5
| 22
| 0.658537
| 9
| 41
| 2.888889
| 0.777778
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.195122
| 41
| 2
| 23
| 20.5
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3bb2135912f7995a7cc118a09b005cefc64443fc
| 48
|
py
|
Python
|
main.py
|
43trh/emtweb
|
018748ec9ec5e7df7a22e4d97334eec1fee9c8c4
|
[
"MIT"
] | 1
|
2021-02-25T17:31:19.000Z
|
2021-02-25T17:31:19.000Z
|
main.py
|
43trh/emtweb
|
018748ec9ec5e7df7a22e4d97334eec1fee9c8c4
|
[
"MIT"
] | null | null | null |
main.py
|
43trh/emtweb
|
018748ec9ec5e7df7a22e4d97334eec1fee9c8c4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# TODO
# print('noe')
| 8
| 23
| 0.458333
| 6
| 48
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.229167
| 48
| 5
| 24
| 9.6
| 0.567568
| 0.8125
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.2
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3bb5714e348f9467cc05831143042906e3a541f3
| 114
|
py
|
Python
|
cnn-text-classification-zh/tests/__init__.py
|
chufucun/deep-learning
|
1c98b0c9c74111d7c34ef81f82ac3b5d2f8560ed
|
[
"MIT"
] | 2
|
2019-01-23T07:03:10.000Z
|
2019-01-23T07:05:18.000Z
|
cnn-text-classification-zh/tests/__init__.py
|
chufucun/deep-learning
|
1c98b0c9c74111d7c34ef81f82ac3b5d2f8560ed
|
[
"MIT"
] | null | null | null |
cnn-text-classification-zh/tests/__init__.py
|
chufucun/deep-learning
|
1c98b0c9c74111d7c34ef81f82ac3b5d2f8560ed
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
import logging
from logger_helper import setup_logging
setup_logging()
| 12.666667
| 39
| 0.780702
| 17
| 114
| 5.058824
| 0.764706
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.131579
| 114
| 8
| 40
| 14.25
| 0.858586
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3bdc07eb40c5085d605b4b0cc466d9a81c50c820
| 113
|
py
|
Python
|
boa3_test/test_sc/interop_test/contract/UpdateContractTooFewArguments.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/interop_test/contract/UpdateContractTooFewArguments.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/interop_test/contract/UpdateContractTooFewArguments.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.builtin.interop.contract import update_contract
def Main(script: bytes):
update_contract(script)
| 18.833333
| 57
| 0.79646
| 15
| 113
| 5.866667
| 0.733333
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.123894
| 113
| 5
| 58
| 22.6
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3befbf56f400916524b0a6def92ad55db6623077
| 199
|
py
|
Python
|
stub_map.py
|
liderrick/Text-Based-Adventure-Puzzle-Game--New-San-Diego-Saga
|
862004b65657875042a6246f89d3b4c2dae08f06
|
[
"MIT"
] | null | null | null |
stub_map.py
|
liderrick/Text-Based-Adventure-Puzzle-Game--New-San-Diego-Saga
|
862004b65657875042a6246f89d3b4c2dae08f06
|
[
"MIT"
] | null | null | null |
stub_map.py
|
liderrick/Text-Based-Adventure-Puzzle-Game--New-San-Diego-Saga
|
862004b65657875042a6246f89d3b4c2dae08f06
|
[
"MIT"
] | null | null | null |
from loaders import CityLoader
def get_map_stub():
map_arr, legendary_items, boss_puzzles = CityLoader.parse_json('etc/city/new_san_diego.json')
return map_arr, legendary_items, boss_puzzles
| 39.8
| 97
| 0.80402
| 30
| 199
| 4.966667
| 0.7
| 0.080537
| 0.201342
| 0.268456
| 0.416107
| 0.416107
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115578
| 199
| 4
| 98
| 49.75
| 0.846591
| 0
| 0
| 0
| 0
| 0
| 0.135678
| 0.135678
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
ce1cc6275eca5bdb079ff79b683266e11cea5db0
| 25
|
py
|
Python
|
pyaztec/__init__.py
|
DGX2000/PyAztec
|
b6284bb9dbadc954b5e877dcfc204056705b8205
|
[
"MIT"
] | null | null | null |
pyaztec/__init__.py
|
DGX2000/PyAztec
|
b6284bb9dbadc954b5e877dcfc204056705b8205
|
[
"MIT"
] | null | null | null |
pyaztec/__init__.py
|
DGX2000/PyAztec
|
b6284bb9dbadc954b5e877dcfc204056705b8205
|
[
"MIT"
] | null | null | null |
from .core import decode
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cbeb2606f00414f4b8e43c4f92a6c4931d3b5651
| 3,545
|
py
|
Python
|
test/t1000/unit/application/result/test_events.py
|
helcerion/T1000
|
25684e88dc8adb37fe07ff358f84f797f7b9c716
|
[
"MIT"
] | 1
|
2021-08-23T01:33:03.000Z
|
2021-08-23T01:33:03.000Z
|
test/t1000/unit/application/result/test_events.py
|
helcerion/T1000
|
25684e88dc8adb37fe07ff358f84f797f7b9c716
|
[
"MIT"
] | 20
|
2019-10-29T10:55:27.000Z
|
2022-03-12T00:04:50.000Z
|
test/t1000/unit/application/result/test_events.py
|
helcerion/T1000
|
25684e88dc8adb37fe07ff358f84f797f7b9c716
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import Mock
from src.t1000.application.result.events import ConsoleEventsResult, HtmlEventsResult
class ConsoleEventsResultTestCase(unittest.TestCase):
def test_get_result_ok(self):
command = Mock()
resource = Mock()
resource.get.return_value = {}
console_event_result = ConsoleEventsResult(command, resource)
console_event = console_event_result.get()
self.assertEqual(console_event, ({}, 0))
def test_get_result_no_command_no_resource(self):
console_event_result = ConsoleEventsResult(None, None)
console_event = console_event_result.get()
self.assertEqual(console_event, ({'message': 'Result needs a command.'}, 1))
def test_get_result_setting_resource_after_and_no_command(self):
resource_mock = Mock()
resource_mock.get.return_value = {'body': ''}
console_event_result = ConsoleEventsResult(None, None)
console_event_result.set_resource(resource_mock)
console_event = console_event_result.get()
self.assertEqual(console_event, ({'message': 'Result needs a command.'}, 1))
def test_get_result_ok_setting_resource_after(self):
resource = Mock()
resource.get.return_value = {'body': ''}
console_event_result = ConsoleEventsResult(Mock(), None)
console_event_result.set_resource(resource)
console_event = console_event_result.get()
self.assertEqual(console_event, ({'body': ''}, 0))
def test_get_result_ok_setting_resource_and_command_after(self):
resource = Mock()
resource.get.return_value = {'body': ''}
console_event_result = ConsoleEventsResult(None, None)
console_event_result.set_command(Mock())
console_event_result.set_resource(resource)
console_event = console_event_result.get()
self.assertEqual(console_event, ({'body': ''}, 0))
def test_get_result_ko(self):
command = Mock()
command.execute.side_effect = Exception('Nooooooo')
command.set_params.return_value = command
console_event_result = ConsoleEventsResult(command, Mock())
console_event = console_event_result.get()
self.assertEqual(console_event, ({'message': 'Nooooooo'}, 1))
def test_with_exception_no_command(self):
console_event_result = ConsoleEventsResult(None, None)
console_event_result.set_resource(Mock())
console_event = console_event_result.get()
self.assertEqual(console_event, ({'message': 'Result needs a command.'}, 1))
def test_with_exception_no_resource(self):
console_event_result = ConsoleEventsResult(None, None)
console_event_result.set_command(Mock())
console_event = console_event_result.get()
self.assertEqual(console_event, ({'message': 'Result needs resource.'}, 1))
class HtmlEventResultTestCase(unittest.TestCase):
def test_get_result_ok(self):
command = Mock()
resource = Mock()
resource.get.return_value = {}
html_event_result = HtmlEventsResult(command, resource)
html_event = html_event_result.get()
self.assertEqual(html_event, ({'events': {}}, 200))
def test_get_result_ko(self):
command = Mock()
command.execute.side_effect = Exception('Nooooo')
command.set_params.return_value = command
html_event_result = HtmlEventsResult(command, Mock())
html_event = html_event_result.get()
self.assertEqual(html_event, ({}, 500))
| 43.765432
| 85
| 0.696474
| 400
| 3,545
| 5.8225
| 0.1275
| 0.195792
| 0.17003
| 0.077286
| 0.860455
| 0.785745
| 0.741091
| 0.724775
| 0.724775
| 0.724775
| 0
| 0.006329
| 0.197743
| 3,545
| 80
| 86
| 44.3125
| 0.812588
| 0
| 0
| 0.608696
| 0
| 0
| 0.049083
| 0
| 0
| 0
| 0
| 0
| 0.144928
| 1
| 0.144928
| false
| 0
| 0.043478
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5a63dfc5976ce769be39f7c4a2aace7ad9bdd945
| 41,322
|
py
|
Python
|
objects.py
|
jin-hao-chen/yank
|
8f6b4f70157ac94cb580373a7c2fbdbce7f781f9
|
[
"MIT"
] | null | null | null |
objects.py
|
jin-hao-chen/yank
|
8f6b4f70157ac94cb580373a7c2fbdbce7f781f9
|
[
"MIT"
] | null | null | null |
objects.py
|
jin-hao-chen/yank
|
8f6b4f70157ac94cb580373a7c2fbdbce7f781f9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
import copy
from ytypes import *
from color_print import fatal_print
class NilObj(object):
def __init__(self):
self.obj_header = ObjHeader(OT_NIL, nil_cls, self)
self.nil = None
def __hash__(self):
return hash(self.nil)
def __eq__(self, other):
return hash(self.nil) == hash(other.nil)
class BoolObj(object):
def __init__(self, boolean):
self.obj_header = ObjHeader(OT_BOOL, bool_cls, self)
self.bool = boolean
def __hash__(self):
return hash(self.bool)
def __eq__(self, other):
return hash(self.bool) == hash(other.bool)
class StrObj(object):
def __init__(self, string):
self.obj_header = ObjHeader(OT_STR, str_cls, self)
self.str = str(string)
def __hash__(self):
return hash(self.str)
def __eq__(self, other):
return hash(self.str) == hash(other.str)
class IntObj(object):
def __init__(self, integer):
self.obj_header = ObjHeader(OT_INT, int_cls, self)
self.int = int(integer)
def __hash__(self):
return hash(self.int)
def __eq__(self, other):
return hash(self.int) == hash(other.int)
class FloatObj(object):
def __init__(self, float_):
self.obj_header = ObjHeader(OT_FLOAT, float_cls, self)
self.float = float(float_)
def __hash__(self):
return hash(self.float)
def __eq__(self, other):
return hash(self.float) == hash(other.float)
class ListObj(object):
def __init__(self, list_=[]):
self.obj_header = ObjHeader(OT_LIST, list_cls, self)
if not list_:
list_ = []
self.list = list(list_)
class MapObj(object):
def __init__(self, map_=None):
self.obj_header = ObjHeader(OT_MAP, map_cls, self)
if not map_:
map_ = {}
self.map = dict(map_)
class ModuleObj(object):
def __init__(self, name):
self.obj_header = ObjHeader(OT_MODULE, module_cls, self)
self.name = name
self.module_var_names = []
self.module_var_name_len = 0
self.module_var_values = []
def add_module_var(self, name):
for i in range(len(self.module_var_names)):
if self.module_var_names[i] == name:
return i
self.module_var_names.append(name)
# self.module_var_values.append(value)
self.module_var_name_len += 1
return self.module_var_name_len - 1
class FunObj(object):
def __init__(self, name, scope=1, arg_num=0):
self.obj_header = ObjHeader(OT_FUN, fun_cls, self)
self.name = name
self.stream = []
self.stream_num = 0
# 存放的是Python级别的字符串, 包括数字和字符串的字面量
self.constants = []
self.constant_num = 0
self.max_used_slots = 0
self.cur_idx = 0
self.scope = scope
self.arg_num = arg_num
def add_constant(self, value):
self.constants.append(value)
self.constant_num += 1
return self.constant_num - 1
def call(obj, method_name):
return obj.obj_header.cls_obj.methods[method_name]
def call_by_value(value, method_name):
return call(value.obj(), method_name)
def exit_if_false(cond):
if not cond:
sys.exit(1)
return True
def _type_to_pystr(obj):
if obj.obj_header.obj_type == OT_INT:
return _int_to_str(obj).str
elif obj.obj_header.obj_type == OT_FLOAT:
return _float_to_str(obj).str
elif obj.obj_header.obj_type == OT_STR:
return _str_to_str(obj).str
elif obj.obj_header.obj_type == OT_LIST:
return _list_to_str(obj).str
elif obj.obj_header.obj_type == OT_MAP:
return _map_to_str(obj).str
elif obj.obj_header.obj_type == OT_NIL:
return _nil_to_str(obj).str
elif obj.obj_header.obj_type == OT_BOOL:
return _bool_to_str(obj).str
elif obj.obj_header.obj_type == OT_FUN:
return _fun_to_str(obj).str
elif obj.obj_header.obj_type == OT_MODULE:
return _module_to_str(obj).str
def type_to_pystr(start, args):
obj = args[start].obj()
if obj.obj_header.obj_type == OT_INT:
return int_to_str(start, args).str
elif obj.obj_header.obj_type == OT_FLOAT:
return float_to_str(start, args).str
elif obj.obj_header.obj_type == OT_STR:
return str_to_str(start, args).str
elif obj.obj_header.obj_type == OT_LIST:
return list_to_str(start, args).str
elif obj.obj_header.obj_type == OT_MAP:
return map_to_str(start, args).str
elif obj.obj_header.obj_type == OT_NIL:
return nil_to_str(start, args).str
elif obj.obj_header.obj_type == OT_BOOL:
return bool_to_str(start, args).str
elif obj.obj_header.obj_type == OT_FUN:
return fun_to_str(start, args).str
elif obj.obj_header.obj_type == OT_MODULE:
return module_to_str(start, args).str
def is_type(obj, obj_type):
return obj.obj_header.obj_type == obj_type
def args_num(pystr):
left = pystr.find('(')
right = pystr.rfind(')')
args_str = pystr[left + 1: right]
return len(args_str.split(','))
class ObjHeader(object):
def __init__(self, obj_type, cls_obj, obj):
self.obj_type = obj_type
self.cls_obj = cls_obj
self.obj = obj
class ClsObj(object):
def __init__(self, name):
self.name = name
self.methods = {}
self.method_names = []
module_cls = ClsObj('module_cls')
fun_cls = ClsObj('fun_cls')
nil_cls = ClsObj('nil_cls')
bool_cls = ClsObj('bool_cls')
str_cls = ClsObj('str_cls')
int_cls = ClsObj('int_cls')
float_cls = ClsObj('float_cls')
list_cls = ClsObj('list_cls')
# map对象比较特别, 在yank中就是对象, map的remove, put, get在内部的方式是@remove, @put, @get, 因为yank中通过map实现对象的, 模仿一下js
map_cls = ClsObj('map_cls')
def return_true(start, args, obj):
args[start].to_value(obj)
return True
def return_false():
return False
# 参数被封装成了yank_list
def fun_call(obj, args):
pass
def nil_to_str(start, args):
obj = args[start].obj()
return return_true(start, args, StrObj(str(obj.nil)))
def nil_equ(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type != OT_NIL:
return return_true(start, args, BoolObj(False))
return return_true(start, args, BoolObj(True))
def nil_hash(start, args):
fatal_print('Runtime error, nil cannot be hashed!')
return return_false()
def nil_bind_methods():
nil_cls.methods['tostr()'] = nil_to_str
nil_cls.methods['==(_)'] = nil_equ
nil_cls.methods['hash(_)'] = nil_hash
nil_cls.method_names = ['tostr()', '==(_)', 'hash()']
nil_cls.methods['_tostr()'] = _nil_to_str
nil_cls.methods['_==(_)'] = _nil_equ
nil_cls.methods['_hash()'] = _nil_hash
def bool_to_str(start, args):
obj = args[start].obj()
return return_true(start, args, StrObj(str(obj.bool)))
def bool_equ(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
return return_true(start, args, BoolObj(obj1.bool == obj2.bool))
def bool_hash(start, args):
obj = args[start].obj()
return return_true(start, args, IntObj(hash(obj.bool)))
def bool_bind_methods():
bool_cls.methods['tostr()'] = bool_to_str
bool_cls.methods['==(_)'] = bool_equ
bool_cls.methods['hash()'] = bool_hash
bool_cls.method_names = ['tostr()', '==(_)', 'hash()']
bool_cls.methods['_tostr()'] = _bool_to_str
bool_cls.methods['_==(_)'] = _bool_equ
bool_cls.methods['_hash()'] = _bool_hash
def str_to_str(start, args):
obj = args[start].obj()
return return_true(start, args, StrObj(str(obj.str)))
def str_equ(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
return return_true(start, args, BoolObj(obj1.str == obj2.str))
def str_hash(start, args):
obj = args[start].obj()
return return_true(start, args, IntObj(hash(obj.str)))
def str_add(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type != OT_STR:
fatal_print('Runtime error, arg2 must be string')
return return_false()
return return_true(start, args, StrObj(obj1.str + obj2.str))
def str_numbers(start, args):
obj = args[start].obj()
if obj.str.isdigit():
ret = IntObj(int(obj.str))
else:
try:
ret = FloatObj(float(obj.str))
except:
fatal_print('Runtime error, cannot convert %s to numbers' % obj.str)
return return_false()
return return_true(start, args, ret)
def str_at(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type != OT_STR:
fatal_print('Runtime error, index must be int')
return return_false()
return return_true(start, args, StrObj(obj1.str[obj2.int]))
def str_len(start, args):
obj = args[start].obj()
return return_true(start, args, IntObj(len(obj.str)))
def str_emtpy(start, args):
obj = args[start].obj()
return return_true(start, args, BoolObj(len(obj.str) == 0))
def _str_numbers(obj):
if obj.str.isdigit():
ret = IntObj(int(obj.str))
else:
try:
ret = FloatObj(float(obj.str))
except:
fatal_print('Runtime error, cannot convert %s to numbers' % obj.str)
sys.exit(1)
return ret
def str_bind_methods():
str_cls.methods['tostr()'] = str_to_str
str_cls.methods['==(_)'] = str_equ
str_cls.methods['hash()'] = str_hash
str_cls.methods['+(_)'] = str_add
str_cls.methods['at(_)'] = str_at
str_cls.methods['len()'] = str_len
str_cls.methods['empty()'] = str_emtpy
str_cls.methods['numbers()'] = str_numbers
str_cls.method_names = ['tostr()', '==(_)', 'hash()', '+(_)', 'at(_)', 'len()', 'empty()', 'numbers()']
str_cls.methods['_tostr()'] = _str_to_str
str_cls.methods['_==(_)'] = _str_equ
str_cls.methods['_hash()'] = _str_hash
str_cls.methods['_+(_)'] = _str_add
str_cls.methods['_at(_)'] = _str_at
str_cls.methods['_len()'] = _str_len
str_cls.methods['_empty()'] = _str_emtpy
str_cls.methods['_numbers()'] = _str_numbers
def int_to_str(start, args):
obj = args[start].obj()
return return_true(start, args, StrObj(str(obj.int)))
def int_equ(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
return return_true(start, args, BoolObj(obj1.int == obj2.int))
def int_hash(start, args):
obj = args[start].obj()
return return_true(start, args, IntObj(hash(obj.int)))
def int_to_float(start, args):
obj = args[start].obj()
return return_true(start, args, FloatObj(float(obj.int)))
def int_add(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj1.obj_header.obj_type == OT_FLOAT:
return return_true(start, args, FloatObj(obj1.float + obj2.float))
if obj1.obj_header.obj_type == OT_INT:
return return_true(start, args, IntObj(obj1.int + obj2.int))
def int_sub(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj1.obj_header.obj_type == OT_FLOAT:
return return_true(start, args, FloatObj(obj1.float - obj2.float))
if obj1.obj_header.obj_type == OT_INT:
return return_true(start, args, IntObj(obj1.int - obj2.int))
def int_mul(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj1.obj_header.obj_type == OT_FLOAT:
return return_true(start, args, FloatObj(obj1.float * obj2.float))
if obj1.obj_header.obj_type == OT_INT:
return return_true(start, args, IntObj(obj1.int * obj2.int))
def int_div(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj1.obj_header.obj_type == OT_FLOAT:
if obj2.float == 0.0:
fatal_print('Runtime error, arg2 cannot be 0')
return return_false()
return return_true(FloatObj(obj1.float / obj2.float))
if obj1.obj_header.obj_type == OT_INT:
if obj2.int == 0:
fatal_print('Runtime error, arg2 cannot be 0')
return return_false()
return return_true(start, args, IntObj(obj1.int / obj2.int))
def int_mod(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, arg2 must be int')
return return_false()
if obj2.int == 0:
fatal_print('Runtime error, arg2 cannot be 0')
return return_false()
return return_true(start, args, IntObj(obj1.int % obj2.int))
def int_gt(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float > obj2.float))
def int_ge(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, args is not a number')
return return_false()
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float >= obj2.float))
def int_lt(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, args is not a number')
return return_false()
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float < obj2.float))
def int_le(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, obj2 is not a number')
return return_false()
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float <= obj2.float))
def int_bind_methods():
int_cls.methods['tostr()'] = int_to_str
int_cls.methods['==(_)'] = int_equ
int_cls.methods['hash()'] = int_hash
int_cls.methods['float()'] = int_to_float
int_cls.methods['+(_)'] = int_add
int_cls.methods['-(_)'] = int_sub
int_cls.methods['*(_)'] = int_mul
int_cls.methods['/(_)'] = int_div
int_cls.methods['%(_)'] = int_mod
int_cls.methods['>(_)'] = int_gt
int_cls.methods['>=(_)'] = int_ge
int_cls.methods['<(_)'] = int_lt
int_cls.methods['<=(_,_)'] = int_le
int_cls.method_names = ['tostr()', '==(_)', 'hash()', 'float()', \
'+(_)', '-(_)', '*(_)', '/(_)', '%(_)', \
'>(_)', '>=(_)', '<(_)', '<=(_)']
int_cls.methods['_tostr(_)'] = _int_to_str
int_cls.methods['_==(_,_)'] = _int_equ
int_cls.methods['_hash(_)'] = _int_hash
int_cls.methods['_float(_)'] = _int_to_float
int_cls.methods['_+(_,_)'] = _int_add
int_cls.methods['_-(_,_)'] = _int_sub
int_cls.methods['_*(_,_)'] = _int_mul
int_cls.methods['_/(_,_)'] = _int_div
int_cls.methods['_%(_,_)'] = _int_mod
int_cls.methods['_>(_,_)'] = _int_gt
int_cls.methods['_>=(_,_)'] = _int_ge
int_cls.methods['_<(_,_)'] = _int_lt
int_cls.methods['_<=(_,_)'] = _int_le
def float_to_str(start, args):
obj = args[start].obj()
return return_true(start, args, StrObj(str(obj.float)))
def float_equ(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
return return_true(start, args, BoolObj(obj1.float == obj2.float))
def float_hash(start, args):
obj = args[start].obj()
return return_true(start, args, IntObj(hash(obj.float)))
def float_to_int(start, args):
obj = args[start].obj()
return return_true(start, args, IntObj(int(obj.float)))
def float_add(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
return return_true(start, args, FloatObj(obj1.float + obj2.float))
def float_sub(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
return return_true(start, args, FloatObj(obj1.float - obj2.float))
def float_mul(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_INT, OT_FLOAT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
return return_true(start, args, FloatObj(obj1.float * obj2.float))
def float_div(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj2.float == 0:
fatal_print('Runtime error, arg2 cannot be 0')
return return_false()
return return_true(start, args, FloatObj(obj1.float / obj2.float))
def float_gt(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float > obj2.float))
def float_ge(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float >= obj2.float))
def float_lt(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float < obj2.float))
def float_le(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
return return_false()
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return return_true(start, args, BoolObj(obj1.float <= obj2.float))
def float_bind_methods():
float_cls.methods['tostr()'] = float_to_str
float_cls.methods['==(_)'] = float_equ
float_cls.methods['hash()'] = float_hash
float_cls.methods['int()'] = float_to_int
float_cls.methods['+(_)'] = float_add
float_cls.methods['-(_)'] = float_sub
float_cls.methods['*(_)'] = float_mul
float_cls.methods['/(_)'] = float_div
float_cls.methods['>(_)'] = float_gt
float_cls.methods['>=(_)'] = float_ge
float_cls.methods['<(_)'] = float_lt
float_cls.methods['<=(_)'] = float_le
float_cls.method_names = ['tostr()', '==(_)', 'hash()', 'int()', \
'+(_)', '-(_)', '*(_)', '/(_)', '>(_)', \
'>=(_)', '<(_)', '<=(_)']
float_cls.methods['_tostr(_)'] = _float_to_str
float_cls.methods['_==(_,_)'] = _float_equ
float_cls.methods['_hash(_)'] = _float_hash
float_cls.methods['_int(_)'] = _float_to_int
float_cls.methods['_+(_,_)'] = _float_add
float_cls.methods['_-(_,_)'] = _float_sub
float_cls.methods['_*(_,_)'] = _float_mul
float_cls.methods['_/(_,_)'] = _float_div
float_cls.methods['_>(_,_)'] = _float_gt
float_cls.methods['_>=(_,_)'] = _float_ge
float_cls.methods['_<(_,_)'] = _float_lt
float_cls.methods['_<=(_,_)'] = _float_le
def list_len(start, args):
obj = args[start].obj()
return return_true(start, args, IntObj(len(obj.list)))
def list_to_str(start, args):
obj = args[start].obj()
s = '['
for item in obj.list:
s += _type_to_pystr(item.obj()) + ', '
s = s[:-2] + ']'
return return_true(start, args, StrObj(s))
def list_at(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, arg2 must be int')
return return_false()
ret = copy.copy(obj1.list[obj2.int])
args[start].value_type = ret.value_type
args[start].obj_header = ret.obj_header
return True
def list_insert(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
obj3 = args[start + 2].obj()
# obj2为下标
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, index must be int')
return return_false()
obj1.list.insert(obj2.int, copy.copy(args[start + 2]))
return return_true(start, args, NilObj())
def list_append(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
obj1.list.append(copy.copy(args[start + 1]))
return return_true(start, args, NilObj())
def list_remove(start, args):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
# obj2为下标
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, index must be int')
return return_false()
length = list_len(obj1)
if obj2.int >= length or obj2.int < 0:
fatal_print('Runtime error, index out of rang')
return return_false()
del obj1.list[obj2.int]
return return_true(start, args, NilObj())
def list_bind_methods():
list_cls.methods['len()'] = list_len
list_cls.methods['tostr()'] = list_to_str
list_cls.methods['insert(_,_)'] = list_insert
list_cls.methods['at(_)'] = list_at
list_cls.methods['remove(_)'] = list_remove
list_cls.methods['append(_)'] = list_append
list_cls.methods['_len(_)'] = _list_len
list_cls.methods['_tostr(_)'] = _list_to_str
list_cls.methods['_insert(_,_,_)'] = _list_insert
list_cls.methods['_at(_,_)'] = _list_at
list_cls.methods['_remove(_,_)'] = _list_remove
list_cls.methods['_append(_,_)'] = _list_append
list_cls.method_names = ['len()', 'tostr()', 'insert(_,_)', 'at(_)', 'remove(_)', 'append(_)']
def map_put(start, args):
obj = args[start].obj()
key = args[start + 1].obj()
val = args[start + 2].obj()
if key.obj_header.obj_type in [OT_MAP, OT_LIST]:
fatal_print('Runtime error, map or list cannot be hashed')
return return_false()
obj.map[copy.copy(args[start + 1])] = copy.copy(args[start + 2])
return return_true(start, args, NilObj())
def map_get(start, args):
obj = args[start].obj()
key = args[start + 1].obj()
if key.obj_header.obj_type == OT_NIL:
fatal_print('Runtime error, key cannot be nil')
return return_false()
if key.obj_header.obj_type in [OT_MAP, OT_LIST]:
fatal_print('Runtime error, map or list cannot be hashed')
return return_false()
if args[start + 1] not in obj.map:
return return_true(start, args, NilObj())
ret = copy.copy(obj.map[args[start + 1]])
args[start].value_type = ret.value_type
args[start].obj_header = ret.obj_header
return True
def map_remove(start, args):
obj = args[start].obj()
key = args[start + 1].obj()
if key.obj_header.obj_type == OT_NIL:
fatal_print('Runtime error, key cannot be nil')
return return_false()
if key.obj_header.obj_type in [OT_MAP, OT_LIST]:
fatal_print('Runtime error, map or list cannot be hashed')
return return_false()
if args[start + 1] in obj.map:
del obj.map[args[start + 1]]
return return_true(start, args, NilObj())
def map_to_str(start, args):
obj = args[start].obj()
s = '{'
for key in obj.map:
s += _type_to_pystr(key.obj()) + ': ' + _type_to_pystr(obj.map[key].obj()) + ', '
return return_true(start, args, StrObj(s[:-2] + '}'))
def map_bind_methods():
map_cls.methods['tostr()'] = map_to_str
map_cls.methods['put(_,_)'] = map_put
map_cls.methods['get(_)'] = map_get
map_cls.methods['remove(_)'] = map_remove
map_cls.methods['@put(_,_)'] = map_put
map_cls.methods['@get(_)'] = map_get
map_cls.methods['@remove(_)'] = map_remove
map_cls.methods['@_tostr(_)'] = _map_to_str
map_cls.methods['@_put(_,_,_)'] = _map_put
map_cls.methods['@_get(_,_)'] = _map_get
map_cls.methods['@_remove(_,_)'] = _map_remove
map_cls.method_names = ['tostr()', 'put(_,_)', 'get(_)', 'remove(_)']
def module_to_str(start, args):
obj = args[start].obj()
addr = str(id(obj))
return return_true(start, args, StrObj('<Module(addr: %s) %s>' % (addr, obj.name)))
def module_bind_methods():
module_cls.methods['tostr(_)'] = module_to_str
module_cls.methods['_tostr(_)'] = _module_to_str
module_cls.method_names = ['tostr()']
def fun_to_str(start, args):
obj = args[start].obj()
addr = str(id(obj))
return return_true(start, args, StrObj('<Function(addr: %s) %s>' % (addr, obj.name)))
def fun_bind_methods():
fun_cls.methods['tostr(_)'] = fun_to_str
fun_cls.methods['_tostr(_)'] = _fun_to_str
def _bind_methods():
module_bind_methods()
fun_bind_methods()
nil_bind_methods()
bool_bind_methods()
str_bind_methods()
int_bind_methods()
float_bind_methods()
list_bind_methods()
map_bind_methods()
# 内部使用
def _nil_to_str(obj):
return StrObj(str(obj.nil))
def _nil_equ(obj1, obj2):
if obj2.obj_header.obj_type != OT_NIL:
return BoolObj(False)
return BoolObj(True)
def _nil_hash(obj):
fatal_print('RuntimetimeError, nil cannot be hashed!')
sys.exit(1)
def _bool_to_str(obj):
return StrObj(str(obj.bool))
def _bool_equ(obj1, obj2):
return BoolObj(obj1.bool == obj2.bool)
def _bool_hash(obj):
return IntObj(hash(obj.bool))
def _str_to_str(obj):
return obj
def _str_equ(obj1, obj2):
return BoolObj(obj1.str == obj2.str)
def _str_hash(obj):
return IntObj(hash(obj.str))
def _str_add(obj1, obj2):
if obj2.obj_header.obj_type != OT_STR:
fatal_print('Runtime error, arg2 must be string')
sys.exit(1)
return StrObj(obj1.str + obj2.str)
def _str_at(obj1, obj2):
if obj2.obj_header.obj_type != OT_STR:
fatal_print('Runtime error, index must be int')
sys.exit(1)
return StrObj(obj1.str[obj2.int])
def _str_len(obj):
return IntObj(len(obj.str))
def _str_emtpy(obj):
return BoolObj(len(obj.str) == 0)
def _int_to_str(obj):
return StrObj(str(obj.int))
def _int_equ(obj1, obj2):
obj1 = args[start].obj()
obj2 = args[start + 1].obj()
return BoolObj(obj1.int == obj2.int)
def _int_hash(obj):
return IntObj(hash(obj.int))
def _int_to_float(obj):
return FloatObj(float(obj.int))
def _int_add(obj1, obj2):
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj1.obj_header.obj_type == OT_FLOAT:
return FloatObj(obj1.float + obj2.float)
if obj1.obj_header.obj_type == OT_INT:
return IntObj(obj1.int + obj2.int)
def _int_sub(obj1, obj2):
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj1.obj_header.obj_type == OT_FLOAT:
return FloatObj(obj1.float - obj2.float)
if obj1.obj_header.obj_type == OT_INT:
return IntObj(obj1.int - obj2.int)
def _int_mul(obj1, obj2):
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj1.obj_header.obj_type == OT_FLOAT:
return FloatObj(obj1.float * obj2.float)
if obj1.obj_header.obj_type == OT_INT:
return IntObj(obj1.int * obj2.int)
def _int_div(obj1, obj2):
if obj2.obj_header.obj_type == OT_FLOAT:
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj1.obj_header.obj_type == OT_FLOAT:
if obj2.float == 0.0:
fatal_print('Runtime error, arg2 cannot be 0')
sys.exit(1)
return FloatObj(obj1.float / obj2.float)
if obj1.obj_header.obj_type == OT_INT:
if obj2.int == 0:
fatal_print('Runtime error, arg2 cannot be 0')
sys.exit(1)
return IntObj(obj1.int / obj2.int)
def _int_mod(obj1, obj2):
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, arg2 must be int')
sys.exit(1)
if obj2.int == 0:
fatal_print('Runtime error, arg2 cannot be 0')
sys.exit(1)
return IntObj(obj1.int % obj2.int)
def _int_gt(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float > obj2.float)
def _int_ge(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, args is not a number')
sys.exit(1)
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float >= obj2.float)
def _int_lt(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, args is not a number')
sys.exit(1)
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float < obj2.float)
def _int_le(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, obj2 is not a number')
sys.exit(1)
obj1 = _int_to_float(obj1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float <= obj2.float)
def _float_to_str(obj):
return StrObj(str(obj.float))
def _float_equ(obj1, obj2):
return BoolObj(obj1.float == obj2.float)
def _float_hash(obj):
return IntObj(hash(obj.float))
def _float_to_int(obj):
return IntObj(int(obj.float))
def _float_add(obj1, obj2):
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
return FloatObj(obj1.float + obj2.float)
def _float_sub(obj1, obj2):
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
return FloatObj(obj1.float - obj2.float)
def _float_mul(obj1, obj2):
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_INT, OT_FLOAT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
return FloatObj(obj1.float * obj2.float)
def _float_div(obj1, obj2):
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj2.float == 0:
fatal_print('Runtime error, arg2 cannot be 0')
return FloatObj(obj1.float / obj2.float)
def _float_gt(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float > obj2.float)
def _float_ge(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float >= obj2.float)
def _float_lt(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float < obj2.float)
def _float_le(obj1, obj2):
if obj2.obj_header.obj_type not in [OT_FLOAT, OT_INT]:
fatal_print('Runtime error, arg2 is not a number')
sys.exit(1)
if obj2.obj_header.obj_type == OT_INT:
obj2 = _int_to_float(obj2)
return BoolObj(obj1.float <= obj2.float)
def _list_len(obj):
return IntObj(len(obj.list))
def _list_to_str(obj):
s = '['
for item in obj.list:
s += _type_to_pystr(item.obj()) + ', '
s = s[:-2] + ']'
return StrObj(s)
def _list_at(obj1, obj2):
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, arg2 must be int')
sys.exit(1)
return obj1.list[obj2.int]
def _list_insert(obj1, obj2, obj3):
# obj2为下标
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, index must be int')
sys.exit(1)
obj1.list.insert(obj2.int, copy.copy(obj3))
def _list_append(obj1, obj2):
obj1.list.append(copy.copy(obj2))
def _list_remove(obj1, obj2):
# obj2为下标
if obj2.obj_header.obj_type != OT_INT:
fatal_print('Runtime error, index must be int')
sys.exit(1)
length = list_len(obj1)
if obj2.int >= length or obj2.int < 0:
fatal_print('Runtime error, index out of rang')
sys.exit(1)
del obj1.list[obj2.int]
def _map_put(obj1, key, val):
if key.obj().obj_header.obj_type in [OT_MAP, OT_LIST]:
fatal_print('Runtime error, map or list cannot be hashed')
sys.exit(1)
obj.map[copy.copy(key)] = copy.copy(val)
def _map_get(obj, key):
if key.obj().obj_header.obj_type == OT_NIL:
fatal_print('Runtime error, key cannot be nil')
sys.exit(1)
if key.obj().obj_header.obj_type in [OT_MAP, OT_LIST]:
fatal_print('Runtime error, map or list cannot be hashed')
sys.exit(1)
if key not in obj.map:
return Value.to_value(NilObj())
return copy.copy(obj.map[key])
def _map_remove(obj, key):
if key.obj().obj_header.obj_type == OT_NIL:
fatal_print('Runtime error, key cannot be nil')
sys.exit(1)
if key.obj().obj_header.obj_type in [OT_MAP, OT_LIST]:
fatal_print('Runtime error, map or list cannot be hashed')
sys.exit(1)
if key in obj.map:
del obj.map[key]
def _map_to_str(obj):
s = '{'
for key in obj.map:
s += _type_to_pystr(key.obj()) + ': ' + _type_to_pystr(obj.map[key].obj()) + ', '
return StrObj(s[:-2] + '}')
def _module_to_str(obj):
addr = str(id(obj))
return StrObj('<Module(addr: %s) %s>' % (addr, obj.name))
def _fun_to_str(obj):
addr = str(id(obj))
return StrObj('<Function(addr: %s) %s>' % (addr, obj.name))
class Value(object):
def __init__(self, obj_header=NilObj().obj_header, value_type=VT_NIL):
self.obj_header = obj_header
self.value_type = value_type
def to_value(self, obj):
self.obj_header = obj.obj_header
if is_type(obj, OT_INT):
self.value_type = VT_INT
elif is_type(obj, OT_FLOAT):
self.value_type = VT_FLOAT
elif is_type(obj, OT_STR):
self.value_type = VT_STR
elif is_type(obj, OT_FUN):
self.value_type = VT_FUN
elif is_type(obj, OT_MAP):
self.value_type = VT_MAP
elif is_type(obj, OT_LIST):
self.value_type = VT_LIST
elif is_type(obj, OT_NIL):
self.value_type = VT_NIL
elif is_type(obj, OT_BOOL):
if obj.bool:
self.value_type = VT_TRUE
else:
self.value_type = VT_FALSE
elif is_type(obj, OT_MODULE):
self.value_type = VT_MODULE
@classmethod
def new_value(cls, obj):
ret = Value(obj.obj_header)
if is_type(obj, OT_INT):
ret.value_type = VT_INT
elif is_type(obj, OT_FLOAT):
ret.value_type = VT_FLOAT
elif is_type(obj, OT_STR):
ret.value_type = VT_STR
elif is_type(obj, OT_FUN):
ret.value_type = VT_FUN
elif is_type(obj, OT_MAP):
ret.value_type = VT_MAP
elif is_type(obj, OT_LIST):
ret.value_type = VT_LIST
elif is_type(obj, OT_NIL):
ret.value_type = VT_NIL
elif is_type(obj, OT_BOOL):
if obj.bool:
ret.value_type = VT_TRUE
else:
ret.value_type = VT_FALSE
elif is_type(obj, OT_MODULE):
ret.value_type = VT_MODULE
return ret
def clear_value(self):
self.obj_header = NilObj().obj_header
self.value_type = VT_NIL
def obj(self):
return self.obj_header.obj
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def __hash__(self):
return call(self.obj(), '_hash(_)')(self.obj()).int
class Frame(object):
def __init__(self, thread, start):
self.thread = thread
self.start = start
# 含头含尾
self.end = self.start
def extend(self, steps=1):
self.end += steps
if self.thread.size - 1 - self.end <= 512:
self.thread.values.extend([Value() for _ in range(self.thread.size)])
self.thread.size *= 2
def __getitem__(self, idx):
return self.thread.values[self.start + idx]
def __setitem__(self, idx, val):
self.thread.values[self.start + idx] = val
def __str__(self):
return str((self.start, self.end))
class Thread(object):
def __init__(self, size=1024):
self.values = [Value() for _ in range(size)]
self.frames = []
self.frame_num = 0
self.start = 0
self.size = size
def alloc_frame(self):
# 第一个frame
if not self.frames:
frame = Frame(self, self.start)
self.frames.append(frame)
self.frame_num += 1
return frame
else:
cur_frame = self.frames[self.frame_num - 1]
next_idx = cur_frame.end + 1
if self.size - 1 - next_idx <= 512:
self.values.extend([Value() for _ in range(self.size)])
self.size *= 2
frame = Frame(self, next_idx)
self.frames.append(frame)
self.frame_num += 1
return frame
def recycle_frame(self):
"""回收当前的frame
"""
del self.frames[self.frame_num - 1]
self.frame_num -= 1
# 如果还有上一个frame就返回上一个frame
if self.frame_num >= 1:
return self.frames[self.frame_num - 1]
# 没有就返回None
return None
_bind_methods()
| 29.473609
| 107
| 0.626712
| 6,101
| 41,322
| 3.945255
| 0.030979
| 0.054965
| 0.062817
| 0.081762
| 0.841919
| 0.805276
| 0.773452
| 0.742086
| 0.717408
| 0.703365
| 0
| 0.0195
| 0.236775
| 41,322
| 1,401
| 108
| 29.494647
| 0.743706
| 0.007986
| 0
| 0.494737
| 0
| 0
| 0.087281
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15311
| false
| 0.000957
| 0.004785
| 0.03445
| 0.352153
| 0.066029
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5a87f6fff6d2285507da0b2482ecf18ffb90bab6
| 6,100
|
py
|
Python
|
script/sklearn_like_toolkit/warpper/xgboost_wrapper.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
script/sklearn_like_toolkit/warpper/xgboost_wrapper.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
script/sklearn_like_toolkit/warpper/xgboost_wrapper.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
from hyperopt import hp
from script.sklearn_like_toolkit.warpper.base.BaseWrapperClf import BaseWrapperClf
from script.sklearn_like_toolkit.warpper.base.MixIn import MetaBaseWrapperClfWithABC, MetaBaseWrapperRegWithABC
from script.sklearn_like_toolkit.warpper.base.BaseWrapperReg import BaseWrapperReg
import warnings
import xgboost as xgb
class XGBoostClf(xgb.XGBClassifier, BaseWrapperClf, metaclass=MetaBaseWrapperClfWithABC):
HyperOpt_space = {
'n_estimators': 10 + hp.randint('n_estimators', 400),
'max_depth': 4 + hp.randint('max_depth', 11),
'min_child_weight': 1 + hp.randint('min_child_weight', 3),
'gamma': hp.uniform('gamma', 0, 1),
'subsample': hp.uniform('subsample', 0, 1),
'colsample_bytree': hp.uniform('colsample_bytree', 0, 1),
'learning_rate': hp.loguniform('learning_rate', -6, 0),
}
tuning_grid = {
'max_depth': [4, 6, 8],
# 'n_estimators': [128, 256],
# 'min_child_weight': [1, 2, 3],
'gamma': [i / 10.0 for i in range(2, 10 + 1, 2)],
'subsample': [i / 10.0 for i in range(2, 10 + 1, 2)],
'colsample_bytree': [i / 10.0 for i in range(2, 10 + 1, 2)],
# 'learning_rate': [0.01, 0.1, 1],
}
tuning_params = {
'max_depth': 3,
'n_estimators': 100,
'min_child_weight': 1,
'gamma': 0,
'subsample': 1,
'colsample_bytree': 1,
'learning_rate': 0.1,
}
remain_param = {
'silent': True,
'objective': 'binary:logistic',
'booster': ['gbtree', 'gblinear', 'dart'],
'colsample_bylevel': 1,
'reg_alpha': 0,
'reg_lambda': 1,
'scale_pos_weight': 1,
'max_delta_step': 0,
'base_score': 0.5,
'n_jobs': 1,
'nthread': None,
'random_state': 0,
'seed': None,
'missing': None,
}
def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="binary:logistic",
booster='gbtree', n_jobs=1, nthread=None, gamma=0, min_child_weight=1, max_delta_step=0, subsample=1,
colsample_bytree=1, colsample_bylevel=1, reg_alpha=0, reg_lambda=1, scale_pos_weight=1, base_score=0.5,
random_state=0, seed=None, missing=None, **kwargs):
xgb.XGBClassifier.__init__(self, max_depth, learning_rate, n_estimators, silent, objective, booster, n_jobs,
nthread, gamma, min_child_weight, max_delta_step, subsample, colsample_bytree,
colsample_bylevel, reg_alpha, reg_lambda, scale_pos_weight, base_score, random_state,
seed, missing, **kwargs)
BaseWrapperClf.__init__(self)
warnings.filterwarnings(module='sklearn*', action='ignore', category=DeprecationWarning)
# params.update({"tree_method": 'auto'})
# params.update({"tree_method": 'gpu_hist'})
# params.update({"tree_method": 'hist'})
# params.update({"tree_method": 'exact'})
# params.update({"tree_method": 'gpu_exact'})
# params.update({'nthread': 1})
# params.update({"silent": 1})
@property
def feature_importances(self):
return self.feature_importances_
class XGBoostReg(xgb.XGBRegressor, BaseWrapperReg, metaclass=MetaBaseWrapperRegWithABC):
HyperOpt_space = {
'n_estimators': 10 + hp.randint('n_estimators', 400),
'max_depth': 4 + hp.randint('max_depth', 11),
'min_child_weight': 1 + hp.randint('min_child_weight', 3),
'gamma': hp.uniform('gamma', 0, 1),
'subsample': hp.uniform('subsample', 0, 1),
'colsample_bytree': hp.uniform('colsample_bytree', 0, 1),
'learning_rate': hp.loguniform('learning_rate', -6, 0),
}
tuning_grid = {
# 'max_depth': [4, 6, 8],
# 'n_estimators': [128, 256],
# 'min_child_weight': [1, 2, 3],
# 'gamma': [i / 10.0 for i in range(2, 10 + 1, 2)],
# 'subsample': [i / 10.0 for i in range(2, 10 + 1, 2)],
# 'colsample_bytree': [i / 10.0 for i in range(2, 10 + 1, 2)],
# 'learning_rate': [0.01, 0.1, 1],
}
remain_param = {
'silent': True,
'objective': 'binary:logistic',
'booster': ['gbtree', 'gblinear', 'dart'],
'colsample_bylevel': 1,
'reg_alpha': 0,
'reg_lambda': 1,
'scale_pos_weight': 1,
'max_delta_step': 0,
'base_score': 0.5,
'n_jobs': 1,
'nthread': None,
'random_state': 0,
'seed': None,
'missing': None,
}
def __init__(
self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="reg:linear",
booster='gbtree', n_jobs=1, nthread=None, gamma=0, min_child_weight=1, max_delta_step=0, subsample=1,
colsample_bytree=1, colsample_bylevel=1, reg_alpha=0, reg_lambda=1, scale_pos_weight=1, base_score=0.5,
random_state=0, seed=None, missing=None, **kwargs):
# warnings.filterwarnings(module='sklearn*', action='ignore', category=DeprecationWarning)
xgb.XGBRegressor.__init__(
self, max_depth, learning_rate, n_estimators, silent, objective, booster, n_jobs, nthread, gamma,
min_child_weight, max_delta_step, subsample, colsample_bytree, colsample_bylevel, reg_alpha,
reg_lambda, scale_pos_weight, base_score, random_state, seed, missing, **kwargs)
BaseWrapperReg.__init__(self)
# params.update({"tree_method": 'auto'})
# params.update({"tree_method": 'gpu_hist'})
# params.update({"tree_method": 'hist'})
# params.update({"tree_method": 'exact'})
# params.update({"tree_method": 'gpu_exact'})
# params.update({'nthread': 1})
# params.update({"silent": 1})
@property
def feature_importances(self):
return self.feature_importances_
| 41.496599
| 121
| 0.588197
| 724
| 6,100
| 4.703039
| 0.156077
| 0.049339
| 0.045228
| 0.064611
| 0.856094
| 0.856094
| 0.848164
| 0.809692
| 0.766814
| 0.766814
| 0
| 0.042506
| 0.267213
| 6,100
| 146
| 122
| 41.780822
| 0.719239
| 0.162131
| 0
| 0.594059
| 0
| 0
| 0.166835
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039604
| false
| 0
| 0.09901
| 0.019802
| 0.247525
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ce7c3f9d96ee9ab0d4a8126a558c11c2ce518f88
| 21
|
py
|
Python
|
logparser/IPLoM/__init__.py
|
CUHK-CSE/logalizer
|
e8d96cd4de1121c5d2b517982c6028cd06e643f1
|
[
"MIT"
] | 859
|
2017-05-06T03:06:22.000Z
|
2022-03-31T12:02:29.000Z
|
logparser/IPLoM/__init__.py
|
mandychenze/logparser
|
8f1f1face2c0e270fd9bcecdefe37ebc6ba76e9d
|
[
"MIT"
] | 71
|
2018-02-24T08:11:32.000Z
|
2022-03-15T11:44:29.000Z
|
logparser/IPLoM/__init__.py
|
mandychenze/logparser
|
8f1f1face2c0e270fd9bcecdefe37ebc6ba76e9d
|
[
"MIT"
] | 445
|
2017-06-19T01:26:16.000Z
|
2022-03-29T08:27:17.000Z
|
from .IPLoM import *
| 10.5
| 20
| 0.714286
| 3
| 21
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ce7e39143fff82302ee5c376d158906c8df89acc
| 171
|
py
|
Python
|
src/nspyre/config/__init__.py
|
AlexBourassa/nspyre
|
d254af09c7c8377552e85dba6f60b150fbb8da2e
|
[
"MIT"
] | 8
|
2019-12-06T14:49:34.000Z
|
2020-07-03T18:46:45.000Z
|
src/nspyre/config/__init__.py
|
nspyre-org/nspyre
|
d254af09c7c8377552e85dba6f60b150fbb8da2e
|
[
"BSD-3-Clause"
] | 31
|
2020-09-21T21:01:06.000Z
|
2021-12-10T03:27:26.000Z
|
src/nspyre/config/__init__.py
|
NSpyre-Dev/nspyre
|
d254af09c7c8377552e85dba6f60b150fbb8da2e
|
[
"BSD-3-Clause"
] | 4
|
2020-10-07T23:58:13.000Z
|
2022-03-01T15:22:34.000Z
|
from .config_files import (
get_config_param,
load_config,
load_meta_config
)
__all__ = [
'get_config_param',
'load_config',
'load_meta_config'
]
| 14.25
| 27
| 0.672515
| 21
| 171
| 4.761905
| 0.428571
| 0.18
| 0.28
| 0.36
| 0.76
| 0.76
| 0.76
| 0.76
| 0
| 0
| 0
| 0
| 0.233918
| 171
| 11
| 28
| 15.545455
| 0.763359
| 0
| 0
| 0
| 0
| 0
| 0.251462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cec9e1a08ed5c84d46a1b4022019287ae2110da7
| 6,672
|
py
|
Python
|
hw3/test3.py
|
rahul-pande/ds501
|
063453de9bf7bc634422a6710d36715175cbeebf
|
[
"MIT"
] | null | null | null |
hw3/test3.py
|
rahul-pande/ds501
|
063453de9bf7bc634422a6710d36715175cbeebf
|
[
"MIT"
] | null | null | null |
hw3/test3.py
|
rahul-pande/ds501
|
063453de9bf7bc634422a6710d36715175cbeebf
|
[
"MIT"
] | null | null | null |
from problem3 import *
import numpy as np
import sys
'''
Unit test 2:
This file includes unit tests for problem3.py.
You could test the correctness of your code by typing `nosetests test3.py` in the terminal.
'''
#-------------------------------------------------------------------------
def test_python_version():
''' ----------- Problem 2 (30 points in total)--------------'''
assert sys.version_info[0]==3 # require python 3 (instead of python 2)
#-------------------------------------------------------------------------
def test_update_U():
'''(10 points) update_U'''
#-------------------------------
# an example rating matrix (2 movies, 2 users)
R = np.array([[2., 2.],
[2., 2.]])
V = np.array([[1., 1.]]) # k=1
U = np.array([[1.],
[1.] ]) # k=1
# call the function
U_new = update_U(R,V, U, beta=1., mu = 1.)
# true answer
U_true = np.array([[3.],
[3.] ]) # k=1
# test the result
assert np.allclose(U_new,U_true)
#-------------------------------
# an example rating matrix (3 movies, 2 users)
R = np.array([[2., 2.],
[2., 2.],
[2., 2.]])
V = np.array([[1., 1.]]) # k=1
U = np.array([[1.],
[1.],
[1.] ]) # k=1
# call the function
U_new = update_U(R,V, U, beta=1., mu=1.)
# true answer
U_true = np.array([[3.],
[3.],
[3.] ]) # k=1
# test the result
assert np.allclose(U_new,U_true)
#-------------------------------
# an example rating matrix (2 movies, 2 users)
R = np.array([[1., 2.],
[3., 4.]])
V = np.array([[1., 1.]]) # k=1
U = np.array([[1.],
[1.] ]) # k=1
# call the function
U_new = update_U(R,V, U, beta=1., mu = 1.)
# true answer
U_true = np.array([[1.],
[9.] ]) # k=1
# test the result
assert np.allclose(U_new,U_true)
# call the function
U_new = update_U(R,V, U, beta=2., mu = 1.)
# true answer
U_true = np.array([[1.],
[17.] ]) # k=1
# test the result
assert np.allclose(U_new,U_true)
# call the function
U_new = update_U(R,V, U, beta=1., mu = 0.)
# true answer
U_true = np.array([[3.],
[11.] ]) # k=1
# test the result
assert np.allclose(U_new,U_true)
#-------------------------------
# an example rating matrix (2 movies, 2 users) with missing ratings
R = np.array([[2., 0.],
[0., 2.]])
V = np.array([[1., 1.]]) # k=1
U = np.array([[1.],
[1.] ]) # k=1
# call the function
U_new = update_U(R,V, U, beta=1., mu = 1.)
# test the result
assert np.allclose(U_new,U)
#-------------------------------
# an example rating matrix (2 movies, 2 users) when K = 2
R = np.array([[2., 2.],
[2., 2.]])
V = np.array([[1., 1.],
[1., 1.] ]) # k=2
U = np.array([[1., 1.],
[1., 1.] ]) # k=2
# call the function
U_new = update_U(R,V, U, beta=1., mu = 1.)
# test the result
assert np.allclose(U_new,-U)
#-------------------------------------------------------------------------
def test_update_V():
'''(10 points) update_V'''
#-------------------------------
# an example rating matrix (2 movies, 2 users)
R = np.array([[2., 2.],
[2., 2.]])
V = np.array([[1., 1.]]) # k=1
U = np.array([[1.],
[1.] ]) # k=1
# call the function
V_new = update_V(R,U, V, beta=1., mu = 1.)
# true answer
V_true = np.array([[3., 3.]]) # k=1
# test the result
assert np.allclose(V_new,V_true, atol= 1e-1)
#-------------------------------
# an example rating matrix (3 movies, 2 users)
R = np.array([[2., 2.],
[2., 2.],
[2., 2.]])
V = np.array([[1., 1.]]) # k=1
U = np.array([[1.],
[1.],
[1.] ]) # k=1
# call the function
V_new = update_V(R,U, V, beta=1., mu=1.)
# true answer
V_true = np.array([[5., 5.]]) # k=1
# test the result
assert np.allclose(V_new,V_true)
#-------------------------------
# an example rating matrix (2 movies, 2 users)
R = np.array([[1., 2.],
[3., 4.]])
V = np.array([[1., 1.]]) # k=1
U = np.array([[1.],
[1.] ]) # k=1
# call the function
V_new = update_V(R, U, V, beta=1., mu = 1.)
# true answer
V_true = np.array([[3., 7.]]) # k=1
# test the result
assert np.allclose(V_new,V_true)
# call the function
V_new = update_V(R,U, V, beta=2., mu = 1.)
# true answer
V_true = np.array([[5., 13.]]) # k=1
# test the result
assert np.allclose(V_new,V_true)
# call the function
V_new = update_V(R, U, V, beta=1., mu = 0.)
# true answer
V_true = np.array([[5., 9.]]) # k=1
# test the result
assert np.allclose(V_new, V_true)
#-------------------------------
# an example rating matrix (2 movies, 2 users) when K = 2
R = np.array([[2., 2.],
[2., 2.]])
V = np.array([[1., 1.],
[1., 1.] ]) # k=2
U = np.array([[1., 1.],
[1., 1.] ]) # k=2
# call the function
V_new = update_V(R,U, V, beta=1., mu = 1.)
# test the result
assert np.allclose(V_new,-V)
#-------------------------------------------------------------------------
def test_matrix_decoposition():
'''(10 points) matrix decoposition'''
#-------------------------------
# an example rating matrix (2 movies, 2 users)
R = np.array([[1., 1.],
[1., 1.]])
# call the function
U, V = matrix_decoposition(R,1)
# test whether or not the result is a float number
assert type(U) == np.ndarray
assert type(V) == np.ndarray
assert U.shape == (2,1)
assert V.shape == (1,2)
# check the correctness of the result
assert np.allclose(np.dot(U,V),R, atol=0.1)
#-------------------------
# another example
# a random rating matrix
R = np.random.randint(1,6, (10, 5)).astype(float)
# call the function
U, V = matrix_decoposition(R,5)
# test whether or not the result is a float number
assert type(U) == np.ndarray
assert type(V) == np.ndarray
assert U.shape == (10,5)
assert V.shape == (5,5)
# check the correctness of the result
assert np.allclose(np.dot(U,V), R, atol=.1)
| 24.988764
| 95
| 0.429406
| 922
| 6,672
| 3.029284
| 0.104121
| 0.095238
| 0.065879
| 0.061224
| 0.816685
| 0.816685
| 0.814178
| 0.797709
| 0.77193
| 0.751522
| 0
| 0.049793
| 0.313699
| 6,672
| 266
| 96
| 25.082707
| 0.560166
| 0.331535
| 0
| 0.681034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 1
| 0.034483
| false
| 0
| 0.025862
| 0
| 0.060345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0c7d8c4d258238891b24cc1a0010bb8f38aef38e
| 36
|
py
|
Python
|
fython/test/hello/hello_test.py
|
nicolasessisbreton/fython
|
988f5a94cee8b16b0000501a22239195c73424a1
|
[
"Apache-2.0"
] | 41
|
2016-01-21T05:14:45.000Z
|
2021-11-24T20:37:21.000Z
|
fython/test/hello/hello_test.py
|
nicolasessisbreton/fython
|
988f5a94cee8b16b0000501a22239195c73424a1
|
[
"Apache-2.0"
] | 5
|
2016-01-21T05:36:37.000Z
|
2016-08-22T19:26:51.000Z
|
fython/test/hello/hello_test.py
|
nicolasessisbreton/fython
|
988f5a94cee8b16b0000501a22239195c73424a1
|
[
"Apache-2.0"
] | 3
|
2016-01-23T04:03:44.000Z
|
2016-08-21T15:58:38.000Z
|
import fython
print(fython.hello())
| 12
| 21
| 0.777778
| 5
| 36
| 5.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 3
| 21
| 12
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
0cc876b9e696c890c7df0e4f6126ed57f9217c2a
| 24,797
|
py
|
Python
|
sdk/python/pulumi_gcp/appengine/standard_app_version.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/appengine/standard_app_version.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/appengine/standard_app_version.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['StandardAppVersion']
class StandardAppVersion(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automatic_scaling: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionAutomaticScalingArgs']]] = None,
basic_scaling: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionBasicScalingArgs']]] = None,
delete_service_on_destroy: Optional[pulumi.Input[bool]] = None,
deployment: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionDeploymentArgs']]] = None,
entrypoint: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionEntrypointArgs']]] = None,
env_variables: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
handlers: Optional[pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionHandlerArgs']]]]] = None,
inbound_services: Optional[pulumi.Input[List[pulumi.Input[str]]]] = None,
instance_class: Optional[pulumi.Input[str]] = None,
libraries: Optional[pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionLibraryArgs']]]]] = None,
manual_scaling: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionManualScalingArgs']]] = None,
noop_on_destroy: Optional[pulumi.Input[bool]] = None,
project: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[str]] = None,
runtime_api_version: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input[str]] = None,
threadsafe: Optional[pulumi.Input[bool]] = None,
version_id: Optional[pulumi.Input[str]] = None,
vpc_access_connector: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionVpcAccessConnectorArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Standard App Version resource to create a new version of standard GAE Application.
Learn about the differences between the standard environment and the flexible environment
at https://cloud.google.com/appengine/docs/the-appengine-environments.
Currently supporting Zip and File Containers.
To get more information about StandardAppVersion, see:
* [API documentation](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions)
* How-to Guides
* [Official Documentation](https://cloud.google.com/appengine/docs/standard)
## Example Usage
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['StandardAppVersionAutomaticScalingArgs']] automatic_scaling: Automatic scaling is based on request rate, response latencies, and other application metrics.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['StandardAppVersionBasicScalingArgs']] basic_scaling: Basic scaling creates instances when your application receives requests. Each instance will be shut down when the application becomes idle. Basic scaling is ideal for work that is intermittent or driven by user activity.
Structure is documented below.
:param pulumi.Input[bool] delete_service_on_destroy: If set to `true`, the service will be deleted if it is the last version.
:param pulumi.Input[pulumi.InputType['StandardAppVersionDeploymentArgs']] deployment: Code and application artifacts that make up this version.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['StandardAppVersionEntrypointArgs']] entrypoint: The entrypoint for the application.
Structure is documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] env_variables: Environment variables available to the application.
:param pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionHandlerArgs']]]] handlers: An ordered list of URL-matching patterns that should be applied to incoming requests.
The first matching URL handles the request and other request handlers are not attempted.
Structure is documented below.
:param pulumi.Input[List[pulumi.Input[str]]] inbound_services: A list of the types of messages that this application is able to receive.
Each value may be one of `INBOUND_SERVICE_MAIL`, `INBOUND_SERVICE_MAIL_BOUNCE`, `INBOUND_SERVICE_XMPP_ERROR`, `INBOUND_SERVICE_XMPP_MESSAGE`, `INBOUND_SERVICE_XMPP_SUBSCRIBE`, `INBOUND_SERVICE_XMPP_PRESENCE`, `INBOUND_SERVICE_CHANNEL_PRESENCE`, and `INBOUND_SERVICE_WARMUP`.
:param pulumi.Input[str] instance_class: Instance class that is used to run this version. Valid values are
AutomaticScaling: F1, F2, F4, F4_1G
BasicScaling or ManualScaling: B1, B2, B4, B4_1G, B8
Defaults to F1 for AutomaticScaling and B2 for ManualScaling and BasicScaling. If no scaling is specified, AutomaticScaling is chosen.
:param pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionLibraryArgs']]]] libraries: Configuration for third-party Python runtime libraries that are required by the application.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['StandardAppVersionManualScalingArgs']] manual_scaling: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
:param pulumi.Input[bool] noop_on_destroy: If set to `true`, the application version will not be deleted.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] runtime: Desired runtime. Example python27.
:param pulumi.Input[str] runtime_api_version: The version of the API in the given runtime environment.
Please see the app.yaml reference for valid values at https://cloud.google.com/appengine/docs/standard//config/appref
:param pulumi.Input[str] service: AppEngine service resource
:param pulumi.Input[bool] threadsafe: Whether multiple requests can be dispatched to this version at once.
:param pulumi.Input[str] version_id: Relative name of the version within the service. For example, `v1`. Version names can contain only lowercase letters, numbers, or hyphens. Reserved names,"default", "latest", and any name with the prefix "ah-".
:param pulumi.Input[pulumi.InputType['StandardAppVersionVpcAccessConnectorArgs']] vpc_access_connector: Enables VPC connectivity for standard apps.
Structure is documented below.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['automatic_scaling'] = automatic_scaling
__props__['basic_scaling'] = basic_scaling
__props__['delete_service_on_destroy'] = delete_service_on_destroy
if deployment is None:
raise TypeError("Missing required property 'deployment'")
__props__['deployment'] = deployment
__props__['entrypoint'] = entrypoint
__props__['env_variables'] = env_variables
__props__['handlers'] = handlers
__props__['inbound_services'] = inbound_services
__props__['instance_class'] = instance_class
__props__['libraries'] = libraries
__props__['manual_scaling'] = manual_scaling
__props__['noop_on_destroy'] = noop_on_destroy
__props__['project'] = project
if runtime is None:
raise TypeError("Missing required property 'runtime'")
__props__['runtime'] = runtime
__props__['runtime_api_version'] = runtime_api_version
if service is None:
raise TypeError("Missing required property 'service'")
__props__['service'] = service
__props__['threadsafe'] = threadsafe
__props__['version_id'] = version_id
__props__['vpc_access_connector'] = vpc_access_connector
__props__['name'] = None
super(StandardAppVersion, __self__).__init__(
'gcp:appengine/standardAppVersion:StandardAppVersion',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
automatic_scaling: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionAutomaticScalingArgs']]] = None,
basic_scaling: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionBasicScalingArgs']]] = None,
delete_service_on_destroy: Optional[pulumi.Input[bool]] = None,
deployment: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionDeploymentArgs']]] = None,
entrypoint: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionEntrypointArgs']]] = None,
env_variables: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
handlers: Optional[pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionHandlerArgs']]]]] = None,
inbound_services: Optional[pulumi.Input[List[pulumi.Input[str]]]] = None,
instance_class: Optional[pulumi.Input[str]] = None,
libraries: Optional[pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionLibraryArgs']]]]] = None,
manual_scaling: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionManualScalingArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
noop_on_destroy: Optional[pulumi.Input[bool]] = None,
project: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[str]] = None,
runtime_api_version: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input[str]] = None,
threadsafe: Optional[pulumi.Input[bool]] = None,
version_id: Optional[pulumi.Input[str]] = None,
vpc_access_connector: Optional[pulumi.Input[pulumi.InputType['StandardAppVersionVpcAccessConnectorArgs']]] = None) -> 'StandardAppVersion':
"""
Get an existing StandardAppVersion resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['StandardAppVersionAutomaticScalingArgs']] automatic_scaling: Automatic scaling is based on request rate, response latencies, and other application metrics.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['StandardAppVersionBasicScalingArgs']] basic_scaling: Basic scaling creates instances when your application receives requests. Each instance will be shut down when the application becomes idle. Basic scaling is ideal for work that is intermittent or driven by user activity.
Structure is documented below.
:param pulumi.Input[bool] delete_service_on_destroy: If set to `true`, the service will be deleted if it is the last version.
:param pulumi.Input[pulumi.InputType['StandardAppVersionDeploymentArgs']] deployment: Code and application artifacts that make up this version.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['StandardAppVersionEntrypointArgs']] entrypoint: The entrypoint for the application.
Structure is documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] env_variables: Environment variables available to the application.
:param pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionHandlerArgs']]]] handlers: An ordered list of URL-matching patterns that should be applied to incoming requests.
The first matching URL handles the request and other request handlers are not attempted.
Structure is documented below.
:param pulumi.Input[List[pulumi.Input[str]]] inbound_services: A list of the types of messages that this application is able to receive.
Each value may be one of `INBOUND_SERVICE_MAIL`, `INBOUND_SERVICE_MAIL_BOUNCE`, `INBOUND_SERVICE_XMPP_ERROR`, `INBOUND_SERVICE_XMPP_MESSAGE`, `INBOUND_SERVICE_XMPP_SUBSCRIBE`, `INBOUND_SERVICE_XMPP_PRESENCE`, `INBOUND_SERVICE_CHANNEL_PRESENCE`, and `INBOUND_SERVICE_WARMUP`.
:param pulumi.Input[str] instance_class: Instance class that is used to run this version. Valid values are
AutomaticScaling: F1, F2, F4, F4_1G
BasicScaling or ManualScaling: B1, B2, B4, B4_1G, B8
Defaults to F1 for AutomaticScaling and B2 for ManualScaling and BasicScaling. If no scaling is specified, AutomaticScaling is chosen.
:param pulumi.Input[List[pulumi.Input[pulumi.InputType['StandardAppVersionLibraryArgs']]]] libraries: Configuration for third-party Python runtime libraries that are required by the application.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['StandardAppVersionManualScalingArgs']] manual_scaling: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
:param pulumi.Input[str] name: Full Serverless VPC Access Connector name e.g. /projects/my-project/locations/us-central1/connectors/c1.
:param pulumi.Input[bool] noop_on_destroy: If set to `true`, the application version will not be deleted.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] runtime: Desired runtime. Example python27.
:param pulumi.Input[str] runtime_api_version: The version of the API in the given runtime environment.
Please see the app.yaml reference for valid values at https://cloud.google.com/appengine/docs/standard//config/appref
:param pulumi.Input[str] service: AppEngine service resource
:param pulumi.Input[bool] threadsafe: Whether multiple requests can be dispatched to this version at once.
:param pulumi.Input[str] version_id: Relative name of the version within the service. For example, `v1`. Version names can contain only lowercase letters, numbers, or hyphens. Reserved names,"default", "latest", and any name with the prefix "ah-".
:param pulumi.Input[pulumi.InputType['StandardAppVersionVpcAccessConnectorArgs']] vpc_access_connector: Enables VPC connectivity for standard apps.
Structure is documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["automatic_scaling"] = automatic_scaling
__props__["basic_scaling"] = basic_scaling
__props__["delete_service_on_destroy"] = delete_service_on_destroy
__props__["deployment"] = deployment
__props__["entrypoint"] = entrypoint
__props__["env_variables"] = env_variables
__props__["handlers"] = handlers
__props__["inbound_services"] = inbound_services
__props__["instance_class"] = instance_class
__props__["libraries"] = libraries
__props__["manual_scaling"] = manual_scaling
__props__["name"] = name
__props__["noop_on_destroy"] = noop_on_destroy
__props__["project"] = project
__props__["runtime"] = runtime
__props__["runtime_api_version"] = runtime_api_version
__props__["service"] = service
__props__["threadsafe"] = threadsafe
__props__["version_id"] = version_id
__props__["vpc_access_connector"] = vpc_access_connector
return StandardAppVersion(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="automaticScaling")
def automatic_scaling(self) -> pulumi.Output[Optional['outputs.StandardAppVersionAutomaticScaling']]:
"""
Automatic scaling is based on request rate, response latencies, and other application metrics.
Structure is documented below.
"""
return pulumi.get(self, "automatic_scaling")
@property
@pulumi.getter(name="basicScaling")
def basic_scaling(self) -> pulumi.Output[Optional['outputs.StandardAppVersionBasicScaling']]:
"""
Basic scaling creates instances when your application receives requests. Each instance will be shut down when the application becomes idle. Basic scaling is ideal for work that is intermittent or driven by user activity.
Structure is documented below.
"""
return pulumi.get(self, "basic_scaling")
@property
@pulumi.getter(name="deleteServiceOnDestroy")
def delete_service_on_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
If set to `true`, the service will be deleted if it is the last version.
"""
return pulumi.get(self, "delete_service_on_destroy")
@property
@pulumi.getter
def deployment(self) -> pulumi.Output['outputs.StandardAppVersionDeployment']:
"""
Code and application artifacts that make up this version.
Structure is documented below.
"""
return pulumi.get(self, "deployment")
@property
@pulumi.getter
def entrypoint(self) -> pulumi.Output[Optional['outputs.StandardAppVersionEntrypoint']]:
"""
The entrypoint for the application.
Structure is documented below.
"""
return pulumi.get(self, "entrypoint")
@property
@pulumi.getter(name="envVariables")
def env_variables(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Environment variables available to the application.
"""
return pulumi.get(self, "env_variables")
@property
@pulumi.getter
def handlers(self) -> pulumi.Output[List['outputs.StandardAppVersionHandler']]:
"""
An ordered list of URL-matching patterns that should be applied to incoming requests.
The first matching URL handles the request and other request handlers are not attempted.
Structure is documented below.
"""
return pulumi.get(self, "handlers")
@property
@pulumi.getter(name="inboundServices")
def inbound_services(self) -> pulumi.Output[Optional[List[str]]]:
"""
A list of the types of messages that this application is able to receive.
Each value may be one of `INBOUND_SERVICE_MAIL`, `INBOUND_SERVICE_MAIL_BOUNCE`, `INBOUND_SERVICE_XMPP_ERROR`, `INBOUND_SERVICE_XMPP_MESSAGE`, `INBOUND_SERVICE_XMPP_SUBSCRIBE`, `INBOUND_SERVICE_XMPP_PRESENCE`, `INBOUND_SERVICE_CHANNEL_PRESENCE`, and `INBOUND_SERVICE_WARMUP`.
"""
return pulumi.get(self, "inbound_services")
@property
@pulumi.getter(name="instanceClass")
def instance_class(self) -> pulumi.Output[str]:
"""
Instance class that is used to run this version. Valid values are
AutomaticScaling: F1, F2, F4, F4_1G
BasicScaling or ManualScaling: B1, B2, B4, B4_1G, B8
Defaults to F1 for AutomaticScaling and B2 for ManualScaling and BasicScaling. If no scaling is specified, AutomaticScaling is chosen.
"""
return pulumi.get(self, "instance_class")
@property
@pulumi.getter
def libraries(self) -> pulumi.Output[Optional[List['outputs.StandardAppVersionLibrary']]]:
"""
Configuration for third-party Python runtime libraries that are required by the application.
Structure is documented below.
"""
return pulumi.get(self, "libraries")
@property
@pulumi.getter(name="manualScaling")
def manual_scaling(self) -> pulumi.Output[Optional['outputs.StandardAppVersionManualScaling']]:
"""
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
"""
return pulumi.get(self, "manual_scaling")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Full Serverless VPC Access Connector name e.g. /projects/my-project/locations/us-central1/connectors/c1.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="noopOnDestroy")
def noop_on_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
If set to `true`, the application version will not be deleted.
"""
return pulumi.get(self, "noop_on_destroy")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def runtime(self) -> pulumi.Output[str]:
"""
Desired runtime. Example python27.
"""
return pulumi.get(self, "runtime")
@property
@pulumi.getter(name="runtimeApiVersion")
def runtime_api_version(self) -> pulumi.Output[Optional[str]]:
"""
The version of the API in the given runtime environment.
Please see the app.yaml reference for valid values at https://cloud.google.com/appengine/docs/standard//config/appref
"""
return pulumi.get(self, "runtime_api_version")
@property
@pulumi.getter
def service(self) -> pulumi.Output[str]:
"""
AppEngine service resource
"""
return pulumi.get(self, "service")
@property
@pulumi.getter
def threadsafe(self) -> pulumi.Output[Optional[bool]]:
"""
Whether multiple requests can be dispatched to this version at once.
"""
return pulumi.get(self, "threadsafe")
@property
@pulumi.getter(name="versionId")
def version_id(self) -> pulumi.Output[Optional[str]]:
"""
Relative name of the version within the service. For example, `v1`. Version names can contain only lowercase letters, numbers, or hyphens. Reserved names,"default", "latest", and any name with the prefix "ah-".
"""
return pulumi.get(self, "version_id")
@property
@pulumi.getter(name="vpcAccessConnector")
def vpc_access_connector(self) -> pulumi.Output[Optional['outputs.StandardAppVersionVpcAccessConnector']]:
"""
Enables VPC connectivity for standard apps.
Structure is documented below.
"""
return pulumi.get(self, "vpc_access_connector")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 59.608173
| 319
| 0.692342
| 2,799
| 24,797
| 5.954269
| 0.122544
| 0.063363
| 0.038402
| 0.049922
| 0.794792
| 0.768391
| 0.75237
| 0.736709
| 0.725789
| 0.724829
| 0
| 0.002793
| 0.220349
| 24,797
| 415
| 320
| 59.751807
| 0.859256
| 0.478324
| 0
| 0.325472
| 1
| 0
| 0.188198
| 0.084779
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113208
| false
| 0.004717
| 0.033019
| 0.009434
| 0.259434
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.